code
stringlengths 3
1.05M
| repo_name
stringlengths 5
104
| path
stringlengths 4
251
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 3
1.05M
|
|---|---|---|---|---|---|
# -*- coding: utf-8 -*-
"""
pygments.lexers.shell
~~~~~~~~~~~~~~~~~~~~~
Lexers for various shells.
:copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
import re
from pygments.lexer import Lexer, RegexLexer, do_insertions, bygroups, \
include, default, this, using, words
from pygments.token import Punctuation, \
Text, Comment, Operator, Keyword, Name, String, Number, Generic
from pygments.util import shebang_matches
__all__ = ['BashLexer', 'BashSessionLexer', 'TcshLexer', 'BatchLexer',
'MSDOSSessionLexer', 'PowerShellLexer',
'PowerShellSessionLexer', 'TcshSessionLexer', 'FishShellLexer']
line_re = re.compile('.*?\n')
class BashLexer(RegexLexer):
"""
Lexer for (ba|k|z|)sh shell scripts.
.. versionadded:: 0.6
"""
name = 'Bash'
aliases = ['bash', 'sh', 'ksh', 'zsh', 'shell']
filenames = ['*.sh', '*.ksh', '*.bash', '*.ebuild', '*.eclass',
'*.exheres-0', '*.exlib', '*.zsh',
'.bashrc', 'bashrc', '.bash_*', 'bash_*', 'zshrc', '.zshrc',
'PKGBUILD']
mimetypes = ['application/x-sh', 'application/x-shellscript']
tokens = {
'root': [
include('basic'),
(r'`', String.Backtick, 'backticks'),
include('data'),
include('interp'),
],
'interp': [
(r'\$\(\(', Keyword, 'math'),
(r'\$\(', Keyword, 'paren'),
(r'\$\{#?', String.Interpol, 'curly'),
(r'\$[a-zA-Z_]\w*', Name.Variable), # user variable
(r'\$(?:\d+|[#$?!_*@-])', Name.Variable), # builtin
(r'\$', Text),
],
'basic': [
(r'\b(if|fi|else|while|do|done|for|then|return|function|case|'
r'select|continue|until|esac|elif)(\s*)\b',
bygroups(Keyword, Text)),
(r'\b(alias|bg|bind|break|builtin|caller|cd|command|compgen|'
r'complete|declare|dirs|disown|echo|enable|eval|exec|exit|'
r'export|false|fc|fg|getopts|hash|help|history|jobs|kill|let|'
r'local|logout|popd|printf|pushd|pwd|read|readonly|set|shift|'
r'shopt|source|suspend|test|time|times|trap|true|type|typeset|'
r'ulimit|umask|unalias|unset|wait)(?=[\s)`])',
Name.Builtin),
(r'\A#!.+\n', Comment.Hashbang),
(r'#.*\n', Comment.Single),
(r'\\[\w\W]', String.Escape),
(r'(\b\w+)(\s*)(\+?=)', bygroups(Name.Variable, Text, Operator)),
(r'[\[\]{}()=]', Operator),
(r'<<<', Operator), # here-string
(r'<<-?\s*(\'?)\\?(\w+)[\w\W]+?\2', String),
(r'&&|\|\|', Operator),
],
'data': [
(r'(?s)\$?"(\\\\|\\[0-7]+|\\.|[^"\\$])*"', String.Double),
(r'"', String.Double, 'string'),
(r"(?s)\$'(\\\\|\\[0-7]+|\\.|[^'\\])*'", String.Single),
(r"(?s)'.*?'", String.Single),
(r';', Punctuation),
(r'&', Punctuation),
(r'\|', Punctuation),
(r'\s+', Text),
(r'\d+\b', Number),
(r'[^=\s\[\]{}()$"\'`\\<&|;]+', Text),
(r'<', Text),
],
'string': [
(r'"', String.Double, '#pop'),
(r'(?s)(\\\\|\\[0-7]+|\\.|[^"\\$])+', String.Double),
include('interp'),
],
'curly': [
(r'\}', String.Interpol, '#pop'),
(r':-', Keyword),
(r'\w+', Name.Variable),
(r'[^}:"\'`$\\]+', Punctuation),
(r':', Punctuation),
include('root'),
],
'paren': [
(r'\)', Keyword, '#pop'),
include('root'),
],
'math': [
(r'\)\)', Keyword, '#pop'),
(r'[-+*/%^|&]|\*\*|\|\|', Operator),
(r'\d+#\d+', Number),
(r'\d+#(?! )', Number),
(r'\d+', Number),
include('root'),
],
'backticks': [
(r'`', String.Backtick, '#pop'),
include('root'),
],
}
def analyse_text(text):
if shebang_matches(text, r'(ba|z|)sh'):
return 1
if text.startswith('$ '):
return 0.2
class ShellSessionBaseLexer(Lexer):
"""
Base lexer for simplistic shell sessions.
.. versionadded:: 2.1
"""
def get_tokens_unprocessed(self, text):
innerlexer = self._innerLexerCls(**self.options)
pos = 0
curcode = ''
insertions = []
backslash_continuation = False
for match in line_re.finditer(text):
line = match.group()
m = re.match(self._ps1rgx, line)
if backslash_continuation:
curcode += line
backslash_continuation = curcode.endswith('\\\n')
elif m:
# To support output lexers (say diff output), the output
# needs to be broken by prompts whenever the output lexer
# changes.
if not insertions:
pos = match.start()
insertions.append((len(curcode),
[(0, Generic.Prompt, m.group(1))]))
curcode += m.group(2)
backslash_continuation = curcode.endswith('\\\n')
elif line.startswith(self._ps2):
insertions.append((len(curcode),
[(0, Generic.Prompt, line[:len(self._ps2)])]))
curcode += line[len(self._ps2):]
backslash_continuation = curcode.endswith('\\\n')
else:
if insertions:
toks = innerlexer.get_tokens_unprocessed(curcode)
for i, t, v in do_insertions(insertions, toks):
yield pos+i, t, v
yield match.start(), Generic.Output, line
insertions = []
curcode = ''
if insertions:
for i, t, v in do_insertions(insertions,
innerlexer.get_tokens_unprocessed(curcode)):
yield pos+i, t, v
class BashSessionLexer(ShellSessionBaseLexer):
"""
Lexer for simplistic shell sessions.
.. versionadded:: 1.1
"""
name = 'Bash Session'
aliases = ['console', 'shell-session']
filenames = ['*.sh-session', '*.shell-session']
mimetypes = ['application/x-shell-session', 'application/x-sh-session']
_innerLexerCls = BashLexer
_ps1rgx = \
r'^((?:(?:\[.*?\])|(?:\(\S+\))?(?:| |sh\S*?|\w+\S+[@:]\S+(?:\s+\S+)' \
r'?|\[\S+[@:][^\n]+\].+))\s*[$#%])(.*\n?)'
_ps2 = '>'
class BatchLexer(RegexLexer):
"""
Lexer for the DOS/Windows Batch file format.
.. versionadded:: 0.7
"""
name = 'Batchfile'
aliases = ['bat', 'batch', 'dosbatch', 'winbatch']
filenames = ['*.bat', '*.cmd']
mimetypes = ['application/x-dos-batch']
flags = re.MULTILINE | re.IGNORECASE
_nl = r'\n\x1a'
_punct = r'&<>|'
_ws = r'\t\v\f\r ,;=\xa0'
_space = r'(?:(?:(?:\^[%s])?[%s])+)' % (_nl, _ws)
_keyword_terminator = (r'(?=(?:\^[%s]?)?[%s+./:[\\\]]|[%s%s(])' %
(_nl, _ws, _nl, _punct))
_token_terminator = r'(?=\^?[%s]|[%s%s])' % (_ws, _punct, _nl)
_start_label = r'((?:(?<=^[^:])|^[^:]?)[%s]*)(:)' % _ws
_label = r'(?:(?:[^%s%s%s+:^]|\^[%s]?[\w\W])*)' % (_nl, _punct, _ws, _nl)
_label_compound = (r'(?:(?:[^%s%s%s+:^)]|\^[%s]?[^)])*)' %
(_nl, _punct, _ws, _nl))
_number = r'(?:-?(?:0[0-7]+|0x[\da-f]+|\d+)%s)' % _token_terminator
_opword = r'(?:equ|geq|gtr|leq|lss|neq)'
_string = r'(?:"[^%s"]*(?:"|(?=[%s])))' % (_nl, _nl)
_variable = (r'(?:(?:%%(?:\*|(?:~[a-z]*(?:\$[^:]+:)?)?\d|'
r'[^%%:%s]+(?::(?:~(?:-?\d+)?(?:,(?:-?\d+)?)?|(?:[^%%%s^]|'
r'\^[^%%%s])[^=%s]*=(?:[^%%%s^]|\^[^%%%s])*)?)?%%))|'
r'(?:\^?![^!:%s]+(?::(?:~(?:-?\d+)?(?:,(?:-?\d+)?)?|(?:'
r'[^!%s^]|\^[^!%s])[^=%s]*=(?:[^!%s^]|\^[^!%s])*)?)?\^?!))' %
(_nl, _nl, _nl, _nl, _nl, _nl, _nl, _nl, _nl, _nl, _nl, _nl))
_core_token = r'(?:(?:(?:\^[%s]?)?[^"%s%s%s])+)' % (_nl, _nl, _punct, _ws)
_core_token_compound = r'(?:(?:(?:\^[%s]?)?[^"%s%s%s)])+)' % (_nl, _nl,
_punct, _ws)
_token = r'(?:[%s]+|%s)' % (_punct, _core_token)
_token_compound = r'(?:[%s]+|%s)' % (_punct, _core_token_compound)
_stoken = (r'(?:[%s]+|(?:%s|%s|%s)+)' %
(_punct, _string, _variable, _core_token))
def _make_begin_state(compound, _core_token=_core_token,
_core_token_compound=_core_token_compound,
_keyword_terminator=_keyword_terminator,
_nl=_nl, _punct=_punct, _string=_string,
_space=_space, _start_label=_start_label,
_stoken=_stoken, _token_terminator=_token_terminator,
_variable=_variable, _ws=_ws):
rest = '(?:%s|%s|[^"%%%s%s%s])*' % (_string, _variable, _nl, _punct,
')' if compound else '')
rest_of_line = r'(?:(?:[^%s^]|\^[%s]?[\w\W])*)' % (_nl, _nl)
rest_of_line_compound = r'(?:(?:[^%s^)]|\^[%s]?[^)])*)' % (_nl, _nl)
set_space = r'((?:(?:\^[%s]?)?[^\S\n])*)' % _nl
suffix = ''
if compound:
_keyword_terminator = r'(?:(?=\))|%s)' % _keyword_terminator
_token_terminator = r'(?:(?=\))|%s)' % _token_terminator
suffix = '/compound'
return [
((r'\)', Punctuation, '#pop') if compound else
(r'\)((?=\()|%s)%s' % (_token_terminator, rest_of_line),
Comment.Single)),
(r'(?=%s)' % _start_label, Text, 'follow%s' % suffix),
(_space, using(this, state='text')),
include('redirect%s' % suffix),
(r'[%s]+' % _nl, Text),
(r'\(', Punctuation, 'root/compound'),
(r'@+', Punctuation),
(r'((?:for|if|rem)(?:(?=(?:\^[%s]?)?/)|(?:(?!\^)|'
r'(?<=m))(?:(?=\()|%s)))(%s?%s?(?:\^[%s]?)?/(?:\^[%s]?)?\?)' %
(_nl, _token_terminator, _space,
_core_token_compound if compound else _core_token, _nl, _nl),
bygroups(Keyword, using(this, state='text')),
'follow%s' % suffix),
(r'(goto%s)(%s(?:\^[%s]?)?/(?:\^[%s]?)?\?%s)' %
(_keyword_terminator, rest, _nl, _nl, rest),
bygroups(Keyword, using(this, state='text')),
'follow%s' % suffix),
(words(('assoc', 'break', 'cd', 'chdir', 'cls', 'color', 'copy',
'date', 'del', 'dir', 'dpath', 'echo', 'endlocal', 'erase',
'exit', 'ftype', 'keys', 'md', 'mkdir', 'mklink', 'move',
'path', 'pause', 'popd', 'prompt', 'pushd', 'rd', 'ren',
'rename', 'rmdir', 'setlocal', 'shift', 'start', 'time',
'title', 'type', 'ver', 'verify', 'vol'),
suffix=_keyword_terminator), Keyword, 'follow%s' % suffix),
(r'(call)(%s?)(:)' % _space,
bygroups(Keyword, using(this, state='text'), Punctuation),
'call%s' % suffix),
(r'call%s' % _keyword_terminator, Keyword),
(r'(for%s(?!\^))(%s)(/f%s)' %
(_token_terminator, _space, _token_terminator),
bygroups(Keyword, using(this, state='text'), Keyword),
('for/f', 'for')),
(r'(for%s(?!\^))(%s)(/l%s)' %
(_token_terminator, _space, _token_terminator),
bygroups(Keyword, using(this, state='text'), Keyword),
('for/l', 'for')),
(r'for%s(?!\^)' % _token_terminator, Keyword, ('for2', 'for')),
(r'(goto%s)(%s?)(:?)' % (_keyword_terminator, _space),
bygroups(Keyword, using(this, state='text'), Punctuation),
'label%s' % suffix),
(r'(if(?:(?=\()|%s)(?!\^))(%s?)((?:/i%s)?)(%s?)((?:not%s)?)(%s?)' %
(_token_terminator, _space, _token_terminator, _space,
_token_terminator, _space),
bygroups(Keyword, using(this, state='text'), Keyword,
using(this, state='text'), Keyword,
using(this, state='text')), ('(?', 'if')),
(r'rem(((?=\()|%s)%s?%s?.*|%s%s)' %
(_token_terminator, _space, _stoken, _keyword_terminator,
rest_of_line_compound if compound else rest_of_line),
Comment.Single, 'follow%s' % suffix),
(r'(set%s)%s(/a)' % (_keyword_terminator, set_space),
bygroups(Keyword, using(this, state='text'), Keyword),
'arithmetic%s' % suffix),
(r'(set%s)%s((?:/p)?)%s((?:(?:(?:\^[%s]?)?[^"%s%s^=%s]|'
r'\^[%s]?[^"=])+)?)((?:(?:\^[%s]?)?=)?)' %
(_keyword_terminator, set_space, set_space, _nl, _nl, _punct,
')' if compound else '', _nl, _nl),
bygroups(Keyword, using(this, state='text'), Keyword,
using(this, state='text'), using(this, state='variable'),
Punctuation),
'follow%s' % suffix),
default('follow%s' % suffix)
]
def _make_follow_state(compound, _label=_label,
_label_compound=_label_compound, _nl=_nl,
_space=_space, _start_label=_start_label,
_token=_token, _token_compound=_token_compound,
_ws=_ws):
suffix = '/compound' if compound else ''
state = []
if compound:
state.append((r'(?=\))', Text, '#pop'))
state += [
(r'%s([%s]*)(%s)(.*)' %
(_start_label, _ws, _label_compound if compound else _label),
bygroups(Text, Punctuation, Text, Name.Label, Comment.Single)),
include('redirect%s' % suffix),
(r'(?=[%s])' % _nl, Text, '#pop'),
(r'\|\|?|&&?', Punctuation, '#pop'),
include('text')
]
return state
def _make_arithmetic_state(compound, _nl=_nl, _punct=_punct,
_string=_string, _variable=_variable, _ws=_ws):
op = r'=+\-*/!~'
state = []
if compound:
state.append((r'(?=\))', Text, '#pop'))
state += [
(r'0[0-7]+', Number.Oct),
(r'0x[\da-f]+', Number.Hex),
(r'\d+', Number.Integer),
(r'[(),]+', Punctuation),
(r'([%s]|%%|\^\^)+' % op, Operator),
(r'(%s|%s|(\^[%s]?)?[^()%s%%^"%s%s%s]|\^[%s%s]?%s)+' %
(_string, _variable, _nl, op, _nl, _punct, _ws, _nl, _ws,
r'[^)]' if compound else r'[\w\W]'),
using(this, state='variable')),
(r'(?=[\x00|&])', Text, '#pop'),
include('follow')
]
return state
def _make_call_state(compound, _label=_label,
_label_compound=_label_compound):
state = []
if compound:
state.append((r'(?=\))', Text, '#pop'))
state.append((r'(:?)(%s)' % (_label_compound if compound else _label),
bygroups(Punctuation, Name.Label), '#pop'))
return state
def _make_label_state(compound, _label=_label,
_label_compound=_label_compound, _nl=_nl,
_punct=_punct, _string=_string, _variable=_variable):
state = []
if compound:
state.append((r'(?=\))', Text, '#pop'))
state.append((r'(%s?)((?:%s|%s|\^[%s]?%s|[^"%%^%s%s%s])*)' %
(_label_compound if compound else _label, _string,
_variable, _nl, r'[^)]' if compound else r'[\w\W]', _nl,
_punct, r')' if compound else ''),
bygroups(Name.Label, Comment.Single), '#pop'))
return state
def _make_redirect_state(compound,
_core_token_compound=_core_token_compound,
_nl=_nl, _punct=_punct, _stoken=_stoken,
_string=_string, _space=_space,
_variable=_variable, _ws=_ws):
stoken_compound = (r'(?:[%s]+|(?:%s|%s|%s)+)' %
(_punct, _string, _variable, _core_token_compound))
return [
(r'((?:(?<=[%s%s])\d)?)(>>?&|<&)([%s%s]*)(\d)' %
(_nl, _ws, _nl, _ws),
bygroups(Number.Integer, Punctuation, Text, Number.Integer)),
(r'((?:(?<=[%s%s])(?<!\^[%s])\d)?)(>>?|<)(%s?%s)' %
(_nl, _ws, _nl, _space, stoken_compound if compound else _stoken),
bygroups(Number.Integer, Punctuation, using(this, state='text')))
]
tokens = {
'root': _make_begin_state(False),
'follow': _make_follow_state(False),
'arithmetic': _make_arithmetic_state(False),
'call': _make_call_state(False),
'label': _make_label_state(False),
'redirect': _make_redirect_state(False),
'root/compound': _make_begin_state(True),
'follow/compound': _make_follow_state(True),
'arithmetic/compound': _make_arithmetic_state(True),
'call/compound': _make_call_state(True),
'label/compound': _make_label_state(True),
'redirect/compound': _make_redirect_state(True),
'variable-or-escape': [
(_variable, Name.Variable),
(r'%%%%|\^[%s]?(\^!|[\w\W])' % _nl, String.Escape)
],
'string': [
(r'"', String.Double, '#pop'),
(_variable, Name.Variable),
(r'\^!|%%', String.Escape),
(r'[^"%%^%s]+|[%%^]' % _nl, String.Double),
default('#pop')
],
'sqstring': [
include('variable-or-escape'),
(r'[^%]+|%', String.Single)
],
'bqstring': [
include('variable-or-escape'),
(r'[^%]+|%', String.Backtick)
],
'text': [
(r'"', String.Double, 'string'),
include('variable-or-escape'),
(r'[^"%%^%s%s%s\d)]+|.' % (_nl, _punct, _ws), Text)
],
'variable': [
(r'"', String.Double, 'string'),
include('variable-or-escape'),
(r'[^"%%^%s]+|.' % _nl, Name.Variable)
],
'for': [
(r'(%s)(in)(%s)(\()' % (_space, _space),
bygroups(using(this, state='text'), Keyword,
using(this, state='text'), Punctuation), '#pop'),
include('follow')
],
'for2': [
(r'\)', Punctuation),
(r'(%s)(do%s)' % (_space, _token_terminator),
bygroups(using(this, state='text'), Keyword), '#pop'),
(r'[%s]+' % _nl, Text),
include('follow')
],
'for/f': [
(r'(")((?:%s|[^"])*?")([%s%s]*)(\))' % (_variable, _nl, _ws),
bygroups(String.Double, using(this, state='string'), Text,
Punctuation)),
(r'"', String.Double, ('#pop', 'for2', 'string')),
(r"('(?:%%%%|%s|[\w\W])*?')([%s%s]*)(\))" % (_variable, _nl, _ws),
bygroups(using(this, state='sqstring'), Text, Punctuation)),
(r'(`(?:%%%%|%s|[\w\W])*?`)([%s%s]*)(\))' % (_variable, _nl, _ws),
bygroups(using(this, state='bqstring'), Text, Punctuation)),
include('for2')
],
'for/l': [
(r'-?\d+', Number.Integer),
include('for2')
],
'if': [
(r'((?:cmdextversion|errorlevel)%s)(%s)(\d+)' %
(_token_terminator, _space),
bygroups(Keyword, using(this, state='text'),
Number.Integer), '#pop'),
(r'(defined%s)(%s)(%s)' % (_token_terminator, _space, _stoken),
bygroups(Keyword, using(this, state='text'),
using(this, state='variable')), '#pop'),
(r'(exist%s)(%s%s)' % (_token_terminator, _space, _stoken),
bygroups(Keyword, using(this, state='text')), '#pop'),
(r'(%s%s)(%s)(%s%s)' % (_number, _space, _opword, _space, _number),
bygroups(using(this, state='arithmetic'), Operator.Word,
using(this, state='arithmetic')), '#pop'),
(_stoken, using(this, state='text'), ('#pop', 'if2')),
],
'if2': [
(r'(%s?)(==)(%s?%s)' % (_space, _space, _stoken),
bygroups(using(this, state='text'), Operator,
using(this, state='text')), '#pop'),
(r'(%s)(%s)(%s%s)' % (_space, _opword, _space, _stoken),
bygroups(using(this, state='text'), Operator.Word,
using(this, state='text')), '#pop')
],
'(?': [
(_space, using(this, state='text')),
(r'\(', Punctuation, ('#pop', 'else?', 'root/compound')),
default('#pop')
],
'else?': [
(_space, using(this, state='text')),
(r'else%s' % _token_terminator, Keyword, '#pop'),
default('#pop')
]
}
class MSDOSSessionLexer(ShellSessionBaseLexer):
"""
Lexer for simplistic MSDOS sessions.
.. versionadded:: 2.1
"""
name = 'MSDOS Session'
aliases = ['doscon']
filenames = []
mimetypes = []
_innerLexerCls = BatchLexer
_ps1rgx = r'^([^>]+>)(.*\n?)'
_ps2 = 'More? '
class TcshLexer(RegexLexer):
"""
Lexer for tcsh scripts.
.. versionadded:: 0.10
"""
name = 'Tcsh'
aliases = ['tcsh', 'csh']
filenames = ['*.tcsh', '*.csh']
mimetypes = ['application/x-csh']
tokens = {
'root': [
include('basic'),
(r'\$\(', Keyword, 'paren'),
(r'\$\{#?', Keyword, 'curly'),
(r'`', String.Backtick, 'backticks'),
include('data'),
],
'basic': [
(r'\b(if|endif|else|while|then|foreach|case|default|'
r'continue|goto|breaksw|end|switch|endsw)\s*\b',
Keyword),
(r'\b(alias|alloc|bg|bindkey|break|builtins|bye|caller|cd|chdir|'
r'complete|dirs|echo|echotc|eval|exec|exit|fg|filetest|getxvers|'
r'glob|getspath|hashstat|history|hup|inlib|jobs|kill|'
r'limit|log|login|logout|ls-F|migrate|newgrp|nice|nohup|notify|'
r'onintr|popd|printenv|pushd|rehash|repeat|rootnode|popd|pushd|'
r'set|shift|sched|setenv|setpath|settc|setty|setxvers|shift|'
r'source|stop|suspend|source|suspend|telltc|time|'
r'umask|unalias|uncomplete|unhash|universe|unlimit|unset|unsetenv|'
r'ver|wait|warp|watchlog|where|which)\s*\b',
Name.Builtin),
(r'#.*', Comment),
(r'\\[\w\W]', String.Escape),
(r'(\b\w+)(\s*)(=)', bygroups(Name.Variable, Text, Operator)),
(r'[\[\]{}()=]+', Operator),
(r'<<\s*(\'?)\\?(\w+)[\w\W]+?\2', String),
(r';', Punctuation),
],
'data': [
(r'(?s)"(\\\\|\\[0-7]+|\\.|[^"\\])*"', String.Double),
(r"(?s)'(\\\\|\\[0-7]+|\\.|[^'\\])*'", String.Single),
(r'\s+', Text),
(r'[^=\s\[\]{}()$"\'`\\;#]+', Text),
(r'\d+(?= |\Z)', Number),
(r'\$#?(\w+|.)', Name.Variable),
],
'curly': [
(r'\}', Keyword, '#pop'),
(r':-', Keyword),
(r'\w+', Name.Variable),
(r'[^}:"\'`$]+', Punctuation),
(r':', Punctuation),
include('root'),
],
'paren': [
(r'\)', Keyword, '#pop'),
include('root'),
],
'backticks': [
(r'`', String.Backtick, '#pop'),
include('root'),
],
}
class TcshSessionLexer(ShellSessionBaseLexer):
"""
Lexer for Tcsh sessions.
.. versionadded:: 2.1
"""
name = 'Tcsh Session'
aliases = ['tcshcon']
filenames = []
mimetypes = []
_innerLexerCls = TcshLexer
_ps1rgx = r'^([^>]+>)(.*\n?)'
_ps2 = '? '
class PowerShellLexer(RegexLexer):
"""
For Windows PowerShell code.
.. versionadded:: 1.5
"""
name = 'PowerShell'
aliases = ['powershell', 'posh', 'ps1', 'psm1']
filenames = ['*.ps1', '*.psm1']
mimetypes = ['text/x-powershell']
flags = re.DOTALL | re.IGNORECASE | re.MULTILINE
keywords = (
'while validateset validaterange validatepattern validatelength '
'validatecount until trap switch return ref process param parameter in '
'if global: function foreach for finally filter end elseif else '
'dynamicparam do default continue cmdletbinding break begin alias \\? '
'% #script #private #local #global mandatory parametersetname position '
'valuefrompipeline valuefrompipelinebypropertyname '
'valuefromremainingarguments helpmessage try catch throw').split()
operators = (
'and as band bnot bor bxor casesensitive ccontains ceq cge cgt cle '
'clike clt cmatch cne cnotcontains cnotlike cnotmatch contains '
'creplace eq exact f file ge gt icontains ieq ige igt ile ilike ilt '
'imatch ine inotcontains inotlike inotmatch ireplace is isnot le like '
'lt match ne not notcontains notlike notmatch or regex replace '
'wildcard').split()
verbs = (
'write where watch wait use update unregister unpublish unprotect '
'unlock uninstall undo unblock trace test tee take sync switch '
'suspend submit stop step start split sort skip show set send select '
'search scroll save revoke resume restore restart resolve resize '
'reset request repair rename remove register redo receive read push '
'publish protect pop ping out optimize open new move mount merge '
'measure lock limit join invoke install initialize import hide group '
'grant get format foreach find export expand exit enter enable edit '
'dismount disconnect disable deny debug cxnew copy convertto '
'convertfrom convert connect confirm compress complete compare close '
'clear checkpoint block backup assert approve aggregate add').split()
aliases = (
'ac asnp cat cd cfs chdir clc clear clhy cli clp cls clv cnsn '
'compare copy cp cpi cpp curl cvpa dbp del diff dir dnsn ebp echo epal '
'epcsv epsn erase etsn exsn fc fhx fl foreach ft fw gal gbp gc gci gcm '
'gcs gdr ghy gi gjb gl gm gmo gp gps gpv group gsn gsnp gsv gu gv gwmi '
'h history icm iex ihy ii ipal ipcsv ipmo ipsn irm ise iwmi iwr kill lp '
'ls man md measure mi mount move mp mv nal ndr ni nmo npssc nsn nv ogv '
'oh popd ps pushd pwd r rbp rcjb rcsn rd rdr ren ri rjb rm rmdir rmo '
'rni rnp rp rsn rsnp rujb rv rvpa rwmi sajb sal saps sasv sbp sc select '
'set shcm si sl sleep sls sort sp spjb spps spsv start sujb sv swmi tee '
'trcm type wget where wjb write').split()
commenthelp = (
'component description example externalhelp forwardhelpcategory '
'forwardhelptargetname functionality inputs link '
'notes outputs parameter remotehelprunspace role synopsis').split()
tokens = {
'root': [
# we need to count pairs of parentheses for correct highlight
# of '$(...)' blocks in strings
(r'\(', Punctuation, 'child'),
(r'\s+', Text),
(r'^(\s*#[#\s]*)(\.(?:%s))([^\n]*$)' % '|'.join(commenthelp),
bygroups(Comment, String.Doc, Comment)),
(r'#[^\n]*?$', Comment),
(r'(<|<)#', Comment.Multiline, 'multline'),
(r'@"\n', String.Heredoc, 'heredoc-double'),
(r"@'\n.*?\n'@", String.Heredoc),
# escaped syntax
(r'`[\'"$@-]', Punctuation),
(r'"', String.Double, 'string'),
(r"'([^']|'')*'", String.Single),
(r'(\$|@@|@)((global|script|private|env):)?\w+',
Name.Variable),
(r'(%s)\b' % '|'.join(keywords), Keyword),
(r'-(%s)\b' % '|'.join(operators), Operator),
(r'(%s)-[a-z_]\w*\b' % '|'.join(verbs), Name.Builtin),
(r'(%s)\s' % '|'.join(aliases), Name.Builtin),
(r'\[[a-z_\[][\w. `,\[\]]*\]', Name.Constant), # .net [type]s
(r'-[a-z_]\w*', Name),
(r'\w+', Name),
(r'[.,;@{}\[\]$()=+*/\\&%!~?^`|<>-]|::', Punctuation),
],
'child': [
(r'\)', Punctuation, '#pop'),
include('root'),
],
'multline': [
(r'[^#&.]+', Comment.Multiline),
(r'#(>|>)', Comment.Multiline, '#pop'),
(r'\.(%s)' % '|'.join(commenthelp), String.Doc),
(r'[#&.]', Comment.Multiline),
],
'string': [
(r"`[0abfnrtv'\"$`]", String.Escape),
(r'[^$`"]+', String.Double),
(r'\$\(', Punctuation, 'child'),
(r'""', String.Double),
(r'[`$]', String.Double),
(r'"', String.Double, '#pop'),
],
'heredoc-double': [
(r'\n"@', String.Heredoc, '#pop'),
(r'\$\(', Punctuation, 'child'),
(r'[^@\n]+"]', String.Heredoc),
(r".", String.Heredoc),
]
}
class PowerShellSessionLexer(ShellSessionBaseLexer):
"""
Lexer for simplistic Windows PowerShell sessions.
.. versionadded:: 2.1
"""
name = 'PowerShell Session'
aliases = ['ps1con']
filenames = []
mimetypes = []
_innerLexerCls = PowerShellLexer
_ps1rgx = r'^(PS [^>]+> )(.*\n?)'
_ps2 = '>> '
class FishShellLexer(RegexLexer):
"""
Lexer for Fish shell scripts.
.. versionadded:: 2.1
"""
name = 'Fish'
aliases = ['fish', 'fishshell']
filenames = ['*.fish', '*.load']
mimetypes = ['application/x-fish']
tokens = {
'root': [
include('basic'),
include('data'),
include('interp'),
],
'interp': [
(r'\$\(\(', Keyword, 'math'),
(r'\(', Keyword, 'paren'),
(r'\$#?(\w+|.)', Name.Variable),
],
'basic': [
(r'\b(begin|end|if|else|while|break|for|in|return|function|block|'
r'case|continue|switch|not|and|or|set|echo|exit|pwd|true|false|'
r'cd|count|test)(\s*)\b',
bygroups(Keyword, Text)),
(r'\b(alias|bg|bind|breakpoint|builtin|command|commandline|'
r'complete|contains|dirh|dirs|emit|eval|exec|fg|fish|fish_config|'
r'fish_indent|fish_pager|fish_prompt|fish_right_prompt|'
r'fish_update_completions|fishd|funced|funcsave|functions|help|'
r'history|isatty|jobs|math|mimedb|nextd|open|popd|prevd|psub|'
r'pushd|random|read|set_color|source|status|trap|type|ulimit|'
r'umask|vared|fc|getopts|hash|kill|printf|time|wait)\s*\b(?!\.)',
Name.Builtin),
(r'#.*\n', Comment),
(r'\\[\w\W]', String.Escape),
(r'(\b\w+)(\s*)(=)', bygroups(Name.Variable, Text, Operator)),
(r'[\[\]()=]', Operator),
(r'<<-?\s*(\'?)\\?(\w+)[\w\W]+?\2', String),
],
'data': [
(r'(?s)\$?"(\\\\|\\[0-7]+|\\.|[^"\\$])*"', String.Double),
(r'"', String.Double, 'string'),
(r"(?s)\$'(\\\\|\\[0-7]+|\\.|[^'\\])*'", String.Single),
(r"(?s)'.*?'", String.Single),
(r';', Punctuation),
(r'&|\||\^|<|>', Operator),
(r'\s+', Text),
(r'\d+(?= |\Z)', Number),
(r'[^=\s\[\]{}()$"\'`\\<&|;]+', Text),
],
'string': [
(r'"', String.Double, '#pop'),
(r'(?s)(\\\\|\\[0-7]+|\\.|[^"\\$])+', String.Double),
include('interp'),
],
'paren': [
(r'\)', Keyword, '#pop'),
include('root'),
],
'math': [
(r'\)\)', Keyword, '#pop'),
(r'[-+*/%^|&]|\*\*|\|\|', Operator),
(r'\d+#\d+', Number),
(r'\d+#(?! )', Number),
(r'\d+', Number),
include('root'),
],
}
|
lmregus/Portfolio
|
python/design_patterns/env/lib/python3.7/site-packages/pygments/lexers/shell.py
|
Python
|
mit
| 32,583
|
import random
from django import template
from django.conf import settings
from django.contrib.sites.models import Site
from friends.models import Friendship, FriendshipInvitation
from socialregistration.models import FacebookProfile
register = template.Library()
@register.inclusion_tag('social/inclusion_tags/twitter_connect_form.html', takes_context=True)
def twitter_connect_form(context, form_id):
context.update({
'form_id': form_id,
})
return context
@register.inclusion_tag('social/inclusion_tags/facebook_connect_form.html', takes_context=True)
def facebook_connect_form(context, form_id):
context.update({
'form_id': form_id,
})
return context
@register.inclusion_tag('social/inclusion_tags/twitter_connect_button.html')
def twitter_connect_button(form_id, media_path):
return {
'form_id': form_id,
'media_path': media_path,
}
@register.inclusion_tag('social/inclusion_tags/facebook_connect_button.html')
def facebook_connect_button(form_id, media_path):
return {
'form_id': form_id,
'media_path': media_path,
}
@register.inclusion_tag('social/inclusion_tags/friendship_setup_button.html', takes_context=True)
def friendship_setup_button(context, user, include_template_name='social/inclusion_tags/friendship_setup_button_include.html'):
"""
Renders either an 'add friend', 'remove friend', 'awaiting confirmation' or 'friendship declined' button based on current friendship state.
Also includes javascript to request friend or remove friend.
"""
if not user:
return {}
# Render add friend template by default.
active_class = "add_friend"
requesting_user = context['request'].user
if requesting_user.is_authenticated():
# If users are friends already render remove friend template.
are_friends = Friendship.objects.are_friends(requesting_user, user)
if are_friends:
active_class = "remove_friend"
else:
# If users are not friends but an invitation exists, render awaiting confirmation or declined template.
status = FriendshipInvitation.objects.invitation_status(user1=requesting_user, user2=user)
if status == 2:
active_class = "awaiting_friend_confirmation"
if status == 6:
active_class = "request_declined"
return {
'include_template_name': include_template_name,
'object': user,
'active_class': active_class,
'random': random.randint(0, 100000000)
}
@register.inclusion_tag('social/inclusion_tags/facebook_invite_friends.html', takes_context=True)
def facebook_invite_friends(context, user):
"""
Renders Facebook friends invite form.
"""
current_site = Site.objects.get(id=settings.SITE_ID)
# exclude id's of facebook users that are already using the app
fb_profiles = FacebookProfile.objects.all()
exclude_ids = ",".join([fb_profile.uid for fb_profile in fb_profiles])
return {
'exclude_ids': exclude_ids,
'site_name': current_site.name,
'site_domain': current_site.domain,
'next': context['next'],
}
|
praekelt/jmbo-social
|
social/templatetags/social_inclusion_tags.py
|
Python
|
bsd-3-clause
| 3,195
|
#!/usr/bin/env python
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
# Copyright (c) 2014 Mozilla Corporation
#
# Contributors:
# Jeff Bryner jbryner@mozilla.com
import logging
import random
import sys
from datetime import datetime
from configlib import getConfig, OptionParser
from logging.handlers import SysLogHandler
from pymongo import MongoClient
import sys
import os
sys.path.append(os.path.join(os.path.dirname(os.path.abspath(__file__)), '../lib'))
from utilities.toUTC import toUTC
from elasticsearch_client import ElasticsearchClient
from query_models import SearchQuery, TermMatch, QueryStringMatch
import re
userre = re.compile(r'''Accepted publickey for (.*?) from''', re.IGNORECASE)
logger = logging.getLogger(sys.argv[0])
def loggerTimeStamp(self, record, datefmt=None):
return toUTC(datetime.now()).isoformat()
def initLogger():
logger.level = logging.INFO
formatter = logging.Formatter(
'%(asctime)s - %(name)s - %(levelname)s - %(message)s')
formatter.formatTime = loggerTimeStamp
if options.output == 'syslog':
logger.addHandler(
SysLogHandler(
address=(options.sysloghostname, options.syslogport)))
else:
sh = logging.StreamHandler(sys.stderr)
sh.setFormatter(formatter)
logger.addHandler(sh)
def genMeteorID():
return('%024x' % random.randrange(16**24))
def searchForSSHKeys(es):
search_query = SearchQuery(minutes=5)
search_query.add_must([
TermMatch('_type', 'event'),
TermMatch('details.program', 'sshd'),
QueryStringMatch('summary:found matching key accepted publickey')
])
results = search_query.execute(es)
return results
def correlateSSHKeys(esResults):
# correlate ssh key to userid by hostname and processid
# dict to populate hits we find
# will be a dict with hostname:processid as they key and sshkey and username as the dict items.
correlations = {}
# a list for the final dicts containing keys: username and key
uniqueCorrelations = []
# first find the keys
for r in esResults['hits']:
if 'found matching' in r['_source']['summary'].lower():
hostname = r['_source']['details']['hostname']
processid = r['_source']['details']['processid']
sshkey = r['_source']['summary'].split('key:')[1].strip()
if '{0}:{1}'.format(hostname, processid) not in correlations.keys():
correlations['{0}:{1}'.format(hostname, processid)] = dict(sshkey=sshkey)
# find the users and match on host:processid
for r in esResults['hits']:
if 'accepted publickey' in r['_source']['summary'].lower():
hostname = r['_source']['details']['hostname']
processid = r['_source']['details']['processid']
username = userre.split(r['_source']['summary'])[1]
if '{0}:{1}'.format(hostname, processid) in correlations.keys() and 'username' not in correlations['{0}:{1}'.format(hostname, processid)].keys():
correlations['{0}:{1}'.format(hostname, processid)]['username'] = username
for c in correlations:
if 'username' in correlations[c].keys():
if correlations[c] not in uniqueCorrelations:
uniqueCorrelations.append(correlations[c])
return uniqueCorrelations
def updateMongo(mozdefdb, correlations):
sshkeys = mozdefdb['sshkeys']
for c in correlations:
keyrecord = sshkeys.find_one({'sshkey': c['sshkey']})
if keyrecord is None:
# new record
# generate a meteor-compatible ID
c['_id'] = genMeteorID()
c['utctimestamp'] = toUTC(datetime.now()).isoformat()
logger.debug(c)
sshkeys.insert(c)
def main():
logger.debug('starting')
logger.debug(options)
try:
es = ElasticsearchClient((list('{0}'.format(s) for s in options.esservers)))
client = MongoClient(options.mongohost, options.mongoport)
# use meteor db
mozdefdb = client.meteor
esResults = searchForSSHKeys(es)
correlations = correlateSSHKeys(esResults)
if len(correlations) > 0:
updateMongo(mozdefdb, correlations)
except Exception as e:
logger.error("Exception %r sending health to mongo" % e)
def initConfig():
# output our log to stdout or syslog
options.output = getConfig('output', 'stdout', options.configfile)
# syslog hostname
options.sysloghostname = getConfig('sysloghostname',
'localhost',
options.configfile)
# syslog port
options.syslogport = getConfig('syslogport', 514, options.configfile)
# elastic search server settings
options.esservers = list(getConfig('esservers',
'http://localhost:9200',
options.configfile).split(','))
options.mongohost = getConfig('mongohost', 'localhost', options.configfile)
options.mongoport = getConfig('mongoport', 3001, options.configfile)
if __name__ == '__main__':
parser = OptionParser()
parser.add_option(
"-c",
dest='configfile',
default=sys.argv[0].replace('.py', '.conf'),
help="configuration file to use")
(options, args) = parser.parse_args()
initConfig()
initLogger()
main()
|
ameihm0912/MozDef
|
cron/collectSSHFingerprints.py
|
Python
|
mpl-2.0
| 5,571
|
# Copyright (c) 2011 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Manage hosts in the current zone.
"""
import collections
import UserDict
from oslo.config import cfg
from nova.compute import task_states
from nova.compute import vm_states
from nova import db
from nova import exception
from nova.openstack.common.gettextutils import _
from nova.openstack.common import jsonutils
from nova.openstack.common import log as logging
from nova.openstack.common import timeutils
from nova.pci import pci_request
from nova.pci import pci_stats
from nova.scheduler import filters
from nova.scheduler import weights
host_manager_opts = [
cfg.MultiStrOpt('scheduler_available_filters',
default=['nova.scheduler.filters.all_filters'],
help='Filter classes available to the scheduler which may '
'be specified more than once. An entry of '
'"nova.scheduler.filters.standard_filters" '
'maps to all filters included with nova.'),
cfg.ListOpt('scheduler_default_filters',
default=[
'RetryFilter',
'AvailabilityZoneFilter',
'RamFilter',
'ComputeFilter',
'ComputeCapabilitiesFilter',
'ImagePropertiesFilter'
],
help='Which filter class names to use for filtering hosts '
'when not specified in the request.'),
cfg.ListOpt('scheduler_weight_classes',
default=['nova.scheduler.weights.all_weighers'],
help='Which weight class names to use for weighing hosts'),
]
CONF = cfg.CONF
CONF.register_opts(host_manager_opts)
LOG = logging.getLogger(__name__)
class ReadOnlyDict(UserDict.IterableUserDict):
"""A read-only dict."""
def __init__(self, source=None):
self.data = {}
self.update(source)
def __setitem__(self, key, item):
raise TypeError()
def __delitem__(self, key):
raise TypeError()
def clear(self):
raise TypeError()
def pop(self, key, *args):
raise TypeError()
def popitem(self):
raise TypeError()
def update(self, source=None):
if source is None:
return
elif isinstance(source, UserDict.UserDict):
self.data = source.data
elif isinstance(source, type({})):
self.data = source
else:
raise TypeError()
# Representation of a single metric value from a compute node.
MetricItem = collections.namedtuple(
'MetricItem', ['value', 'timestamp', 'source'])
class HostState(object):
"""Mutable and immutable information tracked for a host.
This is an attempt to remove the ad-hoc data structures
previously used and lock down access.
"""
def __init__(self, host, node, capabilities=None, service=None):
self.host = host
self.nodename = node
self.update_capabilities(capabilities, service)
# Mutable available resources.
# These will change as resources are virtually "consumed".
self.total_usable_disk_gb = 0
self.disk_mb_used = 0
self.free_ram_mb = 0
self.free_disk_mb = 0
self.vcpus_total = 0
self.vcpus_used = 0
# Additional host information from the compute node stats:
self.vm_states = {}
self.task_states = {}
self.num_instances = 0
self.num_instances_by_project = {}
self.num_instances_by_os_type = {}
self.num_io_ops = 0
# Other information
self.host_ip = None
self.hypervisor_type = None
self.hypervisor_version = None
self.hypervisor_hostname = None
self.cpu_info = None
self.supported_instances = None
# Resource oversubscription values for the compute host:
self.limits = {}
# Generic metrics from compute nodes
self.metrics = {}
self.updated = None
def update_capabilities(self, capabilities=None, service=None):
# Read-only capability dicts
if capabilities is None:
capabilities = {}
self.capabilities = ReadOnlyDict(capabilities)
if service is None:
service = {}
self.service = ReadOnlyDict(service)
def _update_metrics_from_compute_node(self, compute):
#NOTE(llu): The 'or []' is to avoid json decode failure of None
# returned from compute.get, because DB schema allows
# NULL in the metrics column
metrics = compute.get('metrics', []) or []
if metrics:
metrics = jsonutils.loads(metrics)
for metric in metrics:
# 'name', 'value', 'timestamp' and 'source' are all required
# to be valid keys, just let KeyError happen if any one of
# them is missing. But we also require 'name' to be True.
name = metric['name']
item = MetricItem(value=metric['value'],
timestamp=metric['timestamp'],
source=metric['source'])
if name:
self.metrics[name] = item
else:
LOG.warn(_("Metric name unknown of %r") % item)
def update_from_compute_node(self, compute):
"""Update information about a host from its compute_node info."""
if (self.updated and compute['updated_at']
and self.updated > compute['updated_at']):
return
all_ram_mb = compute['memory_mb']
# Assume virtual size is all consumed by instances if use qcow2 disk.
least = compute.get('disk_available_least')
free_disk_mb = least if least is not None else compute['free_disk_gb']
free_disk_mb *= 1024
self.disk_mb_used = compute['local_gb_used'] * 1024
#NOTE(jogo) free_ram_mb can be negative
self.free_ram_mb = compute['free_ram_mb']
self.total_usable_ram_mb = all_ram_mb
self.total_usable_disk_gb = compute['local_gb']
self.free_disk_mb = free_disk_mb
self.vcpus_total = compute['vcpus']
self.vcpus_used = compute['vcpus_used']
self.updated = compute['updated_at']
if 'pci_stats' in compute:
self.pci_stats = pci_stats.PciDeviceStats(compute['pci_stats'])
else:
self.pci_stats = None
# All virt drivers report host_ip
self.host_ip = compute['host_ip']
self.hypervisor_type = compute.get('hypervisor_type')
self.hypervisor_version = compute.get('hypervisor_version')
self.hypervisor_hostname = compute.get('hypervisor_hostname')
self.cpu_info = compute.get('cpu_info')
if compute.get('supported_instances'):
self.supported_instances = jsonutils.loads(
compute.get('supported_instances'))
# Don't store stats directly in host_state to make sure these don't
# overwrite any values, or get overwritten themselves. Store in self so
# filters can schedule with them.
self.stats = self._statmap(compute.get('stats', []))
self.hypervisor_version = compute['hypervisor_version']
# Track number of instances on host
self.num_instances = int(self.stats.get('num_instances', 0))
# Track number of instances by project_id
project_id_keys = [k for k in self.stats.keys() if
k.startswith("num_proj_")]
for key in project_id_keys:
project_id = key[9:]
self.num_instances_by_project[project_id] = int(self.stats[key])
# Track number of instances in certain vm_states
vm_state_keys = [k for k in self.stats.keys() if
k.startswith("num_vm_")]
for key in vm_state_keys:
vm_state = key[7:]
self.vm_states[vm_state] = int(self.stats[key])
# Track number of instances in certain task_states
task_state_keys = [k for k in self.stats.keys() if
k.startswith("num_task_")]
for key in task_state_keys:
task_state = key[9:]
self.task_states[task_state] = int(self.stats[key])
# Track number of instances by host_type
os_keys = [k for k in self.stats.keys() if
k.startswith("num_os_type_")]
for key in os_keys:
os = key[12:]
self.num_instances_by_os_type[os] = int(self.stats[key])
self.num_io_ops = int(self.stats.get('io_workload', 0))
# update metrics
self._update_metrics_from_compute_node(compute)
def consume_from_instance(self, instance):
"""Incrementally update host state from an instance."""
disk_mb = (instance['root_gb'] + instance['ephemeral_gb']) * 1024
ram_mb = instance['memory_mb']
vcpus = instance['vcpus']
self.free_ram_mb -= ram_mb
self.free_disk_mb -= disk_mb
self.vcpus_used += vcpus
self.updated = timeutils.utcnow()
# Track number of instances on host
self.num_instances += 1
# Track number of instances by project_id
project_id = instance.get('project_id')
if project_id not in self.num_instances_by_project:
self.num_instances_by_project[project_id] = 0
self.num_instances_by_project[project_id] += 1
# Track number of instances in certain vm_states
vm_state = instance.get('vm_state', vm_states.BUILDING)
if vm_state not in self.vm_states:
self.vm_states[vm_state] = 0
self.vm_states[vm_state] += 1
# Track number of instances in certain task_states
task_state = instance.get('task_state')
if task_state not in self.task_states:
self.task_states[task_state] = 0
self.task_states[task_state] += 1
# Track number of instances by host_type
os_type = instance.get('os_type')
if os_type not in self.num_instances_by_os_type:
self.num_instances_by_os_type[os_type] = 0
self.num_instances_by_os_type[os_type] += 1
pci_requests = pci_request.get_instance_pci_requests(instance)
if pci_requests and self.pci_stats:
self.pci_stats.apply_requests(pci_requests)
vm_state = instance.get('vm_state', vm_states.BUILDING)
task_state = instance.get('task_state')
if vm_state == vm_states.BUILDING or task_state in [
task_states.RESIZE_MIGRATING, task_states.REBUILDING,
task_states.RESIZE_PREP, task_states.IMAGE_SNAPSHOT,
task_states.IMAGE_BACKUP]:
self.num_io_ops += 1
def _statmap(self, stats):
return dict((st['key'], st['value']) for st in stats)
def __repr__(self):
return ("(%s, %s) ram:%s disk:%s io_ops:%s instances:%s" %
(self.host, self.nodename, self.free_ram_mb, self.free_disk_mb,
self.num_io_ops, self.num_instances))
class HostManager(object):
"""Base HostManager class."""
# Can be overridden in a subclass
host_state_cls = HostState
def __init__(self):
# { (host, hypervisor_hostname) : { <service> : { cap k : v }}}
self.service_states = {}
self.host_state_map = {}
self.filter_handler = filters.HostFilterHandler()
self.filter_classes = self.filter_handler.get_matching_classes(
CONF.scheduler_available_filters)
self.weight_handler = weights.HostWeightHandler()
self.weight_classes = self.weight_handler.get_matching_classes(
CONF.scheduler_weight_classes)
def _choose_host_filters(self, filter_cls_names):
"""Since the caller may specify which filters to use we need
to have an authoritative list of what is permissible. This
function checks the filter names against a predefined set
of acceptable filters.
"""
if filter_cls_names is None:
filter_cls_names = CONF.scheduler_default_filters
if not isinstance(filter_cls_names, (list, tuple)):
filter_cls_names = [filter_cls_names]
cls_map = dict((cls.__name__, cls) for cls in self.filter_classes)
good_filters = []
bad_filters = []
for filter_name in filter_cls_names:
if filter_name not in cls_map:
bad_filters.append(filter_name)
continue
good_filters.append(cls_map[filter_name])
if bad_filters:
msg = ", ".join(bad_filters)
raise exception.SchedulerHostFilterNotFound(filter_name=msg)
return good_filters
def get_filtered_hosts(self, hosts, filter_properties,
filter_class_names=None, index=0):
"""Filter hosts and return only ones passing all filters."""
def _strip_ignore_hosts(host_map, hosts_to_ignore):
ignored_hosts = []
for host in hosts_to_ignore:
for (hostname, nodename) in host_map.keys():
if host == hostname:
del host_map[(hostname, nodename)]
ignored_hosts.append(host)
ignored_hosts_str = ', '.join(ignored_hosts)
msg = _('Host filter ignoring hosts: %s')
LOG.audit(msg % ignored_hosts_str)
def _match_forced_hosts(host_map, hosts_to_force):
forced_hosts = []
for (hostname, nodename) in host_map.keys():
if hostname not in hosts_to_force:
del host_map[(hostname, nodename)]
else:
forced_hosts.append(hostname)
if host_map:
forced_hosts_str = ', '.join(forced_hosts)
msg = _('Host filter forcing available hosts to %s')
else:
forced_hosts_str = ', '.join(hosts_to_force)
msg = _("No hosts matched due to not matching "
"'force_hosts' value of '%s'")
LOG.audit(msg % forced_hosts_str)
def _match_forced_nodes(host_map, nodes_to_force):
forced_nodes = []
for (hostname, nodename) in host_map.keys():
if nodename not in nodes_to_force:
del host_map[(hostname, nodename)]
else:
forced_nodes.append(nodename)
if host_map:
forced_nodes_str = ', '.join(forced_nodes)
msg = _('Host filter forcing available nodes to %s')
else:
forced_nodes_str = ', '.join(nodes_to_force)
msg = _("No nodes matched due to not matching "
"'force_nodes' value of '%s'")
LOG.audit(msg % forced_nodes_str)
filter_classes = self._choose_host_filters(filter_class_names)
ignore_hosts = filter_properties.get('ignore_hosts', [])
force_hosts = filter_properties.get('force_hosts', [])
force_nodes = filter_properties.get('force_nodes', [])
if ignore_hosts or force_hosts or force_nodes:
# NOTE(deva): we can't assume "host" is unique because
# one host may have many nodes.
name_to_cls_map = dict([((x.host, x.nodename), x) for x in hosts])
if ignore_hosts:
_strip_ignore_hosts(name_to_cls_map, ignore_hosts)
if not name_to_cls_map:
return []
# NOTE(deva): allow force_hosts and force_nodes independently
if force_hosts:
_match_forced_hosts(name_to_cls_map, force_hosts)
if force_nodes:
_match_forced_nodes(name_to_cls_map, force_nodes)
if force_hosts or force_nodes:
# NOTE(deva): Skip filters when forcing host or node
if name_to_cls_map:
return name_to_cls_map.values()
hosts = name_to_cls_map.itervalues()
return self.filter_handler.get_filtered_objects(filter_classes,
hosts, filter_properties, index)
def get_weighed_hosts(self, hosts, weight_properties):
"""Weigh the hosts."""
return self.weight_handler.get_weighed_objects(self.weight_classes,
hosts, weight_properties)
def get_all_host_states(self, context):
"""Returns a list of HostStates that represents all the hosts
the HostManager knows about. Also, each of the consumable resources
in HostState are pre-populated and adjusted based on data in the db.
"""
# Get resource usage across the available compute nodes:
compute_nodes = db.compute_node_get_all(context)
seen_nodes = set()
for compute in compute_nodes:
service = compute['service']
if not service:
LOG.warn(_("No service for compute ID %s") % compute['id'])
continue
host = service['host']
node = compute.get('hypervisor_hostname')
state_key = (host, node)
capabilities = self.service_states.get(state_key, None)
host_state = self.host_state_map.get(state_key)
if host_state:
host_state.update_capabilities(capabilities,
dict(service.iteritems()))
else:
host_state = self.host_state_cls(host, node,
capabilities=capabilities,
service=dict(service.iteritems()))
self.host_state_map[state_key] = host_state
host_state.update_from_compute_node(compute)
seen_nodes.add(state_key)
# remove compute nodes from host_state_map if they are not active
dead_nodes = set(self.host_state_map.keys()) - seen_nodes
for state_key in dead_nodes:
host, node = state_key
LOG.info(_("Removing dead compute node %(host)s:%(node)s "
"from scheduler") % {'host': host, 'node': node})
del self.host_state_map[state_key]
return self.host_state_map.itervalues()
|
OpenAcademy-OpenStack/nova-scheduler
|
nova/scheduler/host_manager.py
|
Python
|
apache-2.0
| 18,788
|
#!/usr/bin/env python
"""eventfd: maintain an atomic counter inside a file descriptor"""
from cffi import FFI
import errno
ffi = FFI()
ffi.cdef("""
#define EFD_CLOEXEC ...
#define EFD_NONBLOCK ...
#define EFD_SEMAPHORE ...
int eventfd(unsigned int initval, int flags);
""")
C = ffi.verify("""
#include <sys/eventfd.h>
#include <stdint.h> /* Definition of uint64_t */
""", libraries=[])
def eventfd(inital_value=0, flags=0):
"""Create a new eventfd
Arguments
----------
:param int inital_value: The inital value to set the eventfd to
:param int flags: Flags to specify extra options
Flags
------
EFD_CLOEXEC: Close the eventfd when executing a new program
EFD_NONBLOCK: Open the socket in non-blocking mode
EFD_SEMAPHORE: Provide semaphore like semantics for read operations
Returns
--------
:return: The file descriptor representing the eventfd
:rtype: int
Exceptions
-----------
:raises ValueError: Invalid value in flags
:raises OSError: Max per process FD limit reached
:raises OSError: Max system FD limit reached
:raises OSError: Could not mount (internal) anonymous inode device
:raises MemoryError: Insufficient kernel memory
"""
fd = C.eventfd(inital_value, flags)
if fd < 0:
err = ffi.errno
if err == errno.EINVAL:
raise ValueError("Invalid value in flags")
elif err == errno.EMFILE:
raise OSError("Max per process FD limit reached")
elif err == errno.ENFILE:
raise OSError("Max system FD limit reached")
elif err == errno.ENODEV:
raise OsError("Could not mount (internal) anonymous inode device")
elif err == errno.ENOMEM:
raise MemoryError("Insufficent kernel memory available")
else:
# If you are here, its a bug. send us the traceback
raise ValueError("Unknown Error: {}".format(err))
return fd
def str_to_events(str):
value = ffi.new('uint64_t[1]')
ffi.buffer(value, 8)[0:8] = str
return [value[0]] # this may seem redundent but the original
# container is not actually a list
def event_to_str(event):
# We use ffi rather than the array module as
# python2.7 does not have an unsigned 64 bit in type
event = ffi.new('uint64_t[1]', (event,))
packed_event = ffi.buffer(event)[:]
return packed_event
EFD_CLOEXEC = C.EFD_CLOEXEC
EFD_NONBLOCK = C.EFD_NONBLOCK
EFD_SEMAPHORE = C.EFD_SEMAPHORE
|
arkaitzj/python-butter
|
butter/_eventfd.py
|
Python
|
bsd-3-clause
| 2,534
|
from __future__ import unicode_literals
import json
from moto.core.responses import BaseResponse
from .models import iot_backends
class IoTResponse(BaseResponse):
SERVICE_NAME = 'iot'
@property
def iot_backend(self):
return iot_backends[self.region]
def create_thing(self):
thing_name = self._get_param("thingName")
thing_type_name = self._get_param("thingTypeName")
attribute_payload = self._get_param("attributePayload")
thing_name, thing_arn = self.iot_backend.create_thing(
thing_name=thing_name,
thing_type_name=thing_type_name,
attribute_payload=attribute_payload,
)
return json.dumps(dict(thingName=thing_name, thingArn=thing_arn))
def create_thing_type(self):
thing_type_name = self._get_param("thingTypeName")
thing_type_properties = self._get_param("thingTypeProperties")
thing_type_name, thing_type_arn = self.iot_backend.create_thing_type(
thing_type_name=thing_type_name,
thing_type_properties=thing_type_properties,
)
return json.dumps(dict(thingTypeName=thing_type_name, thingTypeArn=thing_type_arn))
def list_thing_types(self):
previous_next_token = self._get_param("nextToken")
max_results = self._get_int_param("maxResults", 50) # not the default, but makes testing easier
thing_type_name = self._get_param("thingTypeName")
thing_types = self.iot_backend.list_thing_types(
thing_type_name=thing_type_name
)
thing_types = [_.to_dict() for _ in thing_types]
if previous_next_token is None:
result = thing_types[0:max_results]
next_token = str(max_results) if len(thing_types) > max_results else None
else:
token = int(previous_next_token)
result = thing_types[token:token + max_results]
next_token = str(token + max_results) if len(thing_types) > token + max_results else None
return json.dumps(dict(thingTypes=result, nextToken=next_token))
def list_things(self):
previous_next_token = self._get_param("nextToken")
max_results = self._get_int_param("maxResults", 50) # not the default, but makes testing easier
attribute_name = self._get_param("attributeName")
attribute_value = self._get_param("attributeValue")
thing_type_name = self._get_param("thingTypeName")
things, next_token = self.iot_backend.list_things(
attribute_name=attribute_name,
attribute_value=attribute_value,
thing_type_name=thing_type_name,
max_results=max_results,
token=previous_next_token
)
return json.dumps(dict(things=things, nextToken=next_token))
def describe_thing(self):
thing_name = self._get_param("thingName")
thing = self.iot_backend.describe_thing(
thing_name=thing_name,
)
return json.dumps(thing.to_dict(include_default_client_id=True))
def describe_thing_type(self):
thing_type_name = self._get_param("thingTypeName")
thing_type = self.iot_backend.describe_thing_type(
thing_type_name=thing_type_name,
)
return json.dumps(thing_type.to_dict())
def delete_thing(self):
thing_name = self._get_param("thingName")
expected_version = self._get_param("expectedVersion")
self.iot_backend.delete_thing(
thing_name=thing_name,
expected_version=expected_version,
)
return json.dumps(dict())
def delete_thing_type(self):
thing_type_name = self._get_param("thingTypeName")
self.iot_backend.delete_thing_type(
thing_type_name=thing_type_name,
)
return json.dumps(dict())
def update_thing(self):
thing_name = self._get_param("thingName")
thing_type_name = self._get_param("thingTypeName")
attribute_payload = self._get_param("attributePayload")
expected_version = self._get_param("expectedVersion")
remove_thing_type = self._get_param("removeThingType")
self.iot_backend.update_thing(
thing_name=thing_name,
thing_type_name=thing_type_name,
attribute_payload=attribute_payload,
expected_version=expected_version,
remove_thing_type=remove_thing_type,
)
return json.dumps(dict())
def create_job(self):
job_arn, job_id, description = self.iot_backend.create_job(
job_id=self._get_param("jobId"),
targets=self._get_param("targets"),
description=self._get_param("description"),
document_source=self._get_param("documentSource"),
document=self._get_param("document"),
presigned_url_config=self._get_param("presignedUrlConfig"),
target_selection=self._get_param("targetSelection"),
job_executions_rollout_config=self._get_param("jobExecutionsRolloutConfig"),
document_parameters=self._get_param("documentParameters")
)
return json.dumps(dict(jobArn=job_arn, jobId=job_id, description=description))
def describe_job(self):
job = self.iot_backend.describe_job(job_id=self._get_param("jobId"))
return json.dumps(dict(
documentSource=job.document_source,
job=dict(
comment=job.comment,
completedAt=job.completed_at,
createdAt=job.created_at,
description=job.description,
documentParameters=job.document_parameters,
jobArn=job.job_arn,
jobExecutionsRolloutConfig=job.job_executions_rollout_config,
jobId=job.job_id,
jobProcessDetails=job.job_process_details,
lastUpdatedAt=job.last_updated_at,
presignedUrlConfig=job.presigned_url_config,
status=job.status,
targets=job.targets,
targetSelection=job.target_selection
)))
def create_keys_and_certificate(self):
set_as_active = self._get_bool_param("setAsActive")
cert, key_pair = self.iot_backend.create_keys_and_certificate(
set_as_active=set_as_active,
)
return json.dumps(dict(
certificateArn=cert.arn,
certificateId=cert.certificate_id,
certificatePem=cert.certificate_pem,
keyPair=key_pair
))
def delete_certificate(self):
certificate_id = self._get_param("certificateId")
self.iot_backend.delete_certificate(
certificate_id=certificate_id,
)
return json.dumps(dict())
def describe_certificate(self):
certificate_id = self._get_param("certificateId")
certificate = self.iot_backend.describe_certificate(
certificate_id=certificate_id,
)
return json.dumps(dict(certificateDescription=certificate.to_description_dict()))
def list_certificates(self):
# page_size = self._get_int_param("pageSize")
# marker = self._get_param("marker")
# ascending_order = self._get_param("ascendingOrder")
certificates = self.iot_backend.list_certificates()
# TODO: implement pagination in the future
return json.dumps(dict(certificates=[_.to_dict() for _ in certificates]))
def update_certificate(self):
certificate_id = self._get_param("certificateId")
new_status = self._get_param("newStatus")
self.iot_backend.update_certificate(
certificate_id=certificate_id,
new_status=new_status,
)
return json.dumps(dict())
def create_policy(self):
policy_name = self._get_param("policyName")
policy_document = self._get_param("policyDocument")
policy = self.iot_backend.create_policy(
policy_name=policy_name,
policy_document=policy_document,
)
return json.dumps(policy.to_dict_at_creation())
def list_policies(self):
# marker = self._get_param("marker")
# page_size = self._get_int_param("pageSize")
# ascending_order = self._get_param("ascendingOrder")
policies = self.iot_backend.list_policies()
# TODO: implement pagination in the future
return json.dumps(dict(policies=[_.to_dict() for _ in policies]))
def get_policy(self):
policy_name = self._get_param("policyName")
policy = self.iot_backend.get_policy(
policy_name=policy_name,
)
return json.dumps(policy.to_get_dict())
def delete_policy(self):
policy_name = self._get_param("policyName")
self.iot_backend.delete_policy(
policy_name=policy_name,
)
return json.dumps(dict())
def attach_principal_policy(self):
policy_name = self._get_param("policyName")
principal = self.headers.get('x-amzn-iot-principal')
self.iot_backend.attach_principal_policy(
policy_name=policy_name,
principal_arn=principal,
)
return json.dumps(dict())
def detach_principal_policy(self):
policy_name = self._get_param("policyName")
principal = self.headers.get('x-amzn-iot-principal')
self.iot_backend.detach_principal_policy(
policy_name=policy_name,
principal_arn=principal,
)
return json.dumps(dict())
def list_principal_policies(self):
principal = self.headers.get('x-amzn-iot-principal')
# marker = self._get_param("marker")
# page_size = self._get_int_param("pageSize")
# ascending_order = self._get_param("ascendingOrder")
policies = self.iot_backend.list_principal_policies(
principal_arn=principal
)
# TODO: implement pagination in the future
next_marker = None
return json.dumps(dict(policies=[_.to_dict() for _ in policies], nextMarker=next_marker))
def list_policy_principals(self):
policy_name = self.headers.get('x-amzn-iot-policy')
# marker = self._get_param("marker")
# page_size = self._get_int_param("pageSize")
# ascending_order = self._get_param("ascendingOrder")
principals = self.iot_backend.list_policy_principals(
policy_name=policy_name,
)
# TODO: implement pagination in the future
next_marker = None
return json.dumps(dict(principals=principals, nextMarker=next_marker))
def attach_thing_principal(self):
thing_name = self._get_param("thingName")
principal = self.headers.get('x-amzn-principal')
self.iot_backend.attach_thing_principal(
thing_name=thing_name,
principal_arn=principal,
)
return json.dumps(dict())
def detach_thing_principal(self):
thing_name = self._get_param("thingName")
principal = self.headers.get('x-amzn-principal')
self.iot_backend.detach_thing_principal(
thing_name=thing_name,
principal_arn=principal,
)
return json.dumps(dict())
def list_principal_things(self):
next_token = self._get_param("nextToken")
# max_results = self._get_int_param("maxResults")
principal = self.headers.get('x-amzn-principal')
things = self.iot_backend.list_principal_things(
principal_arn=principal,
)
# TODO: implement pagination in the future
next_token = None
return json.dumps(dict(things=things, nextToken=next_token))
def list_thing_principals(self):
thing_name = self._get_param("thingName")
principals = self.iot_backend.list_thing_principals(
thing_name=thing_name,
)
return json.dumps(dict(principals=principals))
def describe_thing_group(self):
thing_group_name = self._get_param("thingGroupName")
thing_group = self.iot_backend.describe_thing_group(
thing_group_name=thing_group_name,
)
return json.dumps(thing_group.to_dict())
def create_thing_group(self):
thing_group_name = self._get_param("thingGroupName")
parent_group_name = self._get_param("parentGroupName")
thing_group_properties = self._get_param("thingGroupProperties")
thing_group_name, thing_group_arn, thing_group_id = self.iot_backend.create_thing_group(
thing_group_name=thing_group_name,
parent_group_name=parent_group_name,
thing_group_properties=thing_group_properties,
)
return json.dumps(dict(
thingGroupName=thing_group_name,
thingGroupArn=thing_group_arn,
thingGroupId=thing_group_id)
)
def delete_thing_group(self):
thing_group_name = self._get_param("thingGroupName")
expected_version = self._get_param("expectedVersion")
self.iot_backend.delete_thing_group(
thing_group_name=thing_group_name,
expected_version=expected_version,
)
return json.dumps(dict())
def list_thing_groups(self):
# next_token = self._get_param("nextToken")
# max_results = self._get_int_param("maxResults")
parent_group = self._get_param("parentGroup")
name_prefix_filter = self._get_param("namePrefixFilter")
recursive = self._get_param("recursive")
thing_groups = self.iot_backend.list_thing_groups(
parent_group=parent_group,
name_prefix_filter=name_prefix_filter,
recursive=recursive,
)
next_token = None
rets = [{'groupName': _.thing_group_name, 'groupArn': _.arn} for _ in thing_groups]
# TODO: implement pagination in the future
return json.dumps(dict(thingGroups=rets, nextToken=next_token))
def update_thing_group(self):
thing_group_name = self._get_param("thingGroupName")
thing_group_properties = self._get_param("thingGroupProperties")
expected_version = self._get_param("expectedVersion")
version = self.iot_backend.update_thing_group(
thing_group_name=thing_group_name,
thing_group_properties=thing_group_properties,
expected_version=expected_version,
)
return json.dumps(dict(version=version))
def add_thing_to_thing_group(self):
thing_group_name = self._get_param("thingGroupName")
thing_group_arn = self._get_param("thingGroupArn")
thing_name = self._get_param("thingName")
thing_arn = self._get_param("thingArn")
self.iot_backend.add_thing_to_thing_group(
thing_group_name=thing_group_name,
thing_group_arn=thing_group_arn,
thing_name=thing_name,
thing_arn=thing_arn,
)
return json.dumps(dict())
def remove_thing_from_thing_group(self):
thing_group_name = self._get_param("thingGroupName")
thing_group_arn = self._get_param("thingGroupArn")
thing_name = self._get_param("thingName")
thing_arn = self._get_param("thingArn")
self.iot_backend.remove_thing_from_thing_group(
thing_group_name=thing_group_name,
thing_group_arn=thing_group_arn,
thing_name=thing_name,
thing_arn=thing_arn,
)
return json.dumps(dict())
def list_things_in_thing_group(self):
thing_group_name = self._get_param("thingGroupName")
recursive = self._get_param("recursive")
# next_token = self._get_param("nextToken")
# max_results = self._get_int_param("maxResults")
things = self.iot_backend.list_things_in_thing_group(
thing_group_name=thing_group_name,
recursive=recursive,
)
next_token = None
thing_names = [_.thing_name for _ in things]
# TODO: implement pagination in the future
return json.dumps(dict(things=thing_names, nextToken=next_token))
def list_thing_groups_for_thing(self):
thing_name = self._get_param("thingName")
# next_token = self._get_param("nextToken")
# max_results = self._get_int_param("maxResults")
thing_groups = self.iot_backend.list_thing_groups_for_thing(
thing_name=thing_name
)
next_token = None
# TODO: implement pagination in the future
return json.dumps(dict(thingGroups=thing_groups, nextToken=next_token))
def update_thing_groups_for_thing(self):
thing_name = self._get_param("thingName")
thing_groups_to_add = self._get_param("thingGroupsToAdd") or []
thing_groups_to_remove = self._get_param("thingGroupsToRemove") or []
self.iot_backend.update_thing_groups_for_thing(
thing_name=thing_name,
thing_groups_to_add=thing_groups_to_add,
thing_groups_to_remove=thing_groups_to_remove,
)
return json.dumps(dict())
|
okomestudio/moto
|
moto/iot/responses.py
|
Python
|
apache-2.0
| 17,089
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from odoo import api, fields, models
class Users(models.Model):
_inherit = 'res.users'
karma = fields.Integer('Karma', default=0)
karma_tracking_ids = fields.One2many('gamification.karma.tracking', 'user_id', string='Karma Changes', groups="base.group_system")
badge_ids = fields.One2many('gamification.badge.user', 'user_id', string='Badges', copy=False)
gold_badge = fields.Integer('Gold badges count', compute="_get_user_badge_level")
silver_badge = fields.Integer('Silver badges count', compute="_get_user_badge_level")
bronze_badge = fields.Integer('Bronze badges count', compute="_get_user_badge_level")
rank_id = fields.Many2one('gamification.karma.rank', 'Rank', index=False)
next_rank_id = fields.Many2one('gamification.karma.rank', 'Next Rank', index=False)
@api.depends('badge_ids')
def _get_user_badge_level(self):
""" Return total badge per level of users
TDE CLEANME: shouldn't check type is forum ? """
for user in self:
user.gold_badge = 0
user.silver_badge = 0
user.bronze_badge = 0
self.env.cr.execute("""
SELECT bu.user_id, b.level, count(1)
FROM gamification_badge_user bu, gamification_badge b
WHERE bu.user_id IN %s
AND bu.badge_id = b.id
AND b.level IS NOT NULL
GROUP BY bu.user_id, b.level
ORDER BY bu.user_id;
""", [tuple(self.ids)])
for (user_id, level, count) in self.env.cr.fetchall():
# levels are gold, silver, bronze but fields have _badge postfix
self.browse(user_id)['{}_badge'.format(level)] = count
@api.model_create_multi
def create(self, values_list):
res = super(Users, self).create(values_list)
karma_trackings = []
for user in res:
if user.karma:
karma_trackings.append({'user_id': user.id, 'old_value': 0, 'new_value': user.karma})
if karma_trackings:
self.env['gamification.karma.tracking'].sudo().create(karma_trackings)
res._recompute_rank()
return res
def write(self, vals):
karma_trackings = []
if 'karma' in vals:
for user in self:
if user.karma != vals['karma']:
karma_trackings.append({'user_id': user.id, 'old_value': user.karma, 'new_value': vals['karma']})
result = super(Users, self).write(vals)
if karma_trackings:
self.env['gamification.karma.tracking'].sudo().create(karma_trackings)
if 'karma' in vals:
self._recompute_rank()
return result
def add_karma(self, karma):
for user in self:
user.karma += karma
return True
def _get_tracking_karma_gain_position(self, user_domain, from_date=None, to_date=None):
""" Get absolute position in term of gained karma for users. First a ranking
of all users is done given a user_domain; then the position of each user
belonging to the current record set is extracted.
Example: in website profile, search users with name containing Norbert. Their
positions should not be 1 to 4 (assuming 4 results), but their actual position
in the karma gain ranking (with example user_domain being karma > 1,
website published True).
:param user_domain: general domain (i.e. active, karma > 1, website, ...)
to compute the absolute position of the current record set
:param from_date: compute karma gained after this date (included) or from
beginning of time;
:param to_date: compute karma gained before this date (included) or until
end of time;
:return list: [{
'user_id': user_id (belonging to current record set),
'karma_gain_total': integer, karma gained in the given timeframe,
'karma_position': integer, ranking position
}, {..}] ordered by karma_position desc
"""
if not self:
return []
where_query = self.env['res.users']._where_calc(user_domain)
user_from_clause, user_where_clause, where_clause_params = where_query.get_sql()
params = []
if from_date:
date_from_condition = 'AND tracking.tracking_date::timestamp >= timestamp %s'
params.append(from_date)
if to_date:
date_to_condition = 'AND tracking.tracking_date::timestamp <= timestamp %s'
params.append(to_date)
params.append(tuple(self.ids))
query = """
SELECT final.user_id, final.karma_gain_total, final.karma_position
FROM (
SELECT intermediate.user_id, intermediate.karma_gain_total, row_number() OVER (ORDER BY intermediate.karma_gain_total DESC) AS karma_position
FROM (
SELECT "res_users".id as user_id, COALESCE(SUM("tracking".new_value - "tracking".old_value), 0) as karma_gain_total
FROM %(user_from_clause)s
LEFT JOIN "gamification_karma_tracking" as "tracking"
ON "res_users".id = "tracking".user_id AND "res_users"."active" = TRUE
WHERE %(user_where_clause)s %(date_from_condition)s %(date_to_condition)s
GROUP BY "res_users".id
ORDER BY karma_gain_total DESC
) intermediate
) final
WHERE final.user_id IN %%s""" % {
'user_from_clause': user_from_clause,
'user_where_clause': user_where_clause or (not from_date and not to_date and 'TRUE') or '',
'date_from_condition': date_from_condition if from_date else '',
'date_to_condition': date_to_condition if to_date else ''
}
self.env.cr.execute(query, tuple(where_clause_params + params))
return self.env.cr.dictfetchall()
def _get_karma_position(self, user_domain):
""" Get absolute position in term of total karma for users. First a ranking
of all users is done given a user_domain; then the position of each user
belonging to the current record set is extracted.
Example: in website profile, search users with name containing Norbert. Their
positions should not be 1 to 4 (assuming 4 results), but their actual position
in the total karma ranking (with example user_domain being karma > 1,
website published True).
:param user_domain: general domain (i.e. active, karma > 1, website, ...)
to compute the absolute position of the current record set
:return list: [{
'user_id': user_id (belonging to current record set),
'karma_position': integer, ranking position
}, {..}] ordered by karma_position desc
"""
if not self:
return {}
where_query = self.env['res.users']._where_calc(user_domain)
user_from_clause, user_where_clause, where_clause_params = where_query.get_sql()
# we search on every user in the DB to get the real positioning (not the one inside the subset)
# then, we filter to get only the subset.
query = """
SELECT sub.user_id, sub.karma_position
FROM (
SELECT "res_users"."id" as user_id, row_number() OVER (ORDER BY res_users.karma DESC) AS karma_position
FROM %(user_from_clause)s
WHERE %(user_where_clause)s
) sub
WHERE sub.user_id IN %%s""" % {
'user_from_clause': user_from_clause,
'user_where_clause': user_where_clause or 'TRUE',
}
self.env.cr.execute(query, tuple(where_clause_params + [tuple(self.ids)]))
return self.env.cr.dictfetchall()
def _rank_changed(self):
"""
Method that can be called on a batch of users with the same new rank
"""
template = self.env.ref('gamification.mail_template_data_new_rank_reached', raise_if_not_found=False)
if template:
for u in self:
if u.rank_id.karma_min > 0:
template.send_mail(u.id, force_send=False, notif_layout='mail.mail_notification_light')
def _recompute_rank(self):
"""
The caller should filter the users on karma > 0 before calling this method
to avoid looping on every single users
Compute rank of each user by user.
For each user, check the rank of this user
"""
ranks = [{'rank': rank, 'karma_min': rank.karma_min} for rank in
self.env['gamification.karma.rank'].search([], order="karma_min DESC")]
# 3 is the number of search/requests used by rank in _recompute_rank_bulk()
if len(self) > len(ranks) * 3:
self._recompute_rank_bulk()
return
for user in self:
old_rank = user.rank_id
if user.karma == 0 and ranks:
user.write({'next_rank_id': ranks[-1]['rank'].id})
else:
for i in range(0, len(ranks)):
if user.karma >= ranks[i]['karma_min']:
user.write({
'rank_id': ranks[i]['rank'].id,
'next_rank_id': ranks[i - 1]['rank'].id if 0 < i else False
})
break
if old_rank != user.rank_id:
user._rank_changed()
def _recompute_rank_bulk(self):
"""
Compute rank of each user by rank.
For each rank, check which users need to be ranked
"""
ranks = [{'rank': rank, 'karma_min': rank.karma_min} for rank in
self.env['gamification.karma.rank'].search([], order="karma_min DESC")]
users_todo = self
next_rank_id = False
# wtf, next_rank_id should be a related on rank_id.next_rank_id and life might get easier.
# And we only need to recompute next_rank_id on write with min_karma or in the create on rank model.
for r in ranks:
rank_id = r['rank'].id
dom = [
('karma', '>=', r['karma_min']),
('id', 'in', users_todo.ids),
'|', # noqa
'|', ('rank_id', '!=', rank_id), ('rank_id', '=', False),
'|', ('next_rank_id', '!=', next_rank_id), ('next_rank_id', '=', False if next_rank_id else -1),
]
users = self.env['res.users'].search(dom)
if users:
users_to_notify = self.env['res.users'].search([
('karma', '>=', r['karma_min']),
'|', ('rank_id', '!=', rank_id), ('rank_id', '=', False),
('id', 'in', users.ids),
])
users.write({
'rank_id': rank_id,
'next_rank_id': next_rank_id,
})
users_to_notify._rank_changed()
users_todo -= users
nothing_to_do_users = self.env['res.users'].search([
('karma', '>=', r['karma_min']),
'|', ('rank_id', '=', rank_id), ('next_rank_id', '=', next_rank_id),
('id', 'in', users_todo.ids),
])
users_todo -= nothing_to_do_users
next_rank_id = r['rank'].id
if ranks:
lower_rank = ranks[-1]['rank']
users = self.env['res.users'].search([
('karma', '>=', 0),
('karma', '<', lower_rank.karma_min),
'|', ('rank_id', '!=', False), ('next_rank_id', '!=', lower_rank.id),
('id', 'in', users_todo.ids),
])
if users:
users.write({
'rank_id': False,
'next_rank_id': lower_rank.id,
})
def _get_next_rank(self):
""" For fresh users with 0 karma that don't have a rank_id and next_rank_id yet
this method returns the first karma rank (by karma ascending). This acts as a
default value in related views.
TDE FIXME in post-12.4: make next_rank_id a non-stored computed field correctly computed """
if self.next_rank_id:
return self.next_rank_id
elif not self.rank_id:
return self.env['gamification.karma.rank'].search([], order="karma_min ASC", limit=1)
else:
return self.env['gamification.karma.rank']
def get_gamification_redirection_data(self):
"""
Hook for other modules to add redirect button(s) in new rank reached mail
Must return a list of dictionnary including url and label.
E.g. return [{'url': '/forum', label: 'Go to Forum'}]
"""
self.ensure_one()
return []
|
ygol/odoo
|
addons/gamification/models/res_users.py
|
Python
|
agpl-3.0
| 12,734
|
import _plotly_utils.basevalidators
class FamilyValidator(_plotly_utils.basevalidators.StringValidator):
def __init__(
self, plotly_name="family", parent_name="heatmapgl.hoverlabel.font", **kwargs
):
super(FamilyValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
array_ok=kwargs.pop("array_ok", True),
edit_type=kwargs.pop("edit_type", "none"),
no_blank=kwargs.pop("no_blank", True),
strict=kwargs.pop("strict", True),
**kwargs
)
|
plotly/plotly.py
|
packages/python/plotly/plotly/validators/heatmapgl/hoverlabel/font/_family.py
|
Python
|
mit
| 574
|
# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class Mg(Package):
"""Mg is intended to be a small, fast, and portable editor for people
who can't (or don't want to) run emacs for one reason or another,
or are not familiar with the vi editor. It is compatible with
emacs because there shouldn't be any reason to learn more editor
types than emacs or vi."""
homepage = "https://github.com/ibara/mg"
url = "https://github.com/ibara/mg/archive/mg-6.6.tar.gz"
version('6.6', sha256='e8440353da1a52ec7d40fb88d4f145da49c320b5ba31daf895b0b0db5ccd0632')
depends_on('ncurses')
phases = ['configure', 'build', 'install']
def configure(self, spec, prefix):
configure = Executable('./configure')
args = [
'--mandir={0}'.format(self.prefix.man),
'--prefix={0}'.format(self.prefix),
]
configure(*args)
def build(self, spec, prefix):
make()
def install(self, spec, prefix):
make('install')
|
LLNL/spack
|
var/spack/repos/builtin/packages/mg/package.py
|
Python
|
lgpl-2.1
| 1,183
|
# This file is part of the Frescobaldi project, http://www.frescobaldi.org/
#
# Copyright (c) 2008 - 2014 by Wilbert Berendsen
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
# See http://www.gnu.org/licenses/ for more information.
"""
Expand variables like $DATE, $LILYPOND_VERSION etc. in snippets.
"""
import builtins
import time
import appinfo
import lilypondinfo
def _(docstring):
"""Returns a decorator.
The decorator gives a function a doc() method, returning the translated docstring.
The untranslated docstring will be added as __doc__ to the function.
builtins._ is expected to be the translation function.
We use the underscore as function name so xgettext picks up the strings
to be translated.
"""
def deco(f):
f.__doc__ = docstring
f.doc = lambda: builtins._(docstring)
return f
return deco
def documentation(cls):
"""Yields tuples documenting the methods of the specified class.
The tuples are: (function_name, docstring). The docstrings are translated.
The tuples are sorted on function_name.
"""
for name, meth in sorted(cls.__dict__.items()):
if name.startswith('_'):
return
yield name, meth.doc()
ANCHOR, CURSOR, SELECTION = constants = 1, 2, 3 # just some constants
class Expander(object):
"""Expands variables.
The methods return text or other events (currently simply integer constants).
"""
def __init__(self, cursor):
self.cursor = cursor
@_("The current date in YYYY-MM-DD format.")
def DATE(self):
return time.strftime('%Y-%m-%d')
@_("The version of the default LilyPond program.")
def LILYPOND_VERSION(self):
return lilypondinfo.preferred().versionString()
@_("The version of Frescobaldi.")
def FRESCOBALDI_VERSION(self):
return appinfo.version
@_("The URL of the current document.")
def URL(self):
return self.cursor.document().url().toString()
@_("The full local filename of the current document.")
def FILE_NAME(self):
return self.cursor.document().url().toLocalFile()
@_("The name of the current document.")
def DOCUMENT_NAME(self):
return self.cursor.document().documentName()
@_("Moves the text cursor here after insert.")
def CURSOR(self):
return CURSOR
@_("Selects text from here to the position given using the <code>$CURSOR</code> variable")
def ANCHOR(self):
return ANCHOR
@_("The selected text if available. If not, the text cursor is moved here.")
def SELECTION(self):
return SELECTION if self.cursor.hasSelection() else CURSOR
|
anthonyfok/frescobaldi
|
frescobaldi_app/snippet/expand.py
|
Python
|
gpl-2.0
| 3,394
|
from django.core.exceptions import PermissionDenied
from django.core.urlresolvers import reverse
from django.shortcuts import redirect, get_object_or_404
from django.template.response import TemplateResponse
from django.utils.translation import ugettext_lazy as _
from django.views.decorators.http import require_POST
from oioioi.base.menu import menu_registry
from oioioi.base.permissions import enforce_condition, not_anonymous
from oioioi.base.utils.confirmation import confirmation_view
from oioioi.contests.utils import contest_exists, can_enter_contest, \
is_contest_admin
from oioioi.contests.menu import contest_admin_menu_registry
from oioioi.forum.models import Category
from oioioi.forum.forms import PostForm, NewThreadForm
from oioioi.forum.utils import forum_exists_and_visible, is_proper_forum, \
is_not_locked, get_forum_ct, get_forum_ctp, get_msgs, forum_is_locked
# registering forum
@menu_registry.register_decorator(_("Forum"), lambda request:
reverse('forum', kwargs={'contest_id': request.contest.id}),
order=500)
@contest_admin_menu_registry.register_decorator(_("Forum"), lambda request:
reverse('oioioiadmin:forum_forum_change',
args=(request.contest.forum.id,)),
order=50)
@enforce_condition(contest_exists & can_enter_contest)
@enforce_condition(forum_exists_and_visible & is_proper_forum)
def forum_view(request):
msgs = get_msgs(request)
category_set = request.contest.forum.category_set \
.prefetch_related('thread_set', 'thread_set__post_set') \
.all()
return TemplateResponse(request, 'forum/forum.html', {
'forum': request.contest.forum, 'msgs': msgs,
'is_locked': forum_is_locked(request), 'category_set': category_set
})
@enforce_condition(contest_exists & can_enter_contest)
@enforce_condition(forum_exists_and_visible & is_proper_forum)
def category_view(request, category_id):
category = get_object_or_404(Category, id=category_id)
msgs = get_msgs(request)
threads = category.thread_set \
.prefetch_related('post_set') \
.select_related('last_post', 'last_post__author') \
.all()
return TemplateResponse(request, 'forum/category.html',
{'forum': request.contest.forum, 'category': category,
'threads': threads, 'msgs': msgs,
'is_locked': forum_is_locked(request)})
@enforce_condition(contest_exists & can_enter_contest)
@enforce_condition(forum_exists_and_visible & is_proper_forum)
def thread_view(request, category_id, thread_id):
category, thread = get_forum_ct(category_id, thread_id)
forum, lock = request.contest.forum, forum_is_locked(request)
msgs = get_msgs(request)
post_set = thread.post_set.select_related('author').all()
if (request.user.is_authenticated() and not lock) or \
is_contest_admin(request):
if request.method == "POST":
form = PostForm(request, request.POST)
if form.is_valid():
instance = form.save(commit=False)
instance.author = request.user
instance.thread = thread
instance.add_date = request.timestamp
instance.save()
return redirect('forum_thread', contest_id=request.contest.id,
category_id=category.id,
thread_id=thread.id)
else:
form = PostForm(request)
return TemplateResponse(request, 'forum/thread.html',
{'forum': forum, 'category': category, 'thread': thread,
'form': form, 'msgs': msgs, 'is_locked': lock,
'post_set': post_set})
else:
return TemplateResponse(request, 'forum/thread.html',
{'forum': forum, 'category': category, 'thread': thread,
'msgs': msgs, 'is_locked': lock, 'post_set': post_set})
@enforce_condition(not_anonymous & contest_exists & can_enter_contest)
@enforce_condition(forum_exists_and_visible & is_proper_forum & is_not_locked)
def thread_add_view(request, category_id):
category = get_object_or_404(Category, id=category_id)
msgs = get_msgs(request)
if request.method == 'POST':
form = NewThreadForm(request, request.POST)
if form.is_valid(): # adding the new thread
instance = form.save(commit=False)
instance.category = category
instance.save()
post = PostForm(request, request.POST)
if post.is_valid(): # adding the new post
inst_post = post.save(commit=False)
inst_post.author = request.user
inst_post.thread = instance
inst_post.add_date = request.timestamp
inst_post.save()
return redirect('forum_thread', contest_id=request.contest.id,
category_id=category.id,
thread_id=instance.id)
else:
form = NewThreadForm(request)
return TemplateResponse(request, 'forum/thread_add.html',
{'forum': request.contest.forum, 'category': category,
'form': form, 'msgs': msgs})
@enforce_condition(not_anonymous & contest_exists & can_enter_contest)
@enforce_condition(forum_exists_and_visible & is_proper_forum & is_not_locked)
def edit_post_view(request, category_id, thread_id, post_id):
(category, thread, post) = get_forum_ctp(category_id, thread_id, post_id)
msgs = get_msgs(request)
is_admin = is_contest_admin(request)
if post.author != request.user and not is_admin:
raise PermissionDenied
if request.method == 'POST':
form = PostForm(request, request.POST, instance=post)
if form.is_valid():
instance = form.save(commit=False)
instance.last_edit_date = request.timestamp
instance.save()
return redirect('forum_thread', contest_id=request.contest.id,
category_id=category.id,
thread_id=thread.id)
else:
form = PostForm(request, instance=post)
return TemplateResponse(request, 'forum/edit_post.html',
{'forum': request.contest.forum, 'category': category,
'thread': thread, 'form': form, 'post': post, 'msgs': msgs})
@enforce_condition(not_anonymous & contest_exists & can_enter_contest)
@enforce_condition(forum_exists_and_visible & is_proper_forum & is_not_locked)
def delete_post_view(request, category_id, thread_id, post_id):
(category, thread, post) = get_forum_ctp(category_id, thread_id, post_id)
is_admin = is_contest_admin(request)
if not is_admin and \
(post.author != request.user or
(post.author == request.user and
(thread.post_set.filter(add_date__gt=post.add_date).exists() or
not post.can_be_removed()))):
# author: if there are other posts added later or timedelta is gt 15min
# if user is not the author of the post or forum admin
raise PermissionDenied
else:
choice = confirmation_view(request, 'forum/confirm_delete.html',
{'elem': post})
if not isinstance(choice, bool):
return choice
if choice:
post.delete()
if not thread.post_set.exists():
thread.delete()
return redirect('forum_category',
contest_id=request.contest.id,
category_id=category.id)
return redirect('forum_thread', contest_id=request.contest.id,
category_id=category.id, thread_id=thread.id)
@enforce_condition(not_anonymous & contest_exists & can_enter_contest)
@enforce_condition(forum_exists_and_visible & is_proper_forum)
@require_POST
def report_post_view(request, category_id, thread_id, post_id):
(category, thread, post) = get_forum_ctp(category_id, thread_id, post_id)
post.reported = True
post.save()
return redirect('forum_thread', contest_id=request.contest.id,
category_id=category.id, thread_id=thread.id)
@enforce_condition(contest_exists & is_contest_admin)
@enforce_condition(forum_exists_and_visible & is_proper_forum)
@require_POST
def unreport_post_view(request, category_id, thread_id, post_id):
(category, thread, post) = get_forum_ctp(category_id, thread_id, post_id)
post.reported = False
post.save()
return redirect('forum_thread', contest_id=request.contest.id,
category_id=category.id, thread_id=thread.id)
@enforce_condition(contest_exists & is_contest_admin)
@enforce_condition(forum_exists_and_visible & is_proper_forum)
@require_POST
def hide_post_view(request, category_id, thread_id, post_id):
(category, thread, post) = get_forum_ctp(category_id, thread_id, post_id)
post.hidden = True
post.reported = False
post.save()
return redirect('forum_thread', contest_id=request.contest.id,
category_id=category.id, thread_id=thread.id)
@enforce_condition(contest_exists & is_contest_admin)
@enforce_condition(forum_exists_and_visible & is_proper_forum)
@require_POST
def show_post_view(request, category_id, thread_id, post_id):
# Admin shows reported/hidden post again
(category, thread, post) = get_forum_ctp(category_id, thread_id, post_id)
post.hidden = False
post.save()
return redirect('forum_thread', contest_id=request.contest.id,
category_id=category.id, thread_id=thread.id)
@enforce_condition(contest_exists & is_contest_admin)
@enforce_condition(forum_exists_and_visible & is_proper_forum & is_not_locked)
@require_POST
def delete_thread_view(request, category_id, thread_id):
category, thread = get_forum_ct(category_id, thread_id)
choice = confirmation_view(request, 'forum/confirm_delete.html',
{'elem': thread})
if not isinstance(choice, bool):
return choice
if choice:
thread.delete()
return redirect('forum_category', contest_id=request.contest.id,
category_id=category.id)
@enforce_condition(contest_exists & is_contest_admin)
@enforce_condition(forum_exists_and_visible & is_proper_forum & is_not_locked)
@require_POST
def delete_category_view(request, category_id):
category = get_object_or_404(Category, id=category_id)
choice = confirmation_view(request, 'forum/confirm_delete.html',
{'elem': category})
if not isinstance(choice, bool):
return choice
if choice:
category.delete()
return redirect('forum', contest_id=request.contest.id)
@enforce_condition(contest_exists & is_contest_admin)
@enforce_condition(forum_exists_and_visible & is_not_locked)
@require_POST
def lock_forum_view(request):
forum = request.contest.forum
forum.lock_date = request.timestamp
if forum.unlock_date and forum.unlock_date <= forum.lock_date:
forum.unlock_date = None
forum.save()
return redirect('forum', contest_id=request.contest.id)
@enforce_condition(contest_exists & is_contest_admin)
@enforce_condition(forum_exists_and_visible)
@require_POST
def unlock_forum_view(request):
# Unlocking forum clears both lock & unlock dates, just like forum was
# never meant to be locked. If admin changes his mind, he will
# lock it again or set auto-locking in admin panel
forum = request.contest.forum
forum.unlock_date = None
forum.lock_date = None
forum.save()
return redirect('forum', contest_id=request.contest.id)
|
papedaniel/oioioi
|
oioioi/forum/views.py
|
Python
|
gpl-3.0
| 11,557
|
LIMIT = 2000000
SIZE = (LIMIT - 1) // 2
def f():
ans = 2
sieve = [False] * SIZE
for i in range(0, SIZE):
if not sieve[i]:
p = 2 * i + 3
ans += p
for j in range(p * p, LIMIT, 2 * p):
sieve[(j - 3) // 2] = True
return ans
import ctypes
import sys
CLOCK_MONOTONIC = 1
class timespec(ctypes.Structure):
_fields_ = [
('tv_sec', ctypes.c_long),
('tv_nsec', ctypes.c_long)
]
librt = ctypes.CDLL('librt.so.1')
clock_gettime = librt.clock_gettime
clock_gettime.argtypes = [ctypes.c_int, ctypes.POINTER(timespec)]
def to_ns(ts):
return ts.tv_sec * int(1e9) + ts.tv_nsec
if len(sys.argv) == 1:
print(f())
elif len(sys.argv) == 2:
start, end = timespec(), timespec()
iters = int(sys.argv[1])
clock_gettime(CLOCK_MONOTONIC, ctypes.pointer(start))
for _ in range(0, iters):
f()
clock_gettime(CLOCK_MONOTONIC, ctypes.pointer(end))
print(to_ns(end) - to_ns(start))
|
japaric/eulermark.rs
|
problems/010/010.py
|
Python
|
apache-2.0
| 1,006
|
# Copyright Red Hat 2017, Jake Hunsaker <jhunsake@redhat.com>
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
import os
import fnmatch
from clustersos.clusters import Cluster
from getpass import getpass
class ovirt(Cluster):
packages = ('ovirt-engine', 'rhevm')
sos_plugins = ['ovirt']
option_list = [
('no-database', bool, False, 'Do not collect a database dump'),
('cluster', str, '', 'Only collect from hosts in this cluster')
]
def setup(self):
self.pg_pass = False
if not self.get_option('no-database'):
self.conf = self.parse_db_conf()
self.pg_pass = self.get_db_password()
self.dbcmd = '/usr/share/ovirt-engine/dbscripts/engine-psql.sh -c '
if not self.get_option('cluster'):
self.dbcmd += '"select host_name from vds_static"'
else:
self.dbcmd += ('"select v.host_name from vds_static as v, cluster'
'as c where v.cluster_id = (select cluster_id from'
'cluster where name = \'%s\') "'
% self.get_option('cluster'))
def get_nodes(self):
res = self.exec_master_cmd(self.dbcmd)
if res['status'] == 0:
nodes = res['stdout'].splitlines()[2:-2]
return [n.strip() for n in nodes]
else:
raise Exception('database query failed, return code: %s'
% res['status'])
def run_extra_cmd(self):
if self.pg_pass:
return self.collect_database()
return False
def get_db_password(self):
if not self.conf:
self.log_error('Could not parse database configuration. Will not '
'attempt to collect database dump from the manager'
)
return False
pg_pass = getpass('Please provide the engine database password: ')
return pg_pass if pg_pass else False
def parse_db_conf(self):
conf = {}
engconf = '/etc/ovirt-engine/engine.conf.d/10-setup-database.conf'
res = self.exec_master_cmd('cat %s' % engconf)
if res['status'] == 0:
config = res['stdout'].splitlines()
for line in config:
k = str(line.split('=')[0])
v = str(line.split('=')[1].replace('"', ''))
conf[k] = v
return conf
def collect_database(self):
sos_opt = (
'-k {plugin}.dbname={db} '
'-k {plugin}.dbhost={dbhost} '
'-k {plugin}.dbport={dbport} '
'-k {plugin}.username={dbuser} '
).format(plugin='postgresql',
db=self.conf['ENGINE_DB_DATABASE'],
dbhost=self.conf['ENGINE_DB_HOST'],
dbport=self.conf['ENGINE_DB_PORT'],
dbuser=self.conf['ENGINE_DB_USER']
)
cmd = ('PGPASSWORD={} /usr/sbin/sosreport --name=postgresqldb '
'--batch -o postgresql {}'
).format(self.pg_pass, sos_opt)
db_sos = self.exec_master_cmd(cmd)
for line in db_sos:
if fnmatch.fnmatch(line, '*sosreport-*tar*'):
return line.strip()
self.log_error('Failed to gather database dump')
return False
|
TurboTurtle/clustersos
|
clustersos/clusters/ovirt.py
|
Python
|
gpl-2.0
| 4,079
|
# -*- coding: utf-8 -*-
import PyQt5.QtWidgets as Qw
from . import parameters as par
class Text_line(Qw.QLineEdit):
"""Text Line Class"""
def __init__(self, val='', parent=None):
super().__init__(parent)
self.set(val)
self.setMinimumHeight(par.MIN_HEIGHT)
def set(self, txt):
if txt:
ttxt = '%s' % txt
self.setText(ttxt.strip())
else:
self.setText('')
self.setCursorPosition(0)
def get(self):
tmp = '%s' % self.text()
return tmp.strip()
|
tedlaz/pyted
|
ted17/ted17/w_textline.py
|
Python
|
gpl-3.0
| 562
|
from __future__ import annotations
import logging
from collections import OrderedDict
import scipy.sparse
import numpy as np
from typing import (
Any,
Dict,
Text,
List,
Tuple,
Callable,
Set,
Optional,
Type,
Union,
)
from rasa.engine.graph import ExecutionContext, GraphComponent
from rasa.engine.recipes.default_recipe import DefaultV1Recipe
from rasa.engine.storage.resource import Resource
from rasa.engine.storage.storage import ModelStorage
from rasa.nlu.tokenizers.spacy_tokenizer import (
POS_TAG_KEY,
SpacyTokenizer,
)
from rasa.nlu.tokenizers.tokenizer import Token, Tokenizer
from rasa.nlu.featurizers.sparse_featurizer.sparse_featurizer import SparseFeaturizer
from rasa.nlu.constants import TOKENS_NAMES
from rasa.shared.constants import DOCS_URL_COMPONENTS
from rasa.shared.nlu.training_data.training_data import TrainingData
from rasa.shared.nlu.training_data.message import Message
from rasa.shared.nlu.constants import TEXT
from rasa.shared.exceptions import InvalidConfigException
import rasa.shared.utils.io
import rasa.utils.io
logger = logging.getLogger(__name__)
END_OF_SENTENCE = "EOS"
BEGIN_OF_SENTENCE = "BOS"
FEATURES = "features"
@DefaultV1Recipe.register(
DefaultV1Recipe.ComponentType.MESSAGE_FEATURIZER, is_trainable=True
)
class LexicalSyntacticFeaturizer(SparseFeaturizer, GraphComponent):
"""Extracts and encodes lexical syntactic features.
Given a sequence of tokens, this featurizer produces a sequence of features
where the `t`-th feature encodes lexical and syntactic information about the `t`-th
token and it's surrounding tokens.
In detail: The lexical syntactic features can be specified via a list of
configurations `[c_0, c_1, ..., c_n]` where each `c_i` is a list of names of
lexical and syntactic features (e.g. `low`, `suffix2`, `digit`).
For a given tokenized text, the featurizer will consider a window of size `n`
around each token and evaluate the given list of configurations as follows:
- It will extract the features listed in `c_m` where `m = (n-1)/2` if n is even and
`n/2` from token `t`
- It will extract the features listed in `c_{m-1}`,`c_{m-2}` ... , from the last,
second to last, ... token before token `t`, respectively.
- It will extract the features listed `c_{m+1}`, `c_{m+1}`, ... for the first,
second, ... token `t`, respectively.
It will then combine all these features into one feature for position `t`.
Example:
If we specify `[['low'], ['upper'], ['prefix2']]`, then for each position `t`
the `t`-th feature will encode whether the token at position `t` is upper case,
where the token at position `t-1` is lower case and the first two characters
of the token at position `t+1`.
"""
FILENAME_FEATURE_TO_IDX_DICT = "feature_to_idx_dict.pkl"
# NOTE: "suffix5" of the token "is" will be "is". Hence, when combining multiple
# prefixes, short words will be represented/encoded repeatedly.
_FUNCTION_DICT: Dict[Text, Callable[[Token], Union[Text, bool, None]]] = {
"low": lambda token: token.text.islower(),
"title": lambda token: token.text.istitle(),
"prefix5": lambda token: token.text[:5],
"prefix2": lambda token: token.text[:2],
"suffix5": lambda token: token.text[-5:],
"suffix3": lambda token: token.text[-3:],
"suffix2": lambda token: token.text[-2:],
"suffix1": lambda token: token.text[-1:],
"pos": lambda token: token.data.get(POS_TAG_KEY, None),
"pos2": lambda token: token.data.get(POS_TAG_KEY, [])[:2]
if POS_TAG_KEY in token.data
else None,
"upper": lambda token: token.text.isupper(),
"digit": lambda token: token.text.isdigit(),
}
SUPPORTED_FEATURES = sorted(
set(_FUNCTION_DICT.keys()).union([END_OF_SENTENCE, BEGIN_OF_SENTENCE])
)
@classmethod
def _extract_raw_features_from_token(
cls, feature_name: Text, token: Token, token_position: int, num_tokens: int,
) -> Text:
"""Extracts a raw feature from the token at the given position.
Args:
feature_name: the name of a supported feature
token: the token from which we want to extract the feature
token_position: the position of the token inside the tokenized text
num_tokens: the total number of tokens in the tokenized text
Returns:
the raw feature value as text
"""
if feature_name not in cls.SUPPORTED_FEATURES:
raise InvalidConfigException(
f"Configured feature '{feature_name}' not valid. Please check "
f"'{DOCS_URL_COMPONENTS}' for valid configuration parameters."
)
if feature_name == END_OF_SENTENCE:
return str(token_position == num_tokens - 1)
if feature_name == BEGIN_OF_SENTENCE:
return str(token_position == 0)
return str(cls._FUNCTION_DICT[feature_name](token))
@classmethod
def required_components(cls) -> List[Type]:
"""Components that should be included in the pipeline before this component."""
return [Tokenizer]
@staticmethod
def get_default_config() -> Dict[Text, Any]:
"""Returns the component's default config."""
return {
**SparseFeaturizer.get_default_config(),
FEATURES: [
["low", "title", "upper"],
["BOS", "EOS", "low", "upper", "title", "digit"],
["low", "title", "upper"],
],
}
def __init__(
self,
config: Dict[Text, Any],
model_storage: ModelStorage,
resource: Resource,
execution_context: ExecutionContext,
feature_to_idx_dict: Optional[Dict[Tuple[int, Text], Dict[Text, int]]] = None,
) -> None:
"""Instantiates a new `LexicalSyntacticFeaturizer` instance."""
super().__init__(execution_context.node_name, config)
# graph component
self._model_storage = model_storage
self._resource = resource
self._execution_context = execution_context
# featurizer specific
self._feature_config = self._config[FEATURES]
self._set_feature_to_idx_dict(
feature_to_idx_dict or {}, check_consistency_with_config=True
)
@classmethod
def validate_config(cls, config: Dict[Text, Any]) -> None:
"""Validates that the component is configured properly."""
if FEATURES not in config:
return # will be replaced with default
feature_config = config[FEATURES]
message = (
f"Expected configuration of `features` to be a list of lists that "
f"that contain names of lexical and syntactic features "
f"(i.e. {cls.SUPPORTED_FEATURES}). "
f"Received {feature_config} instead. "
)
try:
configured_feature_names = set(
feature_name
for pos_config in feature_config
for feature_name in pos_config
)
except TypeError as e:
raise InvalidConfigException(message) from e
if configured_feature_names.difference(cls.SUPPORTED_FEATURES):
raise InvalidConfigException(message)
def _set_feature_to_idx_dict(
self,
feature_to_idx_dict: Dict[Tuple[int, Text], Dict[Text, int]],
check_consistency_with_config: bool = False,
) -> None:
"""Sets the "feature" to index mapping.
Here, "feature" denotes the combination of window position, feature name,
and feature_value.
Args:
feature_to_idx_dict: mapping from tuples of window position and feature name
to a mapping from feature values to indices
check_consistency_with_config: whether the consistency with the current
`self.config` should be checked
"""
self._feature_to_idx_dict = feature_to_idx_dict
self._number_of_features = sum(
[
len(feature_values.values())
for feature_values in self._feature_to_idx_dict.values()
]
)
if check_consistency_with_config:
known_features = set(self._feature_to_idx_dict.keys())
not_in_config = known_features.difference(
(
(window_idx, feature_name)
for window_idx, feature_names in enumerate(self._feature_config)
for feature_name in feature_names
)
)
if not_in_config:
rasa.shared.utils.io.raise_warning(
f"A feature to index mapping has been loaded that does not match "
f"the configured features. The given mapping configures "
f" (position in window, feature_name): {not_in_config}. "
f" These are not specified in the given config "
f" {self._feature_config}. "
f"Continuing with constant values for these features. "
)
def train(self, training_data: TrainingData) -> Resource:
"""Trains the featurizer.
Args:
training_data: the training data
Returns:
the resource from which this trained component can be loaded
"""
self.warn_if_pos_features_cannot_be_computed(training_data)
feature_to_idx_dict = self._create_feature_to_idx_dict(training_data)
self._set_feature_to_idx_dict(feature_to_idx_dict=feature_to_idx_dict)
if not self._feature_to_idx_dict:
rasa.shared.utils.io.raise_warning(
"No lexical syntactic features could be extracted from the training "
"data. In order for this component to work you need to define "
"`features` that can be found in the given training data."
)
self.persist()
return self._resource
def warn_if_pos_features_cannot_be_computed(
self, training_data: TrainingData
) -> None:
"""Warn if part-of-speech features are needed but not given."""
training_example = next(
(
message
for message in training_data.training_examples
if message.get(TOKENS_NAMES[TEXT], [])
),
Message(),
)
tokens_example = training_example.get(TOKENS_NAMES[TEXT], [])
configured_feature_names = set(
feature_name
for pos_config in self._feature_config
for feature_name in pos_config
)
if {"pos", "pos2"}.intersection(
configured_feature_names
) and not tokens_example[0].data.get(POS_TAG_KEY, []):
rasa.shared.utils.io.raise_warning(
f"Expected training data to include tokens with part-of-speech tags"
f"because the given configuration includes part-of-speech features "
f"`pos` and/or `pos2`. "
f"Please add a {SpacyTokenizer.__name__} to your "
f"configuration if you want to use the part-of-speech-features in the"
f"{self.__class__.__name__}. "
f"Continuing without the part-of-speech-features."
)
def _create_feature_to_idx_dict(
self, training_data: TrainingData
) -> Dict[Tuple[int, Text], Dict[Text, int]]:
"""Create a nested dictionary of all feature values.
Returns:
a nested mapping that maps from tuples of positions (in the window) and
supported feature names to "raw feature to index" mappings, i.e.
mappings that map the respective raw feature values to unique indices
(where `unique` means unique with respect to all indices in the
*nested* mapping)
"""
# collect all raw feature values
feature_vocabulary: Dict[Tuple[int, Text], Set[Text]] = dict()
for example in training_data.training_examples:
tokens = example.get(TOKENS_NAMES[TEXT], [])
sentence_features = self._map_tokens_to_raw_features(tokens)
for token_features in sentence_features:
for position_and_feature_name, feature_value in token_features.items():
feature_vocabulary.setdefault(position_and_feature_name, set()).add(
feature_value
)
# assign a unique index to each feature value
return self._build_feature_to_index_map(feature_vocabulary)
def _map_tokens_to_raw_features(
self, tokens: List[Token]
) -> List[Dict[Tuple[int, Text], Text]]:
"""Extracts the raw feature values.
Args:
tokens: a tokenized text
Returns:
a list of feature dictionaries for each token in the given list
where each feature dictionary maps a tuple containing
- a position (in the window) and
- a supported feature name
to the corresponding raw feature value
"""
sentence_features = []
# in case of an even number we will look at one more word before,
# e.g. window size 4 will result in a window range of
# [-2, -1, 0, 1] (0 = current word in sentence)
window_size = len(self._feature_config)
half_window_size = window_size // 2
window_range = range(-half_window_size, half_window_size + window_size % 2)
assert len(window_range) == window_size
for anchor in range(len(tokens)):
token_features: Dict[Tuple[int, Text], Text] = {}
for window_position, relative_position in enumerate(window_range):
absolute_position = anchor + relative_position
# skip, if current_idx is pointing to a non-existing token
if absolute_position < 0 or absolute_position >= len(tokens):
continue
token = tokens[absolute_position]
for feature_name in self._feature_config[window_position]:
token_features[
(window_position, feature_name)
] = self._extract_raw_features_from_token(
token=token,
feature_name=feature_name,
token_position=absolute_position,
num_tokens=len(tokens),
)
sentence_features.append(token_features)
return sentence_features
@staticmethod
def _build_feature_to_index_map(
feature_vocabulary: Dict[Tuple[int, Text], Set[Text]]
) -> Dict[Tuple[int, Text], Dict[Text, int]]:
"""Creates a nested dictionary for mapping raw features to indices.
Args:
feature_vocabulary: a mapping from tuples of positions (in the window) and
supported feature names to the set of possible feature values
Returns:
a nested mapping that maps from tuples of positions (in the window) and
supported feature names to "raw feature to index" mappings, i.e.
mappings that map the respective raw feature values to unique indices
(where `unique` means unique with respect to all indices in the
*nested* mapping)
"""
# Note that this will only sort the top level keys - and we keep
# doing it to ensure consistently with what was done before)
ordered_feature_vocabulary: OrderedDict[
Tuple[int, Text], Set[Text]
] = OrderedDict(sorted(feature_vocabulary.items()))
# create the nested mapping
feature_to_idx_dict: Dict[Tuple[int, Text], Dict[Text, int]] = {}
offset = 0
for (
position_and_feature_name,
feature_values,
) in ordered_feature_vocabulary.items():
sorted_feature_values = sorted(feature_values)
feature_to_idx_dict[position_and_feature_name] = {
feature_value: feature_idx
for feature_idx, feature_value in enumerate(
sorted_feature_values, start=offset
)
}
offset += len(feature_values)
return feature_to_idx_dict
def process(self, messages: List[Message]) -> List[Message]:
"""Featurizes all given messages in-place.
Args:
messages: messages to be featurized.
Returns:
The same list with the same messages after featurization.
"""
for message in messages:
self._process_message(message)
return messages
def process_training_data(self, training_data: TrainingData) -> TrainingData:
"""Processes the training examples in the given training data in-place.
Args:
training_data: the training data
Returns:
same training data after processing
"""
self.process(training_data.training_examples)
return training_data
def _process_message(self, message: Message) -> None:
"""Featurizes the given message in-place.
Args:
message: a message to be featurized
"""
if not self._feature_to_idx_dict:
rasa.shared.utils.io.raise_warning(
f"The {self.__class__.__name__} {self._identifier} has not been "
f"trained properly yet. "
f"Continuing without adding features from this featurizer."
)
return
tokens = message.get(TOKENS_NAMES[TEXT])
if tokens:
sentence_features = self._map_tokens_to_raw_features(tokens)
sparse_matrix = self._map_raw_features_to_indices(sentence_features)
self.add_features_to_message(
# FIXME: create sentence feature and make `sentence` non optional
sequence=sparse_matrix,
sentence=None,
attribute=TEXT,
message=message,
)
def _map_raw_features_to_indices(
self, sentence_features: List[Dict[Tuple[int, Text], Any]]
) -> scipy.sparse.coo_matrix:
"""Converts the raw features to one-hot encodings.
Requires the "feature" to index dictionary, i.e. the featurizer must have
been trained.
Args:
sentence_features: a list of feature dictionaries where the `t`-th feature
dictionary maps a tuple containing
- a position (in the window) and
- a supported feature name
to the raw feature value extracted from the window around the `t`-th token.
Returns:
a sparse matrix where the `i`-th row is a multi-hot vector that encodes the
raw features extracted from the window around the `i`-th token
"""
rows = []
cols = []
shape = (len(sentence_features), self._number_of_features)
for token_idx, token_features in enumerate(sentence_features):
for position_and_feature_name, feature_value in token_features.items():
mapping = self._feature_to_idx_dict.get(position_and_feature_name)
if not mapping:
continue
feature_idx = mapping.get(feature_value, -1)
if feature_idx > -1:
rows.append(token_idx)
cols.append(feature_idx)
rows = np.array(rows)
cols = np.array(cols)
data = np.ones(len(rows))
return scipy.sparse.coo_matrix((data, (rows, cols)), shape=shape)
@classmethod
def create(
cls,
config: Dict[Text, Any],
model_storage: ModelStorage,
resource: Resource,
execution_context: ExecutionContext,
) -> LexicalSyntacticFeaturizer:
"""Creates a new untrained component (see parent class for full docstring)."""
return cls(config, model_storage, resource, execution_context)
@classmethod
def load(
cls,
config: Dict[Text, Any],
model_storage: ModelStorage,
resource: Resource,
execution_context: ExecutionContext,
**kwargs: Any,
) -> LexicalSyntacticFeaturizer:
"""Loads trained component (see parent class for full docstring)."""
try:
with model_storage.read_from(resource) as model_path:
feature_to_idx_dict = rasa.utils.io.json_unpickle(
model_path / cls.FILENAME_FEATURE_TO_IDX_DICT,
encode_non_string_keys=True,
)
return cls(
config=config,
model_storage=model_storage,
resource=resource,
execution_context=execution_context,
feature_to_idx_dict=feature_to_idx_dict,
)
except ValueError:
logger.debug(
f"Failed to load `{cls.__class__.__name__}` from model storage. "
f"Resource '{resource.name}' doesn't exist."
)
return cls(
config=config,
model_storage=model_storage,
resource=resource,
execution_context=execution_context,
)
def persist(self) -> None:
"""Persist this model (see parent class for full docstring)."""
if not self._feature_to_idx_dict:
return None
with self._model_storage.write_to(self._resource) as model_path:
rasa.utils.io.json_pickle(
model_path / self.FILENAME_FEATURE_TO_IDX_DICT,
self._feature_to_idx_dict,
encode_non_string_keys=True,
)
|
RasaHQ/rasa_nlu
|
rasa/nlu/featurizers/sparse_featurizer/lexical_syntactic_featurizer.py
|
Python
|
apache-2.0
| 21,849
|
import os
from celery import Celery
from django.conf import settings
# set the default Django settings module for the 'celery' program.
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'songaday_searcher.settings')
app = Celery('songaday_searcher')
app.config_from_object('django.conf:settings')
app.autodiscover_tasks(lambda: settings.INSTALLED_APPS)
|
zaneswafford/songaday_searcher
|
songaday_searcher/celery.py
|
Python
|
bsd-3-clause
| 352
|
"""initial migration
Revision ID: 5092888353e6
Revises: None
Create Date: 2015-06-17 11:17:05.868000
"""
# revision identifiers, used by Alembic.
revision = '5092888353e6'
down_revision = None
from alembic import op
import sqlalchemy as sa
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.create_table('roles',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('name', sa.String(length=64), nullable=True),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('name')
)
op.create_table('frequencies',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('name', sa.String(length=64), nullable=True),
sa.Column('value', sa.BigInteger(), nullable=True),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('name'),
sa.UniqueConstraint('value')
)
op.create_table('users',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('username', sa.String(length=64), nullable=True),
sa.Column('password_hash', sa.String(length=128), nullable=True),
sa.Column('email', sa.String(length=64), nullable=True),
sa.Column('role_id', sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(['role_id'], ['roles.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_index('ix_users_email', 'users', ['email'], unique=True)
op.create_index('ix_users_username', 'users', ['username'], unique=True)
op.create_table('watchs',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('url', sa.String(length=64), nullable=True),
sa.Column('frequency_id', sa.Integer(), nullable=True),
sa.Column('user_id', sa.Integer(), nullable=True),
sa.Column('email', sa.String(length=64), nullable=True),
sa.Column('client', sa.String(length=64), nullable=True),
sa.Column('timestamp', sa.BigInteger(), nullable=True),
sa.Column('is_active', sa.Boolean(), nullable=True),
sa.ForeignKeyConstraint(['frequency_id'], ['frequencies.id'], ),
sa.ForeignKeyConstraint(['user_id'], ['users.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_table('checks',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('watch_id', sa.Integer(), nullable=True),
sa.Column('report', sa.String(length=64), nullable=True),
sa.Column('timestamp', sa.BigInteger(), nullable=True),
sa.Column('mail_sent', sa.Boolean(), nullable=True),
sa.ForeignKeyConstraint(['watch_id'], ['watchs.id'], ),
sa.PrimaryKeyConstraint('id')
)
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.drop_table('checks')
op.drop_table('watchs')
op.drop_index('ix_users_username', 'users')
op.drop_index('ix_users_email', 'users')
op.drop_table('users')
op.drop_table('frequencies')
op.drop_table('roles')
### end Alembic commands ###
|
andela-bojengwa/team3
|
monitorbot_api/migrations/versions/5092888353e6_initial_migration.py
|
Python
|
mit
| 2,886
|
"""
IknowInnov - Innovation Team Repository and official Web Site
"""
# Always prefer setuptools over distutils
from setuptools import setup, find_packages
# To use a consistent encoding
from codecs import open
from os import path
here = path.abspath(path.dirname(__file__))
# Get the long description from the README file
with open(path.join(here, 'README.md'), encoding='utf-8') as f:
long_description = f.read()
setup(
name='eknowvation',
version='1.0.0',
description='IknowInnov - Innovation Team Repository and official Web Site',
long_description=long_description,
# url='https://github.com/IBM-Bluemix/eknowvation',
license='Apache-2.0'
)
|
rickyaeztor/watson-virtual-infra-mgt-system
|
setup.py
|
Python
|
apache-2.0
| 677
|
# -*- coding: utf-8 -*-
from django.conf import settings
from django.core.exceptions import ImproperlyConfigured
def use_ga(_):
"""
Get the USE_GA env variable.
:rtype: dict
"""
try:
return {'use_ga': settings.USE_GA}
except AttributeError:
raise ImproperlyConfigured('USE_GA not set.')
|
andreipetre/django-project-heroku
|
tools/context_processors.py
|
Python
|
mit
| 330
|
# -*- coding: utf-8 -*-
import copy
from functools import wraps
import json
import sys
import django
from django.contrib.admin.helpers import AdminForm
from django.conf import settings
from django.conf.urls import url
from django.contrib import admin, messages
from django.contrib.admin.models import LogEntry, CHANGE
from django.contrib.admin.options import IncorrectLookupParameters
from django.contrib.admin.utils import get_deleted_objects, quote
from django.contrib.contenttypes.models import ContentType
from django.contrib.sites.models import Site
from django.contrib.sites.shortcuts import get_current_site
from django.core.exceptions import (MultipleObjectsReturned, ObjectDoesNotExist,
PermissionDenied, ValidationError)
from django.db import router, transaction
from django.db.models import Q
from django.http import HttpResponseRedirect, HttpResponse, Http404, HttpResponseBadRequest, HttpResponseForbidden
from django.shortcuts import render, get_object_or_404
from django.template.defaultfilters import escape
from django.utils.encoding import force_text
from django.utils.formats import localize
from django.utils.six.moves.urllib.parse import unquote
from django.utils.translation import ugettext_lazy as _, get_language
from django.utils.decorators import method_decorator
from django.views.decorators.http import require_POST
from cms.admin.change_list import CMSChangeList
from cms.admin.dialog.views import get_copy_dialog
from cms.admin.forms import (
PageForm, AdvancedSettingsForm, PagePermissionForm, PublicationDatesForm
)
from cms.admin.permissionadmin import (
PERMISSION_ADMIN_INLINES, PagePermissionInlineAdmin, ViewRestrictionInlineAdmin
)
from cms.admin.placeholderadmin import PlaceholderAdminMixin
from cms.admin.views import revert_plugins
from cms.constants import (
PAGE_TYPES_ID,
PUBLISHER_STATE_PENDING,
REVISION_INITIAL_COMMENT,
)
from cms.models import Page, Title, CMSPlugin, PagePermission, GlobalPagePermission, StaticPlaceholder
from cms.models.managers import PagePermissionsPermissionManager
from cms.plugin_pool import plugin_pool
from cms.toolbar_pool import toolbar_pool
from cms.utils import helpers, permissions, get_language_from_request, admin as admin_utils, copy_plugins
from cms.utils.i18n import get_language_list, get_language_tuple, get_language_object, force_language
from cms.utils.admin import jsonify_request
from cms.utils.compat.dj import is_installed
from cms.utils.conf import get_cms_setting
from cms.utils.helpers import find_placeholder_relation, current_site
from cms.utils.permissions import has_global_page_permission, has_generic_permission
from cms.utils.urlutils import add_url_parameters, admin_reverse
require_POST = method_decorator(require_POST)
if is_installed('reversion'):
from cms.utils.reversion_hacks import ModelAdmin, create_revision, Version, RollBackRevisionView
else: # pragma: no cover
from django.contrib.admin import ModelAdmin
class ReversionContext(object):
def __enter__(self):
yield
def __exit__(self, exc_type, exc_val, exc_tb):
pass
def __call__(self, func):
"""Allows this revision context to be used as a decorator."""
@wraps(func)
def do_revision_context(*args, **kwargs):
self.__enter__()
exception = False
try:
try:
return func(*args, **kwargs)
except:
exception = True
if not self.__exit__(*sys.exc_info()):
raise
finally:
if not exception:
self.__exit__(None, None, None)
return do_revision_context
def create_revision():
return ReversionContext()
PUBLISH_COMMENT = "Publish"
class PageAdmin(PlaceholderAdminMixin, ModelAdmin):
form = PageForm
search_fields = ('=id', 'title_set__slug', 'title_set__title', 'reverse_id')
revision_form_template = "admin/cms/page/history/revision_header.html"
recover_form_template = "admin/cms/page/history/recover_header.html"
add_general_fields = ['title', 'slug', 'language', 'template']
change_list_template = "admin/cms/page/tree/base.html"
list_filter = ['in_navigation', 'template', 'changed_by', 'soft_root']
title_frontend_editable_fields = ['title', 'menu_title', 'page_title']
inlines = PERMISSION_ADMIN_INLINES
def get_urls(self):
"""Get the admin urls
"""
info = "%s_%s" % (self.model._meta.app_label, self.model._meta.model_name)
pat = lambda regex, fn: url(regex, self.admin_site.admin_view(fn), name='%s_%s' % (info, fn.__name__))
url_patterns = [
pat(r'^([0-9]+)/advanced-settings/$', self.advanced),
pat(r'^([0-9]+)/dates/$', self.dates),
pat(r'^([0-9]+)/permission-settings/$', self.permissions),
pat(r'^([0-9]+)/delete-translation/$', self.delete_translation),
pat(r'^([0-9]+)/move-page/$', self.move_page),
pat(r'^([0-9]+)/copy-page/$', self.copy_page),
pat(r'^([0-9]+)/copy-language/$', self.copy_language),
pat(r'^([0-9]+)/dialog/copy/$', get_copy_dialog), # copy dialog
pat(r'^([0-9]+)/change-navigation/$', self.change_innavigation),
pat(r'^([0-9]+)/permissions/$', self.get_permissions),
pat(r'^([0-9]+)/undo/$', self.undo),
pat(r'^([0-9]+)/redo/$', self.redo),
# Deprecated in 3.2.1, please use ".../change-template/..." instead
pat(r'^([0-9]+)/change_template/$', self.change_template),
pat(r'^([0-9]+)/change-template/$', self.change_template),
pat(r'^([0-9]+)/([a-z\-]+)/edit-field/$', self.edit_title_fields),
pat(r'^([0-9]+)/([a-z\-]+)/publish/$', self.publish_page),
pat(r'^([0-9]+)/([a-z\-]+)/unpublish/$', self.unpublish),
pat(r'^([0-9]+)/([a-z\-]+)/revert/$', self.revert_page),
pat(r'^([0-9]+)/([a-z\-]+)/preview/$', self.preview_page),
pat(r'^add-page-type/$', self.add_page_type),
pat(r'^published-pages/$', self.get_published_pagelist),
url(r'^resolve/$', self.resolve, name="cms_page_resolve"),
url(r'^get-tree/$', self.get_tree, name="get_tree"),
]
if plugin_pool.get_all_plugins():
url_patterns += plugin_pool.get_patterns()
url_patterns += super(PageAdmin, self).get_urls()
return url_patterns
def get_revision_instances(self, request, object):
"""Returns all the instances to be used in the object's revision."""
if isinstance(object, Title):
object = object.page
if isinstance(object, Page) and not object.publisher_is_draft:
object = object.publisher_public
placeholder_relation = find_placeholder_relation(object)
data = [object]
filters = {'placeholder__%s' % placeholder_relation: object}
for plugin in CMSPlugin.objects.filter(**filters):
data.append(plugin)
plugin_instance, admin = plugin.get_plugin_instance()
if plugin_instance:
data.append(plugin_instance)
if isinstance(object, Page):
titles = object.title_set.all()
for title in titles:
title.publisher_public = None
data.append(title)
return data
def save_model(self, request, obj, form, change):
"""
Move the page in the tree if necessary and save every placeholder
Content object.
"""
from cms.extensions import extension_pool
target = request.GET.get('target', None)
position = request.GET.get('position', None)
if 'recover' in request.path_info:
tmp_page = Page(
path=None,
numchild=0,
depth=0,
site_id=obj.site_id,
)
# It's necessary to create a temporary page
# in order to calculate the tree attributes.
if obj.parent_id:
tmp_page = obj.parent.add_child(instance=tmp_page)
else:
tmp_page = obj.add_root(instance=tmp_page)
obj.path = tmp_page.path
obj.numchild = tmp_page.numchild
obj.depth = tmp_page.depth
# Remove temporary page.
tmp_page.delete()
else:
if 'history' in request.path_info:
old_obj = self.model.objects.get(pk=obj.pk)
obj.depth = old_obj.depth
obj.parent_id = old_obj.parent_id
obj.path = old_obj.path
obj.numchild = old_obj.numchild
new = False
if not obj.pk:
new = True
obj.save()
if 'recover' in request.path_info or 'history' in request.path_info:
revert_plugins(request, obj.version.pk, obj)
if target is not None and position is not None:
try:
target = self.model.objects.get(pk=target)
except self.model.DoesNotExist:
pass
else:
if position == 'last-child' or position == 'first-child':
obj.parent_id = target.pk
else:
obj.parent_id = target.parent_id
obj.save()
obj = obj.move(target, pos=position)
page_type_id = form.cleaned_data.get('page_type')
copy_target_id = request.GET.get('copy_target')
copy_target = None
if copy_target_id or page_type_id:
if page_type_id:
copy_target_id = page_type_id
copy_target = self.model.objects.get(pk=copy_target_id)
if not copy_target.has_view_permission(request):
raise PermissionDenied()
obj = obj.reload()
copy_target._copy_attributes(obj, clean=True)
obj.save()
for lang in copy_target.get_languages():
copy_target._copy_contents(obj, lang)
if 'permission' not in request.path_info:
language = form.cleaned_data['language']
Title.objects.set_or_create(
request,
obj,
form,
language,
)
if copy_target:
extension_pool.copy_extensions(copy_target, obj)
# is it home? publish it right away
if new and Page.objects.filter(site_id=obj.site_id).count() == 1:
obj.publish(language)
def get_fieldsets(self, request, obj=None):
form = self.get_form(request, obj, fields=None)
if getattr(form, 'fieldsets', None) is None:
fields = list(form.base_fields) + list(self.get_readonly_fields(request, obj))
return [(None, {'fields': fields})]
else:
return form.fieldsets
def get_inline_classes(self, request, obj=None, **kwargs):
if obj and 'permission' in request.path_info:
return PERMISSION_ADMIN_INLINES
return []
def get_form_class(self, request, obj=None, **kwargs):
if 'advanced' in request.path_info:
return AdvancedSettingsForm
elif 'permission' in request.path_info:
return PagePermissionForm
elif 'dates' in request.path_info:
return PublicationDatesForm
return self.form
def get_form(self, request, obj=None, **kwargs):
"""
Get PageForm for the Page model and modify its fields depending on
the request.
"""
language = get_language_from_request(request, obj)
form_cls = self.get_form_class(request, obj)
form = super(PageAdmin, self).get_form(request, obj, form=form_cls, **kwargs)
# get_form method operates by overriding initial fields value which
# may persist across invocation. Code below deepcopies fields definition
# to avoid leaks
for field in form.base_fields.keys():
form.base_fields[field] = copy.deepcopy(form.base_fields[field])
if 'language' in form.base_fields:
form.base_fields['language'].initial = language
if 'page_type' in form.base_fields:
if 'copy_target' in request.GET or 'add_page_type' in request.GET or obj:
del form.base_fields['page_type']
elif not Title.objects.filter(page__parent__reverse_id=PAGE_TYPES_ID, language=language).exists():
del form.base_fields['page_type']
if 'add_page_type' in request.GET:
del form.base_fields['menu_title']
del form.base_fields['meta_description']
del form.base_fields['page_title']
self.inlines = self.get_inline_classes(request, obj, **kwargs)
if obj:
if 'history' in request.path_info or 'recover' in request.path_info:
version_id = request.path_info.split('/')[-2]
else:
version_id = None
title_obj = obj.get_title_obj(language=language, fallback=False, version_id=version_id, force_reload=True)
if 'site' in form.base_fields and form.base_fields['site'].initial is None:
form.base_fields['site'].initial = obj.site
for name in ('slug', 'title', 'meta_description', 'menu_title', 'page_title', 'redirect'):
if name in form.base_fields:
form.base_fields[name].initial = getattr(title_obj, name)
if 'overwrite_url' in form.base_fields:
if title_obj.has_url_overwrite:
form.base_fields['overwrite_url'].initial = title_obj.path
else:
form.base_fields['overwrite_url'].initial = ''
else:
for name in ('slug', 'title'):
form.base_fields[name].initial = u''
if 'target' in request.GET or 'copy_target' in request.GET:
target = request.GET.get('copy_target') or request.GET.get('target')
if 'position' in request.GET:
position = request.GET['position']
if position == 'last-child' or position == 'first-child':
form.base_fields['parent'].initial = request.GET.get('target', None)
else:
sibling = self.model.objects.get(pk=target)
form.base_fields['parent'].initial = sibling.parent_id
else:
form.base_fields['parent'].initial = request.GET.get('target', None)
form.base_fields['site'].initial = request.session.get('cms_admin_site', None)
return form
def advanced(self, request, object_id):
page = get_object_or_404(self.model, pk=object_id)
if not page.has_advanced_settings_permission(request):
raise PermissionDenied("No permission for editing advanced settings")
return self.change_view(request, object_id, extra_context={'advanced_settings': True, 'title': _("Advanced Settings")})
def dates(self, request, object_id):
return self.change_view(request, object_id, extra_context={'publishing_dates': True, 'title': _("Publishing dates")})
def permissions(self, request, object_id):
page = get_object_or_404(self.model, pk=object_id)
if not page.has_change_permissions_permission(request):
raise PermissionDenied("No permission for editing advanced settings")
return self.change_view(request, object_id, extra_context={'show_permissions': True, 'title': _("Change Permissions")})
def get_inline_instances(self, request, obj=None):
inlines = super(PageAdmin, self).get_inline_instances(request, obj)
if get_cms_setting('PERMISSION') and obj:
filtered_inlines = []
for inline in inlines:
if (isinstance(inline, PagePermissionInlineAdmin)
and not isinstance(inline, ViewRestrictionInlineAdmin)):
if "recover" in request.path or "history" in request.path:
# do not display permissions in recover mode
continue
if not obj.has_change_permissions_permission(request):
continue
filtered_inlines.append(inline)
inlines = filtered_inlines
return inlines
def get_unihandecode_context(self, language):
if language[:2] in get_cms_setting('UNIHANDECODE_DECODERS'):
uhd_lang = language[:2]
else:
uhd_lang = get_cms_setting('UNIHANDECODE_DEFAULT_DECODER')
uhd_host = get_cms_setting('UNIHANDECODE_HOST')
uhd_version = get_cms_setting('UNIHANDECODE_VERSION')
if uhd_lang and uhd_host and uhd_version:
uhd_urls = [
'%sunihandecode-%s.core.min.js' % (uhd_host, uhd_version),
'%sunihandecode-%s.%s.min.js' % (uhd_host, uhd_version, uhd_lang),
]
else:
uhd_urls = []
return {'unihandecode_lang': uhd_lang, 'unihandecode_urls': uhd_urls}
@create_revision()
def add_view(self, request, form_url='', extra_context=None):
extra_context = extra_context or {}
language = get_language_from_request(request)
extra_context.update({
'language': language,
})
if not request.GET.get('add_page_type') is None:
extra_context.update({
'add_page_type': True,
'title': _("Add Page Type"),
})
elif 'copy_target' in request.GET:
extra_context.update({
'title': _("Add Page Copy"),
})
else:
extra_context = self.update_language_tab_context(request, context=extra_context)
extra_context.update(self.get_unihandecode_context(language))
return super(PageAdmin, self).add_view(request, form_url, extra_context=extra_context)
def change_view(self, request, object_id, form_url='', extra_context=None):
"""
The 'change' admin view for the Page model.
"""
if extra_context is None:
extra_context = {'basic_info': True}
try:
obj = self.model.objects.get(pk=object_id)
except self.model.DoesNotExist:
# Don't raise Http404 just yet, because we haven't checked
# permissions yet. We don't want an unauthenticated user to be able
# to determine whether a given object exists.
obj = None
else:
context = {
'page': obj,
'CMS_PERMISSION': get_cms_setting('PERMISSION'),
'ADMIN_MEDIA_URL': settings.STATIC_URL,
'can_change': obj.has_change_permission(request),
'can_change_permissions': obj.has_change_permissions_permission(request),
'current_site_id': settings.SITE_ID,
}
context.update(extra_context or {})
extra_context = self.update_language_tab_context(request, obj, context)
tab_language = get_language_from_request(request)
extra_context.update(self.get_unihandecode_context(tab_language))
response = super(PageAdmin, self).change_view(
request, object_id, form_url=form_url, extra_context=extra_context)
if tab_language and response.status_code == 302 and response._headers['location'][1] == request.path_info:
location = response._headers['location']
response._headers['location'] = (location[0], "%s?language=%s" % (location[1], tab_language))
if request.method == "POST" and response.status_code in (200, 302):
if 'history' in request.path_info:
return HttpResponseRedirect(admin_reverse('cms_page_change', args=(quote(object_id),)))
elif 'recover' in request.path_info:
return HttpResponseRedirect(admin_reverse('cms_page_change', args=(quote(object_id),)))
return response
def render_change_form(self, request, context, add=False, change=False, form_url='', obj=None):
# add context variables
filled_languages = []
if obj:
filled_languages = [t[0] for t in obj.title_set.filter(title__isnull=False).values_list('language')]
allowed_languages = [lang[0] for lang in self._get_site_languages(obj)]
context.update({
'filled_languages': [lang for lang in filled_languages if lang in allowed_languages],
})
return super(PageAdmin, self).render_change_form(request, context, add, change, form_url, obj)
def _get_site_languages(self, obj=None):
if obj:
site_id = obj.site_id
else:
site_id = Site.objects.get_current().pk
return get_language_tuple(site_id)
def update_language_tab_context(self, request, obj=None, context=None):
if not context:
context = {}
language = get_language_from_request(request, obj)
languages = self._get_site_languages(obj)
context.update({
'language': language,
'language_tabs': languages,
# Dates are not language dependent, thus we hide the language
# selection bar: the language is forced through the form class
'show_language_tabs': len(list(languages)) > 1 and not context.get('publishing_dates', False),
})
return context
def response_change(self, request, obj):
"""Called always when page gets changed, call save on page, there may be
some new stuff, which should be published after all other objects on page
are collected.
"""
# save the object again, so all the related changes to page model
# can be published if required
obj.save()
return super(PageAdmin, self).response_change(request, obj)
def has_add_permission(self, request):
"""
Return true if the current user has permission to add a new page.
"""
if get_cms_setting('PERMISSION'):
return permissions.has_page_add_permission_from_request(request)
return super(PageAdmin, self).has_add_permission(request)
def has_change_permission(self, request, obj=None):
"""
Return true if the current user has permission on the page.
Return the string 'All' if the user has all rights.
"""
if get_cms_setting('PERMISSION'):
if obj:
return obj.has_change_permission(request)
else:
return permissions.has_page_change_permission(request)
return super(PageAdmin, self).has_change_permission(request, obj)
def has_delete_permission(self, request, obj=None):
"""
Returns True if the given request has permission to change the given
Django model instance. If CMS_PERMISSION are in use also takes look to
object permissions.
"""
if get_cms_setting('PERMISSION') and obj is not None:
return obj.has_delete_permission(request)
return super(PageAdmin, self).has_delete_permission(request, obj)
def has_recover_permission(self, request):
"""
Returns True if the use has the right to recover pages
"""
if not is_installed('reversion'):
return False
user = request.user
if user.is_superuser:
return True
try:
if has_global_page_permission(request, can_recover_page=True):
return True
except:
pass
return False
def has_add_plugin_permission(self, request, placeholder, plugin_type):
if not permissions.has_plugin_permission(request.user, plugin_type, "add"):
return False
page = placeholder.page
if page and not page.has_change_permission(request):
return False
if page and not page.publisher_is_draft:
return False
return True
def has_copy_plugin_permission(self, request, source_placeholder, target_placeholder, plugins):
source_page = source_placeholder.page
if source_page and not source_page.has_change_permission(request):
return False
target_page = target_placeholder.page
if target_page and not target_page.has_change_permission(request):
return False
if target_page and not target_page.publisher_is_draft:
return False
for plugin in plugins:
if not permissions.has_plugin_permission(request.user, plugin.plugin_type, "add"):
return False
return True
def has_change_plugin_permission(self, request, plugin):
page = plugin.placeholder.page if plugin.placeholder else None
if page and not page.has_change_permission(request):
return False
if page and not page.publisher_is_draft:
return False
if not permissions.has_plugin_permission(request.user, plugin.plugin_type, "change"):
return False
return True
def has_move_plugin_permission(self, request, plugin, target_placeholder):
if not permissions.has_plugin_permission(request.user, plugin.plugin_type, "change"):
return False
page = plugin.placeholder.page
if page and not page.has_change_permission(request):
return False
if page and not page.publisher_is_draft:
return False
return True
def has_delete_plugin_permission(self, request, plugin):
if not permissions.has_plugin_permission(request.user, plugin.plugin_type, "delete"):
return False
page = plugin.placeholder.page
if page:
if not page.publisher_is_draft:
return False
if not page.has_change_permission(request):
return False
return True
def has_clear_placeholder_permission(self, request, placeholder):
page = placeholder.page if placeholder else None
if page:
if not page.publisher_is_draft:
return False
if not page.has_change_permission(request):
return False
return True
@create_revision()
def post_add_plugin(self, request, placeholder, plugin):
if is_installed('reversion') and placeholder.page:
plugin_name = force_text(plugin_pool.get_plugin(plugin.plugin_type).name)
message = _(u"%(plugin_name)s plugin added to %(placeholder)s") % {
'plugin_name': plugin_name, 'placeholder': placeholder}
self.cleanup_history(placeholder.page)
helpers.make_revision_with_plugins(placeholder.page, request.user, message)
@create_revision()
def post_copy_plugins(self, request, source_placeholder, target_placeholder, plugins):
page = target_placeholder.page
if page and is_installed('reversion'):
message = _(u"Copied plugins to %(placeholder)s") % {'placeholder': target_placeholder}
self.cleanup_history(page)
helpers.make_revision_with_plugins(page, request.user, message)
@create_revision()
def post_edit_plugin(self, request, plugin):
page = plugin.placeholder.page
# if reversion is installed, save version of the page plugins
if page and is_installed('reversion'):
plugin_name = force_text(plugin_pool.get_plugin(plugin.plugin_type).name)
message = _(
u"%(plugin_name)s plugin edited at position %(position)s in %(placeholder)s") % {
'plugin_name': plugin_name,
'position': plugin.position,
'placeholder': plugin.placeholder.slot
}
self.cleanup_history(page)
helpers.make_revision_with_plugins(page, request.user, message)
@create_revision()
def post_move_plugin(self, request, source_placeholder, target_placeholder, plugin):
# order matters.
# We give priority to the target page but fallback to the source.
# This comes into play when moving plugins between static placeholders
# and non static placeholders.
page = target_placeholder.page or source_placeholder.page
if page and is_installed('reversion'):
message = _(u"Moved plugins to %(placeholder)s") % {'placeholder': target_placeholder}
self.cleanup_history(page)
helpers.make_revision_with_plugins(page, request.user, message)
@create_revision()
def post_delete_plugin(self, request, plugin):
plugin_name = force_text(plugin_pool.get_plugin(plugin.plugin_type).name)
page = plugin.placeholder.page
if page:
page.save()
comment = _("%(plugin_name)s plugin at position %(position)s in %(placeholder)s was deleted.") % {
'plugin_name': plugin_name,
'position': plugin.position,
'placeholder': plugin.placeholder,
}
if is_installed('reversion'):
self.cleanup_history(page)
helpers.make_revision_with_plugins(page, request.user, comment)
@create_revision()
def post_clear_placeholder(self, request, placeholder):
page = placeholder.page
if page:
page.save()
comment = _('All plugins in the placeholder "%(name)s" were deleted.') % {
'name': force_text(placeholder)
}
if is_installed('reversion'):
self.cleanup_history(page)
helpers.make_revision_with_plugins(page, request.user, comment)
def get_placeholder_template(self, request, placeholder):
page = placeholder.page
if page:
return page.get_template()
def changelist_view(self, request, extra_context=None):
"The 'change list' admin view for this model."
from django.contrib.admin.views.main import ERROR_FLAG
opts = self.model._meta
app_label = opts.app_label
if not self.has_change_permission(request, None):
return HttpResponseForbidden(force_text(_("You do not have permission to change pages.")))
try:
cl = CMSChangeList(request, self.model, self.list_display, self.list_display_links, self.list_filter,
self.date_hierarchy, self.search_fields, self.list_select_related, self.list_per_page,
self.list_max_show_all, self.list_editable, self)
except IncorrectLookupParameters:
# Wacky lookup parameters were given, so redirect to the main
# changelist page, without parameters, and pass an 'invalid=1'
# parameter via the query string. If wacky parameters were given and
# the 'invalid=1' parameter was already in the query string, something
# is screwed up with the database, so display an error page.
if ERROR_FLAG in request.GET.keys():
return render(request, 'admin/invalid_setup.html', {'title': _('Database error')})
return HttpResponseRedirect(request.path_info + '?' + ERROR_FLAG + '=1')
cl.set_items(request)
site_id = request.GET.get('site__exact', None)
if site_id is None:
site_id = current_site(request).pk
site_id = int(site_id)
# languages
languages = get_language_list(site_id)
# parse the cookie that saves which page trees have
# been opened already and extracts the page ID
djangocms_nodes_open = request.COOKIES.get('djangocms_nodes_open', '')
raw_nodes = unquote(djangocms_nodes_open).split(',')
try:
open_menu_trees = [int(c.split('page_', 1)[1]) for c in raw_nodes]
except IndexError:
open_menu_trees = []
# Language may be present in the GET dictionary but empty
language = request.GET.get('language', get_language())
if not language:
language = get_language()
context = {
'title': cl.title,
'is_popup': cl.is_popup,
'cl': cl,
'opts': opts,
'has_add_permission': self.has_add_permission(request),
'root_path': admin_reverse('index'),
'app_label': app_label,
'preview_language': language,
'CMS_MEDIA_URL': get_cms_setting('MEDIA_URL'),
'CMS_PERMISSION': get_cms_setting('PERMISSION'),
'DEBUG': settings.DEBUG,
'site_languages': languages,
'open_menu_trees': open_menu_trees,
}
if is_installed('reversion'):
context['has_recover_permission'] = self.has_recover_permission(request)
context['has_change_permission'] = self.has_change_permission(request)
context.update(extra_context or {})
return render(request, self.change_list_template or [
'admin/%s/%s/change_list.html' % (app_label, opts.object_name.lower()),
'admin/%s/change_list.html' % app_label,
'admin/change_list.html'
], context)
def recoverlist_view(self, request, extra_context=None):
if not self.has_recover_permission(request):
raise PermissionDenied
return super(PageAdmin, self).recoverlist_view(request, extra_context)
def recover_view(self, request, version_id, extra_context=None):
if not self.has_recover_permission(request):
raise PermissionDenied
extra_context = self.update_language_tab_context(request, None, extra_context)
request.original_version_id = version_id
return super(PageAdmin, self).recover_view(request, version_id, extra_context)
def revision_view(self, request, object_id, version_id, extra_context=None):
if not is_installed('reversion'):
return HttpResponseBadRequest('django reversion not installed')
if not self.has_change_permission(request, Page.objects.get(pk=object_id)):
raise PermissionDenied
page = get_object_or_404(self.model, pk=object_id)
if not page.publisher_is_draft:
page = page.publisher_draft
if not page.has_change_permission(request):
return HttpResponseForbidden(force_text(_("You do not have permission to change this page")))
try:
version = Version.objects.get(pk=version_id)
clean = page._apply_revision(version.revision, set_dirty=True)
if not clean:
messages.error(request, _("Page reverted but slug stays the same because of url collisions."))
with create_revision():
adapter = self.revision_manager.get_adapter(page.__class__)
self.revision_context_manager.add_to_context(self.revision_manager, page, adapter.get_version_data(page))
self.revision_context_manager.set_comment(_("Reverted to previous version, saved on %(datetime)s") % {"datetime": localize(version.revision.date_created)})
except IndexError as e:
return HttpResponseBadRequest(e.message)
return HttpResponseRedirect(admin_reverse('cms_page_change', args=(quote(object_id),)))
def history_view(self, request, object_id, extra_context=None):
if not self.has_change_permission(request, Page.objects.get(pk=object_id)):
raise PermissionDenied
extra_context = self.update_language_tab_context(request, None, extra_context)
return super(PageAdmin, self).history_view(request, object_id, extra_context)
def get_object(self, request, object_id, from_field=None):
if from_field:
obj = super(PageAdmin, self).get_object(request, object_id, from_field)
else:
# This is for DJANGO_16
obj = super(PageAdmin, self).get_object(request, object_id)
if is_installed('reversion') and getattr(request, 'original_version_id', None):
version = get_object_or_404(Version, pk=getattr(request, 'original_version_id', None))
recover = 'recover' in request.path_info
revert = 'history' in request.path_info
obj, version = self._reset_parent_during_reversion(obj, version, revert, recover)
return obj
def _reset_parent_during_reversion(self, obj, version, revert=False, recover=False):
if version.field_dict['parent']:
try:
Page.objects.get(pk=version.field_dict['parent'])
except:
if revert and obj.parent_id != int(version.field_dict['parent']):
version.field_dict['parent'] = obj.parent_id
if recover:
obj.parent = None
obj.parent_id = None
version.field_dict['parent'] = None
obj.version = version
return obj, version
# Reversion 1.9+ no longer uses these two methods to save revision, but we still need them
# as we do not use signals
def log_addition(self, request, object, message=None):
"""Sets the version meta information."""
if is_installed('reversion') and not hasattr(self, 'get_revision_data'):
adapter = self.revision_manager.get_adapter(object.__class__)
self.revision_context_manager.add_to_context(self.revision_manager, object, adapter.get_version_data(object))
self.revision_context_manager.set_comment(REVISION_INITIAL_COMMENT)
# Same code as reversion 1.9
try:
super(PageAdmin, self).log_addition(request, object, REVISION_INITIAL_COMMENT)
except TypeError: # Django < 1.9 pragma: no cover
super(PageAdmin, self).log_addition(request, object)
def log_change(self, request, object, message):
"""Sets the version meta information."""
if is_installed('reversion') and not hasattr(self, 'get_revision_data'):
adapter = self.revision_manager.get_adapter(object.__class__)
self.revision_context_manager.add_to_context(self.revision_manager, object, adapter.get_version_data(object))
self.revision_context_manager.set_comment(message)
if isinstance(object, Title):
page = object.page
if isinstance(object, Page):
page = object
helpers.make_revision_with_plugins(page, request.user, message)
super(PageAdmin, self).log_change(request, object, message)
# This is just for Django 1.6 / reversion 1.8 compatibility
# The handling of recover / revision in 3.3 can be simplified
# by using the new reversion semantic and django changeform_view
def revisionform_view(self, request, version, template_name, extra_context=None):
try:
with transaction.atomic():
# Revert the revision.
version.revision.revert(delete=True)
# Run the normal change_view view.
with self._create_revision(request):
response = self.change_view(request, version.object_id, request.path, extra_context)
# Decide on whether the keep the changes.
if request.method == "POST" and response.status_code == 302:
self.revision_context_manager.set_comment(_("Reverted to previous version, saved on %(datetime)s") % {"datetime": localize(version.revision.date_created)})
else:
response.template_name = template_name
response.render()
raise RollBackRevisionView
except RollBackRevisionView:
pass
return response
def render_revision_form(self, request, obj, version, context, revert=False, recover=False):
# reset parent to null if parent is not found
obj, version = self._reset_parent_during_reversion(obj, version, revert, recover)
return super(PageAdmin, self).render_revision_form(request, obj, version, context, revert, recover)
@require_POST
def undo(self, request, object_id):
if not is_installed('reversion'):
return HttpResponseBadRequest('django reversion not installed')
page = get_object_or_404(self.model, pk=object_id)
if not page.publisher_is_draft:
page = page.publisher_draft
if not page.has_change_permission(request):
return HttpResponseForbidden(force_text(_("You do not have permission to change this page")))
try:
reverted, clean = page.undo()
if not clean:
messages.error(request, _("Page reverted but slug stays the same because of url collisions."))
except IndexError as e:
return HttpResponseBadRequest(e.message)
return HttpResponse("ok")
@require_POST
def redo(self, request, object_id):
if not is_installed('reversion'):
return HttpResponseBadRequest('django reversion not installed')
page = get_object_or_404(self.model, pk=object_id)
if not page.publisher_is_draft:
page = page.publisher_draft
if not page.has_change_permission(request):
return HttpResponseForbidden(force_text(_("You do not have permission to change this page")))
try:
reverted, clean = page.redo()
if not clean:
messages.error(request, _("Page reverted but slug stays the same because of url collisions."))
except IndexError as e:
return HttpResponseBadRequest(e.message)
return HttpResponse("ok")
@require_POST
@create_revision()
def change_template(self, request, object_id):
page = get_object_or_404(self.model, pk=object_id)
if not page.has_change_permission(request):
return HttpResponseForbidden(force_text(_("You do not have permission to change the template")))
to_template = request.POST.get("template", None)
if to_template not in dict(get_cms_setting('TEMPLATES')):
return HttpResponseBadRequest(force_text(_("Template not valid")))
page.template = to_template
page.save()
if is_installed('reversion'):
message = _("Template changed to %s") % dict(get_cms_setting('TEMPLATES'))[to_template]
self.cleanup_history(page)
helpers.make_revision_with_plugins(page, request.user, message)
return HttpResponse(force_text(_("The template was successfully changed")))
@require_POST
@transaction.atomic
def move_page(self, request, page_id, extra_context=None):
"""
Move the page to the requested target, at the given position.
NOTE: We have to change from one "coordinate system" to another to
adapt JSTree to Django Treebeard.
If the Tree looks like this:
<root>
⊢ …
⊢ …
⊢ Page 4
⊢ Page 5 (position 0)
⊢ …
For example,
target=4, position=1 => target=5, position="right"
target=4, position=0 => target=4, position="first-child"
"""
target = request.POST.get('target', None)
position = request.POST.get('position', 0)
site_id = request.POST.get('site', None)
try:
position = int(position)
except (TypeError, ValueError):
position = 0
try:
page = self.model.objects.get(pk=page_id)
except self.model.DoesNotExist:
return jsonify_request(HttpResponseBadRequest("error"))
try:
site = Site.objects.get(id=int(site_id))
except (TypeError, ValueError, MultipleObjectsReturned,
ObjectDoesNotExist):
site = get_current_site(request)
if target is None:
# Special case: If «target» is not provided, it means to let the
# page become a new root node.
try:
tb_target = Page.get_root_nodes().filter(
publisher_is_draft=True, site=site)[position]
if page.is_sibling_of(tb_target) and page.path < tb_target.path:
tb_position = "right"
else:
tb_position = "left"
except IndexError:
# Move page to become the last root node.
tb_target = Page.get_last_root_node()
tb_position = "right"
else:
try:
target = tb_target = self.model.objects.get(pk=int(target), site=site)
except (TypeError, ValueError, self.model.DoesNotExist):
return jsonify_request(HttpResponseBadRequest("error"))
if position == 0:
tb_position = "first-child"
else:
try:
tb_target = target.get_children().filter(
publisher_is_draft=True, site=site)[position]
if page.is_sibling_of(tb_target) and page.path < tb_target.path:
tb_position = "right"
else:
tb_position = "left"
except IndexError:
tb_position = "last-child"
# Does the user have permissions to do this...?
if not page.has_move_page_permission(request) or (
target and not target.has_add_permission(request)):
return jsonify_request(
HttpResponseForbidden(
force_text(_("Error! You don't have permissions to move "
"this page. Please reload the page"))))
page.move_page(tb_target, tb_position)
if is_installed('reversion'):
self.cleanup_history(page)
helpers.make_revision_with_plugins(
page, request.user, _("Page moved"))
return jsonify_request(
HttpResponse(admin_utils.render_admin_menu_item(request, page)))
def get_permissions(self, request, page_id):
page = get_object_or_404(self.model, id=page_id)
can_change_list = Page.permissions.get_change_id_list(request.user, page.site_id)
global_page_permissions = GlobalPagePermission.objects.filter(sites__in=[page.site_id])
page_permissions = PagePermission.objects.for_page(page)
all_permissions = list(global_page_permissions) + list(page_permissions)
# does he can change global permissions ?
has_global = permissions.has_global_change_permissions_permission(request)
permission_set = []
for permission in all_permissions:
if isinstance(permission, GlobalPagePermission):
if has_global:
permission_set.append([(True, True), permission])
else:
permission_set.append([(True, False), permission])
else:
if can_change_list == PagePermissionsPermissionManager.GRANT_ALL:
can_change = True
else:
can_change = permission.page_id in can_change_list
permission_set.append([(False, can_change), permission])
context = {
'page': page,
'permission_set': permission_set,
}
return render(request, 'admin/cms/page/permissions.html', context)
@require_POST
@transaction.atomic
def copy_language(self, request, page_id):
with create_revision():
source_language = request.POST.get('source_language')
target_language = request.POST.get('target_language')
page = Page.objects.get(pk=page_id)
placeholders = page.get_placeholders()
if not target_language or not target_language in get_language_list():
return HttpResponseBadRequest(force_text(_("Language must be set to a supported language!")))
for placeholder in placeholders:
plugins = list(
placeholder.cmsplugin_set.filter(language=source_language).order_by('path'))
if not self.has_copy_plugin_permission(request, placeholder, placeholder, plugins):
return HttpResponseForbidden(force_text(_('You do not have permission to copy these plugins.')))
copy_plugins.copy_plugins_to(plugins, placeholder, target_language)
if page and is_installed('reversion'):
message = _(u"Copied plugins from %(source_language)s to %(target_language)s") % {
'source_language': source_language, 'target_language': target_language}
self.cleanup_history(page)
helpers.make_revision_with_plugins(page, request.user, message)
return HttpResponse("ok")
@require_POST
@transaction.atomic
def copy_page(self, request, page_id, extra_context=None):
"""
Copy the page and all its plugins and descendants to the requested
target, at the given position
NOTE: We have to change from one "coordinate system" to another to
adapt JSTree to Django Treebeard. See comments in move_page().
NOTE: This code handles more cases then are *currently* supported in
the UI, specifically, the target should never be None and the position
should never be non-zero. These are implemented, however, because we
intend to support these cases later.
"""
target = request.POST.get('target', None)
position = request.POST.get('position', None)
site_id = request.POST.get('site', None)
copy_permissions = request.POST.get('copy_permissions', False)
try:
page = self.model.objects.get(pk=page_id)
except self.model.DoesNotExist:
return jsonify_request(HttpResponseBadRequest("Error"))
try:
position = int(position)
except (TypeError, ValueError):
position = 0
try:
site = Site.objects.get(id=int(site_id))
except (TypeError, ValueError, MultipleObjectsReturned,
ObjectDoesNotExist):
site = get_current_site(request)
if target is None:
# Special case: If «target» is not provided, it means to create the
# new page as a root node.
try:
tb_target = Page.get_root_nodes().filter(
publisher_is_draft=True, site=site)[position]
tb_position = "left"
except IndexError:
# New page to become the last root node.
tb_target = Page.get_last_root_node()
tb_position = "right"
else:
try:
tb_target = self.model.objects.get(pk=int(target), site=site)
assert tb_target.has_add_permission(request)
except (TypeError, ValueError, self.model.DoesNotExist,
AssertionError):
return jsonify_request(HttpResponseBadRequest("Error"))
if position == 0:
# This is really the only possible value for position.
tb_position = "first-child"
else:
# But, just in case...
try:
tb_target = tb_target.get_children().filter(
publisher_is_draft=True, site=site)[position]
tb_position = "left"
except IndexError:
tb_position = "last-child"
try:
new_page = page.copy_page(tb_target, site, tb_position,
copy_permissions=copy_permissions)
results = {"id": new_page.pk}
return HttpResponse(
json.dumps(results), content_type='application/json')
except ValidationError:
exc = sys.exc_info()[1]
return jsonify_request(HttpResponseBadRequest(exc.messages))
@require_POST
@transaction.atomic
@create_revision()
def publish_page(self, request, page_id, language):
try:
page = Page.objects.get(id=page_id, publisher_is_draft=True)
except Page.DoesNotExist:
page = None
# ensure user has permissions to publish this page
all_published = True
if page:
if not page.has_publish_permission(request):
return HttpResponseForbidden(force_text(_("You do not have permission to publish this page")))
published = page.publish(language)
if not published:
all_published = False
statics = request.GET.get('statics', '')
if not statics and not page:
raise Http404("No page or stack found for publishing.")
if statics:
static_ids = statics .split(',')
for pk in static_ids:
static_placeholder = StaticPlaceholder.objects.get(pk=pk)
published = static_placeholder.publish(request, language)
if not published:
all_published = False
if page:
if all_published:
if page.get_publisher_state(language) == PUBLISHER_STATE_PENDING:
messages.warning(request, _("Page not published! A parent page is not published yet."))
else:
messages.info(request, _('The content was successfully published.'))
LogEntry.objects.log_action(
user_id=request.user.id,
content_type_id=ContentType.objects.get_for_model(Page).pk,
object_id=page_id,
object_repr=page.get_title(language),
action_flag=CHANGE,
)
else:
if page.get_publisher_state(language) == PUBLISHER_STATE_PENDING:
messages.warning(request, _("Page not published! A parent page is not published yet."))
else:
messages.warning(request, _("There was a problem publishing your content"))
if is_installed('reversion') and page:
self.cleanup_history(page, publish=True)
helpers.make_revision_with_plugins(page, request.user, PUBLISH_COMMENT)
# create a new publish reversion
if 'node' in request.GET or 'node' in request.POST:
# if request comes from tree..
return HttpResponse(admin_utils.render_admin_menu_item(request, page))
if 'redirect' in request.GET:
return HttpResponseRedirect(request.GET['redirect'])
referrer = request.META.get('HTTP_REFERER', '')
path = admin_reverse("cms_page_changelist")
if request.GET.get('redirect_language'):
path = "%s?language=%s&page_id=%s" % (path, request.GET.get('redirect_language'), request.GET.get('redirect_page_id'))
if admin_reverse('index') not in referrer:
if all_published:
if page:
if page.get_publisher_state(language) == PUBLISHER_STATE_PENDING:
path = page.get_absolute_url(language, fallback=True)
else:
public_page = Page.objects.get(publisher_public=page.pk)
path = '%s?%s' % (public_page.get_absolute_url(language, fallback=True), get_cms_setting('CMS_TOOLBAR_URL__EDIT_OFF'))
else:
path = '%s?%s' % (referrer, get_cms_setting('CMS_TOOLBAR_URL__EDIT_OFF'))
else:
path = '/?%s' % get_cms_setting('CMS_TOOLBAR_URL__EDIT_OFF')
return HttpResponseRedirect(path)
def cleanup_history(self, page, publish=False):
if is_installed('reversion') and page:
# delete revisions that are not publish revisions
from cms.utils.reversion_hacks import Version
content_type = ContentType.objects.get_for_model(Page)
# reversion 1.8+ removes type field, revision filtering must be based on comments
versions_qs = Version.objects.filter(content_type=content_type, object_id_int=page.pk)
history_limit = get_cms_setting("MAX_PAGE_HISTORY_REVERSIONS")
deleted = []
for version in versions_qs.exclude(revision__comment__in=(REVISION_INITIAL_COMMENT, PUBLISH_COMMENT)).order_by(
'-revision__pk')[history_limit - 1:]:
if not version.revision_id in deleted:
revision = version.revision
revision.delete()
deleted.append(revision.pk)
# delete all publish revisions that are more then MAX_PAGE_PUBLISH_REVERSIONS
publish_limit = get_cms_setting("MAX_PAGE_PUBLISH_REVERSIONS")
if publish_limit and publish:
deleted = []
for version in versions_qs.filter(revision__comment__exact=PUBLISH_COMMENT).order_by(
'-revision__pk')[publish_limit - 1:]:
if not version.revision_id in deleted:
revision = version.revision
revision.delete()
deleted.append(revision.pk)
@require_POST
@transaction.atomic
def unpublish(self, request, page_id, language):
"""
Publish or unpublish a language of a page
"""
site = Site.objects.get_current()
page = get_object_or_404(self.model, pk=page_id)
if not page.has_publish_permission(request):
return HttpResponseForbidden(force_text(_("You do not have permission to unpublish this page")))
if not page.publisher_public_id:
return HttpResponseForbidden(force_text(_("This page was never published")))
try:
page.unpublish(language)
message = _('The %(language)s page "%(page)s" was successfully unpublished') % {
'language': get_language_object(language, site)['name'], 'page': page}
messages.info(request, message)
LogEntry.objects.log_action(
user_id=request.user.id,
content_type_id=ContentType.objects.get_for_model(Page).pk,
object_id=page_id,
object_repr=page.get_title(),
action_flag=CHANGE,
change_message=message,
)
except RuntimeError:
exc = sys.exc_info()[1]
messages.error(request, exc.message)
except ValidationError:
exc = sys.exc_info()[1]
messages.error(request, exc.message)
path = admin_reverse("cms_page_changelist")
if request.GET.get('redirect_language'):
path = "%s?language=%s&page_id=%s" % (path, request.GET.get('redirect_language'), request.GET.get('redirect_page_id'))
return HttpResponseRedirect(path)
@require_POST
@transaction.atomic
def revert_page(self, request, page_id, language):
page = get_object_or_404(self.model, id=page_id)
# ensure user has permissions to publish this page
if not page.has_change_permission(request):
return HttpResponseForbidden(force_text(_("You do not have permission to change this page")))
page.revert(language)
messages.info(request, _('The page "%s" was successfully reverted.') % page)
if 'node' in request.GET or 'node' in request.POST:
# if request comes from tree..
return HttpResponse(admin_utils.render_admin_menu_item(request, page))
# TODO: This should never fail, but it may be a POF
path = page.get_absolute_url(language=language)
path = '%s?%s' % (path, get_cms_setting('CMS_TOOLBAR_URL__EDIT_OFF'))
return HttpResponseRedirect(path)
@create_revision()
def delete_translation(self, request, object_id, extra_context=None):
if 'language' in request.GET:
language = request.GET['language']
else:
language = get_language_from_request(request)
opts = Page._meta
titleopts = Title._meta
app_label = titleopts.app_label
pluginopts = CMSPlugin._meta
try:
obj = self.get_queryset(request).get(pk=unquote(object_id))
except self.model.DoesNotExist:
# Don't raise Http404 just yet, because we haven't checked
# permissions yet. We don't want an unauthenticated user to be able
# to determine whether a given object exists.
obj = None
if not self.has_delete_permission(request, obj):
return HttpResponseForbidden(force_text(_("You do not have permission to change this page")))
if obj is None:
raise Http404(
_('%(name)s object with primary key %(key)r does not exist.') % {
'name': force_text(opts.verbose_name),
'key': escape(object_id)
})
if not len(list(obj.get_languages())) > 1:
raise Http404(_('There only exists one translation for this page'))
titleobj = get_object_or_404(Title, page__id=object_id, language=language)
saved_plugins = CMSPlugin.objects.filter(placeholder__page__id=object_id, language=language)
using = router.db_for_read(self.model)
kwargs = {
'admin_site': self.admin_site,
'user': request.user,
'using': using
}
deleted_objects, __, perms_needed = get_deleted_objects(
[titleobj],
titleopts,
**kwargs
)[:3]
to_delete_plugins, __, perms_needed_plugins = get_deleted_objects(
saved_plugins,
pluginopts,
**kwargs
)[:3]
deleted_objects.append(to_delete_plugins)
perms_needed = set(list(perms_needed) + list(perms_needed_plugins))
if request.method == 'POST':
if perms_needed:
raise PermissionDenied
message = _('Title and plugins with language %(language)s was deleted') % {
'language': force_text(get_language_object(language)['name'])
}
self.log_change(request, titleobj, message)
messages.info(request, message)
titleobj.delete()
for p in saved_plugins:
p.delete()
public = obj.publisher_public
if public:
public.save()
if is_installed('reversion'):
self.cleanup_history(obj)
helpers.make_revision_with_plugins(obj, request.user, message)
if not self.has_change_permission(request, None):
return HttpResponseRedirect(admin_reverse('index'))
return HttpResponseRedirect(admin_reverse('cms_page_changelist'))
context = {
"title": _("Are you sure?"),
"object_name": force_text(titleopts.verbose_name),
"object": titleobj,
"deleted_objects": deleted_objects,
"perms_lacking": perms_needed,
"opts": opts,
"root_path": admin_reverse('index'),
"app_label": app_label,
}
context.update(extra_context or {})
request.current_app = self.admin_site.name
return render(request, self.delete_confirmation_template or [
"admin/%s/%s/delete_confirmation.html" % (app_label, titleopts.object_name.lower()),
"admin/%s/delete_confirmation.html" % app_label,
"admin/delete_confirmation.html"
], context)
def preview_page(self, request, object_id, language):
"""Redirecting preview function based on draft_id
"""
page = get_object_or_404(self.model, id=object_id)
attrs = "?%s" % get_cms_setting('CMS_TOOLBAR_URL__EDIT_ON')
attrs += "&language=" + language
with force_language(language):
url = page.get_absolute_url(language) + attrs
site = get_current_site(request)
if not site == page.site:
url = "http%s://%s%s" % ('s' if request.is_secure() else '',
page.site.domain, url)
return HttpResponseRedirect(url)
@require_POST
def change_innavigation(self, request, page_id):
"""
Switch the in_navigation of a page
"""
page = get_object_or_404(self.model, pk=page_id)
if page.has_change_permission(request):
page.toggle_in_navigation()
language = request.GET.get('language') or get_language_from_request(request)
return HttpResponse(admin_utils.render_admin_menu_item(request, page, language=language))
return HttpResponseForbidden(force_text(_("You do not have permission to change this page's in_navigation status")))
def get_tree(self, request):
"""
Get html for the descendants (only) of given page or if no page_id is
provided, all the root nodes.
Used for lazy loading pages in cms.pagetree.js
Permission checks is done in admin_utils.get_admin_menu_item_context
which is called by admin_utils.render_admin_menu_item.
"""
page_id = request.GET.get('pageId', None)
site_id = request.GET.get('site', None)
language = request.GET.get('language', None)
open_nodes = list(map(int, request.GET.getlist('openNodes[]')))
try:
site_id = int(site_id)
site = Site.objects.get(id=site_id)
except (TypeError, ValueError, MultipleObjectsReturned,
ObjectDoesNotExist):
site = get_current_site(request)
if language is None:
language = (request.GET.get('language') or
get_language_from_request(request))
if page_id:
page = get_object_or_404(self.model, pk=int(page_id))
pages = list(page.get_children())
else:
pages = Page.get_root_nodes().filter(site=site,
publisher_is_draft=True)
template = "admin/cms/page/tree/lazy_menu.html"
response = u""
for page in pages:
response += admin_utils.render_admin_menu_item(
request, page,
template=template,
language=language,
open_nodes=open_nodes,
)
return HttpResponse(response)
def add_page_type(self, request):
site = Site.objects.get_current()
language = request.GET.get('language') or get_language()
target = request.GET.get('copy_target')
type_root, created = self.model.objects.get_or_create(reverse_id=PAGE_TYPES_ID, publisher_is_draft=True, site=site,
defaults={'in_navigation': False})
type_title, created = Title.objects.get_or_create(page=type_root, language=language, slug=PAGE_TYPES_ID,
defaults={'title': _('Page Types')})
url = add_url_parameters(admin_reverse('cms_page_add'), target=type_root.pk, position='first-child',
add_page_type=1, copy_target=target, language=language)
return HttpResponseRedirect(url)
def resolve(self, request):
if not request.user.is_staff:
return HttpResponse('/', content_type='text/plain')
obj = False
url = False
if request.session.get('cms_log_latest', False):
log = LogEntry.objects.get(pk=request.session['cms_log_latest'])
try:
obj = log.get_edited_object()
except (ObjectDoesNotExist, ValueError):
obj = None
del request.session['cms_log_latest']
if obj and obj.__class__ in toolbar_pool.get_watch_models() and hasattr(obj, 'get_absolute_url'):
# This is a test if the object url can be retrieved
# In case it can't, object it's not taken into account
try:
force_text(obj.get_absolute_url())
except:
obj = None
else:
obj = None
if not obj:
pk = request.GET.get('pk', False) or request.POST.get('pk', False)
full_model = request.GET.get('model') or request.POST.get('model', False)
if pk and full_model:
app_label, model = full_model.split('.')
if pk and app_label:
ctype = ContentType.objects.get(app_label=app_label, model=model)
try:
obj = ctype.get_object_for_this_type(pk=pk)
except ctype.model_class().DoesNotExist:
obj = None
try:
force_text(obj.get_absolute_url())
except:
obj = None
if obj:
if not getattr(request, 'toolbar', False) or not getattr(request.toolbar, 'edit_mode', False):
if isinstance(obj, Page):
if obj.get_public_object():
url = obj.get_public_object().get_absolute_url()
else:
url = '%s?%s' % (
obj.get_draft_object().get_absolute_url(),
get_cms_setting('CMS_TOOLBAR_URL__EDIT_ON')
)
else:
url = obj.get_absolute_url()
else:
url = obj.get_absolute_url()
if url:
return HttpResponse(force_text(url), content_type='text/plain')
return HttpResponse('', content_type='text/plain')
def lookup_allowed(self, key, *args, **kwargs):
if key == 'site__exact':
return True
return super(PageAdmin, self).lookup_allowed(key, *args, **kwargs)
def edit_title_fields(self, request, page_id, language):
title = Title.objects.get(page_id=page_id, language=language)
saved_successfully = False
raw_fields = request.GET.get("edit_fields", 'title')
edit_fields = [field for field in raw_fields.split(",") if field in self.title_frontend_editable_fields]
cancel_clicked = request.POST.get("_cancel", False)
opts = Title._meta
if not edit_fields:
# Defaults to title
edit_fields = ('title',)
if not has_generic_permission(title.page.pk, request.user, "change",
title.page.site.pk):
return HttpResponseForbidden(force_text(_("You do not have permission to edit this page")))
class PageTitleForm(django.forms.ModelForm):
"""
Dynamic form showing only the fields to be edited
"""
class Meta:
model = Title
fields = edit_fields
if not cancel_clicked and request.method == 'POST':
form = PageTitleForm(instance=title, data=request.POST)
if form.is_valid():
form.save()
saved_successfully = True
else:
form = PageTitleForm(instance=title)
admin_form = AdminForm(form, fieldsets=[(None, {'fields': edit_fields})], prepopulated_fields={},
model_admin=self)
media = self.media + admin_form.media
context = {
'CMS_MEDIA_URL': get_cms_setting('MEDIA_URL'),
'title': 'Title',
'plugin': title.page,
'plugin_id': title.page.id,
'adminform': admin_form,
'add': False,
'is_popup': True,
'media': media,
'opts': opts,
'change': True,
'save_as': False,
'has_add_permission': False,
'window_close_timeout': 10,
}
if cancel_clicked:
# cancel button was clicked
context.update({
'cancel': True,
})
return render(request, 'admin/cms/page/plugin/confirm_form.html', context)
if not cancel_clicked and request.method == 'POST' and saved_successfully:
return render(request, 'admin/cms/page/plugin/confirm_form.html', context)
return render(request, 'admin/cms/page/plugin/change_form.html', context)
def get_published_pagelist(self, *args, **kwargs):
"""
This view is used by the PageSmartLinkWidget as the user type to feed the autocomplete drop-down.
"""
request = args[0]
if request.is_ajax():
query_term = request.GET.get('q','').strip('/')
language_code = request.GET.get('language_code', settings.LANGUAGE_CODE)
matching_published_pages = self.model.objects.published().public().filter(
Q(title_set__title__icontains=query_term, title_set__language=language_code)
| Q(title_set__path__icontains=query_term, title_set__language=language_code)
| Q(title_set__menu_title__icontains=query_term, title_set__language=language_code)
| Q(title_set__page_title__icontains=query_term, title_set__language=language_code)
).distinct()
results = []
for page in matching_published_pages:
results.append(
{
'path': page.get_path(language=language_code),
'title': page.get_title(language=language_code),
'redirect_url': page.get_absolute_url(language=language_code)
}
)
return HttpResponse(json.dumps(results), content_type='application/json')
else:
return HttpResponseForbidden()
admin.site.register(Page, PageAdmin)
|
vxsx/django-cms
|
cms/admin/pageadmin.py
|
Python
|
bsd-3-clause
| 74,374
|
# -*- coding: utf-8 -*-
"""
Settings for project
"""
from __future__ import absolute_import
import os
import hashlib
import base64
from celery.schedules import crontab
import djcelery
djcelery.setup_loader()
BASE_DIR = os.path.dirname(os.path.dirname(__file__))
SECRET_KEY = '|N9./dYpiLS.."..7|__0054d2e0--bc40086e58eca5d0--2548527bccf42f4f__|.@.nX...%H.R{../O|'
SITE_ID = 1
SITE_NAME = 'Helix.Community' # FIXME
WSGI_APPLICATION = 'core.wsgi.application'
# Display ---------------------------------------------------------------------------------------- #
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'America/Denver'
USE_I18N = True
USE_L10N = True
USE_TZ = True
DEBUG = False
TEMPLATE_DEBUG = False
TEMPLATE_DIRS = (
os.path.join(BASE_DIR, 'templates'),
)
TEMPLATE_CONTEXT_PROCESSORS = (
'django.contrib.auth.context_processors.auth',
'django.core.context_processors.request',
'django.contrib.messages.context_processors.messages',
'core.context_processors.site',
)
# Communications --------------------------------------------------------------------------------- #
EMAIL_BACKEND = 'django.core.mail.backends.smtp.EmailBackend'
# EMAIL_HOST = 'mail0.nyc2'
# EMAIL_HOST_USER = 'support'
# EMAIL_HOST_PASSWORD = '00091220-73bb7bbee083a36b-181b26d8279bc054'
EMAIL_PORT = 25
EMAIL_USE_TLS = True
# EMAIL_SUBJECT_PREFIX = '[Bizopp Support] '
# DEFAULT_FROM_EMAIL = 'support@bizopp.stash.host'
# Django ----------------------------------------------------------------------------------------- #
INSTALLED_APPS = (
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.admin',
'django.contrib.admindocs',
'django.contrib.staticfiles',
'django.contrib.sites',
'djcelery',
# 'django_extensions',
'accounts',
'user',
'topic',
'core',
)
MIDDLEWARE_CLASSES = (
'django.middleware.common.CommonMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.middleware.cache.UpdateCacheMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.cache.FetchFromCacheMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.admindocs.middleware.XViewMiddleware',
)
# Authentication --------------------------------------------------------------------------------- #
AUTH_USER_MODEL = "core.User"
AUTHENTICATION_BACKENDS = (
'core.backends.ModelBackend',
)
SESSION_EXPIRE_AT_BROWSER_CLOSE = True
SESSION_COOKIE_SECURE = True
# Routing ---------------------------------------------------------------------------------------- #
ALLOWED_HOSTS = ['*']
ROOT_URLCONF = 'core.urls'
APPEND_SLASH = True
MEDIA_ROOT = os.path.join(BASE_DIR, 'media')
MEDIA_URL = '/media/'
STATIC_URL = '/static/'
STATIC_ROOT = os.path.join(BASE_DIR, "static")
STATICFILES_FINDERS = (
'django.contrib.staticfiles.finders.FileSystemFinder',
'django.contrib.staticfiles.finders.AppDirectoriesFinder',
)
# Storage ---------------------------------------------------------------------------------------- #
DATABASES = {
"default": {
"ENGINE": "django.db.backends.postgresql_psycopg2",
"NAME": "karmadecay",
"USER": "karmadecay",
"PASSWORD": "",
"HOST": "postgres.nyc2",
"PORT": 5432,
"OPTIONS": {"sslmode": 'require', },
}
}
CACHES = {
'default': {
'BACKEND': 'redis_cache.cache.RedisCache',
'LOCATION': 'localhost:6379:0',
'OPTIONS': {
'PARSER_CLASS': 'redis.connection.HiredisParser',
}
},
'gsecure': {
'BACKEND': 'redis_cache.cache.RedisCache',
'LOCATION': 'localhost:6379:1',
'OPTIONS': {
'PARSER_CLASS': 'redis.connection.HiredisParser',
}
},
}
SESSION_ENGINE = 'django.contrib.sessions.backends.cached_db'
MESSAGE_STORAGE = 'django.contrib.messages.storage.fallback.FallbackStorage'
# Task Queue ------------------------------------------------------------------------------------- #
BROKER_URL = 'redis://:29f7cee053284473-670235e57b7350b6@redis.nyc2:6379/3'
CELERYD_CONCURRENCY = 8
CELERYD_PREFETCH_MULTIPLIER = 0
CELERY_TASK_SERIALIZER = 'yaml'
CELERY_RESULT_BACKEND = 'djcelery.backends.cache:CacheBackend'
CELERY_ANNOTATIONS = {
}
CELERYBEAT_SCHEDULER = 'djcelery.schedulers.DatabaseScheduler'
CELERYBEAT_SCHEDULE = {
}
# Logging ---------------------------------------------------------------------------------------- #
LOGGING = {
'version': 1,
'handlers': {
'logstash': {
'level': 'DEBUG',
'class': 'logstash.UDPLogstashHandler',
'host': 'logstash.nyc2',
'port': 5959,
'version': 1
}
},
'loggers': {
'bizopp.task': {
'handlers': ('logstash', ),
'level': 'DEBUG',
'propagate': True,
},
'bizopp.signal': {
'handlers': ('logstash', ),
'level': 'DEBUG',
'propagate': True,
},
'bizopp.urls': {
'handlers': ('logstash', ),
'level': 'DEBUG',
'propagate': True,
},
'bizopp.podio.webhook': {
'handlers': ('logstash', ),
'level': 'DEBUG',
'propagate': True,
},
'django.request': {
'handlers': ('logstash', ),
'level': 'DEBUG',
'propagate': True,
},
'django.security': {
'handlers': ('logstash', ),
'level': 'DEBUG',
'propagate': True,
}
}
}
# Local Settings --------------------------------------------------------------------------------- #
try:
from core.local_settings import *
except ImportError:
pass
|
xj9/wampum
|
core/settings.py
|
Python
|
gpl-3.0
| 5,942
|
# Copyright 2017 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
DEPS = [
'git',
'recipe_engine/step',
]
def RunSteps(api):
api.step('1', cmd=['git', 'status'])
with api.git.env():
api.step('2', cmd=['git', 'status'])
def GenTests(api):
yield api.test('test')
|
Hikari-no-Tenshi/android_external_skia
|
infra/bots/recipe_modules/git/examples/full.py
|
Python
|
bsd-3-clause
| 377
|
import datetime
import math
import smtplib
import sys
import time
from Keysight34972A import Keysight34972A
from Fluke7341 import Fluke7341
from Fluke1502A import Fluke1502A
class RingBuffer():
def __init__(self, size):
self.size = size
self.buffer = [0] * size
self.pointer = 0
self.count = 0
def update(self, value):
self.buffer[self.pointer] = value
self.pointer = (self.pointer + 1) % self.size
self.count += 1
def reset(self):
self.count = 0
def getAverage(self, silent=True):
if self.count < self.size:
if not silent:
print "[WARNING] Buffer has not been filled completely: [{}/{}]".format(self.count, self.size)
return sum(self.buffer) / self.size
def getSTD(self):
std = 0
avg = self.getAverage()
for i in range(self.size):
std += (self.buffer[i] - avg) ** 2
std /= self.size
return math.sqrt(std)
class EquilibriumMonitor():
def __init__(self, size, name=""):
self.size = size
self.readings = RingBuffer(size)
self.STDs = [99.0] * size
self.count = 0
self.name = name
self.nReadings = 0
def update(self, value):
self.readings.update(value)
std = self.readings.getSTD()
self.nReadings += 1
if self.nReadings >= self.size:
self.count += 1
if std < min(self.STDs):
self._print("converging")
self.minSTD = std
self.count = 0
elif std > max(self.STDs) * 1.025:
self._print("diverging")
self.count = 0
elif self.count < self.size:
self._print("stabilizing")
else:
self._print("need more readings")
self.STDs = self.STDs[1:] + [std]
def isEqualized(self):
return self.count >= self.size
def reset(self):
self.count = 0
def _print(self, msg):
val = self.count
if self.nReadings < self.size:
val = self.nReadings
print "{} {} [{}/{}]".format(self.name.rjust(8), msg, val, self.size)
class Controller():
COMMANDS = ["wait", "hold", "ramp", "set", "stop", "loggeron", "loggeroff"]
DEBUG = True
TEMP_MAX = 50
TEMP_MIN = -25
RAMP = 0
HOLD = 1
WAIT = 2
SET = 3
STOP = 4
GO = 5
LOGGERON = 6
LOGGEROFF = 7
STATES = ["RAMP", "HOLD", "WAIT", "SET", "STOP", "GO", "LOGGERON", "LOGGEROFF"]
def __init__(self):
self.sampleInterval = 5
self.bufferSize = 30
self.stdHoldCount = 30
self.doLogging = True
self.daq = None
self.bath = None
self.probe = None
self.sensorList = []
self.sensorBuffers = []
self.probeBuffer = None
self.bathBuffer = None
self.numSensors = 0
self.file = ""
self.commands = [] # command queue
self.command = 0 # index of current command within self.commands
self.state = self.GO
# state variables
self.rampStart = 0.0
self.rampEnd = 0.0
self.rampInc = 0.0
self.holdTime = 0.0
self.setpoint = 0.0
self.t0 = 0
self.epoch = 0
def connect(self):
self.daq = Keysight34972A()
self.bath = Fluke7341()
self.probe = Fluke1502A()
if self.numSensors > 0:
if not self.daq.connect():
print "Failed to connect to Keysight34972A (DAQ)".format()
return False
self.daq.initialize(Keysight34972A.MODE_RESISTANCE, self.sensorList)
if not self.bath.connect():
print "Failed to connect to Fluke7341 (Calibration Bath)"
return False
if not self.probe.connect():
print "Failed to connect to Fluke1502A (Probe Reader)"
return False
self.epoch = time.time()
self.t0 = time.time()
return True
def disconnect(self):
if self.numSensors > 0:
self.daq.disconnect()
self.bath.disconnect()
self.probe.disconnect()
def init(self):
self.numSensors = len(self.sensorList)
self.sensorBuffers = [EquilibriumMonitor(self.bufferSize, name="sensor{}".format(i)) for i in range(self.numSensors)]
self.probeBuffer = EquilibriumMonitor(self.bufferSize, name="probe")
self.bathBuffer = EquilibriumMonitor(self.bufferSize, name="bath")
timestamp = datetime.datetime.now().isoformat().split('.')[0].replace(':', '-')
self.file = "{}.csv".format(timestamp)
f = open(self.file, "a")
f.write("Timestamp,Elapsed Time,Setpoint,Bath Temp,Probe Temp,{}\n".format(
",".join(["r{}".format(i) for i in range(self.numSensors)])))
f.close()
def validateCommand(self, command):
command = command.strip()
command = command.replace(",", " ")
if command == "":
return True
command = command.split()
com = command[0].lower()
args = command[1:]
if com.startswith("#"):
return True
elif com == "wait":
if len(args) > 0:
self.error("WAIT requires 0 arguments")
return False
elif com == "loggeron":
if len(args) > 0:
self.error("LOGGERON requires 0 arguments")
return False
elif com == "loggeroff":
if len(args) > 0:
self.error("LOGGEROFF requires 0 arguments")
return False
elif com == "hold":
if len(args) != 1:
self.error("HOLD requires 1 argument")
return False
else:
try:
i = int(args[0])
if i < 0:
self.error("HOLD requires positive integer argument")
return False
return True
except ValueError:
self.error("HOLD requires integer argument")
return False
elif com == "ramp":
if len(args) != 3:
self.error("RAMP requires 3 arguments")
return False
else:
try:
for i in range(3):
args[i] = float(args[i])
except ValueError:
self.error("RAMP requires 3 numeric values")
return False
start = args[0]
end = args[1]
inc = args[2]
direction = end - start
if direction * inc < 0:
self.error("RAMP increment has incorrect sign")
return False
if start < self.TEMP_MIN:
self.error("RAMP start must be greater than or equal to {}".format(self.TEMP_MIN))
return False
elif start > self.TEMP_MAX:
self.error("RAMP start must be less than or equal to {}".format(self.TEMP_MAX))
return False
if end < self.TEMP_MIN:
self.error("RAMP end must be greater than or equal to {}".format(self.TEMP_MIN))
return False
elif end > self.TEMP_MAX:
self.error("RAMP end must be less than or equal to {}".format(self.TEMP_MAX))
return False
elif com == "set":
if len(args) != 1:
self.error("SET requires 1 argument")
return False
else:
setpoint = 0
try:
setpoint = float(args[0])
except ValueError:
self.error("SET requires a numeric value")
return False
if setpoint < self.TEMP_MIN:
self.error("SET setpoint must be greater than or equal to {}".format(self.TEMP_MIN))
return False
elif setpoint > self.TEMP_MAX:
self.error("SET setpoint must be less than or equal to {}".format(self.TEMP_MAX))
return False
else:
self.error("Invalid command {}.".format(command))
return False
return True
def validateProgram(self, program):
lines = program.splitlines()
lineCount = 1
for line in lines:
if self.validateCommand(line):
action = self.getAction(line)
if action in self.COMMANDS:
self.commands.append(line)
else:
self.error("Error at line {}".format(lineCount))
return False
lineCount += 1
return True
def getAction(self, command):
if len(command.split()) == 0:
return []
return command.split()[0].strip().lower()
def getArgs(self, command):
args = command.split()[1:]
args = [float(arg) for arg in args]
return args
def nextState(self):
if self.command >= len(self.commands):
self.state = self.STOP
return
action = self.getAction(self.commands[self.command])
args = self.getArgs(self.commands[self.command])
self.command += 1
if action == "wait":
self.state = self.WAIT
elif action == "hold":
self.holdTime = self.t0 + args[0]
self.state = self.HOLD
elif action == "ramp":
self.setpoint = args[0]
self.rampEnd = args[1]
self.rampInc = args[2]
self.bath.setSetpoint(self.setpoint)
self.state = self.RAMP
elif action == "set":
self.setpoint = args[0]
self.state = self.SET
elif action == "stop":
self.state = self.STOP
elif action == "loggeroff":
self.doLogging = False
self.state = self.LOGGEROFF
elif action == "loggeron":
self.doLogging = True
self.state = self.LOGGERON
else:
self.error("UNKOWN COMMAND: {}".format(action))
self.state = self.STOP
self.resetBuffers()
self.info("state: {}".format(self.STATES[self.state]))
def isEqualized(self):
return self.probeBuffer.isEqualized()
def resetBuffers(self):
self.bathBuffer.reset()
self.probeBuffer.reset()
for i in range(self.numSensors):
self.sensorBuffers[i].reset()
def runProgram(self, program):
if not self.validateProgram(program):
print "Invalid program."
return False
self.init()
if not self.connect():
return False
self.program = program.splitlines()
self.command = 0
self.nextState()
while self.state != self.STOP:
self.step()
if self.state == self.GO:
self.nextState()
elif self.state == self.LOGGERON:
self.nextState()
elif self.state == self.LOGGEROFF:
self.nextState()
elif self.state == self.HOLD:
if self.t0 > self.holdTime:
self.nextState()
elif self.state == self.WAIT:
if self.isEqualized():
self.nextState()
elif self.state == self.SET:
self.bath.setSetpoint(self.setpoint)
self.nextState()
elif self.state == self.RAMP:
if abs(self.setpoint - self.rampEnd) < 0.001:
if self.isEqualized():
self.nextState()
if self.isEqualized():
self.setpoint += self.rampInc
self.bath.setSetpoint(self.setpoint)
self.resetBuffers()
elif self.state == self.STOP:
pass
else:
self.error("Unknown state: {}".format(self.state))
self.disconnect()
def step(self):
elapsedTime = datetime.datetime.now() - datetime.datetime.fromtimestamp(self.epoch)
# make new readings and update appropriate buffers
bathTemp = float(self.bath.readTemp())
probeTemp = float(self.probe.readTemp())
resistances = []
if self.numSensors > 0:
resistances = self.daq.readValues()
self.bathBuffer.update(bathTemp)
self.probeBuffer.update(probeTemp)
for i in range(self.numSensors):
self.sensorBuffers[i].update(resistances[i])
# log results
if self.doLogging:
t = datetime.datetime.now()
timestamp = "{}/{}/{} {}:{}:{}".format(t.month, t.day, t.year, t.hour, t.minute, t.second)
seconds = elapsedTime.seconds % 60
minutes = (elapsedTime.seconds / 60) % 60
hours = (elapsedTime.seconds / 3600) % 24
elapsedTime = "{}:{}:{}".format(hours, minutes, seconds)
output = open(self.file, "a")
resistances = ",".join([str(r) for r in resistances])
output.write(",".join([timestamp, elapsedTime, str(self.setpoint),
str(bathTemp), str(probeTemp), resistances]))
output.write("\n")
output.close()
# wait until next measurement interval
while time.time() < self.t0 + self.sampleInterval:
time.sleep(0.01)
self.t0 = self.t0 + self.sampleInterval
def info(self, msg):
if self.DEBUG: print "[INFO]", msg
def warning(self, msg):
if self.DEBUG: print "[WARNING]", msg
def error(self, msg):
if self.DEBUG: print "[ERROR]", msg
"""
command syntax
ramp 0, -1, -0.1
ramp -1, 0, 0.1
hold 600
wait
"""
if __name__ == "__main__":
c = Controller()
c.runProgram("""
LOGGEROFF
SET 0
WAIT
LOGGERON
HOLD 1800
""")
c.disconnect()
# send notification email
if len(sys.argv) > 1:
f = open("credentials.txt", "r")
senderEmail, password = f.read().split(',')
targetEmail = sys.argv[1]
s = smtplib.SMTP('smtp.gmail.com', 587)
s.ehlo()
s.starttls()
s.login(senderEmail, password)
s.sendmail(senderEmail, targetEmail, 'Subject: Experiment Complete\nFile: {}'.format(c.file))
s.quit()
exit()
#test code
c = Controller()
assert c.validateCommand("WAIT") == True
assert c.validateCommand("wait") == True
assert c.validateCommand("wAIt") == True
assert c.validateCommand(" WAIT") == True
assert c.validateCommand(" WAIT ") == True
assert c.validateCommand(" WAIT asdf") == False
assert c.validateCommand("# WAIT asdf") == True
assert c.validateCommand(" # WAIT asdf") == True
assert c.validateCommand("HOLD") == False
assert c.validateCommand("HOLD 1") == True
assert c.validateCommand("HOLD a") == False
assert c.validateCommand("HOLD -1") == False
assert c.validateCommand("HOLD 1.0") == False
assert c.validateCommand("HOLD -1.0") == False
assert c.validateCommand("HOLD 12 1") == False
assert c.validateCommand("RAMP") == False
assert c.validateCommand("RAMP a") == False
assert c.validateCommand("RAMP a b") == False
assert c.validateCommand("RAMP 1 2 3") == True
assert c.validateCommand("RAMP a b c") == False
assert c.validateCommand("RAMP 1 2.0 3") == True
assert c.validateCommand("RAMP 1 2.0 3") == True
assert c.validateCommand("RAMP 1 2.0 -3") == False
assert c.validateCommand("RAMP 1 -2.0 -3") == True
assert c.validateCommand("RAMP -25 50 1.0") == True
assert c.validateCommand("RAMP -26 50 1.0") == False
assert c.validateCommand("RAMP -25 51 1.0") == False
assert c.validateCommand("RAMP -25 -2.0 1.0") == True
assert c.validateCommand("RAMP 1 -2.0 -3 32") == False
assert c.validateCommand("SET") == False
assert c.validateCommand("SET 2") == True
assert c.validateCommand("SET a") == False
assert c.validateCommand("SET 50") == True
assert c.validateCommand("SET 51") == False
assert c.validateCommand("SET 2.0") == True
assert c.validateCommand("SET -25") == True
assert c.validateCommand("SET a a") == False
assert c.validateCommand("SET -26") == False
assert c.validateCommand("SET -2.0") == True
assert c.validateCommand("INVALID COMMAND") == False
assert c.validateProgram("""
# TEST PROGRAM
SET 0.0
WAIT
SET -10
WAIT
RAMP -10, 0, 1.0
# COMMENT
RAMP 0.0 -10 -2.0
HOLD 1800
""") == True
assert c.validateProgram("""
TEST PROGRAM
SET 0.0
WAIT
SET -10
WAIT
RAMP -10, 0, 1.0
# COMMENT
RAMP 0.0 -10 -2.0
HOLD 1800
""") == False
|
geocryology/GeoCryoLabPy
|
equipment/Controller.py
|
Python
|
gpl-3.0
| 17,230
|
#!/usr/bin/env python3
import argparse
import numpy as np
import random
import sys
parser = argparse.ArgumentParser()
parser.add_argument('ref_vectors')
parser.add_argument('vectors')
parser.add_argument('-n', type=int, default=500000)
parser.add_argument('-k', type=int, default=1)
parser.add_argument('-m', type=int, default=1000)
if __name__ == '__main__':
args = parser.parse_args()
with open(args.ref_vectors) as f:
ref_vectors = [np.array([float(x) for x in line.split(',')]) for line in f]
with open(args.vectors) as f:
vectors = [np.array([float(x) for x in line.split(',')]) for line in f]
vectors = list(enumerate(vectors))
n = 0
l = len(vectors)
while n < args.n and l > 0:
vector = ref_vectors[n % len(ref_vectors)]
n += 1
def key(i):
return np.sum((vector - vectors[i][1]) ** 2)
indices = random.sample(range(l), k=args.m)
if args.k > 1:
indices = sorted(indices, key=key)[:args.k]
else:
indices = [min(indices, key=key)]
for i in indices:
sys.stdout.write(str(vectors[i][0]) + '\n')
#sys.stdout.flush()
for i in indices:
vectors[i], vectors[l - 1] = vectors[l - 1], vectors[i]
l -= 1
|
eske/seq2seq
|
scripts/post_editing/select-by-ter.py
|
Python
|
apache-2.0
| 1,303
|
import subprocess
import json
import os
import argparse
import cv2
import shutil
import math
import colorsys
from dominantColor import colorz
from operator import itemgetter
from PIL import Image, ImageFilter, ImageStat, ImageChops
parser = argparse.ArgumentParser(description='Tries to find a good thumbnail for a video')
parser.add_argument('--src', dest='src', default=None, help='The source video')
args = parser.parse_args()
command = 'ffmpeg -ss %s -i %s -vf "select=gt(scene\,0.4)" -r 1 -frames:v 1 %s'
input_file = args.src
output_file = 'thumb_%s.tif'
num_thumbs = 10
num_out = 3
thumbs = {}
score_points = {
'face': 120,
'sat' : 50,
'sharpness': 100,
'bri': 50,
'con': 100
}
base_dir = os.path.dirname(os.path.realpath(__file__))
input_filepath = os.path.join(base_dir, input_file)
work_folder = os.path.join(base_dir, 'tmp')
analyze_folder = os.path.join(work_folder, 'analyze')
output_folder = os.path.join(base_dir, 'out')
def get_length(filename):
result = subprocess.Popen(
["ffprobe", '-v', 'quiet', '-hide_banner', '-show_streams', '-print_format', 'json', filename, ],
stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
info = json.loads(result.stdout.read())
return int(float(info['streams'][0]['duration']))
def prepare_folder(folder):
shutil.rmtree(folder)
os.makedirs(folder)
os.makedirs(analyze_folder)
if os.path.isdir(output_folder) is False:
os.makedirs(output_folder)
def generate_thumbs(num, output_file):
duration = get_length(input_filepath)
duration_part = duration / num_thumbs
for i in range(0, num):
cur_filename = (output_file % i)
cur_filepath = os.path.join(work_folder, cur_filename)
thumbs[cur_filepath] = { 'score': 0 }
cur_command = command % (i * duration_part, input_file, cur_filepath)
print cur_command
p = subprocess.Popen(cur_command, shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
output = p.communicate()[0]
print output
# Returns overall perceived brightness of the image, as defined in luma
def brightness(stat):
r,g,b = stat.mean
brightness = math.sqrt(0.241 * (r ** 2) + 0.691 * (g ** 2) + 0.068 * (b ** 2))
print 'perc brightness %s' % brightness
return 1 - math.fabs(brightness / 100 - 1)
def saturation_dominant_colors(image):
copy = image
colors = colorz(copy)
saturation_indicator = 0
for color in colors:
color = [float(x)/255 for x in color]
hls_color = colorsys.rgb_to_hls(color[0], color[1], color[2])
luminance = hls_color[1] * 100
saturation = hls_color[2] * 100
saturation_indicator += (luminance + saturation)
print saturation_indicator
return saturation_indicator / 600
def has_face(image):
"""
image is expected to be a opencv image
"""
cascade = cv2.CascadeClassifier(os.path.join(base_dir, 'lib', 'haarcascade_frontalface_alt.xml'))
rects = cascade.detectMultiScale(image, 1.3, 4, cv2.cv.CV_HAAR_SCALE_IMAGE, (20, 20))
return len(rects) > 0
def trim(im):
bg = Image.new(im.mode, im.size, im.getpixel((0,0)))
diff = ImageChops.difference(im, bg)
diff = ImageChops.add(diff, diff, 2.0, -100)
bbox = diff.getbbox()
if bbox:
return im.crop(bbox)
def calculate_contrast(stats):
return stats.extrema[0][0]/255 + stats.extrema[0][1]/255 + stats.stddev[0]/255
def analyze():
print thumbs
for file, info in thumbs.iteritems():
if os.path.isfile(file):
filename = os.path.splitext(os.path.basename(file))[0]
print filename
# open image
im = Image.open(file)
# extract original size so we can add it back again
info['width'] = im.size[0]
info['height'] = im.size[1]
print 'Initial Score: %s' % info['score']
# crop all black borders
im = trim(im)
cv_handle = cv2.imread(file, 0)
# if the crop left nothing left (one colored image, continue)
if im is None:
continue
# # edges = cv2.Canny(cv_handle, 100, 200)
# hist = cv2.calcHist([cv_handle],[0],None,[256],[0,256])
# #print hist
# # cv2.imwrite(os.path.join(analyze_folder, filename + '_canny' +'.jpg'), edges)
# check if image contains a face
if has_face(cv_handle) is True:
info['score'] += score_points['face']
print 'Has Face - Score: %s' % info['score']
# check saturation of dominant colors
sat = saturation_dominant_colors(im) * score_points['sat']
print 'Saturation Score %s' % sat
info['score'] += sat
# check perceived brightness
v = ImageStat.Stat(im)
b = brightness(v) * score_points['bri']
print 'Brightness score: %s' % b
info['score'] += b
# check sharpness
im = im.convert('L')
bw = ImageStat.Stat(im)
im = im.filter(ImageFilter.FIND_EDGES)
edges = ImageStat.Stat(im)
sha = edges.rms[0]/100 * score_points['sharpness']
print 'Sharpness score: %s' % sha
info['score'] += sha
# check contrast
con = calculate_contrast(bw) * score_points['con']
print 'Contrast score: %s' % con
info['score'] += con
print '# Score: %s' % info['score']
#im.save(os.path.join(analyze_folder, filename + '.jpg'), 'JPEG')
def output():
best = sorted(thumbs.items(),key = lambda x :x[1]['score'],reverse = True)
print best
for i in range(0, num_out):
thumb = best[i][0]
name = input_file + '_' + str(i) + '_' + os.path.basename(thumb) + '.jpg'
if os.path.isfile(thumb) is False:
continue
im = Image.open(thumb)
im.save(os.path.join(output_folder, name), 'JPEG')
def fire():
prepare_folder(work_folder)
generate_thumbs(num_thumbs, output_file)
analyze()
output()
fire()
|
luhmann/movie-thumbnails
|
thumbs.py
|
Python
|
mit
| 6,157
|
from array import array
# not working
def string_permute_iterative(ar, hi):
lo = index = 0
stack = [(lo, index)]
while lo<=index<=hi:
if lo == hi:
while stack:
lo, index = stack.pop()
ar[lo], ar[index] = ar[index], ar[lo]
if lo == index:
print ar[:]
elif index+1 <= hi:
ar[index], ar[lo] = ar[lo], ar[index]
stack.append((lo, index+1))
else:
break
else:
for index in xrange(lo, hi+1):
ar[index], ar[lo] = ar[lo], ar[index]
stack.append((lo, index))
lo += 1
def string_permute(ar, lo, hi, result):
# this is beautiful
if lo == hi:
# print ar
result.append(ar[:])
else:
for index in xrange(lo, hi+1):
ar[index], ar[lo] = ar[lo], ar[index]
string_permute(ar, lo+1, hi, result)
ar[index], ar[lo] = ar[lo], ar[index]
return result
if __name__ == "__main__":
f = array('c', '123')
result = []
# string_permute(f.tolist(), 0, len(f)-1, result)
# print result
string_permute_iterative(f.tolist(), len(f)-1)
# string_permute_iter(f.tolist(), 0, len(f)-1) # this is useless
|
codecakes/algorithms_monk
|
string/string_permutation.py
|
Python
|
mit
| 1,318
|
#!/usr/bin/env python3
"""docstring"""
import argparse
import os
import re
import sys
from collections import defaultdict
# --------------------------------------------------
def get_args():
"""get args"""
parser = argparse.ArgumentParser(description='Annotate UProC')
parser.add_argument('-k', '--kegg_out', help='KEGG out',
metavar='str', type=str, default='')
parser.add_argument('-p', '--pfam_out', help='PFAM out',
metavar='str', type=str, default='')
parser.add_argument('-e', '--kegg_desc', help='KEGG descriptions',
metavar='str', type=str, default='')
parser.add_argument('-f', '--pfam_desc', help='PFAM descriptions',
metavar='str', type=str, default='')
parser.add_argument('-o', '--out', help='Outfile',
metavar='str', type=str, default='out')
return parser.parse_args()
# --------------------------------------------------
def main():
"""main"""
args = get_args()
kegg_out = args.kegg_out
pfam_out = args.pfam_out
kegg_desc = args.kegg_desc
pfam_desc = args.pfam_desc
out_file = args.out
if not kegg_out and not pfam_out:
print('Need --kegg_out and/or --pfam_out')
sys.exit(1)
if os.path.isfile(out_file):
answer = input('--out "{}" exists. Overwrite [yN]? '.format(out_file))
if not answer.lower().startswith('y'):
print('Not OK, exiting')
sys.exit(1)
out_fh = open(out_file, 'w')
num_written = 0
num_written += process('kegg', kegg_out, kegg_desc, out_fh)
num_written += process('pfam', pfam_out, pfam_desc, out_fh)
print('Done, wrote {} to file "{}."'.format(num_written, out_file))
# --------------------------------------------------
def process(source, uproc_out, desc_file, fh):
"""do all the stuff"""
if not uproc_out and desc_file:
return
id_to_desc = defaultdict(str)
for line in open(desc_file):
flds = line.rstrip().split('\t')
if len(flds) == 2:
id_to_desc[flds[0]] = flds[1]
for i, line in enumerate(open(uproc_out)):
flds = line.rstrip().split(',')
gene = re.sub(r'\|.*', '', flds[1])
prot_id = flds[6]
score = flds[7]
desc = id_to_desc.get(prot_id, 'NONE')
fh.write('\t'.join([gene, source, prot_id, desc, score]) + '\n')
return i + 1
# --------------------------------------------------
if __name__ == '__main__':
main()
|
kyclark/metagenomics-book
|
python/uproc/annotate_uproc.py
|
Python
|
gpl-3.0
| 2,538
|
# projecteuler.com/problem=7
def main():
res = NstPrime(10001)
print(res)
#for i in range(1, 100):
#if isPrime(i):
#print(i)
def NstPrime(n):
i = 2
while n >= 1:
if isPrime(i):
n = n - 1
i = i + 1
return i-1
def isPrime(n):
i = n-1
while i > 1:
if n % i == 0:
return False
i = i - 1
return True
if __name__ == '__main__':
main()
|
yuriyshapovalov/Prototypes
|
ProjectEuler/python/prob7.py
|
Python
|
apache-2.0
| 392
|
# Copyright 2015, Google, Inc.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import argparse
import datetime
from gcloud import datastore
def path_to_key(datastore, path):
"""
Translates a file system path to a datastore key. The basename becomes the
key name and the extension becomes the kind.
Examples:
/file.ext -> key(ext, file)
/parent.ext/file.ext -> key(ext, parent, ext, file)
"""
key_parts = []
path_parts = path.strip(u'/').split(u'/')
for n, x in enumerate(path_parts):
name, ext = x.rsplit('.', 1)
key_parts.extend([ext, name])
return datastore.key(*key_parts)
def create_user(ds, username, profile):
key = path_to_key(ds, '{0}.user'.format(username))
entity = datastore.Entity(key)
entity.update(profile)
ds.put(entity)
def create_post(ds, username, post_content):
now = datetime.datetime.utcnow()
key = path_to_key(ds, '{0}.user/{1}.post'.format(username, now))
entity = datastore.Entity(key)
entity.update({
'created': now,
'created_by': username,
'content': post_content
})
ds.put(entity)
def repost(ds, username, original):
now = datetime.datetime.utcnow()
new_key = path_to_key(ds, '{0}.user/{1}.post'.format(username, now))
new = datastore.Entity(new_key)
new.update(original)
ds.put(new)
def list_posts_by_user(ds, username):
user_key = path_to_key(ds, '{0}.user'.format(username))
return ds.query(kind='post', ancestor=user_key).fetch()
def list_all_posts(ds):
return ds.query(kind='post').fetch()
def main(project_id):
ds = datastore.Client(dataset_id=project_id)
print("Creating users...")
create_user(ds, 'tonystark',
{'name': 'Tony Stark', 'location': 'Stark Island'})
create_user(ds, 'peterparker',
{'name': 'Peter Parker', 'location': 'New York City'})
print("Creating posts...")
for n in range(1, 10):
create_post(ds, 'tonystark', "Tony's post #{0}".format(n))
create_post(ds, 'peterparker', "Peter's post #{0}".format(n))
print("Re-posting tony's post as peter...")
tonysposts = list_posts_by_user(ds, 'tonystark')
for post in tonysposts:
original_post = post
break
repost(ds, 'peterparker', original_post)
print('Posts by tonystark:')
for post in list_posts_by_user(ds, 'tonystark'):
print("> {0} on {1}".format(post['content'], post['created']))
print('Posts by peterparker:')
for post in list_posts_by_user(ds, 'peterparker'):
print("> {0} on {1}".format(post['content'], post['created']))
print('Posts by everyone:')
for post in list_all_posts(ds):
print("> {0} on {1}".format(post['content'], post['created']))
print('Cleaning up...')
ds.delete_multi([
path_to_key(ds, 'tonystark.user'),
path_to_key(ds, 'peterparker.user')
])
ds.delete_multi([
x.key for x in list_all_posts(ds)])
if __name__ == "__main__":
parser = argparse.ArgumentParser(
description='Demonstrates wiki data model.')
parser.add_argument('project_id', help='Your cloud project ID.')
args = parser.parse_args()
main(args.project_id)
|
JPO1/python-docs-samples
|
blog/introduction_to_data_models_in_cloud_datastore/blog.py
|
Python
|
apache-2.0
| 3,748
|
# -*- coding: utf-8 -*-
# OpenERP, Open Source Management Solution
# Copyright (c) 2015 Rooms For (Hong Kong) Limited T/A OSCG
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
{
'name': 'Report Task Construcion Order',
'category': 'Reporting',
'version': '9.0.1.0.0',
'author': 'Rooms For (Hong Kong) Limited T/A OSCG',
'website': 'https://www.odoo-asia.com/',
'licence': 'AGPL-3',
'depends': ['report_aeroo','sale_service','project_site','purchase'],
'summary':"""Print Construction Order from project.task using Aeroo Report""",
'description': """
Print Construction Order from project.task using Aeroo Report
""",
'data': [
'report/report.xml',
],
'installable': True,
'applitcation': False,
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
rfhk/ykt-custom
|
report_task_construction_order/__openerp__.py
|
Python
|
agpl-3.0
| 1,472
|
import json
from boto.sqs.message import Message
class SQSJSONMessage(Message):
def encode(self, value):
return json.dumps(value)
def decode(self, value):
return json.loads(value)
|
alesdotio/motorway
|
motorway/contrib/amazon_sqs/utils.py
|
Python
|
apache-2.0
| 206
|
# -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import proto # type: ignore
__protobuf__ = proto.module(
package="google.ads.googleads.v9.services",
marshal="google.ads.googleads.v9",
manifest={"GetLandingPageViewRequest",},
)
class GetLandingPageViewRequest(proto.Message):
r"""Request message for
[LandingPageViewService.GetLandingPageView][google.ads.googleads.v9.services.LandingPageViewService.GetLandingPageView].
Attributes:
resource_name (str):
Required. The resource name of the landing
page view to fetch.
"""
resource_name = proto.Field(proto.STRING, number=1,)
__all__ = tuple(sorted(__protobuf__.manifest))
|
googleads/google-ads-python
|
google/ads/googleads/v9/services/types/landing_page_view_service.py
|
Python
|
apache-2.0
| 1,245
|
"""HTTP related errors."""
from asyncio import TimeoutError
__all__ = (
'DisconnectedError', 'ClientDisconnectedError', 'ServerDisconnectedError',
'HttpProcessingError', 'BadHttpMessage',
'HttpMethodNotAllowed', 'HttpBadRequest', 'HttpProxyError',
'BadStatusLine', 'LineTooLong', 'InvalidHeader',
'ClientError', 'ClientHttpProcessingError', 'ClientConnectionError',
'ClientOSError', 'ClientTimeoutError', 'ProxyConnectionError',
'ClientRequestError', 'ClientResponseError',
'FingerprintMismatch',
'WSServerHandshakeError')
class DisconnectedError(Exception):
"""Disconnected."""
class ClientDisconnectedError(DisconnectedError):
"""Client disconnected."""
class ServerDisconnectedError(DisconnectedError):
"""Server disconnected."""
class ClientError(Exception):
"""Base class for client connection errors."""
class ClientHttpProcessingError(ClientError):
"""Base class for client HTTP processing errors."""
class ClientRequestError(ClientHttpProcessingError):
"""Connection error during sending request."""
class ClientResponseError(ClientHttpProcessingError):
"""Connection error during reading response."""
class ClientConnectionError(ClientError):
"""Base class for client socket errors."""
class ClientOSError(ClientConnectionError, OSError):
"""OSError error."""
class ClientTimeoutError(ClientConnectionError, TimeoutError):
"""Client connection timeout error."""
class ProxyConnectionError(ClientConnectionError):
"""Proxy connection error.
Raised in :class:`aiohttp.connector.ProxyConnector` if
connection to proxy can not be established.
"""
class HttpProcessingError(Exception):
"""HTTP error.
Shortcut for raising HTTP errors with custom code, message and headers.
:param int code: HTTP Error code.
:param str message: (optional) Error message.
:param list of [tuple] headers: (optional) Headers to be sent in response.
"""
code = 0
message = ''
headers = None
def __init__(self, *, code=None, message='', headers=None):
if code is not None:
self.code = code
self.headers = headers
self.message = message
super().__init__("%s, message='%s'" % (self.code, message))
class WSServerHandshakeError(HttpProcessingError):
"""websocket server handshake error."""
class HttpProxyError(HttpProcessingError):
"""HTTP proxy error.
Raised in :class:`aiohttp.connector.ProxyConnector` if
proxy responds with status other than ``200 OK``
on ``CONNECT`` request.
"""
class BadHttpMessage(HttpProcessingError):
code = 400
message = 'Bad Request'
def __init__(self, message, *, headers=None):
super().__init__(message=message, headers=headers)
class HttpMethodNotAllowed(HttpProcessingError):
code = 405
message = 'Method Not Allowed'
class HttpBadRequest(BadHttpMessage):
code = 400
message = 'Bad Request'
class ContentEncodingError(BadHttpMessage):
"""Content encoding error."""
class TransferEncodingError(BadHttpMessage):
"""transfer encoding error."""
class LineTooLong(BadHttpMessage):
def __init__(self, line, limit='Unknown'):
super().__init__(
"got more than %s bytes when reading %s" % (limit, line))
class InvalidHeader(BadHttpMessage):
def __init__(self, hdr):
if isinstance(hdr, bytes):
hdr = hdr.decode('utf-8', 'surrogateescape')
super().__init__('Invalid HTTP Header: {}'.format(hdr))
self.hdr = hdr
class BadStatusLine(BadHttpMessage):
def __init__(self, line=''):
if not line:
line = repr(line)
self.args = line,
self.line = line
class LineLimitExceededParserError(HttpBadRequest):
"""Line is too long."""
def __init__(self, msg, limit):
super().__init__(msg)
self.limit = limit
class FingerprintMismatch(ClientConnectionError):
"""SSL certificate does not match expected fingerprint."""
def __init__(self, expected, got, host, port):
self.expected = expected
self.got = got
self.host = host
self.port = port
def __repr__(self):
return '<{} expected={} got={} host={} port={}>'.format(
self.__class__.__name__, self.expected, self.got,
self.host, self.port)
|
esaezgil/aiohttp
|
aiohttp/errors.py
|
Python
|
apache-2.0
| 4,390
|
from toontown.coghq.SellbotCogHQLoader import SellbotCogHQLoader
from toontown.toonbase import ToontownGlobals
from toontown.hood.CogHood import CogHood
class SellbotHQ(CogHood):
notify = directNotify.newCategory('SellbotHQ')
ID = ToontownGlobals.SellbotHQ
LOADER_CLASS = SellbotCogHQLoader
def load(self):
CogHood.load(self)
self.sky.setScale(2.0)
def enter(self, requestStatus):
CogHood.enter(self, requestStatus)
base.localAvatar.setCameraFov(ToontownGlobals.CogHQCameraFov)
base.camLens.setNearFar(ToontownGlobals.CogHQCameraNear, ToontownGlobals.CogHQCameraFar)
|
Spiderlover/Toontown
|
toontown/hood/SellbotHQ.py
|
Python
|
mit
| 634
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
{
'name': 'Online Members Directory',
'category': 'Website',
'summary': 'Publish your members directory',
'version': '1.0',
'description': """
Publish your members/association directory publicly.
""",
'depends': ['website_partner', 'website_google_map', 'association', 'website_sale'],
'data': [
'data/membership_data.xml',
'views/website_membership_templates.xml',
'security/ir.model.access.csv',
'security/website_membership.xml',
],
'demo': ['data/membership_demo.xml'],
'qweb': ['static/src/xml/*.xml'],
'installable': True,
}
|
t3dev/odoo
|
addons/website_membership/__manifest__.py
|
Python
|
gpl-3.0
| 711
|
# Copyright 2014 Red Hat Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_serialization import jsonutils
from nova import exception
from nova.objects import base
from nova.objects import fields
from nova.virt import hardware
def all_things_equal(obj_a, obj_b):
for name in obj_a.fields:
set_a = obj_a.obj_attr_is_set(name)
set_b = obj_b.obj_attr_is_set(name)
if set_a != set_b:
return False
elif not set_a:
continue
if getattr(obj_a, name) != getattr(obj_b, name):
return False
return True
@base.NovaObjectRegistry.register
class NUMACell(base.NovaObject):
# Version 1.0: Initial version
# Version 1.1: Added pinned_cpus and siblings fields
# Version 1.2: Added mempages field
VERSION = '1.2'
fields = {
'id': fields.IntegerField(read_only=True),
'cpuset': fields.SetOfIntegersField(),
'memory': fields.IntegerField(),
'cpu_usage': fields.IntegerField(default=0),
'memory_usage': fields.IntegerField(default=0),
'pinned_cpus': fields.SetOfIntegersField(),
'siblings': fields.ListOfSetsOfIntegersField(),
'mempages': fields.ListOfObjectsField('NUMAPagesTopology'),
}
obj_relationships = {
'mempages': [('1.2', '1.0')]
}
def __eq__(self, other):
return all_things_equal(self, other)
def __ne__(self, other):
return not (self == other)
@property
def free_cpus(self):
return self.cpuset - self.pinned_cpus or set()
@property
def free_siblings(self):
return [sibling_set & self.free_cpus
for sibling_set in self.siblings]
@property
def avail_cpus(self):
return len(self.free_cpus)
@property
def avail_memory(self):
return self.memory - self.memory_usage
def pin_cpus(self, cpus):
if cpus - self.cpuset:
raise exception.CPUPinningUnknown(requested=list(cpus),
cpuset=list(self.pinned_cpus))
if self.pinned_cpus & cpus:
raise exception.CPUPinningInvalid(requested=list(cpus),
pinned=list(self.pinned_cpus))
self.pinned_cpus |= cpus
def unpin_cpus(self, cpus):
if cpus - self.cpuset:
raise exception.CPUPinningUnknown(requested=list(cpus),
cpuset=list(self.pinned_cpus))
if (self.pinned_cpus & cpus) != cpus:
raise exception.CPUPinningInvalid(requested=list(cpus),
pinned=list(self.pinned_cpus))
self.pinned_cpus -= cpus
def _to_dict(self):
return {
'id': self.id,
'cpus': hardware.format_cpu_spec(
self.cpuset, allow_ranges=False),
'mem': {
'total': self.memory,
'used': self.memory_usage},
'cpu_usage': self.cpu_usage}
@classmethod
def _from_dict(cls, data_dict):
cpuset = hardware.parse_cpu_spec(
data_dict.get('cpus', ''))
cpu_usage = data_dict.get('cpu_usage', 0)
memory = data_dict.get('mem', {}).get('total', 0)
memory_usage = data_dict.get('mem', {}).get('used', 0)
cell_id = data_dict.get('id')
return cls(id=cell_id, cpuset=cpuset, memory=memory,
cpu_usage=cpu_usage, memory_usage=memory_usage,
mempages=[], pinned_cpus=set([]), siblings=[])
def can_fit_hugepages(self, pagesize, memory):
"""Returns whether memory can fit into hugepages size
:param pagesize: a page size in KibB
:param memory: a memory size asked to fit in KiB
:returns: whether memory can fit in hugepages
:raises: MemoryPageSizeNotSupported if page size not supported
"""
for pages in self.mempages:
if pages.size_kb == pagesize:
return (memory <= pages.free_kb and
(memory % pages.size_kb) == 0)
raise exception.MemoryPageSizeNotSupported(pagesize=pagesize)
@base.NovaObjectRegistry.register
class NUMAPagesTopology(base.NovaObject):
# Version 1.0: Initial version
VERSION = '1.0'
fields = {
'size_kb': fields.IntegerField(),
'total': fields.IntegerField(),
'used': fields.IntegerField(default=0),
}
def __eq__(self, other):
return all_things_equal(self, other)
def __ne__(self, other):
return not (self == other)
@property
def free(self):
"""Returns the number of avail pages."""
return self.total - self.used
@property
def free_kb(self):
"""Returns the avail memory size in KiB."""
return self.free * self.size_kb
# TODO(berrange): Remove NovaObjectDictCompat
@base.NovaObjectRegistry.register
class NUMATopology(base.NovaObject,
base.NovaObjectDictCompat):
# Version 1.0: Initial version
# Version 1.1: Update NUMACell to 1.1
# Version 1.2: Update NUMACell to 1.2
VERSION = '1.2'
fields = {
'cells': fields.ListOfObjectsField('NUMACell'),
}
obj_relationships = {
'cells': [('1.0', '1.0'), ('1.1', '1.1'), ('1.2', '1.2')]
}
@classmethod
def obj_from_primitive(cls, primitive):
if 'nova_object.name' in primitive:
obj_topology = super(NUMATopology, cls).obj_from_primitive(
primitive)
else:
# NOTE(sahid): This compatibility code needs to stay until we can
# guarantee that there are no cases of the old format stored in
# the database (or forever, if we can never guarantee that).
obj_topology = NUMATopology._from_dict(primitive)
return obj_topology
def _to_json(self):
return jsonutils.dumps(self.obj_to_primitive())
@classmethod
def obj_from_db_obj(cls, db_obj):
return cls.obj_from_primitive(
jsonutils.loads(db_obj))
def __len__(self):
"""Defined so that boolean testing works the same as for lists."""
return len(self.cells)
def _to_dict(self):
# TODO(sahid): needs to be removed.
return {'cells': [cell._to_dict() for cell in self.cells]}
@classmethod
def _from_dict(cls, data_dict):
return cls(cells=[
NUMACell._from_dict(cell_dict)
for cell_dict in data_dict.get('cells', [])])
@base.NovaObjectRegistry.register
class NUMATopologyLimits(base.NovaObject):
# Version 1.0: Initial version
VERSION = '1.0'
fields = {
'cpu_allocation_ratio': fields.FloatField(),
'ram_allocation_ratio': fields.FloatField(),
}
def to_dict_legacy(self, host_topology):
cells = []
for cell in host_topology.cells:
cells.append(
{'cpus': hardware.format_cpu_spec(
cell.cpuset, allow_ranges=False),
'mem': {'total': cell.memory,
'limit': cell.memory * self.ram_allocation_ratio},
'cpu_limit': len(cell.cpuset) * self.cpu_allocation_ratio,
'id': cell.id})
return {'cells': cells}
@classmethod
def obj_from_db_obj(cls, db_obj):
if 'nova_object.name' in db_obj:
obj_topology = cls.obj_from_primitive(db_obj)
else:
# NOTE(sahid): This compatibility code needs to stay until we can
# guarantee that all compute nodes are using RPC API => 3.40.
cell = db_obj['cells'][0]
ram_ratio = cell['mem']['limit'] / float(cell['mem']['total'])
cpu_ratio = cell['cpu_limit'] / float(len(hardware.parse_cpu_spec(
cell['cpus'])))
obj_topology = NUMATopologyLimits(
cpu_allocation_ratio=cpu_ratio,
ram_allocation_ratio=ram_ratio)
return obj_topology
|
barnsnake351/nova
|
nova/objects/numa.py
|
Python
|
apache-2.0
| 8,564
|
# Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import subprocess
import sys
if __name__ == '__main__':
java_args = ['java', '-jar', 'target/driver.jar'] + sys.argv[1:]
subprocess.call(['mvn', 'package'])
subprocess.call(['cp', 'target/driver.jar', 'target/classes/gce/'])
subprocess.call([
'zip', '-FSr', './target/classes/gce/cps.zip', './proto',
'./python_src', './node_src/src', './node_src/package.json', './go_src'
])
subprocess.call(java_args)
|
GoogleCloudPlatform/pubsub
|
load-test-framework/run.py
|
Python
|
apache-2.0
| 1,022
|
# -*- coding: utf-8 -*-
#!/usr/bin/python
__doc__ = '''
Reasonable Python
A module for integrating F-logic into Python
f2py.py --- translating F-logic back to Python
by Markus Schatten <markus_dot_schatten_at_foi_dot_hr>
Faculty of Organization and Informatics,
Varaždin, Croatia, 2007
This library is free software; you can redistribute it and/or
modify it under the terms of the GNU Lesser General Public
License as published by the Free Software Foundation; either
version 2.1 of the License, or (at your option) any later version.
This library is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
Lesser General Public License for more details.
You should have received a copy of the GNU Lesser General Public
License along with this library; if not, write to the Free Software
Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
'''
import types
class f2py:
def classForName( self, name, ns ):
return ns[ name ]
def translate( self, string, gl ):
if not isinstance( string, str ):
raise ValueError, 'Only FLORA-2 strings can be translated back to Python objects'
if not string[ :2 ] == 'py':
raise ValueError, 'The supplied string does not seem to be a FLORA-2 string'
if string == 'pybuffer':
return types.BufferType
if string == 'pybuiltinfunction':
return types.BuiltinFunctionType
if string == 'pybuiltinmethod':
return types.BuiltinMethodType
if string == 'pycode':
return types.CodeType
if string == 'pydictproxy':
return types.DictProxyType
if string == 'pyellipsis':
return types.EllipsisType
if string == 'pyfile':
return types.FileType
if string == 'pyframe':
return types.FrameType
if string == 'pyfunction':
return types.FunctionType
if string == 'pygenerator':
return types.GeneratorType
if string == 'pylambda':
return types.LambdaType
if string == 'pymethod':
return types.MethodType
if string == 'pynotimplemented':
return types.NotImplementedType
if string == 'pyslice':
return types.SliceType
if string == 'pytraceback':
return types.TracebackType
if string == 'pyunboundmethod':
return types.UnboundMethodType
if string == 'pyxrange':
return types.XRangeType
if string[ 0 ] == '_':
return None
if string[ :6 ] == 'pytype' or string[ :8 ] == 'pyclass':
classname = string.split( 'xxxmarexxx' )[ -1: ][ 0 ]
package = string.split( 'xxxmarexxx' )[ -1: ][ : ]
return self.classForName( classname, gl )
else:
return 'pyunknown'
def clean_list( self, lst ):
retlst = []
for i in lst:
if i == 'true':
retlst.append( True )
elif i == 'false':
retlst.append( False )
try:
retlst.append( int( i ) )
except:
try:
retlst.append( long( i ) )
except:
try:
retlst.append( float( i ) )
except:
try:
retlst.append( complex( i ) )
except:
retlst.append( i )
return retlst
|
johannesloetzsch/reasonablepy
|
rp/f2py.py
|
Python
|
lgpl-2.1
| 3,062
|
"""
Django settings for tests project.
For more information on this file, see
https://docs.djangoproject.com/en/1.6/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.6/ref/settings/
"""
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
import os
BASE_DIR = os.path.dirname(os.path.dirname(__file__))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.6/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = '#f*oce7(o0i=@15kk-29gr(n11e!hd%vbrxmvnlzjcjw$5meyl'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
TEMPLATE_DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = (
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'static_site',
)
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
)
ROOT_URLCONF = 'tests.urls'
WSGI_APPLICATION = 'tests.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.6/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Internationalization
# https://docs.djangoproject.com/en/1.6/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.6/howto/static-files/
STATIC_URL = '/static/'
|
zakuro9715/django-static-site
|
tests/settings.py
|
Python
|
mit
| 1,988
|
from ChannelSelection import ChannelSelection, BouquetSelector, SilentBouquetSelector
from Components.ActionMap import ActionMap, HelpableActionMap
from Components.ActionMap import NumberActionMap
from Components.Harddisk import harddiskmanager
from Components.Input import Input
from Components.Label import Label
from Components.MovieList import AUDIO_EXTENSIONS, MOVIE_EXTENSIONS, DVD_EXTENSIONS
from Components.PluginComponent import plugins
from Components.ServiceEventTracker import ServiceEventTracker
from Components.Sources.Boolean import Boolean
from Components.config import config, ConfigBoolean, ConfigClock, ConfigText
from Components.SystemInfo import SystemInfo
from Components.UsageConfig import preferredInstantRecordPath, defaultMoviePath, ConfigSelection
from Components.VolumeControl import VolumeControl
from Components.Sources.StaticText import StaticText
from EpgSelection import EPGSelection
from Plugins.Plugin import PluginDescriptor
from Screen import Screen
from Screens import ScreenSaver
from Screens import Standby
from Screens.ChoiceBox import ChoiceBox
from Screens.Dish import Dish
from Screens.EventView import EventViewEPGSelect, EventViewSimple
from Screens.InputBox import InputBox
from Screens.MessageBox import MessageBox
from Screens.MinuteInput import MinuteInput
from Screens.TimerSelection import TimerSelection
from Screens.PictureInPicture import PictureInPicture
import Screens.Standby
from Screens.SubtitleDisplay import SubtitleDisplay
from Screens.RdsDisplay import RdsInfoDisplay, RassInteractive
from Screens.TimeDateInput import TimeDateInput
from Screens.UnhandledKey import UnhandledKey
from ServiceReference import ServiceReference, isPlayableForCur
from Tools import Notifications, ASCIItranslit
from Tools.Directories import fileExists, getRecordingFilename, moveFiles
from enigma import eTimer, eServiceCenter, eDVBServicePMTHandler, iServiceInformation, iPlayableService, eServiceReference, eEPGCache, eActionMap, getDesktop, eDVBDB
from time import time, localtime, strftime
import os
from bisect import insort
from sys import maxint
####key debug
# from keyids import KEYIDS
# from datetime import datetime
from RecordTimer import RecordTimerEntry, RecordTimer, findSafeRecordPath
# hack alert!
from Menu import MainMenu, mdom
def isStandardInfoBar(self):
return self.__class__.__name__ == "InfoBar"
def setResumePoint(session):
global resumePointCache, resumePointCacheLast
service = session.nav.getCurrentService()
ref = session.nav.getCurrentlyPlayingServiceOrGroup()
if (service is not None) and (ref is not None): # and (ref.type != 1):
# ref type 1 has its own memory...
seek = service.seek()
if seek:
pos = seek.getPlayPosition()
if not pos[0]:
key = ref.toString()
lru = int(time())
l = seek.getLength()
if l:
l = l[1]
else:
l = None
resumePointCache[key] = [lru, pos[1], l]
if len(resumePointCache) > 50:
candidate = key
for k,v in resumePointCache.items():
if v[0] < lru:
candidate = k
del resumePointCache[candidate]
if lru - resumePointCacheLast > 3600:
saveResumePoints()
def delResumePoint(ref):
global resumePointCache, resumePointCacheLast
try:
del resumePointCache[ref.toString()]
except KeyError:
pass
if int(time()) - resumePointCacheLast > 3600:
saveResumePoints()
def getResumePoint(session):
global resumePointCache
ref = session.nav.getCurrentlyPlayingServiceOrGroup()
if (ref is not None) and (ref.type != 1):
try:
entry = resumePointCache[ref.toString()]
entry[0] = int(time()) # update LRU timestamp
return entry[1]
except KeyError:
return None
def saveResumePoints():
global resumePointCache, resumePointCacheLast
import cPickle
try:
f = open('/home/root/resumepoints.pkl', 'wb')
cPickle.dump(resumePointCache, f, cPickle.HIGHEST_PROTOCOL)
except Exception, ex:
print "[InfoBar] Failed to write resumepoints:", ex
resumePointCacheLast = int(time())
def loadResumePoints():
import cPickle
try:
return cPickle.load(open('/home/root/resumepoints.pkl', 'rb'))
except Exception, ex:
print "[InfoBar] Failed to load resumepoints:", ex
return {}
resumePointCache = loadResumePoints()
resumePointCacheLast = int(time())
class InfoBarDish:
def __init__(self):
self.dishDialog = self.session.instantiateDialog(Dish)
if SystemInfo["hasOSDAnimation"]:
self.dishDialog.setAnimationMode(0)
class InfoBarUnhandledKey:
def __init__(self):
self.unhandledKeyDialog = self.session.instantiateDialog(UnhandledKey)
if SystemInfo["hasOSDAnimation"]:
self.unhandledKeyDialog.setAnimationMode(0)
self.hideUnhandledKeySymbolTimer = eTimer()
self.hideUnhandledKeySymbolTimer.callback.append(self.unhandledKeyDialog.hide)
self.checkUnusedTimer = eTimer()
self.checkUnusedTimer.callback.append(self.checkUnused)
self.onLayoutFinish.append(self.unhandledKeyDialog.hide)
eActionMap.getInstance().bindAction('', -maxint -1, self.actionA) #highest prio
eActionMap.getInstance().bindAction('', maxint, self.actionB) #lowest prio
self.flags = (1<<1)
self.uflags = 0
#this function is called on every keypress!
def actionA(self, key, flag):
####key debug
#try:
# print 'KEY: %s %s %s' % (key,(key_name for key_name,value in KEYIDS.items() if value==key).next(),getKeyDescription(key)[0])
#except:
# try:
# print 'KEY: %s %s' % (key,(key_name for key_name,value in KEYIDS.items() if value==key).next()) # inverse dictionary lookup in KEYIDS
# except:
# print 'KEY: %s' % (key)
self.unhandledKeyDialog.hide()
if flag != 4:
if self.flags & (1<<1):
self.flags = self.uflags = 0
self.flags |= (1<<flag)
if flag == 1: # break
self.checkUnusedTimer.start(0, True)
return 0
#this function is only called when no other action has handled this key
def actionB(self, key, flag):
if flag != 4:
self.uflags |= (1<<flag)
def checkUnused(self):
if self.flags == self.uflags:
self.unhandledKeyDialog.show()
self.hideUnhandledKeySymbolTimer.start(2000, True)
class InfoBarScreenSaver:
def __init__(self):
self.onExecBegin.append(self.__onExecBegin)
self.onExecEnd.append(self.__onExecEnd)
self.screenSaverTimer = eTimer()
self.screenSaverTimer.callback.append(self.screensaverTimeout)
self.screensaver = self.session.instantiateDialog(ScreenSaver.Screensaver)
self.onLayoutFinish.append(self.__layoutFinished)
def __layoutFinished(self):
self.screensaver.hide()
def __onExecBegin(self):
self.ScreenSaverTimerStart()
def __onExecEnd(self):
if self.screensaver.shown:
self.screensaver.hide()
eActionMap.getInstance().unbindAction('', self.keypressScreenSaver)
self.screenSaverTimer.stop()
def ScreenSaverTimerStart(self):
time = int(config.usage.screen_saver.value)
flag = self.seekstate[0]
if not flag:
ref = self.session.nav.getCurrentlyPlayingServiceOrGroup()
if ref and not (hasattr(self.session, "pipshown") and self.session.pipshown):
ref = ref.toString().split(":")
flag = ref[2] == "2" or os.path.splitext(ref[10])[1].lower() in AUDIO_EXTENSIONS
if time and flag:
self.screenSaverTimer.startLongTimer(time)
else:
self.screenSaverTimer.stop()
def screensaverTimeout(self):
if self.execing and not Standby.inStandby and not Standby.inTryQuitMainloop:
self.hide()
if hasattr(self, "pvrStateDialog"):
self.pvrStateDialog.hide()
self.screensaver.show()
eActionMap.getInstance().bindAction('', -maxint - 1, self.keypressScreenSaver)
def keypressScreenSaver(self, key, flag):
if flag:
self.screensaver.hide()
self.show()
self.ScreenSaverTimerStart()
eActionMap.getInstance().unbindAction('', self.keypressScreenSaver)
class HideVBILine(Screen):
def __init__(self, session):
self.skin = """<screen position="0,0" size="%s,%s" flags="wfNoBorder" zPosition="1"/>""" % (getDesktop(0).size().width(), getDesktop(0).size().height() / 360 + 1)
Screen.__init__(self, session)
class SecondInfoBar(Screen):
def __init__(self, session, skinName):
Screen.__init__(self, session)
self.skinName = skinName
class InfoBarShowHide(InfoBarScreenSaver):
""" InfoBar show/hide control, accepts toggleShow and hide actions, might start
fancy animations. """
STATE_HIDDEN = 0
STATE_HIDING = 1
STATE_SHOWING = 2
STATE_SHOWN = 3
FLAG_HIDE_VBI = 512
def __init__(self):
self["ShowHideActions"] = ActionMap( ["InfobarShowHideActions"] ,
{
"toggleShow": self.okButtonCheck,
"hide": self.keyHide,
"toggleShowLong" : self.toggleShowLong,
"hideLong" : self.hideLong,
}, 1) # lower prio to make it possible to override ok and cancel..
self.__event_tracker = ServiceEventTracker(screen=self, eventmap=
{
iPlayableService.evStart: self.serviceStarted,
})
InfoBarScreenSaver.__init__(self)
self.__state = self.STATE_SHOWN
self.__locked = 0
self.hideTimer = eTimer()
self.hideTimer.callback.append(self.doTimerHide)
self.hideTimer.start(5000, True)
self.onShow.append(self.__onShow)
self.onHide.append(self.__onHide)
self.onShowHideNotifiers = []
self.actualSecondInfoBarScreen = None
self.secondInfoBarScreen = None
if isStandardInfoBar(self):
self.secondInfoBarScreen = self.session.instantiateDialog(SecondInfoBar, "SecondInfoBar")
self.secondInfoBarScreen.show()
self.secondInfoBarScreenSimple = self.session.instantiateDialog(SecondInfoBar, "SecondInfoBarSimple")
self.secondInfoBarScreenSimple.show()
self.actualSecondInfoBarScreen = config.usage.show_simple_second_infobar.value and self.secondInfoBarScreenSimple.skinAttributes and self.secondInfoBarScreenSimple or self.secondInfoBarScreen
self.hideVBILineScreen = self.session.instantiateDialog(HideVBILine)
self.hideVBILineScreen.show()
self.onLayoutFinish.append(self.__layoutFinished)
self.onExecBegin.append(self.__onExecBegin)
def __onExecBegin(self):
self.clearScreenPath()
self.showHideVBI()
def __layoutFinished(self):
if self.actualSecondInfoBarScreen:
self.secondInfoBarScreen.hide()
self.secondInfoBarScreenSimple.hide()
self.hideVBILineScreen.hide()
def __onShow(self):
self.__state = self.STATE_SHOWN
for x in self.onShowHideNotifiers:
x(True)
self.startHideTimer()
def doDimming(self):
if config.usage.show_infobar_do_dimming.value:
self.dimmed = self.dimmed-1
else:
self.dimmed = 0
self.DimmingTimer.stop()
self.doHide()
def unDimming(self):
self.unDimmingTimer.stop()
self.doWriteAlpha(config.av.osd_alpha.value)
def doWriteAlpha(self, value):
if fileExists("/proc/stb/video/alpha"):
f=open("/proc/stb/video/alpha","w")
f.write("%i" % (value))
f.close()
def __onHide(self):
self.unDimmingTimer = eTimer()
self.unDimmingTimer.callback.append(self.unDimming)
self.unDimmingTimer.start(100, True)
self.__state = self.STATE_HIDDEN
if self.actualSecondInfoBarScreen:
self.actualSecondInfoBarScreen.hide()
for x in self.onShowHideNotifiers:
x(False)
def toggleShowLong(self):
if not config.usage.ok_is_channelselection.value:
self.toggleSecondInfoBar()
def hideLong(self):
if config.usage.ok_is_channelselection.value:
self.toggleSecondInfoBar()
def toggleSecondInfoBar(self):
if self.actualSecondInfoBarScreen and not self.shown and not self.actualSecondInfoBarScreen.shown and self.secondInfoBarScreenSimple.skinAttributes and self.secondInfoBarScreen.skinAttributes:
self.actualSecondInfoBarScreen.hide()
config.usage.show_simple_second_infobar.value = not config.usage.show_simple_second_infobar.value
config.usage.show_simple_second_infobar.save()
self.actualSecondInfoBarScreen = config.usage.show_simple_second_infobar.value and self.secondInfoBarScreenSimple or self.secondInfoBarScreen
self.showSecondInfoBar()
def keyHide(self):
if self.__state == self.STATE_HIDDEN and self.session.pipshown and "popup" in config.usage.pip_hideOnExit.value:
if config.usage.pip_hideOnExit.value == "popup":
self.session.openWithCallback(self.hidePipOnExitCallback, MessageBox, _("Disable Picture in Picture"), simple=True)
else:
self.hidePipOnExitCallback(True)
elif config.usage.ok_is_channelselection.value and hasattr(self, "openServiceList"):
self.toggleShow()
elif self.__state == self.STATE_SHOWN:
self.hide()
def hidePipOnExitCallback(self, answer):
if answer == True:
self.showPiP()
def connectShowHideNotifier(self, fnc):
if not fnc in self.onShowHideNotifiers:
self.onShowHideNotifiers.append(fnc)
def disconnectShowHideNotifier(self, fnc):
if fnc in self.onShowHideNotifiers:
self.onShowHideNotifiers.remove(fnc)
def serviceStarted(self):
if self.execing:
if config.usage.show_infobar_on_zap.value:
self.doShow()
self.showHideVBI()
def startHideTimer(self):
if self.__state == self.STATE_SHOWN and not self.__locked:
self.hideTimer.stop()
if self.actualSecondInfoBarScreen and self.actualSecondInfoBarScreen.shown:
idx = config.usage.show_second_infobar.index - 1
else:
idx = config.usage.infobar_timeout.index
if idx:
self.hideTimer.startLongTimer(idx)
def doShow(self):
self.show()
self.startHideTimer()
def doTimerHide(self):
self.hideTimer.stop()
#if self.__state == self.STATE_SHOWN:
# self.hide()
self.DimmingTimer = eTimer()
self.DimmingTimer.callback.append(self.doDimming)
self.DimmingTimer.start(70, True)
self.dimmed = config.usage.show_infobar_dimming_speed.value
def doHide(self):
if self.__state != self.STATE_HIDDEN:
self.doWriteAlpha((config.av.osd_alpha.value*self.dimmed/config.usage.show_infobar_dimming_speed.value))
if self.dimmed > 0:
self.DimmingTimer.start(70, True)
else:
self.DimmingTimer.stop()
if self.__state == self.STATE_SHOWN:
self.hide()
if hasattr(self, "pvrStateDialog"):
try:
self.pvrStateDialog.hide()
except:
pass
elif self.__state == self.STATE_HIDDEN and self.secondInfoBarScreen and self.secondInfoBarScreen.shown:
self.secondInfoBarScreen.hide()
self.secondInfoBarWasShown = False
def okButtonCheck(self):
if config.usage.ok_is_channelselection.value and hasattr(self, "openServiceList"):
if isinstance(self, InfoBarTimeshift) and self.timeshiftEnabled() and isinstance(self, InfoBarSeek) and self.seekstate == self.SEEK_STATE_PAUSE:
return
self.openServiceList()
else:
self.toggleShow()
def toggleShow(self):
if self.__state == self.STATE_HIDDEN:
self.showFirstInfoBar()
else:
self.showSecondInfoBar()
def showSecondInfoBar(self):
if isStandardInfoBar(self) and config.usage.show_second_infobar.value == "EPG":
if not(hasattr(self, "hotkeyGlobal") and self.hotkeyGlobal("info") != 0):
self.showDefaultEPG()
elif self.actualSecondInfoBarScreen and config.usage.show_second_infobar.value and not self.actualSecondInfoBarScreen.shown:
self.show()
self.actualSecondInfoBarScreen.show()
self.startHideTimer()
else:
self.hide()
self.hideTimer.stop()
def showFirstInfoBar(self):
if self.__state == self.STATE_HIDDEN or self.actualSecondInfoBarScreen and self.actualSecondInfoBarScreen.shown:
self.actualSecondInfoBarScreen and self.actualSecondInfoBarScreen.hide()
self.show()
else:
self.hide()
self.hideTimer.stop()
def lockShow(self):
self.__locked = self.__locked + 1
if self.execing:
self.show()
self.hideTimer.stop()
def unlockShow(self):
self.__locked = self.__locked - 1
if self.execing:
self.startHideTimer()
def checkHideVBI(self):
service = self.session.nav.getCurrentlyPlayingServiceReference()
servicepath = service and service.getPath()
if servicepath and servicepath.startswith("/"):
if service.toString().startswith("1:"):
info = eServiceCenter.getInstance().info(service)
service = info and info.getInfoString(service, iServiceInformation.sServiceref)
return service and eDVBDB.getInstance().getFlag(eServiceReference(service)) & self.FLAG_HIDE_VBI and True
else:
return ".hidvbi." in servicepath.lower()
service = self.session.nav.getCurrentService()
info = service and service.info()
return info and info.getInfo(iServiceInformation.sHideVBI)
def showHideVBI(self):
if self.checkHideVBI():
self.hideVBILineScreen.show()
else:
self.hideVBILineScreen.hide()
def ToggleHideVBI(self):
service = self.session.nav.getCurrentlyPlayingServiceReference()
servicepath = service and service.getPath()
if not servicepath:
if eDVBDB.getInstance().getFlag(service) & self.FLAG_HIDE_VBI:
eDVBDB.getInstance().removeFlag(service, self.FLAG_HIDE_VBI)
else:
eDVBDB.getInstance().addFlag(service, self.FLAG_HIDE_VBI)
eDVBDB.getInstance().reloadBouquets()
self.showHideVBI()
class BufferIndicator(Screen):
def __init__(self, session):
Screen.__init__(self, session)
self["status"] = Label()
self.mayShow = False
self.__event_tracker = ServiceEventTracker(screen=self, eventmap=
{
iPlayableService.evBuffering: self.bufferChanged,
iPlayableService.evStart: self.__evStart,
iPlayableService.evGstreamerPlayStarted: self.__evGstreamerPlayStarted,
})
def bufferChanged(self):
if self.mayShow:
service = self.session.nav.getCurrentService()
info = service and service.info()
if info:
value = info.getInfo(iServiceInformation.sBuffer)
if value and value != 100:
self["status"].setText(_("Buffering %d%%") % value)
if not self.shown:
self.show()
def __evStart(self):
self.mayShow = True
self.hide()
def __evGstreamerPlayStarted(self):
self.mayShow = False
self.hide()
class InfoBarBuffer():
def __init__(self):
self.bufferScreen = self.session.instantiateDialog(BufferIndicator)
self.bufferScreen.hide()
class NumberZap(Screen):
def quit(self):
self.Timer.stop()
self.close()
def keyOK(self):
self.Timer.stop()
self.close(self.service, self.bouquet)
def handleServiceName(self):
if self.searchNumber:
self.service, self.bouquet = self.searchNumber(int(self["number"].getText()))
self["servicename"].text = self["servicename_summary"].text = ServiceReference(self.service).getServiceName()
if not self.startBouquet:
self.startBouquet = self.bouquet
def keyBlue(self):
self.Timer.start(3000, True)
if self.searchNumber:
if self.startBouquet == self.bouquet:
self.service, self.bouquet = self.searchNumber(int(self["number"].getText()), firstBouquetOnly = True)
else:
self.service, self.bouquet = self.searchNumber(int(self["number"].getText()))
self["servicename"].text = self["servicename_summary"].text = ServiceReference(self.service).getServiceName()
def keyNumberGlobal(self, number):
if config.usage.numzaptimeoutmode.value is not "off":
if config.usage.numzaptimeoutmode.value is "standard":
self.Timer.start(1000, True)
else:
self.Timer.start(int(config.usage.numzaptimeout2.value), True)
self.numberString = self.numberString + str(number)
self["number"].text = self["number_summary"].text = self.numberString
self.field = self.numberString
self.handleServiceName()
if len(self.numberString) >= int(config.usage.maxchannelnumlen.value):
self.keyOK()
def __init__(self, session, number, searchNumberFunction = None):
Screen.__init__(self, session)
self.numberString = str(number)
self.field = str(number)
self.searchNumber = searchNumberFunction
self.startBouquet = None
self["channel"] = Label(_("Channel:"))
self["number"] = Label(self.numberString)
self["servicename"] = Label()
self["channel_summary"] = StaticText(_("Channel:"))
self["number_summary"] = StaticText(self.numberString)
self["servicename_summary"] = StaticText()
self.handleServiceName()
self["actions"] = NumberActionMap( [ "SetupActions", "ShortcutActions" ],
{
"cancel": self.quit,
"ok": self.keyOK,
"blue": self.keyBlue,
"1": self.keyNumberGlobal,
"2": self.keyNumberGlobal,
"3": self.keyNumberGlobal,
"4": self.keyNumberGlobal,
"5": self.keyNumberGlobal,
"6": self.keyNumberGlobal,
"7": self.keyNumberGlobal,
"8": self.keyNumberGlobal,
"9": self.keyNumberGlobal,
"0": self.keyNumberGlobal
})
self.Timer = eTimer()
self.Timer.callback.append(self.keyOK)
if config.usage.numzaptimeoutmode.value is not "off":
if config.usage.numzaptimeoutmode.value is "standard":
self.Timer.start(3000, True)
else:
self.Timer.start(int(config.usage.numzaptimeout1.value), True)
class InfoBarNumberZap:
""" Handles an initial number for NumberZapping """
def __init__(self):
self["NumberActions"] = NumberActionMap( [ "NumberActions"],
{
"1": self.keyNumberGlobal,
"2": self.keyNumberGlobal,
"3": self.keyNumberGlobal,
"4": self.keyNumberGlobal,
"5": self.keyNumberGlobal,
"6": self.keyNumberGlobal,
"7": self.keyNumberGlobal,
"8": self.keyNumberGlobal,
"9": self.keyNumberGlobal,
"0": self.keyNumberGlobal,
})
def keyNumberGlobal(self, number):
if number == 0:
if isinstance(self, InfoBarPiP) and self.pipHandles0Action():
self.pipDoHandle0Action()
elif len(self.servicelist.history) > 1:
self.checkTimeshiftRunning(self.recallPrevService)
else:
if self.has_key("TimeshiftActions") and self.timeshiftEnabled():
ts = self.getTimeshift()
if ts and ts.isTimeshiftActive():
return
self.session.openWithCallback(self.numberEntered, NumberZap, number, self.searchNumber)
def recallPrevService(self, reply):
if reply:
self.servicelist.history_tv = []
self.servicelist.history_radio = []
self.servicelist.recallPrevService()
def numberEntered(self, service = None, bouquet = None):
if service:
self.selectAndStartService(service, bouquet)
def searchNumberHelper(self, serviceHandler, num, bouquet):
servicelist = serviceHandler.list(bouquet)
if servicelist:
serviceIterator = servicelist.getNext()
while serviceIterator.valid():
if num == serviceIterator.getChannelNum():
return serviceIterator
serviceIterator = servicelist.getNext()
return None
def searchNumber(self, number, firstBouquetOnly=False, bouquet=None):
bouquet = bouquet or self.servicelist.getRoot()
service = None
serviceHandler = eServiceCenter.getInstance()
if not firstBouquetOnly:
service = self.searchNumberHelper(serviceHandler, number, bouquet)
if config.usage.multibouquet.value and not service:
bouquet = self.servicelist.bouquet_root
bouquetlist = serviceHandler.list(bouquet)
if bouquetlist:
bouquet = bouquetlist.getNext()
while bouquet.valid():
if bouquet.flags & eServiceReference.isDirectory and not bouquet.flags & eServiceReference.isInvisible:
service = self.searchNumberHelper(serviceHandler, number, bouquet)
if service:
playable = not (service.flags & (eServiceReference.isMarker|eServiceReference.isDirectory)) or (service.flags & eServiceReference.isNumberedMarker)
if not playable:
service = None
break
if config.usage.alternative_number_mode.value or firstBouquetOnly:
break
bouquet = bouquetlist.getNext()
return service, bouquet
def selectAndStartService(self, service, bouquet):
if service and not service.flags & eServiceReference.isMarker:
if self.servicelist.getRoot() != bouquet: #already in correct bouquet?
self.servicelist.clearPath()
if self.servicelist.bouquet_root != bouquet:
self.servicelist.enterPath(self.servicelist.bouquet_root)
self.servicelist.enterPath(bouquet)
self.servicelist.setCurrentSelection(service) #select the service in servicelist
self.servicelist.zap(enable_pipzap = True)
self.servicelist.correctChannelNumber()
self.servicelist.startRoot = None
def zapToNumber(self, number):
service, bouquet = self.searchNumber(number)
self.selectAndStartService(service, bouquet)
config.misc.initialchannelselection = ConfigBoolean(default = True)
class InfoBarChannelSelection:
""" ChannelSelection - handles the channelSelection dialog and the initial
channelChange actions which open the channelSelection dialog """
def __init__(self):
#instantiate forever
self.servicelist = self.session.instantiateDialog(ChannelSelection)
if config.misc.initialchannelselection.value:
self.onShown.append(self.firstRun)
self["ChannelSelectActions"] = HelpableActionMap(self, "InfobarChannelSelection",
{
"keyUp": (self.keyUpCheck, self.getKeyUpHelptext),
"keyDown": (self.keyDownCheck, self.getKeyDownHelpText),
"keyLeft": (self.keyLeftCheck, self.getKeyLeftHelptext),
"keyRight": (self.keyRightCheck, self.getKeyRightHelptext),
"historyBack": (self.historyBack, _("Switch to previous channel in history")),
"historyNext": (self.historyNext, _("Switch to next channel in history")),
"openServiceList": (self.openServiceList, _("Open service list")),
"openhistorybrowser": (self.openHistoryBrowser, _("open history browser")),
#"opendevicemanager": (self.openDeviceManager, _("open device manager")),
#"openaroraplugins": (self.openAroraPlugins, _("open Arora Browser")),
"showPluginBrowser": (self.showPluginBrowser, _("Show the plugin browser..")),
"openBouquetList": (self.openBouquetList, _("open bouquetlist")),
"keyChannelUp": (self.keyChannelUpCheck, self.getKeyChannelUpHelptext),
"keyChannelDown": (self.keyChannelDownCheck, self.getKeyChannelDownHelptext),
})
def openHistoryBrowser(self):
if fileExists("/usr/lib/enigma2/python/Plugins/Extensions/ZapHistoryBrowser/plugin.pyo"):
for plugin in plugins.getPlugins([PluginDescriptor.WHERE_EXTENSIONSMENU, PluginDescriptor.WHERE_EVENTINFO]):
if plugin.name == _("Zap-Historie Browser") or plugin.name == _("Zap-History Browser"):
self.runPlugin(plugin)
break
else:
self.session.open(MessageBox, _("The Zap-History Browser plugin is not installed!\nPlease install it."), type = MessageBox.TYPE_INFO,timeout = 10 )
def openDeviceManager(self):
if fileExists("/usr/lib/enigma2/python/Plugins/SystemPlugins/DeviceManager/plugin.pyo"):
for plugin in plugins.getPlugins([PluginDescriptor.WHERE_EXTENSIONSMENU, PluginDescriptor.WHERE_EVENTINFO]):
if plugin.name == _("Device Manager - Fast Mounted Remove"):
self.runPlugin(plugin)
break
else:
self.session.open(MessageBox, _("The Device Manager plugin is not installed!\nPlease install it."), type = MessageBox.TYPE_INFO,timeout = 10 )
def openAroraPlugins(self):
if fileExists("/usr/lib/enigma2/python/Plugins/Extensions/WebBrowser/plugin.pyo"):
for plugin in plugins.getPlugins([PluginDescriptor.WHERE_PLUGINMENU, PluginDescriptor.WHERE_EVENTINFO]):
if plugin.name == _("Web Browser"):
self.runPlugin(plugin)
break
else:
self.session.open(MessageBox, _("The WebBrowser is not installed!\nPlease install it."), type = MessageBox.TYPE_INFO,timeout = 10 )
def showPluginBrowser(self):
if self.secondInfoBarScreen and self.secondInfoBarScreen.shown:
self.secondInfoBarScreen.hide()
self.secondInfoBarWasShown = False
from Screens.PluginBrowser import PluginBrowser
self.session.open(PluginBrowser)
def showTvChannelList(self, zap=False):
self.servicelist.setModeTv()
if zap:
self.servicelist.zap()
def showRadioChannelList(self, zap=False):
self.servicelist.setModeRadio()
if zap:
self.servicelist.zap()
def firstRun(self):
self.onShown.remove(self.firstRun)
config.misc.initialchannelselection.value = False
config.misc.initialchannelselection.save()
self.switchChannelDown()
def historyBack(self):
self.checkTimeshiftRunning(self.historyBackCheckTimeshiftCallback)
def historyBackCheckTimeshiftCallback(self, answer):
if answer:
self.servicelist.historyBack()
def historyNext(self):
self.checkTimeshiftRunning(self.historyNextCheckTimeshiftCallback)
def historyNextCheckTimeshiftCallback(self, answer):
if answer:
self.servicelist.historyNext()
def openBouquetList(self):
self.servicelist.showFavourites()
self.session.execDialog(self.servicelist)
def keyUpCheck(self):
if config.usage.oldstyle_zap_controls.value:
self.zapDown()
elif config.usage.volume_instead_of_channelselection.value:
VolumeControl.instance and VolumeControl.instance.volUp()
else:
self.switchChannelUp()
def keyDownCheck(self):
if config.usage.oldstyle_zap_controls.value:
self.zapUp()
elif config.usage.volume_instead_of_channelselection.value:
VolumeControl.instance and VolumeControl.instance.volDown()
else:
self.switchChannelDown()
def keyLeftCheck(self):
if config.usage.oldstyle_zap_controls.value:
if config.usage.volume_instead_of_channelselection.value:
VolumeControl.instance and VolumeControl.instance.volDown()
else:
self.switchChannelUp()
else:
self.zapUp()
def keyRightCheck(self):
if config.usage.oldstyle_zap_controls.value:
if config.usage.volume_instead_of_channelselection.value:
VolumeControl.instance and VolumeControl.instance.volUp()
else:
self.switchChannelDown()
else:
self.zapDown()
def keyChannelUpCheck(self):
if config.usage.zap_with_ch_buttons.value:
self.zapDown()
else:
self.openServiceList()
def keyChannelDownCheck(self):
if config.usage.zap_with_ch_buttons.value:
self.zapUp()
else:
self.openServiceList()
def getKeyUpHelptext(self):
if config.usage.oldstyle_zap_controls.value:
value = _("Switch to next channel")
else:
if config.usage.volume_instead_of_channelselection.value:
value = _("Volume up")
else:
value = _("Open service list")
if not "keep" in config.usage.servicelist_cursor_behavior.value:
value += " " + _("and select previous channel")
return value
def getKeyDownHelpText(self):
if config.usage.oldstyle_zap_controls.value:
value = _("Switch to previous channel")
else:
if config.usage.volume_instead_of_channelselection.value:
value = _("Volume down")
else:
value = _("Open service list")
if not "keep" in config.usage.servicelist_cursor_behavior.value:
value += " " + _("and select next channel")
return value
def getKeyLeftHelptext(self):
if config.usage.oldstyle_zap_controls.value:
if config.usage.volume_instead_of_channelselection.value:
value = _("Volume down")
else:
value = _("Open service list")
if not "keep" in config.usage.servicelist_cursor_behavior.value:
value += " " + _("and select previous channel")
else:
value = _("Switch to previous channel")
return value
def getKeyRightHelptext(self):
if config.usage.oldstyle_zap_controls.value:
if config.usage.volume_instead_of_channelselection.value:
value = _("Volume up")
else:
value = _("Open service list")
if not "keep" in config.usage.servicelist_cursor_behavior.value:
value += " " + _("and select next channel")
else:
value = _("Switch to next channel")
return value
def getKeyChannelUpHelptext(self):
return config.usage.zap_with_ch_buttons.value and _("Switch to next channel") or _("Open service list")
def getKeyChannelDownHelptext(self):
return config.usage.zap_with_ch_buttons.value and _("Switch to previous channel") or _("Open service list")
def switchChannelUp(self):
if "keep" not in config.usage.servicelist_cursor_behavior.value:
self.servicelist.moveUp()
self.session.execDialog(self.servicelist)
def switchChannelDown(self):
if "keep" not in config.usage.servicelist_cursor_behavior.value:
self.servicelist.moveDown()
self.session.execDialog(self.servicelist)
def zapUp(self):
if self.servicelist.inBouquet():
prev = self.servicelist.getCurrentSelection()
if prev:
prev = prev.toString()
while True:
if config.usage.quickzap_bouquet_change.value:
if self.servicelist.atBegin():
self.servicelist.prevBouquet()
self.servicelist.moveUp()
cur = self.servicelist.getCurrentSelection()
if cur:
if self.servicelist.dopipzap:
isPlayable = self.session.pip.isPlayableForPipService(cur)
else:
isPlayable = isPlayableForCur(cur)
if cur and (cur.toString() == prev or isPlayable):
break
else:
self.servicelist.moveUp()
self.servicelist.zap(enable_pipzap = True)
def zapDown(self):
if self.servicelist.inBouquet():
prev = self.servicelist.getCurrentSelection()
if prev:
prev = prev.toString()
while True:
if config.usage.quickzap_bouquet_change.value and self.servicelist.atEnd():
self.servicelist.nextBouquet()
else:
self.servicelist.moveDown()
cur = self.servicelist.getCurrentSelection()
if cur:
if self.servicelist.dopipzap:
isPlayable = self.session.pip.isPlayableForPipService(cur)
else:
isPlayable = isPlayableForCur(cur)
if cur and (cur.toString() == prev or isPlayable):
break
else:
self.servicelist.moveDown()
self.servicelist.zap(enable_pipzap = True)
def openFavouritesList(self):
self.servicelist.showFavourites()
self.openServiceList()
def openServiceList(self):
self.session.execDialog(self.servicelist)
class InfoBarMenu:
""" Handles a menu action, to open the (main) menu """
def __init__(self):
self["MenuActions"] = HelpableActionMap(self, "InfobarMenuActions",
{
"mainMenu": (self.mainMenu, _("Enter main menu...")),
})
self.session.infobar = None
def mainMenu(self):
print "loading mainmenu XML..."
menu = mdom.getroot()
assert menu.tag == "menu", "root element in menu must be 'menu'!"
self.session.infobar = self
# so we can access the currently active infobar from screens opened from within the mainmenu
# at the moment used from the SubserviceSelection
self.session.openWithCallback(self.mainMenuClosed, MainMenu, menu)
def mainMenuClosed(self, *val):
self.session.infobar = None
class InfoBarSimpleEventView:
""" Opens the Eventview for now/next """
def __init__(self):
self["EPGActions"] = HelpableActionMap(self, "InfobarEPGActions",
{
"showEventInfo": (self.openEventView, _("Show event details")),
"showEventInfoSingleEPG": (self.openEventView, _("Show event details")),
"showInfobarOrEpgWhenInfobarAlreadyVisible": self.showEventInfoWhenNotVisible,
})
def showEventInfoWhenNotVisible(self):
if self.shown:
self.openEventView()
else:
self.toggleShow()
return 1
def openEventView(self):
epglist = [ ]
self.epglist = epglist
service = self.session.nav.getCurrentService()
ref = self.session.nav.getCurrentlyPlayingServiceOrGroup()
info = service.info()
ptr=info.getEvent(0)
if ptr:
epglist.append(ptr)
ptr=info.getEvent(1)
if ptr:
epglist.append(ptr)
if epglist:
self.session.open(EventViewSimple, epglist[0], ServiceReference(ref), self.eventViewCallback)
def eventViewCallback(self, setEvent, setService, val): #used for now/next displaying
epglist = self.epglist
if len(epglist) > 1:
tmp = epglist[0]
epglist[0] = epglist[1]
epglist[1] = tmp
setEvent(epglist[0])
class SimpleServicelist:
def __init__(self, services):
self.setServices(services)
def setServices(self, services):
self.services = services
self.length = len(services)
self.current = 0
def selectService(self, service):
if not self.length:
self.current = -1
return False
else:
self.current = 0
while self.services[self.current].ref != service:
self.current += 1
if self.current >= self.length:
return False
return True
def nextService(self):
if not self.length:
return
if self.current+1 < self.length:
self.current += 1
else:
self.current = 0
def prevService(self):
if not self.length:
return
if self.current-1 > -1:
self.current -= 1
else:
self.current = self.length - 1
def currentService(self):
if not self.length or self.current >= self.length:
return None
return self.services[self.current]
class InfoBarEPG:
""" EPG - Opens an EPG list when the showEPGList action fires """
def __init__(self):
self.is_now_next = False
self.dlg_stack = [ ]
self.bouquetSel = None
self.eventView = None
self.epglist = []
self.__event_tracker = ServiceEventTracker(screen=self, eventmap=
{
iPlayableService.evUpdatedEventInfo: self.__evEventInfoChanged,
})
self["EPGActions"] = HelpableActionMap(self, "InfobarEPGActions",
{
"showEventInfo": (self.showDefaultEPG, _("Show EPG...")),
"showEventInfoSingleEPG": (self.showSingleEPG, _("Show single service EPG")),
"showEventInfoMultiEPG": (self.showMultiEPG, _("Show multi channel EPG")),
#"showCurrentEvent": (self.openEventView, _("Show Current Info...")),
#"showSingleCurrentEPG": (self.openSingleServiceEPG, _("Show single channel EPG...")),
#"showBouquetEPG": (self.openMultiServiceEPG, _("Show Bouquet EPG...")),
##"showEventInfoPlugin": (self.showEventInfoPlugins, _("List EPG functions...")),
##"showEventGuidePlugin": (self.showEventGuidePlugins, _("List EPG functions...")),
"showInfobarOrEpgWhenInfobarAlreadyVisible": self.showEventInfoWhenNotVisible,
})
def getEPGPluginList(self, getAll=False):
pluginlist = [(p.name, boundFunction(self.runPlugin, p), p.path) for p in plugins.getPlugins(where = PluginDescriptor.WHERE_EVENTINFO) \
if 'selectedevent' not in p.__call__.func_code.co_varnames] or []
from Components.ServiceEventTracker import InfoBarCount
if getAll or InfoBarCount == 1:
pluginlist.append((_("Show EPG for current channel..."), self.openSingleServiceEPG, "current_channel"))
pluginlist.append((_("Multi EPG"), self.openMultiServiceEPG, "multi_epg"))
pluginlist.append((_("Current event EPG"), self.openEventView, "event_epg"))
return pluginlist
def showEventInfoWhenNotVisible(self):
if self.shown:
self.openEventView()
else:
self.toggleShow()
return 1
def zapToService(self, service, preview = False, zapback = False):
if self.servicelist.startServiceRef is None:
self.servicelist.startServiceRef = self.session.nav.getCurrentlyPlayingServiceOrGroup()
if service is not None:
if self.servicelist.getRoot() != self.epg_bouquet: #already in correct bouquet?
self.servicelist.clearPath()
if self.servicelist.bouquet_root != self.epg_bouquet:
self.servicelist.enterPath(self.servicelist.bouquet_root)
self.servicelist.enterPath(self.epg_bouquet)
self.servicelist.setCurrentSelection(service) #select the service in servicelist
if not zapback or preview:
self.servicelist.zap(enable_pipzap = True)
if (self.servicelist.dopipzap or zapback) and not preview:
self.servicelist.zapBack()
if not preview:
self.servicelist.startServiceRef = None
self.servicelist.startRoot = None
def getBouquetServices(self, bouquet):
services = [ ]
servicelist = eServiceCenter.getInstance().list(bouquet)
if not servicelist is None:
while True:
service = servicelist.getNext()
if not service.valid(): #check if end of list
break
if service.flags & (eServiceReference.isDirectory | eServiceReference.isMarker): #ignore non playable services
continue
services.append(ServiceReference(service))
return services
def openBouquetEPG(self, bouquet, withCallback=True):
services = self.getBouquetServices(bouquet)
if services:
self.epg_bouquet = bouquet
if withCallback:
self.dlg_stack.append(self.session.openWithCallback(self.closed, EPGSelection, services, self.zapToService, None, self.changeBouquetCB))
else:
self.session.open(EPGSelection, services, self.zapToService, None, self.changeBouquetCB)
def changeBouquetCB(self, direction, epg):
if self.bouquetSel:
if direction > 0:
self.bouquetSel.down()
else:
self.bouquetSel.up()
bouquet = self.bouquetSel.getCurrent()
services = self.getBouquetServices(bouquet)
if services:
self.epg_bouquet = bouquet
epg.setServices(services)
def selectBouquet(self, bouquetref, epg):
services = self.getBouquetServices(bouquetref)
if services:
self.epg_bouquet = bouquetref
self.serviceSel.setServices(services)
epg.setServices(services)
def setService(self, service):
if service:
self.serviceSel.selectService(service)
def closed(self, ret=False):
closedScreen = self.dlg_stack.pop()
if self.bouquetSel and closedScreen == self.bouquetSel:
self.bouquetSel = None
elif self.eventView and closedScreen == self.eventView:
self.eventView = None
if ret:
dlgs=len(self.dlg_stack)
if dlgs > 0:
self.dlg_stack[dlgs-1].close(dlgs > 1)
def openMultiServiceEPG(self, withCallback=True):
bouquets = self.servicelist.getBouquetList()
if bouquets is None:
cnt = 0
else:
cnt = len(bouquets)
if config.usage.multiepg_ask_bouquet.value:
self.openMultiServiceEPGAskBouquet(bouquets, cnt, withCallback)
else:
self.openMultiServiceEPGSilent(bouquets, cnt, withCallback)
def openMultiServiceEPGAskBouquet(self, bouquets, cnt, withCallback):
if cnt > 1: # show bouquet list
if withCallback:
self.bouquetSel = self.session.openWithCallback(self.closed, BouquetSelector, bouquets, self.openBouquetEPG, enableWrapAround=True)
self.dlg_stack.append(self.bouquetSel)
else:
self.bouquetSel = self.session.open(BouquetSelector, bouquets, self.openBouquetEPG, enableWrapAround=True)
elif cnt == 1:
self.openBouquetEPG(bouquets[0][1], withCallback)
def openMultiServiceEPGSilent(self, bouquets, cnt, withCallback):
root = self.servicelist.getRoot()
rootstr = root.toCompareString()
current = 0
for bouquet in bouquets:
if bouquet[1].toCompareString() == rootstr:
break
current += 1
if current >= cnt:
current = 0
if cnt > 1: # create bouquet list for bouq+/-
self.bouquetSel = SilentBouquetSelector(bouquets, True, self.servicelist.getBouquetNumOffset(root))
if cnt >= 1:
self.openBouquetEPG(root, withCallback)
def changeServiceCB(self, direction, epg):
if self.serviceSel:
if direction > 0:
self.serviceSel.nextService()
else:
self.serviceSel.prevService()
epg.setService(self.serviceSel.currentService())
def SingleServiceEPGClosed(self, ret=False):
self.serviceSel = None
def openSingleServiceEPG(self):
ref = self.servicelist.getCurrentSelection()
if ref:
if self.servicelist.getMutableList(): # bouquet in channellist
current_path = self.servicelist.getRoot()
services = self.getBouquetServices(current_path)
self.serviceSel = SimpleServicelist(services)
if self.serviceSel.selectService(ref):
self.epg_bouquet = current_path
self.session.openWithCallback(self.SingleServiceEPGClosed, EPGSelection, ref, self.zapToService, serviceChangeCB=self.changeServiceCB, parent=self)
else:
self.session.openWithCallback(self.SingleServiceEPGClosed, EPGSelection, ref)
else:
self.session.open(EPGSelection, ref)
def runPlugin(self, plugin):
plugin(session = self.session, servicelist = self.servicelist)
def showEventInfoPlugins(self):
pluginlist = self.getEPGPluginList()
if pluginlist:
self.session.openWithCallback(self.EventInfoPluginChosen, ChoiceBox, title=_("Please choose an extension..."), list=pluginlist, skin_name="EPGExtensionsList", reorderConfig="eventinfo_order", windowTitle=_("Events info menu"))
else:
self.openSingleServiceEPG()
def EventInfoPluginChosen(self, answer):
if answer is not None:
answer[1]()
def openSimilarList(self, eventid, refstr):
self.session.open(EPGSelection, refstr, None, eventid)
def getNowNext(self):
epglist = [ ]
service = self.session.nav.getCurrentService()
info = service and service.info()
ptr = info and info.getEvent(0)
if ptr and ptr.getEventName() != "":
epglist.append(ptr)
ptr = info and info.getEvent(1)
if ptr and ptr.getEventName() != "":
epglist.append(ptr)
self.epglist = epglist
def __evEventInfoChanged(self):
if self.is_now_next and len(self.dlg_stack) == 1:
self.getNowNext()
if self.eventView and self.epglist:
self.eventView.setEvent(self.epglist[0])
def showDefaultEPG(self):
self.openEventView()
def showSingleEPG(self):
self.openSingleServiceEPG()
def showMultiEPG(self):
self.openMultiServiceEPG()
def openEventView(self):
from Components.ServiceEventTracker import InfoBarCount
if InfoBarCount > 1:
epglist = [ ]
self.epglist = epglist
service = self.session.nav.getCurrentService()
ref = self.session.nav.getCurrentlyPlayingServiceOrGroup()
info = service.info()
ptr=info.getEvent(0)
if ptr:
epglist.append(ptr)
ptr=info.getEvent(1)
if ptr:
epglist.append(ptr)
if epglist:
self.session.open(EventViewEPGSelect, epglist[0], ServiceReference(ref), self.eventViewCallback, self.openSingleServiceEPG, self.openMultiServiceEPG, self.openSimilarList)
else:
ref = self.session.nav.getCurrentlyPlayingServiceOrGroup()
self.getNowNext()
epglist = self.epglist
if not epglist:
self.is_now_next = False
epg = eEPGCache.getInstance()
ptr = ref and ref.valid() and epg.lookupEventTime(ref, -1)
if ptr:
epglist.append(ptr)
ptr = epg.lookupEventTime(ref, ptr.getBeginTime(), +1)
if ptr:
epglist.append(ptr)
else:
self.is_now_next = True
if epglist:
self.eventView = self.session.openWithCallback(self.closed, EventViewEPGSelect, epglist[0], ServiceReference(ref), self.eventViewCallback, self.openSingleServiceEPG, self.openMultiServiceEPG, self.openSimilarList)
self.dlg_stack.append(self.eventView)
if not epglist:
print "no epg for the service avail.. so we show multiepg instead of eventinfo"
self.openMultiServiceEPG(False)
def eventViewCallback(self, setEvent, setService, val): #used for now/next displaying
epglist = self.epglist
if len(epglist) > 1:
tmp = epglist[0]
epglist[0]=epglist[1]
epglist[1]=tmp
setEvent(epglist[0])
class InfoBarRdsDecoder:
"""provides RDS and Rass support/display"""
def __init__(self):
self.rds_display = self.session.instantiateDialog(RdsInfoDisplay)
if SystemInfo["hasOSDAnimation"]:
self.rds_display.setAnimationMode(0)
self.session.instantiateSummaryDialog(self.rds_display)
self.rass_interactive = None
self.__event_tracker = ServiceEventTracker(screen=self, eventmap=
{
iPlayableService.evEnd: self.__serviceStopped,
iPlayableService.evUpdatedRassSlidePic: self.RassSlidePicChanged
})
self["RdsActions"] = ActionMap(["InfobarRdsActions"],
{
"startRassInteractive": self.startRassInteractive
},-1)
self["RdsActions"].setEnabled(False)
self.onLayoutFinish.append(self.rds_display.show)
self.rds_display.onRassInteractivePossibilityChanged.append(self.RassInteractivePossibilityChanged)
def RassInteractivePossibilityChanged(self, state):
self["RdsActions"].setEnabled(state)
def RassSlidePicChanged(self):
if not self.rass_interactive:
service = self.session.nav.getCurrentService()
decoder = service and service.rdsDecoder()
if decoder:
decoder.showRassSlidePicture()
def __serviceStopped(self):
if self.rass_interactive is not None:
rass_interactive = self.rass_interactive
self.rass_interactive = None
rass_interactive.close()
def startRassInteractive(self):
self.rds_display.hide()
self.rass_interactive = self.session.openWithCallback(self.RassInteractiveClosed, RassInteractive)
def RassInteractiveClosed(self, *val):
if self.rass_interactive is not None:
self.rass_interactive = None
self.RassSlidePicChanged()
self.rds_display.show()
class InfoBarSeek:
"""handles actions like seeking, pause"""
SEEK_STATE_PLAY = (0, 0, 0, ">")
SEEK_STATE_PAUSE = (1, 0, 0, "||")
SEEK_STATE_EOF = (1, 0, 0, "END")
def __init__(self, actionmap = "InfobarSeekActions"):
self.__event_tracker = ServiceEventTracker(screen=self, eventmap=
{
iPlayableService.evSeekableStatusChanged: self.__seekableStatusChanged,
iPlayableService.evStart: self.__serviceStarted,
iPlayableService.evEOF: self.__evEOF,
iPlayableService.evSOF: self.__evSOF,
})
self.fast_winding_hint_message_showed = False
class InfoBarSeekActionMap(HelpableActionMap):
def __init__(self, screen, *args, **kwargs):
HelpableActionMap.__init__(self, screen, *args, **kwargs)
self.screen = screen
def action(self, contexts, action):
print "action:", action
if action[:5] == "seek:":
time = int(action[5:])
self.screen.doSeekRelative(time * 90000)
return 1
elif action[:8] == "seekdef:":
key = int(action[8:])
time = (-config.seek.selfdefined_13.value, False, config.seek.selfdefined_13.value,
-config.seek.selfdefined_46.value, False, config.seek.selfdefined_46.value,
-config.seek.selfdefined_79.value, False, config.seek.selfdefined_79.value)[key-1]
self.screen.doSeekRelative(time * 90000)
return 1
else:
return HelpableActionMap.action(self, contexts, action)
self["SeekActions"] = InfoBarSeekActionMap(self, actionmap,
{
"playpauseService": (self.playpauseService, _("Pauze/Continue playback")),
"pauseService": (self.pauseService, _("Pause playback")),
"unPauseService": (self.unPauseService, _("Continue playback")),
"okButton": (self.okButton, _("Continue playback")),
"seekFwd": (self.seekFwd, _("Seek forward")),
"seekFwdManual": (self.seekFwdManual, _("Seek forward (enter time)")),
"seekBack": (self.seekBack, _("Seek backward")),
"seekBackManual": (self.seekBackManual, _("Seek backward (enter time)")),
"jumpPreviousMark": (self.seekPreviousMark, _("Jump to previous marked position")),
"jumpNextMark": (self.seekNextMark, _("Jump to next marked position")),
}, prio=-1)
# give them a little more priority to win over color buttons
self["SeekActions"].setEnabled(False)
self.seekstate = self.SEEK_STATE_PLAY
self.lastseekstate = self.SEEK_STATE_PLAY
self.onPlayStateChanged = [ ]
self.lockedBecauseOfSkipping = False
self.__seekableStatusChanged()
def makeStateForward(self, n):
return (0, n, 0, ">> %dx" % n)
def makeStateBackward(self, n):
return (0, -n, 0, "<< %dx" % n)
def makeStateSlowMotion(self, n):
return (0, 0, n, "/%d" % n)
def isStateForward(self, state):
return state[1] > 1
def isStateBackward(self, state):
return state[1] < 0
def isStateSlowMotion(self, state):
return state[1] == 0 and state[2] > 1
def getHigher(self, n, lst):
for x in lst:
if x > n:
return x
return False
def getLower(self, n, lst):
lst = lst[:]
lst.reverse()
for x in lst:
if x < n:
return x
return False
def showAfterSeek(self):
if isinstance(self, InfoBarShowHide):
if isStandardInfoBar(self) and self.timeshiftEnabled():
for c in self.onPlayStateChanged:
c(self.seekstate)
else:
self.doShow()
def up(self):
pass
def down(self):
pass
def getSeek(self):
service = self.session.nav.getCurrentService()
if service is None:
return None
seek = service.seek()
if seek is None or not seek.isCurrentlySeekable():
return None
return seek
def isSeekable(self):
if self.getSeek() is None or (isStandardInfoBar(self) and not self.timeshiftEnabled()):
return False
return True
def __seekableStatusChanged(self):
# print "seekable status changed!"
if not self.isSeekable():
self["SeekActions"].setEnabled(False)
# print "not seekable, return to play"
self.setSeekState(self.SEEK_STATE_PLAY)
else:
self["SeekActions"].setEnabled(True)
# print "seekable"
def __serviceStarted(self):
self.fast_winding_hint_message_showed = False
self.setSeekState(self.SEEK_STATE_PLAY)
self.__seekableStatusChanged()
def setSeekState(self, state):
service = self.session.nav.getCurrentService()
if service is None:
return False
if not self.isSeekable():
if state not in (self.SEEK_STATE_PLAY, self.SEEK_STATE_PAUSE):
state = self.SEEK_STATE_PLAY
pauseable = service.pause()
if pauseable is None:
print "not pauseable."
state = self.SEEK_STATE_PLAY
self.seekstate = state
if pauseable is not None:
if self.seekstate[0]:
print "resolved to PAUSE"
pauseable.pause()
elif self.seekstate[1]:
if not pauseable.setFastForward(self.seekstate[1]):
print "resolved to FAST FORWARD"
else:
self.seekstate = self.SEEK_STATE_PLAY
print "FAST FORWARD not possible: resolved to PLAY"
elif self.seekstate[2]:
if not pauseable.setSlowMotion(self.seekstate[2]):
print "resolved to SLOW MOTION"
else:
self.seekstate = self.SEEK_STATE_PAUSE
print "SLOW MOTION not possible: resolved to PAUSE"
else:
print "resolved to PLAY"
pauseable.unpause()
for c in self.onPlayStateChanged:
c(self.seekstate)
self.checkSkipShowHideLock()
if hasattr(self, "ScreenSaverTimerStart"):
self.ScreenSaverTimerStart()
return True
def playpauseService(self):
if self.seekstate != self.SEEK_STATE_PLAY:
self.unPauseService()
else:
self.pauseService()
def okButton(self):
if self.seekstate == self.SEEK_STATE_PLAY:
return 0
elif self.seekstate == self.SEEK_STATE_PAUSE:
self.pauseService()
else:
self.unPauseService()
def pauseService(self):
if self.seekstate == self.SEEK_STATE_PAUSE:
if config.seek.on_pause.value == "play":
self.unPauseService()
elif config.seek.on_pause.value == "step":
self.doSeekRelative(1)
elif config.seek.on_pause.value == "last":
self.setSeekState(self.lastseekstate)
self.lastseekstate = self.SEEK_STATE_PLAY
else:
if self.seekstate != self.SEEK_STATE_EOF:
self.lastseekstate = self.seekstate
self.setSeekState(self.SEEK_STATE_PAUSE)
def unPauseService(self):
print "unpause"
if self.seekstate == self.SEEK_STATE_PLAY:
return 0
self.setSeekState(self.SEEK_STATE_PLAY)
def doSeek(self, pts):
seekable = self.getSeek()
if seekable is None:
return
seekable.seekTo(pts)
def doSeekRelative(self, pts):
seekable = self.getSeek()
if seekable is None:
return
prevstate = self.seekstate
if self.seekstate == self.SEEK_STATE_EOF:
if prevstate == self.SEEK_STATE_PAUSE:
self.setSeekState(self.SEEK_STATE_PAUSE)
else:
self.setSeekState(self.SEEK_STATE_PLAY)
seekable.seekRelative(pts<0 and -1 or 1, abs(pts))
if abs(pts) > 100 and config.usage.show_infobar_on_skip.value:
self.showAfterSeek()
def seekFwd(self):
seek = self.getSeek()
if seek and not (seek.isCurrentlySeekable() & 2):
if not self.fast_winding_hint_message_showed and (seek.isCurrentlySeekable() & 1):
self.session.open(MessageBox, _("No fast winding possible yet.. but you can use the number buttons to skip forward/backward!"), MessageBox.TYPE_INFO, timeout=10)
self.fast_winding_hint_message_showed = True
return
return 0 # trade as unhandled action
if self.seekstate == self.SEEK_STATE_PLAY:
self.setSeekState(self.makeStateForward(int(config.seek.enter_forward.value)))
elif self.seekstate == self.SEEK_STATE_PAUSE:
if len(config.seek.speeds_slowmotion.value):
self.setSeekState(self.makeStateSlowMotion(config.seek.speeds_slowmotion.value[-1]))
else:
self.setSeekState(self.makeStateForward(int(config.seek.enter_forward.value)))
elif self.seekstate == self.SEEK_STATE_EOF:
pass
elif self.isStateForward(self.seekstate):
speed = self.seekstate[1]
if self.seekstate[2]:
speed /= self.seekstate[2]
speed = self.getHigher(speed, config.seek.speeds_forward.value) or config.seek.speeds_forward.value[-1]
self.setSeekState(self.makeStateForward(speed))
elif self.isStateBackward(self.seekstate):
speed = -self.seekstate[1]
if self.seekstate[2]:
speed /= self.seekstate[2]
speed = self.getLower(speed, config.seek.speeds_backward.value)
if speed:
self.setSeekState(self.makeStateBackward(speed))
else:
self.setSeekState(self.SEEK_STATE_PLAY)
elif self.isStateSlowMotion(self.seekstate):
speed = self.getLower(self.seekstate[2], config.seek.speeds_slowmotion.value) or config.seek.speeds_slowmotion.value[0]
self.setSeekState(self.makeStateSlowMotion(speed))
def seekBack(self):
seek = self.getSeek()
if seek and not (seek.isCurrentlySeekable() & 2):
if not self.fast_winding_hint_message_showed and (seek.isCurrentlySeekable() & 1):
self.session.open(MessageBox, _("No fast winding possible yet.. but you can use the number buttons to skip forward/backward!"), MessageBox.TYPE_INFO, timeout=10)
self.fast_winding_hint_message_showed = True
return
return 0 # trade as unhandled action
seekstate = self.seekstate
if seekstate == self.SEEK_STATE_PLAY:
self.setSeekState(self.makeStateBackward(int(config.seek.enter_backward.value)))
elif seekstate == self.SEEK_STATE_EOF:
self.setSeekState(self.makeStateBackward(int(config.seek.enter_backward.value)))
self.doSeekRelative(-6)
elif seekstate == self.SEEK_STATE_PAUSE:
self.doSeekRelative(-1)
elif self.isStateForward(seekstate):
speed = seekstate[1]
if seekstate[2]:
speed /= seekstate[2]
speed = self.getLower(speed, config.seek.speeds_forward.value)
if speed:
self.setSeekState(self.makeStateForward(speed))
else:
self.setSeekState(self.SEEK_STATE_PLAY)
elif self.isStateBackward(seekstate):
speed = -seekstate[1]
if seekstate[2]:
speed /= seekstate[2]
speed = self.getHigher(speed, config.seek.speeds_backward.value) or config.seek.speeds_backward.value[-1]
self.setSeekState(self.makeStateBackward(speed))
elif self.isStateSlowMotion(seekstate):
speed = self.getHigher(seekstate[2], config.seek.speeds_slowmotion.value)
if speed:
self.setSeekState(self.makeStateSlowMotion(speed))
else:
self.setSeekState(self.SEEK_STATE_PAUSE)
def seekFwdManual(self):
self.session.openWithCallback(self.fwdSeekTo, MinuteInput)
def fwdSeekTo(self, minutes):
print "Seek", minutes, "minutes forward"
self.doSeekRelative(minutes * 60 * 90000)
def seekBackManual(self):
self.session.openWithCallback(self.rwdSeekTo, MinuteInput)
def rwdSeekTo(self, minutes):
print "rwdSeekTo"
self.doSeekRelative(-minutes * 60 * 90000)
def checkSkipShowHideLock(self):
wantlock = self.seekstate != self.SEEK_STATE_PLAY
if config.usage.show_infobar_on_skip.value:
if self.lockedBecauseOfSkipping and not wantlock:
self.unlockShow()
self.lockedBecauseOfSkipping = False
if wantlock and not self.lockedBecauseOfSkipping:
self.lockShow()
self.lockedBecauseOfSkipping = True
def calcRemainingTime(self):
seekable = self.getSeek()
if seekable is not None:
len = seekable.getLength()
try:
tmp = self.cueGetEndCutPosition()
if tmp:
len = (False, tmp)
except:
pass
pos = seekable.getPlayPosition()
speednom = self.seekstate[1] or 1
speedden = self.seekstate[2] or 1
if not len[0] and not pos[0]:
if len[1] <= pos[1]:
return 0
time = (len[1] - pos[1])*speedden/(90*speednom)
return time
return False
def __evEOF(self):
if self.seekstate == self.SEEK_STATE_EOF:
return
# if we are seeking forward, we try to end up ~1s before the end, and pause there.
seekstate = self.seekstate
if self.seekstate != self.SEEK_STATE_PAUSE:
self.setSeekState(self.SEEK_STATE_EOF)
if seekstate not in (self.SEEK_STATE_PLAY, self.SEEK_STATE_PAUSE): # if we are seeking
seekable = self.getSeek()
if seekable is not None:
seekable.seekTo(-1)
if seekstate == self.SEEK_STATE_PLAY: # regular EOF
self.doEofInternal(True)
else:
self.doEofInternal(False)
def doEofInternal(self, playing):
pass # Defined in subclasses
def __evSOF(self):
self.setSeekState(self.SEEK_STATE_PLAY)
self.doSeek(0)
# This is needed, because some Mediaplayer use InfoBarSeek but not InfoBarCueSheetSupport
def seekPreviousMark(self):
if isinstance(self, InfoBarCueSheetSupport):
self.jumpPreviousMark()
def seekNextMark(self):
if isinstance(self, InfoBarCueSheetSupport):
self.jumpNextMark()
from Screens.PVRState import PVRState, TimeshiftState
class InfoBarPVRState:
def __init__(self, screen=PVRState, force_show = False):
self.onPlayStateChanged.append(self.__playStateChanged)
self.pvrStateDialog = self.session.instantiateDialog(screen)
if SystemInfo["hasOSDAnimation"]:
self.pvrStateDialog.setAnimationMode(0)
self.onShow.append(self._mayShow)
self.onHide.append(self.pvrStateDialog.hide)
self.force_show = force_show
def _mayShow(self):
if self.shown and self.seekstate != self.SEEK_STATE_PLAY:
self.pvrStateDialog.show()
def __playStateChanged(self, state):
playstateString = state[3]
self.pvrStateDialog["state"].setText(playstateString)
# if we return into "PLAY" state, ensure that the dialog gets hidden if there will be no infobar displayed
if not config.usage.show_infobar_on_skip.value and self.seekstate == self.SEEK_STATE_PLAY and not self.force_show:
self.pvrStateDialog.hide()
else:
self._mayShow()
class TimeshiftLive(Screen):
def __init__(self, session):
Screen.__init__(self, session)
class InfoBarTimeshiftState(InfoBarPVRState):
def __init__(self):
InfoBarPVRState.__init__(self, screen=TimeshiftState, force_show=True)
self.timeshiftLiveScreen = self.session.instantiateDialog(TimeshiftLive)
self.onHide.append(self.timeshiftLiveScreen.hide)
if isStandardInfoBar(self):
self.secondInfoBarScreen and self.secondInfoBarScreen.onShow.append(self.timeshiftLiveScreen.hide)
self.secondInfoBarScreenSimple and self.secondInfoBarScreenSimple.onShow.append(self.timeshiftLiveScreen.hide)
self.timeshiftLiveScreen.hide()
self.__hideTimer = eTimer()
self.__hideTimer.callback.append(self.__hideTimeshiftState)
self.onFirstExecBegin.append(self.pvrStateDialog.show)
def _mayShow(self):
if self.timeshiftEnabled():
if isStandardInfoBar(self):
if self.secondInfoBarScreen and self.secondInfoBarScreen.shown:
self.secondInfoBarScreen.hide()
if self.secondInfoBarScreenSimple and self.secondInfoBarScreenSimple.shown:
self.secondInfoBarScreenSimple.hide()
if self.timeshiftActivated():
self.pvrStateDialog.show()
self.timeshiftLiveScreen.hide()
elif self.showTimeshiftState:
self.pvrStateDialog.hide()
self.timeshiftLiveScreen.show()
self.showTimeshiftState = False
if self.seekstate == self.SEEK_STATE_PLAY and config.usage.infobar_timeout.index and (self.pvrStateDialog.shown or self.timeshiftLiveScreen.shown):
self.__hideTimer.startLongTimer(config.usage.infobar_timeout.index)
else:
self.__hideTimeshiftState()
def __hideTimeshiftState(self):
self.pvrStateDialog.hide()
self.timeshiftLiveScreen.hide()
class InfoBarShowMovies:
# i don't really like this class.
# it calls a not further specified "movie list" on up/down/movieList,
# so this is not more than an action map
def __init__(self):
self["MovieListActions"] = HelpableActionMap(self, "InfobarMovieListActions",
{
"movieList": (self.showMovies, _("Open the movie list")),
"up": (self.up, _("Open the movie list")),
"down": (self.down, _("Open the movie list"))
})
# InfoBarTimeshift requires InfoBarSeek, instantiated BEFORE!
# Hrmf.
#
# Timeshift works the following way:
# demux0 demux1 "TimeshiftActions" "TimeshiftActivateActions" "SeekActions"
# - normal playback TUNER unused PLAY enable disable disable
# - user presses "yellow" button. FILE record PAUSE enable disable enable
# - user presess pause again FILE record PLAY enable disable enable
# - user fast forwards FILE record FF enable disable enable
# - end of timeshift buffer reached TUNER record PLAY enable enable disable
# - user backwards FILE record BACK # !! enable disable enable
#
# in other words:
# - when a service is playing, pressing the "timeshiftStart" button ("yellow") enables recording ("enables timeshift"),
# freezes the picture (to indicate timeshift), sets timeshiftMode ("activates timeshift")
# now, the service becomes seekable, so "SeekActions" are enabled, "TimeshiftEnableActions" are disabled.
# - the user can now PVR around
# - if it hits the end, the service goes into live mode ("deactivates timeshift", it's of course still "enabled")
# the service looses it's "seekable" state. It can still be paused, but just to activate timeshift right
# after!
# the seek actions will be disabled, but the timeshiftActivateActions will be enabled
# - if the user rewinds, or press pause, timeshift will be activated again
# note that a timeshift can be enabled ("recording") and
# activated (currently time-shifting).
class InfoBarTimeshift:
ts_disabled = False
def __init__(self):
self["TimeshiftActions"] = HelpableActionMap(self, "InfobarTimeshiftActions",
{
"timeshiftStart": (self.startTimeshift, _("Start timeshift")), # the "yellow key"
"timeshiftStop": (self.stopTimeshift, _("Stop timeshift")) # currently undefined :), probably 'TV'
}, prio=1)
self["TimeshiftActivateActions"] = ActionMap(["InfobarTimeshiftActivateActions"],
{
"timeshiftActivateEnd": self.activateTimeshiftEnd, # something like "rewind key"
"timeshiftActivateEndAndPause": self.activateTimeshiftEndAndPause # something like "pause key"
}, prio=-1) # priority over record
self["TimeshiftActivateActions"].setEnabled(False)
self.ts_rewind_timer = eTimer()
self.ts_rewind_timer.callback.append(self.rewindService)
self.ts_start_delay_timer = eTimer()
self.ts_start_delay_timer.callback.append(self.startTimeshiftWithoutPause)
self.ts_current_event_timer = eTimer()
self.ts_current_event_timer.callback.append(self.saveTimeshiftFileForEvent)
self.save_timeshift_file = False
self.timeshift_was_activated = False
self.showTimeshiftState = False
self.save_timeshift_only_current_event = False
self.__event_tracker = ServiceEventTracker(screen=self, eventmap=
{
iPlayableService.evStart: self.__serviceStarted,
iPlayableService.evSeekableStatusChanged: self.__seekableStatusChanged,
iPlayableService.evEnd: self.__serviceEnd
})
def getTimeshift(self):
if self.ts_disabled:
return None
service = self.session.nav.getCurrentService()
return service and service.timeshift()
def timeshiftEnabled(self):
ts = self.getTimeshift()
return ts and ts.isTimeshiftEnabled()
def timeshiftActivated(self):
ts = self.getTimeshift()
return ts and ts.isTimeshiftActive()
def startTimeshift(self, pauseService = True):
print "enable timeshift"
ts = self.getTimeshift()
if ts is None:
if not pauseService and not int(config.usage.timeshift_start_delay.value):
self.session.open(MessageBox, _("Timeshift not possible!"), MessageBox.TYPE_ERROR, simple = True)
print "no ts interface"
return 0
if ts.isTimeshiftEnabled():
print "hu, timeshift already enabled?"
else:
if not ts.startTimeshift():
# we remove the "relative time" for now.
#self.pvrStateDialog["timeshift"].setRelative(time.time())
if pauseService:
# PAUSE.
#self.setSeekState(self.SEEK_STATE_PAUSE)
self.activateTimeshiftEnd(False)
self.showTimeshiftState = True
else:
self.showTimeshiftState = False
# enable the "TimeshiftEnableActions", which will override
# the startTimeshift actions
self.__seekableStatusChanged()
# get current timeshift filename and calculate new
self.save_timeshift_file = False
self.save_timeshift_in_movie_dir = False
self.setCurrentEventTimer()
self.current_timeshift_filename = ts.getTimeshiftFilename()
self.new_timeshift_filename = self.generateNewTimeshiftFileName()
else:
print "timeshift failed"
def startTimeshiftWithoutPause(self):
self.startTimeshift(False)
def stopTimeshift(self):
ts = self.getTimeshift()
if ts and ts.isTimeshiftEnabled():
if int(config.usage.timeshift_start_delay.value):
ts.switchToLive()
else:
self.checkTimeshiftRunning(self.stopTimeshiftcheckTimeshiftRunningCallback)
else:
return 0
def stopTimeshiftcheckTimeshiftRunningCallback(self, answer):
ts = self.getTimeshift()
if answer and ts:
ts.stopTimeshift()
self.pvrStateDialog.hide()
self.setCurrentEventTimer()
# disable actions
self.__seekableStatusChanged()
# activates timeshift, and seeks to (almost) the end
def activateTimeshiftEnd(self, back = True):
self.showTimeshiftState = True
ts = self.getTimeshift()
print "activateTimeshiftEnd"
if ts is None:
return
if ts.isTimeshiftActive():
print "!! activate timeshift called - but shouldn't this be a normal pause?"
self.pauseService()
else:
print "play, ..."
ts.activateTimeshift() # activate timeshift will automatically pause
self.setSeekState(self.SEEK_STATE_PAUSE)
seekable = self.getSeek()
if seekable is not None:
seekable.seekTo(-90000) # seek approx. 1 sec before end
self.timeshift_was_activated = True
if back:
self.ts_rewind_timer.start(200, 1)
def rewindService(self):
self.setSeekState(self.makeStateBackward(int(config.seek.enter_backward.value)))
# generates only filename without path
def generateNewTimeshiftFileName(self):
name = "timeshift record"
info = { }
self.getProgramInfoAndEvent(info, name)
serviceref = info["serviceref"]
service_name = ""
if isinstance(serviceref, eServiceReference):
service_name = ServiceReference(serviceref).getServiceName()
begin_date = strftime("%Y%m%d %H%M", localtime(time()))
filename = begin_date + " - " + service_name
if config.recording.filename_composition.value == "event":
filename = name + ' - ' + begin_date + "_" + service_name
elif config.recording.filename_composition.value == "veryshort":
filename = service_name + " - " + begin_date
elif config.recording.filename_composition.value == "short":
filename = strftime("%Y%m%d", localtime(time())) + " - " + info["name"]
elif config.recording.filename_composition.value == "long":
filename += " - " + info["name"] + " - " + info["description"]
else:
filename += " - " + info["name"] # standard
if config.recording.ascii_filenames.value:
filename = ASCIItranslit.legacyEncode(filename)
print "New timeshift filename: ", filename
return filename
# same as activateTimeshiftEnd, but pauses afterwards.
def activateTimeshiftEndAndPause(self):
print "activateTimeshiftEndAndPause"
#state = self.seekstate
self.activateTimeshiftEnd(False)
def callServiceStarted(self):
self.__serviceStarted()
def __seekableStatusChanged(self):
self["TimeshiftActivateActions"].setEnabled(not self.isSeekable() and self.timeshiftEnabled())
state = self.getSeek() is not None and self.timeshiftEnabled()
self["SeekActions"].setEnabled(state)
if not state:
self.setSeekState(self.SEEK_STATE_PLAY)
self.restartSubtitle()
def __serviceStarted(self):
self.pvrStateDialog.hide()
self.__seekableStatusChanged()
if self.ts_start_delay_timer.isActive():
self.ts_start_delay_timer.stop()
if int(config.usage.timeshift_start_delay.value):
self.ts_start_delay_timer.start(int(config.usage.timeshift_start_delay.value) * 1000, True)
def checkTimeshiftRunning(self, returnFunction):
if self.timeshiftEnabled() and config.usage.check_timeshift.value and self.timeshift_was_activated:
message = _("Stop timeshift?")
if not self.save_timeshift_file:
choice = [(_("Yes"), "stop"), (_("No"), "continue"), (_("Yes and save"), "save"), (_("Yes and save in movie dir"), "save_movie")]
else:
choice = [(_("Yes"), "stop"), (_("No"), "continue")]
message += "\n" + _("Reminder, you have chosen to save timeshift file.")
if self.save_timeshift_only_current_event:
remaining = self.currentEventTime()
if remaining > 0:
message += "\n" + _("The %d min remaining before the end of the event.") % abs(remaining / 60)
self.session.openWithCallback(boundFunction(self.checkTimeshiftRunningCallback, returnFunction), MessageBox, message, simple = True, list = choice)
else:
returnFunction(True)
def checkTimeshiftRunningCallback(self, returnFunction, answer):
if answer:
if "movie" in answer:
self.save_timeshift_in_movie_dir = True
if "save" in answer:
self.save_timeshift_file = True
ts = self.getTimeshift()
if ts:
ts.saveTimeshiftFile()
del ts
if "continue" not in answer:
self.saveTimeshiftFiles()
returnFunction(answer and answer != "continue")
# renames/moves timeshift files if requested
def __serviceEnd(self):
self.saveTimeshiftFiles()
self.setCurrentEventTimer()
self.timeshift_was_activated = False
def saveTimeshiftFiles(self):
if self.save_timeshift_file and self.current_timeshift_filename and self.new_timeshift_filename:
if config.usage.timeshift_path.value and not self.save_timeshift_in_movie_dir:
dirname = config.usage.timeshift_path.value
else:
dirname = defaultMoviePath()
filename = getRecordingFilename(self.new_timeshift_filename, dirname) + ".ts"
fileList = []
fileList.append((self.current_timeshift_filename, filename))
if fileExists(self.current_timeshift_filename + ".sc"):
fileList.append((self.current_timeshift_filename + ".sc", filename + ".sc"))
if fileExists(self.current_timeshift_filename + ".cuts"):
fileList.append((self.current_timeshift_filename + ".cuts", filename + ".cuts"))
moveFiles(fileList)
self.save_timeshift_file = False
self.setCurrentEventTimer()
def currentEventTime(self):
remaining = 0
ref = self.session.nav.getCurrentlyPlayingServiceOrGroup()
if ref:
epg = eEPGCache.getInstance()
event = epg.lookupEventTime(ref, -1, 0)
if event:
now = int(time())
start = event.getBeginTime()
duration = event.getDuration()
end = start + duration
remaining = end - now
return remaining
def saveTimeshiftFileForEvent(self):
if self.timeshiftEnabled() and self.save_timeshift_only_current_event and self.timeshift_was_activated and self.save_timeshift_file:
message = _("Current event is over.\nSelect an option to save the timeshift file.")
choice = [(_("Save and stop timeshift"), "save"), (_("Save and restart timeshift"), "restart"), (_("Don't save and stop timeshift"), "stop"), (_("Do nothing"), "continue")]
self.session.openWithCallback(self.saveTimeshiftFileForEventCallback, MessageBox, message, simple = True, list = choice, timeout=15)
def saveTimeshiftFileForEventCallback(self, answer):
self.save_timeshift_only_current_event = False
if answer:
ts = self.getTimeshift()
if ts and answer in ("save", "restart", "stop"):
self.stopTimeshiftcheckTimeshiftRunningCallback(True)
if answer in ("save", "restart"):
ts.saveTimeshiftFile()
del ts
self.saveTimeshiftFiles()
if answer == "restart":
self.ts_start_delay_timer.start(1000, True)
self.save_timeshift_file = False
self.save_timeshift_in_movie_dir = False
def setCurrentEventTimer(self, duration=0):
self.ts_current_event_timer.stop()
self.save_timeshift_only_current_event = False
if duration > 0:
self.save_timeshift_only_current_event = True
self.ts_current_event_timer.startLongTimer(duration)
from Screens.PiPSetup import PiPSetup
class InfoBarExtensions:
EXTENSION_SINGLE = 0
EXTENSION_LIST = 1
def __init__(self):
self.list = []
self["InstantExtensionsActions"] = HelpableActionMap(self, "InfobarExtensions",
{
"extensions": (self.showExtensionSelection, _("Show extensions...")),
}, 1) # lower priority
def addExtension(self, extension, key = None, type = EXTENSION_SINGLE):
self.list.append((type, extension, key))
def updateExtension(self, extension, key = None):
self.extensionsList.append(extension)
if key is not None:
if self.extensionKeys.has_key(key):
key = None
if key is None:
for x in self.availableKeys:
if not self.extensionKeys.has_key(x):
key = x
break
if key is not None:
self.extensionKeys[key] = len(self.extensionsList) - 1
def updateExtensions(self):
self.extensionsList = []
self.availableKeys = [ "1", "2", "3", "4", "5", "6", "7", "8", "9", "0", "red", "green", "yellow", "blue" ]
self.extensionKeys = {}
for x in self.list:
if x[0] == self.EXTENSION_SINGLE:
self.updateExtension(x[1], x[2])
else:
for y in x[1]():
self.updateExtension(y[0], y[1])
def showExtensionSelection(self):
self.updateExtensions()
extensionsList = self.extensionsList[:]
keys = []
list = []
for x in self.availableKeys:
if self.extensionKeys.has_key(x):
entry = self.extensionKeys[x]
extension = self.extensionsList[entry]
if extension[2]():
name = str(extension[0]())
list.append((extension[0](), extension))
keys.append(x)
extensionsList.remove(extension)
else:
extensionsList.remove(extension)
list.extend([(x[0](), x) for x in extensionsList])
keys += [""] * len(extensionsList)
self.session.openWithCallback(self.extensionCallback, ChoiceBox, title=_("Please choose an extension..."), list=list, keys=keys, skin_name="ExtensionsList", reorderConfig="extension_order", windowTitle=_("Extensions menu"))
def extensionCallback(self, answer):
if answer is not None:
answer[1][1]()
from Tools.BoundFunction import boundFunction
import inspect
# depends on InfoBarExtensions
class InfoBarPlugins:
def __init__(self):
self.addExtension(extension = self.getPluginList, type = InfoBarExtensions.EXTENSION_LIST)
def getPluginName(self, name):
return name
def getPluginList(self):
l = []
for p in plugins.getPlugins(where = PluginDescriptor.WHERE_EXTENSIONSMENU):
args = inspect.getargspec(p.__call__)[0]
if len(args) == 1 or len(args) == 2 and isinstance(self, InfoBarChannelSelection):
l.append(((boundFunction(self.getPluginName, p.name), boundFunction(self.runPlugin, p), lambda: True), None, p.name))
l.sort(key = lambda e: e[2]) # sort by name
return l
def runPlugin(self, plugin):
if isinstance(self, InfoBarChannelSelection):
plugin(session = self.session, servicelist = self.servicelist)
else:
plugin(session = self.session)
from Components.Task import job_manager
class InfoBarJobman:
def __init__(self):
self.addExtension(extension = self.getJobList, type = InfoBarExtensions.EXTENSION_LIST)
def getJobList(self):
return [((boundFunction(self.getJobName, job), boundFunction(self.showJobView, job), lambda: True), None) for job in job_manager.getPendingJobs()]
def getJobName(self, job):
return "%s: %s (%d%%)" % (job.getStatustext(), job.name, int(100*job.progress/float(job.end)))
def showJobView(self, job):
from Screens.TaskView import JobView
job_manager.in_background = False
self.session.openWithCallback(self.JobViewCB, JobView, job)
def JobViewCB(self, in_background):
job_manager.in_background = in_background
# depends on InfoBarExtensions
class InfoBarPiP:
def __init__(self):
try:
self.session.pipshown
except:
self.session.pipshown = False
self.lastPiPService = None
if SystemInfo["PIPAvailable"]:
self["PiPActions"] = HelpableActionMap(self, "InfobarPiPActions",
{
"activatePiP": (self.activePiP, self.activePiPName),
})
if (self.allowPiP):
self.addExtension((self.getShowHideName, self.showPiP, lambda: True), "blue")
self.addExtension((self.getMoveName, self.movePiP, self.pipShown), "green")
self.addExtension((self.getSwapName, self.swapPiP, self.pipShown), "yellow")
self.addExtension((self.getTogglePipzapName, self.togglePipzap, lambda: True), "red")
else:
self.addExtension((self.getShowHideName, self.showPiP, self.pipShown), "blue")
self.addExtension((self.getMoveName, self.movePiP, self.pipShown), "green")
self.lastPiPServiceTimeoutTimer = eTimer()
self.lastPiPServiceTimeoutTimer.callback.append(self.clearLastPiPService)
def pipShown(self):
return self.session.pipshown
def pipHandles0Action(self):
return self.pipShown() and config.usage.pip_zero_button.value != "standard"
def getShowHideName(self):
if self.session.pipshown:
return _("Disable Picture in Picture")
else:
return _("Activate Picture in Picture")
def getSwapName(self):
return _("Swap services")
def getMoveName(self):
return _("Picture in Picture Setup")
def getTogglePipzapName(self):
slist = self.servicelist
if slist and slist.dopipzap:
return _("Zap focus to main screen")
return _("Zap focus to Picture in Picture")
def togglePipzap(self):
if not self.session.pipshown:
self.showPiP()
slist = self.servicelist
if slist and self.session.pipshown:
slist.togglePipzap()
if slist.dopipzap:
currentServicePath = slist.getCurrentServicePath()
slist.setCurrentServicePath(self.session.pip.servicePath, doZap=False)
self.session.pip.servicePath = currentServicePath
def showPiP(self):
self.lastPiPServiceTimeoutTimer.stop()
slist = self.servicelist
if self.session.pipshown:
if slist and slist.dopipzap:
self.togglePipzap()
if self.session.pipshown:
lastPiPServiceTimeout = int(config.usage.pip_last_service_timeout.value)
if lastPiPServiceTimeout >= 0:
self.lastPiPService = self.session.pip.getCurrentService()
if lastPiPServiceTimeout:
self.lastPiPServiceTimeoutTimer.startLongTimer(lastPiPServiceTimeout)
del self.session.pip
if SystemInfo["LCDMiniTV"]:
if config.lcd.modepip.value >= "1":
f = open("/proc/stb/lcd/mode", "w")
f.write(config.lcd.modeminitv.value)
f.close()
self.session.pipshown = False
if hasattr(self, "ScreenSaverTimerStart"):
self.ScreenSaverTimerStart()
else:
self.session.pip = self.session.instantiateDialog(PictureInPicture)
if SystemInfo["hasOSDAnimation"]:
self.session.pip.setAnimationMode(0)
self.session.pip.show()
newservice = self.lastPiPService or self.session.nav.getCurrentlyPlayingServiceOrGroup() or (slist and slist.servicelist.getCurrent())
if self.session.pip.playService(newservice):
self.session.pipshown = True
self.session.pip.servicePath = slist and slist.getCurrentServicePath()
if SystemInfo["LCDMiniTV"]:
if config.lcd.modepip.value >= "1":
f = open("/proc/stb/lcd/mode", "w")
f.write(config.lcd.modepip.value)
f.close()
f = open("/proc/stb/vmpeg/1/dst_width", "w")
f.write("0")
f.close()
f = open("/proc/stb/vmpeg/1/dst_height", "w")
f.write("0")
f.close()
f = open("/proc/stb/vmpeg/1/dst_apply", "w")
f.write("1")
f.close()
else:
newservice = self.session.nav.getCurrentlyPlayingServiceOrGroup() or (slist and slist.servicelist.getCurrent())
if self.session.pip.playService(newservice):
self.session.pipshown = True
self.session.pip.servicePath = slist and slist.getCurrentServicePath()
else:
self.session.pipshown = False
del self.session.pip
if self.session.pipshown and hasattr(self, "screenSaverTimer"):
self.screenSaverTimer.stop()
self.lastPiPService = None
def clearLastPiPService(self):
self.lastPiPService = None
def activePiP(self):
if self.servicelist and self.servicelist.dopipzap or not self.session.pipshown:
self.showPiP()
else:
self.togglePipzap()
def activePiPName(self):
if self.servicelist and self.servicelist.dopipzap:
return _("Disable Picture in Picture")
if self.session.pipshown:
return _("Zap focus to Picture in Picture")
else:
return _("Activate Picture in Picture")
def swapPiP(self):
if self.pipShown():
swapservice = self.session.nav.getCurrentlyPlayingServiceOrGroup()
pipref = self.session.pip.getCurrentService()
if swapservice and pipref and pipref.toString() != swapservice.toString():
slist = self.servicelist
if slist:
currentServicePath = slist.getCurrentServicePath()
currentBouquet = slist.getRoot()
slist.setCurrentServicePath(self.session.pip.servicePath, doZap=False)
self.session.pip.playService(swapservice)
self.session.nav.playService(pipref, checkParentalControl=False, adjust=False)
if slist:
self.session.pip.servicePath = currentServicePath
self.session.pip.servicePath[1] = currentBouquet
if slist and slist.dopipzap:
slist.setCurrentSelection(self.session.pip.getCurrentService())
def movePiP(self):
if self.pipShown():
self.session.open(PiPSetup, pip = self.session.pip)
def pipDoHandle0Action(self):
use = config.usage.pip_zero_button.value
if "swap" == use:
self.swapPiP()
elif "swapstop" == use:
self.swapPiP()
self.showPiP()
elif "stop" == use:
self.showPiP()
from RecordTimer import parseEvent, RecordTimerEntry
class InfoBarInstantRecord:
"""Instant Record - handles the instantRecord action in order to
start/stop instant records"""
def __init__(self):
self["InstantRecordActions"] = HelpableActionMap(self, "InfobarInstantRecord",
{
"instantRecord": (self.instantRecord, _("Instant recording...")),
})
self.SelectedInstantServiceRef = None
if isStandardInfoBar(self):
self.recording = []
else:
from Screens.InfoBar import InfoBar
InfoBarInstance = InfoBar.instance
if InfoBarInstance:
self.recording = InfoBarInstance.recording
def moveToTrash(self, entry):
print "instantRecord stop and delete recording: ", entry.name
import Tools.Trashcan
trash = Tools.Trashcan.createTrashFolder(entry.Filename)
from MovieSelection import moveServiceFiles
moveServiceFiles(entry.Filename, trash, entry.name, allowCopy=False)
def stopCurrentRecording(self, entry = -1):
def confirm(answer=False):
if answer:
self.session.nav.RecordTimer.removeEntry(self.recording[entry])
if self.deleteRecording:
self.moveToTrash(self.recording[entry])
self.recording.remove(self.recording[entry])
if entry is not None and entry != -1:
msg = _("Stop recording:")
if self.deleteRecording:
msg = _("Stop and delete recording:")
msg += "\n"
msg += " - " + self.recording[entry].name + "\n"
self.session.openWithCallback(confirm, MessageBox, msg, MessageBox.TYPE_YESNO)
def stopAllCurrentRecordings(self, list):
def confirm(answer=False):
if answer:
for entry in list:
self.session.nav.RecordTimer.removeEntry(entry[0])
self.recording.remove(entry[0])
if self.deleteRecording:
self.moveToTrash(entry[0])
msg = _("Stop recordings:")
if self.deleteRecording:
msg = _("Stop and delete recordings:")
msg += "\n"
for entry in list:
msg += " - " + entry[0].name + "\n"
self.session.openWithCallback(confirm, MessageBox, msg, MessageBox.TYPE_YESNO)
def getProgramInfoAndEvent(self, info, name):
info["serviceref"] = hasattr(self, "SelectedInstantServiceRef") and self.SelectedInstantServiceRef or self.session.nav.getCurrentlyPlayingServiceOrGroup()
# try to get event info
event = None
try:
epg = eEPGCache.getInstance()
event = epg.lookupEventTime(info["serviceref"], -1, 0)
if event is None:
if hasattr(self, "SelectedInstantServiceRef") and self.SelectedInstantServiceRef:
service_info = eServiceCenter.getInstance().info(self.SelectedInstantServiceRef)
event = service_info and service_info.getEvent(self.SelectedInstantServiceRef)
else:
service = self.session.nav.getCurrentService()
event = service and service.info().getEvent(0)
except:
pass
info["event"] = event
info["name"] = name
info["description"] = ""
info["eventid"] = None
if event is not None:
curEvent = parseEvent(event)
info["name"] = curEvent[2]
info["description"] = curEvent[3]
info["eventid"] = curEvent[4]
info["end"] = curEvent[1]
def startInstantRecording(self, limitEvent = False):
begin = int(time())
end = begin + 3600 # dummy
name = "instant record"
info = { }
self.getProgramInfoAndEvent(info, name)
serviceref = info["serviceref"]
event = info["event"]
if event is not None:
if limitEvent:
end = info["end"]
else:
if limitEvent:
self.session.open(MessageBox, _("No event info found, recording indefinitely."), MessageBox.TYPE_INFO)
if isinstance(serviceref, eServiceReference):
serviceref = ServiceReference(serviceref)
recording = RecordTimerEntry(serviceref, begin, end, info["name"], info["description"], info["eventid"], dirname = preferredInstantRecordPath())
recording.dontSave = True
if event is None or limitEvent == False:
recording.autoincrease = True
recording.setAutoincreaseEnd()
simulTimerList = self.session.nav.RecordTimer.record(recording)
if simulTimerList is None: # no conflict
recording.autoincrease = False
self.recording.append(recording)
else:
if len(simulTimerList) > 1: # with other recording
name = simulTimerList[1].name
name_date = ' '.join((name, strftime('%F %T', localtime(simulTimerList[1].begin))))
print "[TIMER] conflicts with", name_date
recording.autoincrease = True # start with max available length, then increment
if recording.setAutoincreaseEnd():
self.session.nav.RecordTimer.record(recording)
self.recording.append(recording)
self.session.open(MessageBox, _("Record time limited due to conflicting timer %s") % name_date, MessageBox.TYPE_INFO)
else:
self.session.open(MessageBox, _("Could not record due to conflicting timer %s") % name, MessageBox.TYPE_INFO)
else:
self.session.open(MessageBox, _("Could not record due to invalid service %s") % serviceref, MessageBox.TYPE_INFO)
recording.autoincrease = False
def isInstantRecordRunning(self):
print "self.recording:", self.recording
if self.recording:
for x in self.recording:
if x.isRunning():
return True
return False
def recordQuestionCallback(self, answer):
print "pre:\n", self.recording
if answer is None or answer[1] == "no":
return
list = []
recording = self.recording[:]
for x in recording:
if not x in self.session.nav.RecordTimer.timer_list:
self.recording.remove(x)
elif x.dontSave and x.isRunning():
list.append((x, False))
self.deleteRecording = False
if answer[1] == "changeduration":
if len(self.recording) == 1:
self.changeDuration(0)
else:
self.session.openWithCallback(self.changeDuration, TimerSelection, list)
elif answer[1] == "addrecordingtime":
if len(self.recording) == 1:
self.addRecordingTime(0)
else:
self.session.openWithCallback(self.addRecordingTime, TimerSelection, list)
elif answer[1] == "changeendtime":
if len(self.recording) == 1:
self.setEndtime(0)
else:
self.session.openWithCallback(self.setEndtime, TimerSelection, list)
elif answer[1] == "timer":
import TimerEdit
self.session.open(TimerEdit.TimerEditList)
elif answer[1] == "stop":
if len(self.recording) == 1:
self.stopCurrentRecording(0)
else:
self.session.openWithCallback(self.stopCurrentRecording, TimerSelection, list)
elif answer[1] == "stopdelete":
self.deleteRecording = True
if len(self.recording) == 1:
self.stopCurrentRecording(0)
else:
self.session.openWithCallback(self.stopCurrentRecording, TimerSelection, list)
elif answer[1] == "stopall":
self.stopAllCurrentRecordings(list)
elif answer[1] == "stopdeleteall":
self.deleteRecording = True
self.stopAllCurrentRecordings(list)
elif answer[1] in ( "indefinitely" , "manualduration", "manualendtime", "event"):
self.startInstantRecording(limitEvent = answer[1] in ("event", "manualendtime") or False)
if answer[1] == "manualduration":
self.changeDuration(len(self.recording)-1)
elif answer[1] == "manualendtime":
self.setEndtime(len(self.recording)-1)
elif "timeshift" in answer[1]:
ts = self.getTimeshift()
if ts:
ts.saveTimeshiftFile()
self.save_timeshift_file = True
if "movie" in answer[1]:
self.save_timeshift_in_movie_dir = True
if "event" in answer[1]:
remaining = self.currentEventTime()
if remaining > 0:
self.setCurrentEventTimer(remaining-15)
print "after:\n", self.recording
def setEndtime(self, entry):
if entry is not None and entry >= 0:
self.selectedEntry = entry
self.endtime=ConfigClock(default = self.recording[self.selectedEntry].end)
dlg = self.session.openWithCallback(self.TimeDateInputClosed, TimeDateInput, self.endtime)
dlg.setTitle(_("Please change recording endtime"))
def TimeDateInputClosed(self, ret):
if len(ret) > 1:
if ret[0]:
print "stopping recording at", strftime("%F %T", localtime(ret[1]))
if self.recording[self.selectedEntry].end != ret[1]:
self.recording[self.selectedEntry].autoincrease = False
self.recording[self.selectedEntry].end = ret[1]
self.session.nav.RecordTimer.timeChanged(self.recording[self.selectedEntry])
def changeDuration(self, entry):
if entry is not None and entry >= 0:
self.selectedEntry = entry
self.session.openWithCallback(self.inputCallback, InputBox, title=_("How many minutes do you want to record?"), text="5 ", maxSize=True, type=Input.NUMBER)
def addRecordingTime(self, entry):
if entry is not None and entry >= 0:
self.selectedEntry = entry
self.session.openWithCallback(self.inputAddRecordingTime, InputBox, title=_("How many minutes do you want add to record?"), text="5 ", maxSize=True, type=Input.NUMBER)
def inputAddRecordingTime(self, value):
if value:
print "added", int(value), "minutes for recording."
entry = self.recording[self.selectedEntry]
if int(value) != 0:
entry.autoincrease = False
entry.end += 60 * int(value)
self.session.nav.RecordTimer.timeChanged(entry)
def inputCallback(self, value):
if value:
print "stopping recording after", int(value), "minutes."
entry = self.recording[self.selectedEntry]
if int(value) != 0:
entry.autoincrease = False
entry.end = int(time()) + 60 * int(value)
self.session.nav.RecordTimer.timeChanged(entry)
def isTimerRecordRunning(self):
identical = timers = 0
for timer in self.session.nav.RecordTimer.timer_list:
if timer.isRunning() and not timer.justplay:
timers += 1
if self.recording:
for x in self.recording:
if x.isRunning() and x == timer:
identical += 1
return timers > identical
def instantRecord(self, serviceRef=None):
self.SelectedInstantServiceRef = serviceRef
pirr = preferredInstantRecordPath()
if not findSafeRecordPath(pirr) and not findSafeRecordPath(defaultMoviePath()):
if not pirr:
pirr = ""
self.session.open(MessageBox, _("Missing ") + "\n" + pirr +
"\n" + _("No HDD found or HDD not initialized!"), MessageBox.TYPE_ERROR)
return
if isStandardInfoBar(self):
common = ((_("Add recording (stop after current event)"), "event"),)
if not SystemInfo["hasGBIpboxClient"]:
common += ((_("Add recording (indefinitely)"), "indefinitely"),
(_("Add recording (enter recording duration)"), "manualduration"),
(_("Add recording (enter recording endtime)"), "manualendtime"),)
else:
common = ()
if self.isInstantRecordRunning():
title =_("A recording is currently running.\nWhat do you want to do?")
if not SystemInfo["hasGBIpboxClient"]:
list = common + \
((_("Change recording (duration)"), "changeduration"),
(_("Change recording (add time)"), "addrecordingtime"),
(_("Change recording (endtime)"), "changeendtime"),)
else:
list = common
list += ((_("Stop recording"), "stop"),)
if config.usage.movielist_trashcan.value:
list += ((_("Stop and delete recording"), "stopdelete"),)
if len(self.recording) > 1:
list += ((_("Stop all current recordings"), "stopall"),)
if config.usage.movielist_trashcan.value:
list += ((_("Stop and delete all current recordings"), "stopdeleteall"),)
if self.isTimerRecordRunning():
list += ((_("Stop timer recording"), "timer"),)
list += ((_("Do nothing"), "no"),)
else:
title=_("Start recording?")
list = common
if self.isTimerRecordRunning():
list += ((_("Stop timer recording"), "timer"),)
if isStandardInfoBar(self):
list += ((_("Do not record"), "no"),)
if isStandardInfoBar(self) and self.timeshiftEnabled():
list = list + ((_("Save timeshift file"), "timeshift"),
(_("Save timeshift file in movie directory"), "timeshift_movie"))
if self.currentEventTime() > 0:
list += ((_("Save timeshift only for current event"), "timeshift_event"),)
if list:
self.session.openWithCallback(self.recordQuestionCallback, ChoiceBox, title=title, list=list)
else:
return 0
from Tools.ISO639 import LanguageCodes
class InfoBarAudioSelection:
def __init__(self):
self["AudioSelectionAction"] = HelpableActionMap(self, "InfobarAudioSelectionActions",
{
"audioSelection": (self.audioSelection, _("Audio options...")),
})
def audioSelection(self):
from Screens.AudioSelection import AudioSelection
self.session.openWithCallback(self.audioSelected, AudioSelection, infobar=self)
def audioSelected(self, ret=None):
print "[infobar::audioSelected]", ret
class InfoBarSubserviceSelection:
def __init__(self):
self["SubserviceSelectionAction"] = HelpableActionMap(self, "InfobarSubserviceSelectionActions",
{
"subserviceSelection": (self.subserviceSelection, _("Subservice list...")),
})
self["SubserviceQuickzapAction"] = HelpableActionMap(self, "InfobarSubserviceQuickzapActions",
{
"nextSubservice": (self.nextSubservice, _("Switch to next sub service")),
"prevSubservice": (self.prevSubservice, _("Switch to previous sub service"))
}, -1)
self["SubserviceQuickzapAction"].setEnabled(False)
self.__event_tracker = ServiceEventTracker(screen=self, eventmap=
{
iPlayableService.evUpdatedEventInfo: self.checkSubservicesAvail
})
self.onClose.append(self.__removeNotifications)
self.bsel = None
def __removeNotifications(self):
self.session.nav.event.remove(self.checkSubservicesAvail)
def checkSubservicesAvail(self):
service = self.session.nav.getCurrentService()
subservices = service and service.subServices()
if not subservices or subservices.getNumberOfSubservices() == 0:
self["SubserviceQuickzapAction"].setEnabled(False)
def nextSubservice(self):
self.changeSubservice(+1)
def prevSubservice(self):
self.changeSubservice(-1)
def playSubservice(self, ref):
if ref.getUnsignedData(6) == 0:
ref.setName("")
self.session.nav.playService(ref, checkParentalControl=False, adjust=False)
def changeSubservice(self, direction):
service = self.session.nav.getCurrentService()
subservices = service and service.subServices()
n = subservices and subservices.getNumberOfSubservices()
if n and n > 0:
selection = -1
ref = self.session.nav.getCurrentlyPlayingServiceReference()
idx = 0
while idx < n:
if subservices.getSubservice(idx).toString() == ref.toString():
selection = idx
break
idx += 1
if selection != -1:
selection += direction
if selection >= n:
selection = 0
elif selection < 0:
selection = n - 1
newservice = subservices.getSubservice(selection)
if newservice.valid():
del subservices
del service
self.playSubservice(newservice)
def subserviceSelection(self):
service = self.session.nav.getCurrentService()
subservices = service and service.subServices()
self.bouquets = self.servicelist.getBouquetList()
n = subservices and subservices.getNumberOfSubservices()
selection = 0
if n and n > 0:
ref = self.session.nav.getCurrentlyPlayingServiceReference()
tlist = []
idx = 0
cnt_parent = 0
while idx < n:
i = subservices.getSubservice(idx)
if i.toString() == ref.toString():
selection = idx
tlist.append((i.getName(), i))
if i.getUnsignedData(6):
cnt_parent += 1
idx += 1
if cnt_parent and self.bouquets and len(self.bouquets):
keys = ["red", "blue", "", "0", "1", "2", "3", "4", "5", "6", "7", "8", "9" ] + [""] * n
if config.usage.multibouquet.value:
tlist = [(_("Quick zap"), "quickzap", service.subServices()), (_("Add to bouquet"), "CALLFUNC", self.addSubserviceToBouquetCallback), ("--", "")] + tlist
else:
tlist = [(_("Quick zap"), "quickzap", service.subServices()), (_("Add to favourites"), "CALLFUNC", self.addSubserviceToBouquetCallback), ("--", "")] + tlist
selection += 3
else:
tlist = [(_("Quick zap"), "quickzap", service.subServices()), ("--", "")] + tlist
keys = ["red", "", "0", "1", "2", "3", "4", "5", "6", "7", "8", "9" ] + [""] * n
selection += 2
self.session.openWithCallback(self.subserviceSelected, ChoiceBox, title=_("Please select a sub service..."), list = tlist, selection = selection, keys = keys, skin_name = "SubserviceSelection")
def subserviceSelected(self, service):
del self.bouquets
if not service is None:
if isinstance(service[1], str):
if service[1] == "quickzap":
from Screens.SubservicesQuickzap import SubservicesQuickzap
self.session.open(SubservicesQuickzap, service[2])
else:
self["SubserviceQuickzapAction"].setEnabled(True)
self.playSubservice(service[1])
def addSubserviceToBouquetCallback(self, service):
if service and len(service) > 1 and isinstance(service[1], eServiceReference):
self.selectedSubservice = service
if self.bouquets is None:
cnt = 0
else:
cnt = len(self.bouquets)
if cnt > 1: # show bouquet list
self.bsel = self.session.openWithCallback(self.bouquetSelClosed, BouquetSelector, self.bouquets, self.addSubserviceToBouquet)
elif cnt == 1: # add to only one existing bouquet
self.addSubserviceToBouquet(self.bouquets[0][1])
self.session.open(MessageBox, _("Service has been added to the favourites."), MessageBox.TYPE_INFO, timeout=5)
def bouquetSelClosed(self, confirmed):
self.bsel = None
del self.selectedSubservice
if confirmed:
self.session.open(MessageBox, _("Service has been added to the selected bouquet."), MessageBox.TYPE_INFO, timeout=5)
def addSubserviceToBouquet(self, dest):
self.servicelist.addServiceToBouquet(dest, self.selectedSubservice[1])
if self.bsel:
self.bsel.close(True)
else:
del self.selectedSubservice
from Components.Sources.HbbtvApplication import HbbtvApplication
gHbbtvApplication = HbbtvApplication()
class InfoBarRedButton:
def __init__(self):
self["RedButtonActions"] = HelpableActionMap(self, "InfobarRedButtonActions",
{
"activateRedButton": (self.activateRedButton, _("Red button...")),
})
self["HbbtvApplication"] = gHbbtvApplication
self.onHBBTVActivation = [ ]
self.onRedButtonActivation = [ ]
self.onReadyForAIT = [ ]
self.__et = ServiceEventTracker(screen=self, eventmap=
{
iPlayableService.evHBBTVInfo: self.detectedHbbtvApplication,
iPlayableService.evUpdatedInfo: self.updateInfomation
})
def updateAIT(self, orgId=0):
for x in self.onReadyForAIT:
try:
x(orgId)
except Exception, ErrMsg:
print ErrMsg
#self.onReadyForAIT.remove(x)
def updateInfomation(self):
try:
self["HbbtvApplication"].setApplicationName("")
self.updateAIT()
except Exception, ErrMsg:
pass
def detectedHbbtvApplication(self):
service = self.session.nav.getCurrentService()
info = service and service.info()
try:
for x in info.getInfoObject(iServiceInformation.sHBBTVUrl):
print x
if x[0] in (-1, 1):
self.updateAIT(x[3])
self["HbbtvApplication"].setApplicationName(x[1])
break
except Exception, ErrMsg:
pass
def activateRedButton(self):
service = self.session.nav.getCurrentService()
info = service and service.info()
if info and info.getInfoString(iServiceInformation.sHBBTVUrl) != "":
for x in self.onHBBTVActivation:
x()
elif False: # TODO: other red button services
for x in self.onRedButtonActivation:
x()
class InfoBarTimerButton:
def __init__(self):
self["TimerButtonActions"] = HelpableActionMap(self, "InfobarTimerButtonActions",
{
"timerSelection": (self.timerSelection, _("Timer selection...")),
})
def timerSelection(self):
from Screens.TimerEdit import TimerEditList
self.session.open(TimerEditList)
class InfoBarVmodeButton:
def __init__(self):
self["VmodeButtonActions"] = HelpableActionMap(self, "InfobarVmodeButtonActions",
{
"vmodeSelection": (self.vmodeSelection, _("Letterbox zoom")),
})
def vmodeSelection(self):
self.session.open(VideoMode)
class VideoMode(Screen):
def __init__(self,session):
Screen.__init__(self, session)
self["videomode"] = Label()
self["actions"] = NumberActionMap( [ "InfobarVmodeButtonActions" ],
{
"vmodeSelection": self.selectVMode
})
self.Timer = eTimer()
self.Timer.callback.append(self.quit)
self.selectVMode()
def selectVMode(self):
policy = config.av.policy_43
if self.isWideScreen():
policy = config.av.policy_169
idx = policy.choices.index(policy.value)
idx = (idx + 1) % len(policy.choices)
policy.value = policy.choices[idx]
self["videomode"].setText(policy.value)
self.Timer.start(1000, True)
def isWideScreen(self):
from Components.Converter.ServiceInfo import WIDESCREEN
service = self.session.nav.getCurrentService()
info = service and service.info()
return info.getInfo(iServiceInformation.sAspect) in WIDESCREEN
def quit(self):
self.Timer.stop()
self.close()
class InfoBarAdditionalInfo:
def __init__(self):
self["RecordingPossible"] = Boolean(fixed=harddiskmanager.HDDCount() > 0)
self["TimeshiftPossible"] = self["RecordingPossible"]
self["ExtensionsAvailable"] = Boolean(fixed=1)
# TODO: these properties should be queried from the input device keymap
self["ShowTimeshiftOnYellow"] = Boolean(fixed=0)
self["ShowAudioOnYellow"] = Boolean(fixed=0)
self["ShowRecordOnRed"] = Boolean(fixed=0)
class InfoBarNotifications:
def __init__(self):
self.onExecBegin.append(self.checkNotifications)
Notifications.notificationAdded.append(self.checkNotificationsIfExecing)
self.onClose.append(self.__removeNotification)
def __removeNotification(self):
Notifications.notificationAdded.remove(self.checkNotificationsIfExecing)
def checkNotificationsIfExecing(self):
if self.execing:
self.checkNotifications()
def checkNotifications(self):
notifications = Notifications.notifications
if notifications:
n = notifications[0]
del notifications[0]
cb = n[0]
if n[3].has_key("onSessionOpenCallback"):
n[3]["onSessionOpenCallback"]()
del n[3]["onSessionOpenCallback"]
if cb:
dlg = self.session.openWithCallback(cb, n[1], *n[2], **n[3])
elif not Notifications.current_notifications and n[4] == "ZapError":
if n[3].has_key("timeout"):
del n[3]["timeout"]
n[3]["enable_input"] = False
dlg = self.session.instantiateDialog(n[1], *n[2], **n[3])
self.hide()
dlg.show()
self.notificationDialog = dlg
eActionMap.getInstance().bindAction('', -maxint - 1, self.keypressNotification)
else:
dlg = self.session.open(n[1], *n[2], **n[3])
# remember that this notification is currently active
d = (n[4], dlg)
Notifications.current_notifications.append(d)
dlg.onClose.append(boundFunction(self.__notificationClosed, d))
def closeNotificationInstantiateDialog(self):
if hasattr(self, "notificationDialog"):
self.session.deleteDialog(self.notificationDialog)
del self.notificationDialog
eActionMap.getInstance().unbindAction('', self.keypressNotification)
def keypressNotification(self, key, flag):
if flag:
self.closeNotificationInstantiateDialog()
def __notificationClosed(self, d):
Notifications.current_notifications.remove(d)
class InfoBarServiceNotifications:
def __init__(self):
self.__event_tracker = ServiceEventTracker(screen=self, eventmap=
{
iPlayableService.evEnd: self.serviceHasEnded
})
def serviceHasEnded(self):
print "service end!"
try:
self.setSeekState(self.SEEK_STATE_PLAY)
except:
pass
class InfoBarCueSheetSupport:
CUT_TYPE_IN = 0
CUT_TYPE_OUT = 1
CUT_TYPE_MARK = 2
CUT_TYPE_LAST = 3
ENABLE_RESUME_SUPPORT = False
def __init__(self, actionmap = "InfobarCueSheetActions"):
self["CueSheetActions"] = HelpableActionMap(self, actionmap,
{
"jumpPreviousMark": (self.jumpPreviousMark, _("Jump to previous marked position")),
"jumpNextMark": (self.jumpNextMark, _("Jump to next marked position")),
"toggleMark": (self.toggleMark, _("Toggle a cut mark at the current position"))
}, prio=1)
self.cut_list = [ ]
self.is_closing = False
self.__event_tracker = ServiceEventTracker(screen=self, eventmap=
{
iPlayableService.evStart: self.__serviceStarted,
iPlayableService.evCuesheetChanged: self.downloadCuesheet,
})
def __serviceStarted(self):
if self.is_closing:
return
print "new service started! trying to download cuts!"
self.downloadCuesheet()
if self.ENABLE_RESUME_SUPPORT:
for (pts, what) in self.cut_list:
if what == self.CUT_TYPE_LAST:
last = pts
break
else:
last = getResumePoint(self.session)
if last is None:
return
# only resume if at least 10 seconds ahead, or <10 seconds before the end.
seekable = self.__getSeekable()
if seekable is None:
return # Should not happen?
length = seekable.getLength() or (None,0)
print "seekable.getLength() returns:", length
# Hmm, this implies we don't resume if the length is unknown...
if (last > 900000) and (not length[1] or (last < length[1] - 900000)):
self.resume_point = last
l = last / 90000
if "ask" in config.usage.on_movie_start.value or not length[1]:
Notifications.AddNotificationWithCallback(self.playLastCB, MessageBox, _("Do you want to resume this playback?") + "\n" + (_("Resume position at %s") % ("%d:%02d:%02d" % (l/3600, l%3600/60, l%60))), timeout=10, default="yes" in config.usage.on_movie_start.value)
elif config.usage.on_movie_start.value == "resume":
# TRANSLATORS: The string "Resuming playback" flashes for a moment
# TRANSLATORS: at the start of a movie, when the user has selected
# TRANSLATORS: "Resume from last position" as start behavior.
# TRANSLATORS: The purpose is to notify the user that the movie starts
# TRANSLATORS: in the middle somewhere and not from the beginning.
# TRANSLATORS: (Some translators seem to have interpreted it as a
# TRANSLATORS: question or a choice, but it is a statement.)
Notifications.AddNotificationWithCallback(self.playLastCB, MessageBox, _("Resuming playback"), timeout=2, type=MessageBox.TYPE_INFO)
def playLastCB(self, answer):
if answer == True:
self.doSeek(self.resume_point)
self.hideAfterResume()
def hideAfterResume(self):
if isinstance(self, InfoBarShowHide):
self.hide()
def __getSeekable(self):
service = self.session.nav.getCurrentService()
if service is None:
return None
return service.seek()
def cueGetCurrentPosition(self):
seek = self.__getSeekable()
if seek is None:
return None
r = seek.getPlayPosition()
if r[0]:
return None
return long(r[1])
def cueGetEndCutPosition(self):
ret = False
isin = True
for cp in self.cut_list:
if cp[1] == self.CUT_TYPE_OUT:
if isin:
isin = False
ret = cp[0]
elif cp[1] == self.CUT_TYPE_IN:
isin = True
return ret
def jumpPreviousNextMark(self, cmp, start=False):
current_pos = self.cueGetCurrentPosition()
if current_pos is None:
return False
mark = self.getNearestCutPoint(current_pos, cmp=cmp, start=start)
if mark is not None:
pts = mark[0]
else:
return False
self.doSeek(pts)
return True
def jumpPreviousMark(self):
# we add 5 seconds, so if the play position is <5s after
# the mark, the mark before will be used
self.jumpPreviousNextMark(lambda x: -x-5*90000, start=True)
def jumpNextMark(self):
if not self.jumpPreviousNextMark(lambda x: x-90000):
self.doSeek(-1)
def getNearestCutPoint(self, pts, cmp=abs, start=False):
# can be optimized
beforecut = True
nearest = None
bestdiff = -1
instate = True
if start:
bestdiff = cmp(0 - pts)
if bestdiff >= 0:
nearest = [0, False]
for cp in self.cut_list:
if beforecut and cp[1] in (self.CUT_TYPE_IN, self.CUT_TYPE_OUT):
beforecut = False
if cp[1] == self.CUT_TYPE_IN: # Start is here, disregard previous marks
diff = cmp(cp[0] - pts)
if start and diff >= 0:
nearest = cp
bestdiff = diff
else:
nearest = None
bestdiff = -1
if cp[1] == self.CUT_TYPE_IN:
instate = True
elif cp[1] == self.CUT_TYPE_OUT:
instate = False
elif cp[1] in (self.CUT_TYPE_MARK, self.CUT_TYPE_LAST):
diff = cmp(cp[0] - pts)
if instate and diff >= 0 and (nearest is None or bestdiff > diff):
nearest = cp
bestdiff = diff
return nearest
def toggleMark(self, onlyremove=False, onlyadd=False, tolerance=5*90000, onlyreturn=False):
current_pos = self.cueGetCurrentPosition()
if current_pos is None:
print "not seekable"
return
nearest_cutpoint = self.getNearestCutPoint(current_pos)
if nearest_cutpoint is not None and abs(nearest_cutpoint[0] - current_pos) < tolerance:
if onlyreturn:
return nearest_cutpoint
if not onlyadd:
self.removeMark(nearest_cutpoint)
elif not onlyremove and not onlyreturn:
self.addMark((current_pos, self.CUT_TYPE_MARK))
if onlyreturn:
return None
def addMark(self, point):
insort(self.cut_list, point)
self.uploadCuesheet()
self.showAfterCuesheetOperation()
def removeMark(self, point):
self.cut_list.remove(point)
self.uploadCuesheet()
self.showAfterCuesheetOperation()
def showAfterCuesheetOperation(self):
if isinstance(self, InfoBarShowHide):
self.doShow()
def __getCuesheet(self):
service = self.session.nav.getCurrentService()
if service is None:
return None
return service.cueSheet()
def uploadCuesheet(self):
cue = self.__getCuesheet()
if cue is None:
print "no cue available to upload, no cuesheet interface"
return
cue.setCutList(self.cut_list)
def downloadCuesheet(self):
cue = self.__getCuesheet()
if cue is None:
print "no cue available for download, no cuesheet interface"
self.cut_list = [ ]
else:
self.cut_list = cue.getCutList()
class InfoBarSummary(Screen):
skin = """
<screen position="0,0" size="132,64">
<widget source="global.CurrentTime" render="Label" position="62,46" size="82,18" font="Regular;16" >
<convert type="ClockToText">WithSeconds</convert>
</widget>
<widget source="session.RecordState" render="FixedLabel" text=" " position="62,46" size="82,18" zPosition="1" >
<convert type="ConfigEntryTest">config.usage.blinking_display_clock_during_recording,True,CheckSourceBoolean</convert>
<convert type="ConditionalShowHide">Blink</convert>
</widget>
<widget source="session.CurrentService" render="Label" position="6,4" size="120,42" font="Regular;18" >
<convert type="ServiceName">Name</convert>
</widget>
<widget source="session.Event_Now" render="Progress" position="6,46" size="46,18" borderWidth="1" >
<convert type="EventTime">Progress</convert>
</widget>
</screen>"""
# for picon: (path="piconlcd" will use LCD picons)
# <widget source="session.CurrentService" render="Picon" position="6,0" size="120,64" path="piconlcd" >
# <convert type="ServiceName">Reference</convert>
# </widget>
class InfoBarSummarySupport:
def __init__(self):
pass
def createSummary(self):
return InfoBarSummary
class InfoBarMoviePlayerSummary(Screen):
skin = """
<screen position="0,0" size="132,64">
<widget source="global.CurrentTime" render="Label" position="62,46" size="64,18" font="Regular;16" halign="right" >
<convert type="ClockToText">WithSeconds</convert>
</widget>
<widget source="session.RecordState" render="FixedLabel" text=" " position="62,46" size="64,18" zPosition="1" >
<convert type="ConfigEntryTest">config.usage.blinking_display_clock_during_recording,True,CheckSourceBoolean</convert>
<convert type="ConditionalShowHide">Blink</convert>
</widget>
<widget source="session.CurrentService" render="Label" position="6,4" size="120,42" font="Regular;18" >
<convert type="ServiceName">Name</convert>
</widget>
<widget source="session.CurrentService" render="Progress" position="6,46" size="56,18" borderWidth="1" >
<convert type="ServicePosition">Position</convert>
</widget>
</screen>"""
class InfoBarMoviePlayerSummarySupport:
def __init__(self):
pass
def createSummary(self):
return InfoBarMoviePlayerSummary
class InfoBarTeletextPlugin:
def __init__(self):
self.teletext_plugin = None
for p in plugins.getPlugins(PluginDescriptor.WHERE_TELETEXT):
self.teletext_plugin = p
if self.teletext_plugin is not None:
self["TeletextActions"] = HelpableActionMap(self, "InfobarTeletextActions",
{
"startTeletext": (self.startTeletext, _("View teletext..."))
})
else:
print "no teletext plugin found!"
def startTeletext(self):
self.teletext_plugin and self.teletext_plugin(session=self.session, service=self.session.nav.getCurrentService())
class InfoBarSubtitleSupport(object):
def __init__(self):
object.__init__(self)
self["SubtitleSelectionAction"] = HelpableActionMap(self, "InfobarSubtitleSelectionActions",
{
"subtitleSelection": (self.subtitleSelection, _("Subtitle selection...")),
})
self.selected_subtitle = None
if isStandardInfoBar(self):
self.subtitle_window = self.session.instantiateDialog(SubtitleDisplay)
if SystemInfo["hasOSDAnimation"]:
self.subtitle_window.setAnimationMode(0)
else:
from Screens.InfoBar import InfoBar
self.subtitle_window = InfoBar.instance.subtitle_window
self.subtitle_window.hide()
self.__event_tracker = ServiceEventTracker(screen=self, eventmap=
{
iPlayableService.evStart: self.__serviceChanged,
iPlayableService.evEnd: self.__serviceChanged,
iPlayableService.evUpdatedInfo: self.__updatedInfo
})
def getCurrentServiceSubtitle(self):
service = self.session.nav.getCurrentService()
return service and service.subtitle()
def subtitleSelection(self):
subtitle = self.getCurrentServiceSubtitle()
subtitlelist = subtitle and subtitle.getSubtitleList()
if self.selected_subtitle or subtitlelist and len(subtitlelist)>0:
from Screens.AudioSelection import SubtitleSelection
self.session.open(SubtitleSelection, self)
else:
return 0
def __serviceChanged(self):
if self.selected_subtitle:
self.selected_subtitle = None
self.subtitle_window.hide()
def __updatedInfo(self):
if not self.selected_subtitle:
subtitle = self.getCurrentServiceSubtitle()
cachedsubtitle = subtitle and subtitle.getCachedSubtitle()
if cachedsubtitle:
self.enableSubtitle(cachedsubtitle)
def enableSubtitle(self, selectedSubtitle):
subtitle = self.getCurrentServiceSubtitle()
self.selected_subtitle = selectedSubtitle
if subtitle and self.selected_subtitle:
subtitle.enableSubtitles(self.subtitle_window.instance, self.selected_subtitle)
self.subtitle_window.show()
else:
if subtitle:
subtitle.disableSubtitles(self.subtitle_window.instance)
self.subtitle_window.hide()
def restartSubtitle(self):
if self.selected_subtitle:
self.enableSubtitle(self.selected_subtitle)
class InfoBarServiceErrorPopupSupport:
def __init__(self):
self.__event_tracker = ServiceEventTracker(screen=self, eventmap=
{
iPlayableService.evTuneFailed: self.__tuneFailed,
iPlayableService.evTunedIn: self.__serviceStarted,
iPlayableService.evStart: self.__serviceStarted
})
self.__serviceStarted()
def __serviceStarted(self):
self.closeNotificationInstantiateDialog()
self.last_error = None
if not config.usage.hide_zap_errors.value:
Notifications.RemovePopup(id = "ZapError")
def __tuneFailed(self):
if not config.usage.hide_zap_errors.value or not config.usage.remote_fallback_enabled.value:
service = self.session.nav.getCurrentService()
info = service and service.info()
error = info and info.getInfo(iServiceInformation.sDVBState)
if not config.usage.remote_fallback_enabled.value and (error == eDVBServicePMTHandler.eventMisconfiguration or error == eDVBServicePMTHandler.eventNoResources):
self.session.nav.currentlyPlayingServiceReference = None
self.session.nav.currentlyPlayingServiceOrGroup = None
if error == self.last_error:
error = None
else:
self.last_error = error
error = {
eDVBServicePMTHandler.eventNoResources: _("No free tuner!"),
eDVBServicePMTHandler.eventTuneFailed: _("Tune failed!"),
eDVBServicePMTHandler.eventNoPAT: _("No data on transponder!\n(Timeout reading PAT)"),
eDVBServicePMTHandler.eventNoPATEntry: _("Service not found!\n(SID not found in PAT)"),
eDVBServicePMTHandler.eventNoPMT: _("Service invalid!\n(Timeout reading PMT)"),
eDVBServicePMTHandler.eventNewProgramInfo: None,
eDVBServicePMTHandler.eventTuned: None,
eDVBServicePMTHandler.eventSOF: None,
eDVBServicePMTHandler.eventEOF: None,
eDVBServicePMTHandler.eventMisconfiguration: _("Service unavailable!\nCheck tuner configuration!"),
}.get(error) #this returns None when the key not exist in the dict
if error and not config.usage.hide_zap_errors.value:
self.closeNotificationInstantiateDialog()
if hasattr(self, "dishDialog") and not self.dishDialog.dishState():
Notifications.AddPopup(text = error, type = MessageBox.TYPE_ERROR, timeout = 5, id = "ZapError")
class InfoBarPowersaver:
def __init__(self):
self.inactivityTimer = eTimer()
self.inactivityTimer.callback.append(self.inactivityTimeout)
self.restartInactiveTimer()
self.sleepTimer = eTimer()
self.sleepStartTime = 0
self.sleepTimer.callback.append(self.sleepTimerTimeout)
eActionMap.getInstance().bindAction('', -maxint - 1, self.keypress)
def keypress(self, key, flag):
if flag:
self.restartInactiveTimer()
def restartInactiveTimer(self):
time = abs(int(config.usage.inactivity_timer.value))
if time:
self.inactivityTimer.startLongTimer(time)
else:
self.inactivityTimer.stop()
def inactivityTimeout(self):
if config.usage.inactivity_timer_blocktime.value:
curtime = localtime(time())
if curtime.tm_year > 1970: #check if the current time is valid
duration = blocktime = extra_time = False
if config.usage.inactivity_timer_blocktime_by_weekdays.value:
weekday = curtime.tm_wday
if config.usage.inactivity_timer_blocktime_day[weekday].value:
blocktime = True
begintime = tuple(config.usage.inactivity_timer_blocktime_begin_day[weekday].value)
endtime = tuple(config.usage.inactivity_timer_blocktime_end_day[weekday].value)
extra_time = config.usage.inactivity_timer_blocktime_extra_day[weekday].value
begintime_extra = tuple(config.usage.inactivity_timer_blocktime_extra_begin_day[weekday].value)
endtime_extra = tuple(config.usage.inactivity_timer_blocktime_extra_end_day[weekday].value)
else:
blocktime = True
begintime = tuple(config.usage.inactivity_timer_blocktime_begin.value)
endtime = tuple(config.usage.inactivity_timer_blocktime_end.value)
extra_time = config.usage.inactivity_timer_blocktime_extra.value
begintime_extra = tuple(config.usage.inactivity_timer_blocktime_extra_begin.value)
endtime_extra = tuple(config.usage.inactivity_timer_blocktime_extra_end.value)
curtime = (curtime.tm_hour, curtime.tm_min, curtime.tm_sec)
if blocktime and (begintime <= endtime and (curtime >= begintime and curtime < endtime) or begintime > endtime and (curtime >= begintime or curtime < endtime)):
duration = (endtime[0]*3600 + endtime[1]*60) - (curtime[0]*3600 + curtime[1]*60 + curtime[2])
elif extra_time and (begintime_extra <= endtime_extra and (curtime >= begintime_extra and curtime < endtime_extra) or begintime_extra > endtime_extra and (curtime >= begintime_extra or curtime < endtime_extra)):
duration = (endtime_extra[0]*3600 + endtime_extra[1]*60) - (curtime[0]*3600 + curtime[1]*60 + curtime[2])
if duration:
if duration < 0:
duration += 24*3600
self.inactivityTimer.startLongTimer(duration)
return
if Screens.Standby.inStandby:
self.inactivityTimeoutCallback(True)
else:
message = _("Your receiver will got to standby due to inactivity.") + "\n" + _("Do you want this?")
self.session.openWithCallback(self.inactivityTimeoutCallback, MessageBox, message, timeout=60, simple=True, default=False, timeout_default=True)
def inactivityTimeoutCallback(self, answer):
if answer:
self.goStandby()
else:
print "[InfoBarPowersaver] abort"
def sleepTimerState(self):
if self.sleepTimer.isActive():
return (self.sleepStartTime - time()) / 60
return 0
def setSleepTimer(self, sleepTime):
print "[InfoBarPowersaver] set sleeptimer", sleepTime
if sleepTime:
m = abs(sleepTime / 60)
message = _("The sleep timer has been activated.") + "\n" + _("And will put your receiver in standby over ") + ngettext("%d minute", "%d minutes", m) % m
self.sleepTimer.startLongTimer(sleepTime)
self.sleepStartTime = time() + sleepTime
else:
message = _("The sleep timer has been disabled.")
self.sleepTimer.stop()
Notifications.AddPopup(message, type = MessageBox.TYPE_INFO, timeout = 5)
def sleepTimerTimeout(self):
if not Screens.Standby.inStandby:
list = [ (_("Yes"), True), (_("Extend sleeptimer 15 minutes"), "extend"), (_("No"), False) ]
message = _("Your receiver will got to stand by due to the sleeptimer.")
message += "\n" + _("Do you want this?")
self.session.openWithCallback(self.sleepTimerTimeoutCallback, MessageBox, message, timeout=60, simple=True, list=list, default=False, timeout_default=True)
def sleepTimerTimeoutCallback(self, answer):
if answer == "extend":
print "[InfoBarPowersaver] extend sleeptimer"
self.setSleepTimer(900)
elif answer:
self.goStandby()
else:
print "[InfoBarPowersaver] abort"
self.setSleepTimer(0)
def goStandby(self):
if not Screens.Standby.inStandby:
print "[InfoBarPowersaver] goto standby"
self.session.open(Screens.Standby.Standby)
class InfoBarHDMI:
def HDMIIn(self):
slist = self.servicelist
if slist.dopipzap:
curref = self.session.pip.getCurrentService()
if curref and curref.type != 8192:
self.session.pip.playService(eServiceReference('8192:0:1:0:0:0:0:0:0:0:'))
else:
self.session.pip.playService(slist.servicelist.getCurrent())
else:
curref = self.session.nav.getCurrentlyPlayingServiceOrGroup()
if curref and curref.type != 8192:
if curref and curref.type != -1 and os.path.splitext(curref.toString().split(":")[10])[1].lower() in AUDIO_EXTENSIONS.union(MOVIE_EXTENSIONS, DVD_EXTENSIONS):
setResumePoint(self.session)
self.session.nav.playService(eServiceReference('8192:0:1:0:0:0:0:0:0:0:'))
elif isStandardInfoBar(self):
self.session.nav.playService(slist.servicelist.getCurrent())
else:
self.session.nav.playService(self.cur_service)
|
kajgan/stbgui
|
lib/python/Screens/InfoBarGenerics.py
|
Python
|
gpl-2.0
| 127,100
|
"""
Class to represent the whole setup (a bunch of nodes)
"""
import logging
import yaml
from stitches.connection import Connection
class Structure(object):
"""
Stateful object to represent whole setup
"""
def __init__(self):
self.logger = logging.getLogger('stitches.structure')
self.Instances = {}
self.config = {}
def __del__(self):
"""
Close all connections
"""
for role in self.Instances.keys():
for connection in self.Instances[role]:
connection.sftp.close()
connection.cli.close()
def reconnect_all(self):
"""
Re-establish connection to all instances
"""
for role in self.Instances.keys():
for connection in self.Instances[role]:
connection.reconnect()
def add_instance(self,
role,
instance,
username='root',
key_filename=None,
output_shell=False):
"""
Add instance to the setup
@param role: instance's role
@type role: str
@param instance: host parameters we would like to establish connection
to
@type instance: dict
@param username: user name for creating ssh connection
@type username: str
@param key_filename: file name with ssh private key
@type key_filename: str
@param output_shell: write output from this connection to standard
output
@type output_shell: bool
"""
if not role in self.Instances.keys():
self.Instances[role] = []
self.logger.debug('Adding ' + role + ' with private_hostname ' +
instance['private_hostname'] +
', public_hostname ' + instance['public_hostname'])
self.Instances[role].append(Connection(instance,
username,
key_filename,
output_shell=output_shell))
def setup_from_yamlfile(self, yamlfile, output_shell=False):
"""
Setup from yaml config
@param yamlfile: path to yaml config file
@type yamlfile: str
@param output_shell: write output from this connection to standard
output
@type output_shell: bool
"""
self.logger.debug('Loading config from ' + yamlfile)
with open(yamlfile, 'r') as yamlfd:
yamlconfig = yaml.load(yamlfd)
for instance in yamlconfig['Instances']:
self.add_instance(instance['role'].upper(),
instance,
output_shell=output_shell)
if 'Config' in yamlconfig.keys():
self.logger.debug('Config found: ' + str(yamlconfig['Config']))
self.config = yamlconfig['Config'].copy()
|
RedHatQE/python-stitches
|
stitches/structure.py
|
Python
|
gpl-3.0
| 3,076
|
#!/usr/bin/env python2
# vim:fileencoding=UTF-8:ts=4:sw=4:sta:et:sts=4:ai
from __future__ import with_statement
__license__ = 'GPL v3'
__copyright__ = '2009, Kovid Goyal <kovid@kovidgoyal.net>'
__docformat__ = 'restructuredtext en'
__all__ = [
'pot', 'translations', 'get_translations', 'iso639', 'iso3166',
'build', 'mathjax',
'gui',
'develop', 'install',
'kakasi', 'coffee', 'resources',
'check',
'sdist',
'manual', 'tag_release',
'pypi_register', 'pypi_upload', 'upload_to_server',
'upload_installers',
'upload_user_manual', 'upload_demo', 'reupload',
'linux32', 'linux64', 'linux', 'linux_freeze',
'osx32_freeze', 'osx', 'rsync', 'push',
'win32_freeze', 'win32', 'win64', 'win',
'stage1', 'stage2', 'stage3', 'stage4', 'stage5', 'publish', 'publish_betas',
]
from setup.translations import POT, GetTranslations, Translations, ISO639, ISO3166
pot = POT()
translations = Translations()
get_translations = GetTranslations()
iso639 = ISO639()
iso3166 = ISO3166()
from setup.extensions import Build
build = Build()
from setup.mathjax import MathJax
mathjax = MathJax()
from setup.install import Develop, Install, Sdist
develop = Develop()
install = Install()
sdist = Sdist()
from setup.gui import GUI
gui = GUI()
from setup.check import Check
check = Check()
from setup.resources import Resources, Kakasi, Coffee
resources = Resources()
kakasi = Kakasi()
coffee = Coffee()
from setup.publish import Manual, TagRelease, Stage1, Stage2, \
Stage3, Stage4, Stage5, Publish, PublishBetas
manual = Manual()
tag_release = TagRelease()
stage1 = Stage1()
stage2 = Stage2()
stage3 = Stage3()
stage4 = Stage4()
stage5 = Stage5()
publish = Publish()
publish_betas = PublishBetas()
from setup.upload import (UploadUserManual, UploadDemo, UploadInstallers,
UploadToServer, ReUpload)
upload_user_manual = UploadUserManual()
upload_demo = UploadDemo()
upload_to_server = UploadToServer()
upload_installers = UploadInstallers()
reupload = ReUpload()
from setup.installer import Rsync, Push
rsync = Rsync()
push = Push()
from setup.installer.linux import Linux, Linux32, Linux64
linux = Linux()
linux32 = Linux32()
linux64 = Linux64()
from setup.installer.linux.freeze2 import LinuxFreeze
linux_freeze = LinuxFreeze()
from setup.installer.osx import OSX
osx = OSX()
from setup.installer.osx.app.main import OSX32_Freeze
osx32_freeze = OSX32_Freeze()
from setup.installer.windows import Win, Win32, Win64
win = Win()
win32 = Win32()
win64 = Win64()
from setup.installer.windows.freeze import Win32Freeze
win32_freeze = Win32Freeze()
from setup.pypi import PyPIRegister, PyPIUpload
pypi_register = PyPIRegister()
pypi_upload = PyPIUpload()
commands = {}
for x in __all__:
commands[x] = locals()[x]
command_names = dict(zip(commands.values(), commands.keys()))
|
user-none/calibre
|
setup/commands.py
|
Python
|
gpl-3.0
| 2,911
|
import unittest
from zen import *
import networkx
import random
class AllPairsDijkstraPathLength_TestCase(unittest.TestCase):
def test_apdp_undirected_w_weights(self):
G = Graph()
G.add_edge(1,2,weight=4)
G.add_edge(2,3,weight=1)
G.add_edge(1,4,weight=2)
G.add_edge(4,5,weight=1)
G.add_edge(5,3,weight=1)
D = all_pairs_dijkstra_path_length_(G,ignore_weights=False)
self.assertEqual(D[0,0],0)
self.assertEqual(D[0,1],4)
self.assertEqual(D[0,2],4)
def test_apdp_undirected_ignore_weights(self):
G = Graph()
G.add_edge(1,2,weight=4)
G.add_edge(2,3,weight=1)
G.add_edge(1,4,weight=2)
G.add_edge(4,5,weight=1)
G.add_edge(5,3,weight=1)
D = all_pairs_dijkstra_path_length_(G,ignore_weights=True)
self.assertEqual(D[0,0],0)
self.assertEqual(D[0,1],1)
self.assertEqual(D[0,2],2)
def test_apdp_undirected(self):
G = Graph()
G.add_edge(1,2)
G.add_edge(2,3)
G.add_edge(2,4)
D = all_pairs_dijkstra_path_length_(G)
self.assertEqual(D[0,0],0)
self.assertEqual(D[0,1],1)
self.assertEqual(D[0,2],2)
self.assertEqual(D[0,3],2)
self.assertEqual(D[1,2],1)
self.assertEqual(D[1,3],1)
self.assertEqual(D[2,3],2)
def test_apdp_directed(self):
G = DiGraph()
G.add_edge(1,2)
G.add_edge(2,3)
G.add_edge(2,4)
D = all_pairs_dijkstra_path_length_(G)
self.assertEqual(D[0,0],0)
self.assertEqual(D[0,1],1)
self.assertEqual(D[0,2],2)
self.assertEqual(D[0,3],2)
self.assertEqual(D[1,2],1)
self.assertEqual(D[1,3],1)
self.assertEqual(D[2,3],float('infinity'))
def test_disconnected(self):
G = Graph()
G.add_edge(1,2)
G.add_edge(2,3)
G.add_node(4)
D = all_pairs_dijkstra_path_length_(G)
self.assertEqual(D[0,3],float('infinity'))
self.assertEqual(D[1,3],float('infinity'))
self.assertEqual(D[2,3],float('infinity'))
class AllPairsDijkstraPath_TestCase(unittest.TestCase):
def test_apdp_undirected(self):
G = Graph()
G.add_edge(1,2)
G.add_edge(2,3)
G.add_edge(2,4)
D,P = all_pairs_dijkstra_path_(G)
self.assertEqual(D[0,0],0)
self.assertEqual(D[0,1],1)
self.assertEqual(D[0,2],2)
self.assertEqual(D[0,3],2)
self.assertEqual(D[1,2],1)
self.assertEqual(D[1,3],1)
self.assertEqual(D[2,3],2)
self.assertEqual(P[0,0],-1)
self.assertEqual(P[0,1],0)
self.assertEqual(P[0,2],1)
self.assertEqual(P[0,3],1)
self.assertEqual(P[1,2],1)
self.assertEqual(P[1,3],1)
self.assertEqual(P[2,3],1)
def test_apdp_directed(self):
G = DiGraph()
G.add_edge(1,2)
G.add_edge(2,3)
G.add_edge(2,4)
D,P = all_pairs_dijkstra_path_(G)
self.assertEqual(D[0,0],0)
self.assertEqual(D[0,1],1)
self.assertEqual(D[0,2],2)
self.assertEqual(D[0,3],2)
self.assertEqual(D[1,2],1)
self.assertEqual(D[1,3],1)
self.assertEqual(D[2,3],float('infinity'))
self.assertEqual(P[0,0],-1)
self.assertEqual(P[0,1],0)
self.assertEqual(P[0,2],1)
self.assertEqual(P[0,3],1)
self.assertEqual(P[1,2],1)
self.assertEqual(P[1,3],1)
self.assertEqual(P[2,3],-1)
def test_disconnected(self):
G = Graph()
G.add_edge(1,2)
G.add_edge(2,3)
G.add_node(4)
D,P = all_pairs_dijkstra_path_(G)
self.assertEqual(D[0,3],float('infinity'))
self.assertEqual(D[1,3],float('infinity'))
self.assertEqual(D[2,3],float('infinity'))
self.assertEqual(P[0,3],-1)
self.assertEqual(P[1,3],-1)
self.assertEqual(P[2,3],-1)
class AllPairsDijkstraPathLengthTestCase(unittest.TestCase):
def test_apdp_undirected(self):
G = Graph()
G.add_edge(1,2)
G.add_edge(2,3)
G.add_edge(2,4)
D = all_pairs_dijkstra_path_length(G)
self.assertEqual(D[1][1],0)
self.assertEqual(D[1][2],1)
self.assertEqual(D[1][3],2)
self.assertEqual(D[1][4],2)
self.assertEqual(D[2][3],1)
self.assertEqual(D[2][4],1)
self.assertEqual(D[3][4],2)
def test_apdp_directed(self):
G = DiGraph()
G.add_edge(1,2)
G.add_edge(2,3)
G.add_edge(2,4)
D = all_pairs_dijkstra_path_length(G)
self.assertEqual(D[1][1],0)
self.assertEqual(D[1][2],1)
self.assertEqual(D[1][3],2)
self.assertEqual(D[1][4],2)
self.assertEqual(D[2][3],1)
self.assertEqual(D[2][4],1)
self.assertEqual(D[3][4],float('infinity'))
def test_disconnected(self):
G = Graph()
G.add_edge(1,2)
G.add_edge(2,3)
G.add_node(4)
D = all_pairs_dijkstra_path_length(G)
self.assertEqual(D[1][4],float('infinity'))
self.assertEqual(D[2][4],float('infinity'))
self.assertEqual(D[3][4],float('infinity'))
class AllPairsDijkstraPathTestCase(unittest.TestCase):
def test_apdp_undirected(self):
G = Graph()
G.add_edge(1,2)
G.add_edge(2,3)
G.add_edge(2,4)
D = all_pairs_dijkstra_path(G)
self.assertEqual(D[1][1][0],0)
self.assertEqual(D[1][2][0],1)
self.assertEqual(D[1][3][0],2)
self.assertEqual(D[1][4][0],2)
self.assertEqual(D[2][3][0],1)
self.assertEqual(D[2][4][0],1)
self.assertEqual(D[3][4][0],2)
self.assertEqual(D[1][1][1],None)
self.assertEqual(D[1][2][1],1)
self.assertEqual(D[1][3][1],2)
self.assertEqual(D[1][4][1],2)
self.assertEqual(D[2][3][1],2)
self.assertEqual(D[2][4][1],2)
self.assertEqual(D[3][4][1],2)
def test_apdp_directed(self):
G = DiGraph()
G.add_edge(1,2)
G.add_edge(2,3)
G.add_edge(2,4)
D = all_pairs_dijkstra_path(G)
self.assertEqual(D[1][1][0],0)
self.assertEqual(D[1][2][0],1)
self.assertEqual(D[1][3][0],2)
self.assertEqual(D[1][4][0],2)
self.assertEqual(D[2][3][0],1)
self.assertEqual(D[2][4][0],1)
self.assertEqual(D[3][4][0],float('infinity'))
self.assertEqual(D[1][1][1],None)
self.assertEqual(D[1][2][1],1)
self.assertEqual(D[1][3][1],2)
self.assertEqual(D[1][4][1],2)
self.assertEqual(D[2][3][1],2)
self.assertEqual(D[2][4][1],2)
self.assertEqual(D[3][4][1],None)
def test_disconnected(self):
G = Graph()
G.add_edge(1,2)
G.add_edge(2,3)
G.add_node(4)
D = all_pairs_dijkstra_path(G)
self.assertEqual(D[1][4][0],float('infinity'))
self.assertEqual(D[2][4][0],float('infinity'))
self.assertEqual(D[3][4][0],float('infinity'))
self.assertEqual(D[1][4][1],None)
self.assertEqual(D[2][4][1],None)
self.assertEqual(D[3][4][1],None)
class DijkstraPathLengthTestCase(unittest.TestCase):
def test_sssp_undirected(self):
# following example from CLRS book page 596
G = Graph()
G.add_edge('o', 'a', None, 2)
G.add_edge('a', 'f', None, 12)
G.add_edge('f', 't', None, 3)
G.add_edge('t', 'e', None, 7)
G.add_edge('e', 'c', None, 4)
G.add_edge('c', 'o', None, 4)
G.add_edge('o', 'b', None, 5)
G.add_edge('a', 'b', None, 2)
G.add_edge('a', 'd', None, 7)
G.add_edge('b', 'd', None, 4)
G.add_edge('b', 'e', None, 3)
G.add_edge('t', 'd', None, 5)
G.add_edge('e', 'd', None, 1)
G.add_edge('b', 'c', None, 1)
G.add_edge('x', 'y', None, 1)
D = dijkstra_path_length(G, 'o')
self.assertEqual(D['o'], 0)
self.assertEqual(D['a'], 2)
self.assertEqual(D['b'], 4)
#self.assertEqual(D['d'], (8, 'e'))
self.assertEqual(D['c'], 4)
self.assertEqual(D['e'], 7)
self.assertEqual(D['t'], 13)
self.assertEqual(D['f'], 14)
self.assertEqual(D['x'], float('infinity'))
self.assertEqual(D['y'], float('infinity'))
def test_source_is_end(self):
G = DiGraph()
G.add_edge('s', 't', None, 10)
d = dijkstra_path_length(G, 's', 's')
self.assertEquals(0, d)
def test_unreachable(self):
G = DiGraph()
G.add_edge('s', 't', None, 10)
G.add_edge('x', 'z', None, 2)
d = dijkstra_path_length(G, 's', 'x')
self.assertEquals(float('infinity'), d)
def test_sssp_directed(self):
# following example from CLRS book page 596
G = DiGraph()
G.add_edge('s', 't', None, 10)
G.add_edge('s', 'y', None, 5)
G.add_edge('t', 'x', None, 1)
G.add_edge('t', 'y', None, 2)
G.add_edge('y', 't', None, 3)
G.add_edge('y', 'x', None, 9)
G.add_edge('y', 'z', None, 2)
G.add_edge('z', 's', None, 7)
G.add_edge('z', 'x', None, 6)
G.add_edge('x', 'z', None, 4)
G.add_edge('a', 'b', None, 4)
D = dijkstra_path_length(G, 's')
self.assertEqual(D['s'], 0)
self.assertEqual(D['t'], 8)
self.assertEqual(D['y'], 5)
self.assertEqual(D['x'], 9)
self.assertEqual(D['z'], 7)
self.assertEqual(D['a'], float('infinity'))
self.assertEqual(D['b'], float('infinity'))
def test_spsp_directed(self):
# following example from CLRS book page 596
G = DiGraph()
G.add_edge('s', 't', None, 10)
G.add_edge('s', 'y', None, 5)
G.add_edge('t', 'x', None, 1)
G.add_edge('t', 'y', None, 2)
G.add_edge('y', 't', None, 3)
G.add_edge('y', 'x', None, 9)
G.add_edge('y', 'z', None, 2)
G.add_edge('z', 's', None, 7)
G.add_edge('z', 'x', None, 6)
G.add_edge('x', 'z', None, 4)
G.add_edge('a', 'b', None, 4)
d = dijkstra_path_length(G, 's', 't') # x should not be found
self.assertEquals(8, d)
def test_simple_directed_(self):
G = Graph()
G.add_edge(0,1)
G.add_edge(1,2)
G.add_edge(2,3)
D = dijkstra_path_length_(G,0,3)
self.assertEquals(D[3],3)
return
class DijkstraPathTestCase(unittest.TestCase):
def test_sssp_undirected(self):
# following example from CLRS book page 596
G = Graph()
G.add_edge('o', 'a', None, 2)
G.add_edge('a', 'f', None, 12)
G.add_edge('f', 't', None, 3)
G.add_edge('t', 'e', None, 7)
G.add_edge('e', 'c', None, 4)
G.add_edge('c', 'o', None, 4)
G.add_edge('o', 'b', None, 5)
G.add_edge('a', 'b', None, 2)
G.add_edge('a', 'd', None, 7)
G.add_edge('b', 'd', None, 4)
G.add_edge('b', 'e', None, 3)
G.add_edge('t', 'd', None, 5)
G.add_edge('e', 'd', None, 1)
G.add_edge('b', 'c', None, 1)
G.add_edge('x', 'y', None, 1)
D = dijkstra_path(G, 'o')
self.assertEqual(D['o'], (0, None))
self.assertEqual(D['a'], (2, 'o'))
self.assertEqual(D['b'], (4, 'a'))
#self.assertEqual(D['d'], (8, 'e'))
self.assertEqual(D['c'], (4, 'o'))
self.assertEqual(D['e'], (7, 'b'))
self.assertEqual(D['t'], (13, 'd'))
self.assertEqual(D['f'], (14, 'a'))
self.assertEqual(D['x'], (float('infinity'),None))
self.assertEqual(D['y'], (float('infinity'),None))
def test_source_is_end(self):
G = DiGraph()
G.add_edge('s', 't', None, 10)
d, p = dijkstra_path(G, 's', 's')
self.assertEquals(0, d)
self.assertEquals([], p)
def test_unreachable(self):
G = DiGraph()
G.add_edge('s', 't', None, 10)
G.add_edge('x', 'z', None, 2)
d, p = dijkstra_path(G, 's', 'x')
self.assertEquals(float('infinity'), d)
self.assertEquals(None, p)
def test_sssp_directed(self):
# following example from CLRS book page 596
G = DiGraph()
G.add_edge('s', 't', None, 10)
G.add_edge('s', 'y', None, 5)
G.add_edge('t', 'x', None, 1)
G.add_edge('t', 'y', None, 2)
G.add_edge('y', 't', None, 3)
G.add_edge('y', 'x', None, 9)
G.add_edge('y', 'z', None, 2)
G.add_edge('z', 's', None, 7)
G.add_edge('z', 'x', None, 6)
G.add_edge('x', 'z', None, 4)
G.add_edge('a', 'b', None, 4)
D = dijkstra_path(G, 's')
self.assertEqual(D['s'], (0, None))
self.assertEqual(D['t'], (8, 'y'))
self.assertEqual(D['y'], (5, 's'))
self.assertEqual(D['x'], (9, 't'))
self.assertEqual(D['z'], (7, 'y'))
self.assertEqual(D['a'], (float('infinity'),None))
self.assertEqual(D['b'], (float('infinity'),None))
def test_spsp_directed(self):
# following example from CLRS book page 596
G = DiGraph()
G.add_edge('s', 't', None, 10)
G.add_edge('s', 'y', None, 5)
G.add_edge('t', 'x', None, 1)
G.add_edge('t', 'y', None, 2)
G.add_edge('y', 't', None, 3)
G.add_edge('y', 'x', None, 9)
G.add_edge('y', 'z', None, 2)
G.add_edge('z', 's', None, 7)
G.add_edge('z', 'x', None, 6)
G.add_edge('x', 'z', None, 4)
G.add_edge('a', 'b', None, 4)
d,p = dijkstra_path(G, 's', 't') # x should not be found
self.assertEquals(8, d)
self.assertEquals(['s', 'y', 't'], p)
def test_simple_directed_(self):
G = Graph()
G.add_edge(0,1)
G.add_edge(1,2)
G.add_edge(2,3)
R = dijkstra_path_(G,0,3)
self.assertEquals(len(R),2)
d,p = R
self.assertEquals(d[3],3)
return
if __name__ == '__main__':
unittest.main()
|
networkdynamics/zenlib
|
src/zen/tests/dijkstra.py
|
Python
|
bsd-3-clause
| 12,161
|
import numpy as np
from matplotlib.testing.decorators import image_comparison
import matplotlib.pyplot as plt
@image_comparison(baseline_images=['legend_auto1'], tol=1.5e-3, remove_text=True)
def test_legend_auto1():
'Test automatic legend placement'
fig = plt.figure()
ax = fig.add_subplot(111)
x = np.arange(100)
ax.plot(x, 50 - x, 'o', label='y=1')
ax.plot(x, x - 50, 'o', label='y=-1')
ax.legend(loc=0)
@image_comparison(baseline_images=['legend_auto2'], remove_text=True)
def test_legend_auto2():
'Test automatic legend placement'
fig = plt.figure()
ax = fig.add_subplot(111)
x = np.arange(100)
b1 = ax.bar(x, x, color='m')
b2 = ax.bar(x, x[::-1], color='g')
ax.legend([b1[0], b2[0]], ['up', 'down'], loc=0)
@image_comparison(baseline_images=['legend_various_labels'], remove_text=True)
def test_various_labels():
# tests all sorts of label types
fig = plt.figure()
ax = fig.add_subplot(121)
ax.plot(range(4), 'o', label=1)
ax.plot(np.linspace(4, 4.1), 'o', label=u'D\xe9velopp\xe9s')
ax.plot(range(4, 1, -1), 'o', label='__nolegend__')
ax.legend(numpoints=1)
@image_comparison(baseline_images=['fancy'], remove_text=True)
def test_fancy():
# using subplot triggers some offsetbox functionality untested elsewhere
plt.subplot(121)
plt.scatter(range(10), range(10, 0, -1), label='XX\nXX')
plt.plot([5] * 10, 'o--', label='XX')
plt.errorbar(range(10), range(10), xerr=0.5, yerr=0.5, label='XX')
plt.legend(loc="center left", bbox_to_anchor=[1.0, 0.5],
ncol=2, shadow=True, title="My legend", numpoints=1)
|
lthurlow/Network-Grapher
|
proj/external/matplotlib-1.2.1/lib/matplotlib/tests/test_legend.py
|
Python
|
mit
| 1,640
|
#!/usr/bin/env python
"""
Calculate MD5 hash of a file.
Copyright (c) 2014, Are Hansen
All rights reserved.
Redistribution and use in source and binary forms, with or without modification, are
permitted provided that the following conditions are met:
1. Redistributions of source code must retain the above copyright notice, this list
of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright notice, this
list of conditions and the following disclaimer in the documentation and/or other
materials provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND AN
EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT
SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED
TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""
__author__ = 'Are Hansen'
__date__ = '2014, May 11'
__version__ = '0.0.1'
import argparse
import hashlib
import sys
from os import path, access, R_OK
def parse_args():
"""Defines the command line arguments. """
parser = argparse.ArgumentParser('Calculates the MD5 sum of a given file.')
grp1 = parser.add_argument_group('- File')
grp1.add_argument('-F', dest='md5file', help='Path to file', required=True)
args = parser.parse_args()
return args
def find_md5(target_file):
"""Calculate the md5sum of a file. """
md5_out = hashlib.md5(open(target_file).read()).hexdigest()
print('MD5 sum of {0}: {1}'.format(target_file, md5_out))
def process_args(args):
"""Process the command line arguments. """
md5file = args.md5file
if path.isfile(md5file) and access(md5file, R_OK):
find_md5(md5file)
else:
print 'ERROR: Unable to either find and/or read {0}'.format(md5file)
sys.exit(1)
def main():
"""Do what Main does best... """
args = parse_args()
process_args(args)
if __name__ == '__main__':
main()
|
ZombieNinjaPirate/Random-Python
|
filemd5.py
|
Python
|
gpl-3.0
| 2,374
|
import os
import numpy as np
from pymatgen.core.lattice import Lattice
from pymatgen.core.structure import Structure
from pymatgen.core.trajectory import Trajectory
from pymatgen.io.vasp.inputs import Poscar
from pymatgen.io.vasp.outputs import Xdatcar
from pymatgen.util.testing import PymatgenTest
class TrajectoryTest(PymatgenTest):
def setUp(self):
xdatcar = Xdatcar(os.path.join(PymatgenTest.TEST_FILES_DIR, "Traj_XDATCAR"))
self.traj = Trajectory.from_file(os.path.join(PymatgenTest.TEST_FILES_DIR, "Traj_XDATCAR"))
self.structures = xdatcar.structures
def _check_traj_equality(self, traj_1, traj_2):
if np.sum(np.square(np.subtract(traj_1.lattice, traj_2.lattice))) > 0.0001:
return False
if traj_1.species != traj_2.species:
return False
return all(i == j for i, j in zip(self.traj, traj_2))
def test_single_index_slice(self):
self.assertTrue(all([self.traj[i] == self.structures[i] for i in range(0, len(self.structures), 19)]))
def test_slice(self):
sliced_traj = self.traj[2:99:3]
sliced_traj_from_structs = Trajectory.from_structures(self.structures[2:99:3])
if len(sliced_traj) == len(sliced_traj_from_structs):
self.assertTrue(all([sliced_traj[i] == sliced_traj_from_structs[i] for i in range(len(sliced_traj))]))
else:
self.assertTrue(False)
sliced_traj = self.traj[:-4:2]
sliced_traj_from_structs = Trajectory.from_structures(self.structures[:-4:2])
if len(sliced_traj) == len(sliced_traj_from_structs):
self.assertTrue(all([sliced_traj[i] == sliced_traj_from_structs[i] for i in range(len(sliced_traj))]))
else:
self.assertTrue(False)
def test_list_slice(self):
sliced_traj = self.traj[[10, 30, 70]]
sliced_traj_from_structs = Trajectory.from_structures([self.structures[i] for i in [10, 30, 70]])
if len(sliced_traj) == len(sliced_traj_from_structs):
self.assertTrue(all([sliced_traj[i] == sliced_traj_from_structs[i] for i in range(len(sliced_traj))]))
else:
self.assertTrue(False)
def test_conversion(self):
# Convert to displacements and back. Check structures
self.traj.to_displacements()
self.traj.to_positions()
self.assertTrue(all([struct == self.structures[i] for i, struct in enumerate(self.traj)]))
def test_copy(self):
traj_copy = self.traj.copy()
self.assertTrue(all([i == j for i, j in zip(self.traj, traj_copy)]))
def test_site_properties(self):
lattice = [[1, 0, 0], [0, 1, 0], [0, 0, 1]]
species = ["Si", "Si"]
frac_coords = [
[[0, 0, 0], [0.5, 0.5, 0.5]],
[[0.1, 0.1, 0.1], [0.6, 0.6, 0.6]],
[[0.2, 0.2, 0.2], [0.7, 0.7, 0.7]],
]
site_properties = [
{
"selective_dynamics": [[True, True, True], [False, False, False]],
"magmom": [5, 5],
},
{
"selective_dynamics": [[False, False, False], [False, False, False]],
"magmom": [6, 6],
},
{
"selective_dynamics": [[True, True, True], [False, False, False]],
"magmom": [5, 5],
},
]
traj = Trajectory(lattice, species, frac_coords, site_properties=site_properties)
# compare the overall site properties list
self.assertEqual(traj.site_properties, site_properties)
# # compare the site properties after slicing
self.assertEqual(traj[0].site_properties, site_properties[0])
self.assertEqual(traj[1:].site_properties, site_properties[1:])
def test_frame_properties(self):
lattice = [[1, 0, 0], [0, 1, 0], [0, 0, 1]]
species = ["Si", "Si"]
frac_coords = [
[[0, 0, 0], [0.5, 0.5, 0.5]],
[[0.1, 0.1, 0.1], [0.6, 0.6, 0.6]],
[[0.2, 0.2, 0.2], [0.7, 0.7, 0.7]],
]
site_properties = [
{
"selective_dynamics": [[True, True, True], [False, False, False]],
"magmom": [5, 5],
},
{
"selective_dynamics": [[False, False, False], [False, False, False]],
"magmom": [6, 6],
},
{
"selective_dynamics": [[True, True, True], [False, False, False]],
"magmom": [5, 5],
},
]
frame_properties = {"energy_per_atom": [-3.0001, -3.0971, -3.0465]}
traj = Trajectory(
lattice,
species,
frac_coords,
site_properties=site_properties,
frame_properties=frame_properties,
)
# compare the overall site properties list
self.assertEqual(traj.frame_properties, frame_properties)
# compare the site properties after slicing
expected_output = {"energy_per_atom": [-3.0971, -3.0465]}
self.assertEqual(traj[1:].frame_properties, expected_output)
def test_extend(self):
traj = self.traj.copy()
# Case of compatible trajectories
compatible_traj = Trajectory.from_file(os.path.join(PymatgenTest.TEST_FILES_DIR, "Traj_Combine_Test_XDATCAR_1"))
traj.extend(compatible_traj)
full_traj = Trajectory.from_file(os.path.join(PymatgenTest.TEST_FILES_DIR, "Traj_Combine_Test_XDATCAR_Full"))
compatible_success = self._check_traj_equality(self.traj, full_traj)
# Case of incompatible trajectories
traj = self.traj.copy()
incompatible_traj = Trajectory.from_file(
os.path.join(PymatgenTest.TEST_FILES_DIR, "Traj_Combine_Test_XDATCAR_2")
)
incompatible_test_success = False
try:
traj.extend(incompatible_traj)
except Exception:
incompatible_test_success = True
self.assertTrue(compatible_success and incompatible_test_success)
def test_extend_no_site_props(self):
lattice = [[1, 0, 0], [0, 1, 0], [0, 0, 1]]
species = ["Si", "Si"]
frac_coords = [
[[0, 0, 0], [0.5, 0.5, 0.5]],
[[0.1, 0.1, 0.1], [0.6, 0.6, 0.6]],
[[0.2, 0.2, 0.2], [0.7, 0.7, 0.7]],
]
# Trajectory with no site_properties
traj_1 = Trajectory(lattice, species, frac_coords)
traj_2 = Trajectory(lattice, species, frac_coords)
# Test combining two trajectories with no site properties
traj_combined = traj_1.copy()
traj_combined.extend(traj_2)
self.assertEqual(traj_combined.site_properties, None)
def test_extend_equivalent_site_props(self):
lattice = [[1, 0, 0], [0, 1, 0], [0, 0, 1]]
species = ["Si", "Si"]
frac_coords = [
[[0, 0, 0], [0.5, 0.5, 0.5]],
[[0.1, 0.1, 0.1], [0.6, 0.6, 0.6]],
[[0.2, 0.2, 0.2], [0.7, 0.7, 0.7]],
]
# Trajectories with constant site properties
site_properties_1 = [
{
"selective_dynamics": [[True, True, True], [False, False, False]],
"magmom": [5, 5],
}
]
traj_1 = Trajectory(lattice, species, frac_coords, site_properties=site_properties_1)
site_properties_2 = [
{
"selective_dynamics": [[True, True, True], [False, False, False]],
"magmom": [5, 5],
}
]
traj_2 = Trajectory(lattice, species, frac_coords, site_properties=site_properties_2)
# Test combining two trajectories with similar site_properties
traj_combined = traj_1.copy()
traj_combined.extend(traj_2)
self.assertEqual(traj_combined.site_properties, site_properties_1)
def test_extend_inequivalent_site_props(self):
lattice = [[1, 0, 0], [0, 1, 0], [0, 0, 1]]
species = ["Si", "Si"]
frac_coords = [
[[0, 0, 0], [0.5, 0.5, 0.5]],
[[0.1, 0.1, 0.1], [0.6, 0.6, 0.6]],
[[0.2, 0.2, 0.2], [0.7, 0.7, 0.7]],
]
# Trajectories with constant site properties
site_properties_1 = [
{
"selective_dynamics": [[False, False, False], [False, False, False]],
"magmom": [5, 5],
}
]
traj_1 = Trajectory(lattice, species, frac_coords, site_properties=site_properties_1)
site_properties_2 = [
{
"selective_dynamics": [[True, True, True], [False, False, False]],
"magmom": [5, 5],
}
]
traj_2 = Trajectory(lattice, species, frac_coords, site_properties=site_properties_2)
# Test combining two trajectories with similar site_properties
traj_combined = traj_1.copy()
traj_combined.extend(traj_2)
expected_site_props = [
{
"selective_dynamics": [[False, False, False], [False, False, False]],
"magmom": [5, 5],
},
{
"selective_dynamics": [[False, False, False], [False, False, False]],
"magmom": [5, 5],
},
{
"selective_dynamics": [[False, False, False], [False, False, False]],
"magmom": [5, 5],
},
{
"selective_dynamics": [[True, True, True], [False, False, False]],
"magmom": [5, 5],
},
{
"selective_dynamics": [[True, True, True], [False, False, False]],
"magmom": [5, 5],
},
{
"selective_dynamics": [[True, True, True], [False, False, False]],
"magmom": [5, 5],
},
]
self.assertEqual(traj_combined.site_properties, expected_site_props)
# Trajectory with const site_properties and trajectory with changing site properties
site_properties_1 = [
{
"selective_dynamics": [[True, False, False], [False, False, False]],
"magmom": [5, 5],
}
]
traj_1 = Trajectory(lattice, species, frac_coords, site_properties=site_properties_1)
site_properties_2 = [
{
"selective_dynamics": [[False, True, True], [False, False, False]],
"magmom": [5, 5],
},
{
"selective_dynamics": [[True, False, True], [False, False, False]],
"magmom": [5, 5],
},
{
"selective_dynamics": [[True, True, False], [False, False, False]],
"magmom": [5, 5],
},
]
traj_2 = Trajectory(lattice, species, frac_coords, site_properties=site_properties_2)
# Test combining two trajectories with similar site_properties
traj_combined = traj_1.copy()
traj_combined.extend(traj_2)
expected_site_props = [
{
"selective_dynamics": [[True, False, False], [False, False, False]],
"magmom": [5, 5],
},
{
"selective_dynamics": [[True, False, False], [False, False, False]],
"magmom": [5, 5],
},
{
"selective_dynamics": [[True, False, False], [False, False, False]],
"magmom": [5, 5],
},
{
"selective_dynamics": [[False, True, True], [False, False, False]],
"magmom": [5, 5],
},
{
"selective_dynamics": [[True, False, True], [False, False, False]],
"magmom": [5, 5],
},
{
"selective_dynamics": [[True, True, False], [False, False, False]],
"magmom": [5, 5],
},
]
self.assertEqual(traj_combined.site_properties, expected_site_props)
# The other way around
traj_combined = traj_2.copy()
traj_combined.extend(traj_1)
expected_site_props = [
{
"selective_dynamics": [[False, True, True], [False, False, False]],
"magmom": [5, 5],
},
{
"selective_dynamics": [[True, False, True], [False, False, False]],
"magmom": [5, 5],
},
{
"selective_dynamics": [[True, True, False], [False, False, False]],
"magmom": [5, 5],
},
{
"selective_dynamics": [[True, False, False], [False, False, False]],
"magmom": [5, 5],
},
{
"selective_dynamics": [[True, False, False], [False, False, False]],
"magmom": [5, 5],
},
{
"selective_dynamics": [[True, False, False], [False, False, False]],
"magmom": [5, 5],
},
]
self.assertEqual(traj_combined.site_properties, expected_site_props)
# Trajectory with no and trajectory with changing site properties
site_properties_1 = None
traj_1 = Trajectory(lattice, species, frac_coords, site_properties=site_properties_1)
site_properties_2 = [
{
"selective_dynamics": [[False, True, True], [False, False, False]],
"magmom": [5, 5],
},
{
"selective_dynamics": [[True, False, True], [False, False, False]],
"magmom": [5, 5],
},
{
"selective_dynamics": [[True, True, False], [False, False, False]],
"magmom": [5, 5],
},
]
traj_2 = Trajectory(lattice, species, frac_coords, site_properties=site_properties_2)
# Test combining two trajectories with similar site_properties
traj_combined = traj_1.copy()
traj_combined.extend(traj_2)
expected_site_props = [
None,
None,
None,
{
"selective_dynamics": [[False, True, True], [False, False, False]],
"magmom": [5, 5],
},
{
"selective_dynamics": [[True, False, True], [False, False, False]],
"magmom": [5, 5],
},
{
"selective_dynamics": [[True, True, False], [False, False, False]],
"magmom": [5, 5],
},
]
self.assertEqual(traj_combined.site_properties, expected_site_props)
# The other way around
traj_combined = traj_2.copy()
traj_combined.extend(traj_1)
expected_site_props = [
{
"selective_dynamics": [[False, True, True], [False, False, False]],
"magmom": [5, 5],
},
{
"selective_dynamics": [[True, False, True], [False, False, False]],
"magmom": [5, 5],
},
{
"selective_dynamics": [[True, True, False], [False, False, False]],
"magmom": [5, 5],
},
None,
None,
None,
]
self.assertEqual(traj_combined.site_properties, expected_site_props)
def test_extend_no_frame_props(self):
lattice = [[1, 0, 0], [0, 1, 0], [0, 0, 1]]
species = ["Si", "Si"]
frac_coords = [
[[0, 0, 0], [0.5, 0.5, 0.5]],
[[0.1, 0.1, 0.1], [0.6, 0.6, 0.6]],
[[0.2, 0.2, 0.2], [0.7, 0.7, 0.7]],
]
# Trajectory with no site_properties
traj_1 = Trajectory(lattice, species, frac_coords)
traj_2 = Trajectory(lattice, species, frac_coords)
# Test combining two trajectories with no site properties
traj_combined = traj_1.copy()
traj_combined.extend(traj_2)
self.assertEqual(traj_combined.frame_properties, None)
def test_extend_frame_props(self):
lattice = [[1, 0, 0], [0, 1, 0], [0, 0, 1]]
species = ["Si", "Si"]
frac_coords = [
[[0, 0, 0], [0.5, 0.5, 0.5]],
[[0.1, 0.1, 0.1], [0.6, 0.6, 0.6]],
[[0.2, 0.2, 0.2], [0.7, 0.7, 0.7]],
]
# Trajectories with constant site properties
frame_properties_1 = {"energy": [-3, -3.9, -4.1]}
traj_1 = Trajectory(lattice, species, frac_coords, frame_properties=frame_properties_1)
frame_properties_2 = {"energy": [-4.2, -4.25, -4.3]}
traj_2 = Trajectory(lattice, species, frac_coords, frame_properties=frame_properties_2)
# Test combining two trajectories with similar site_properties
traj_combined = traj_1.copy()
traj_combined.extend(traj_2)
expected_frame_properties = {"energy": [-3, -3.9, -4.1, -4.2, -4.25, -4.3]}
self.assertEqual(traj_combined.frame_properties, expected_frame_properties)
# Mismatched frame propertied
frame_properties_3 = {"energy": [-4.2, -4.25, -4.3], "pressure": [2, 2.5, 2.5]}
traj_3 = Trajectory(lattice, species, frac_coords, frame_properties=frame_properties_3)
traj_combined = traj_1.copy()
traj_combined.extend(traj_3)
expected_frame_properties = {
"energy": [-3, -3.9, -4.1, -4.2, -4.25, -4.3],
"pressure": [None, None, None, 2, 2.5, 2.5],
}
self.assertEqual(traj_combined.frame_properties, expected_frame_properties)
def test_length(self):
self.assertTrue(len(self.traj) == len(self.structures))
def test_displacements(self):
poscar = Poscar.from_file(os.path.join(PymatgenTest.TEST_FILES_DIR, "POSCAR"))
structures = [poscar.structure]
displacements = np.zeros((11, *np.shape(structures[-1].frac_coords)))
for i in range(10):
displacement = np.random.random_sample(np.shape(structures[-1].frac_coords)) / 20
new_coords = displacement + structures[-1].frac_coords
structures.append(Structure(structures[-1].lattice, structures[-1].species, new_coords))
displacements[i + 1, :, :] = displacement
traj = Trajectory.from_structures(structures, constant_lattice=True)
traj.to_displacements()
self.assertTrue(np.allclose(traj.frac_coords, displacements))
def test_variable_lattice(self):
structure = self.structures[0]
# Generate structures with different lattices
structures = []
for i in range(10):
new_lattice = np.dot(structure.lattice.matrix, np.diag(1 + np.random.random_sample(3) / 20))
temp_struct = structure.copy()
temp_struct.lattice = Lattice(new_lattice)
structures.append(temp_struct)
traj = Trajectory.from_structures(structures, constant_lattice=False)
# Check if lattices were properly stored
self.assertTrue(
all(np.allclose(struct.lattice.matrix, structures[i].lattice.matrix) for i, struct in enumerate(traj))
)
def test_to_from_dict(self):
d = self.traj.as_dict()
traj = Trajectory.from_dict(d)
self.assertEqual(type(traj), Trajectory)
def test_xdatcar_write(self):
self.traj.write_Xdatcar(filename="traj_test_XDATCAR")
# Load trajectory from written xdatcar and compare to original
written_traj = Trajectory.from_file("traj_test_XDATCAR")
self._check_traj_equality(self.traj, written_traj)
os.remove("traj_test_XDATCAR")
if __name__ == "__main__":
import unittest
unittest.main()
|
gmatteo/pymatgen
|
pymatgen/core/tests/test_trajectory.py
|
Python
|
mit
| 19,694
|
"""
Evaluate the value of an arithmetic expression in Reverse Polish Notation.
Valid operators are +, -, *, /. Each operand may be an integer or another expression.
Some examples:
["2", "1", "+", "3", "*"] -> ((2 + 1) * 3) -> 9
["4", "13", "5", "/", "+"] -> (4 + (13 / 5)) -> 6
"""
class Solution:
# @param tokens, a list of string
# @return an integer
def evalRPN(self, tokens):
stack = []
for token in tokens:
if self.isOperator(token):
b = stack.pop()
a = stack.pop()
c = self.calc(a, b, token)
stack.append(c)
else:
stack.append(int(token))
return stack.pop()
def isOperator(self, op):
if op == "+" or op == "-" or op == "*" or op == "/":
return True
return False
def calc(self, a, b, op):
if op == "+":
return a+b
if op == "-":
return a-b
if op == "*":
return a*b
if op == "/": #python weired division
da = -1 * a if a < 0 else a
db = -1 * b if b < 0 else b
r = da/db
if (a < 0 and b > 0) or (a > 0 and b < 0):
r = -1 * r
return r
s = Solution()
l = ["-78","-33","196","+","-19","-","115","+","-","-99","/","-18","8","*","-86","-","-","16","/","26","-14","-","-","47","-","101","-","163","*","143","-","0","-","171","+","120","*","-60","+","156","/","173","/","-24","11","+","21","/","*","44","*","180","70","-40","-","*","86","132","-84","+","*","-","38","/","/","21","28","/","+","83","/","-31","156","-","+","28","/","95","-","120","+","8","*","90","-","-94","*","-73","/","-62","/","93","*","196","-","-59","+","187","-","143","/","-79","-89","+","-"]
print s.evalRPN(l)
|
Ahmed--Mohsen/leetcode
|
evaluate_reverse_polish_notation.py
|
Python
|
mit
| 1,580
|
# -*- coding: utf-8 -*-
# Copyright 2012 Loris Corazza, Sakis Christakidis
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from twisted.internet import task,reactor
from twisted.enterprise import adbapi
from collections import deque
import os
import time
class DB(object):
def __init__(self, dir,port):
name='stats'+str(port)+'.db'
dbname= os.path.join(dir,name)
self.dbpool=adbapi.ConnectionPool('sqlite3',dbname, check_same_thread=False)
d=self.deleteDB()
d.addCallback(self.createDB)
def getKeys(self):
expr='SELECT DISTINCT comp,sid,name FROM stat'
return self.dbpool.runQuery((expr))
def deleteDB(self):
return self.dbpool.runOperation('DROP TABLE IF EXISTS stat')
def createDB(self,d):
return self.dbpool.runOperation('CREATE TABLE stat(id INTEGER PRIMARY KEY AUTOINCREMENT, comp TEXT, sid INTEGER, name TEXT, value REAL, x REAL, time REAL, lpb INTEGER) ')
def update(self,stats):
d=self.dbpool.runInteraction(self._commitRecord,stats)
def _commitRecord(self,txn,args):
for arg in args:
txn.execute('INSERT INTO stat(comp, sid, name, value, x, time, lpb) VALUES(?,?,?,?,?,?,?)',arg)
def getRecords(self,expr):
d=self._getRecords(expr)
d.addCallback(self.updateId)
return d
def _getRecords(self,expr):
expr = 'SELECT * FROM stat '+expr
return self.dbpool.runQuery((expr))
def updateId(self,stats):
ret={}
for s in stats:
key=(s[1],s[2],s[3])
if (key) not in ret:
ret[key]=[]
ret[key].append([s[4],s[5],s[6],s[7]])
return ret
def stop(self):
self.dbpool.disconnect()
|
schristakidis/p2ner
|
p2ner/components/stats/dbstats/dbstats/db.py
|
Python
|
apache-2.0
| 2,264
|
# -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import proto # type: ignore
__protobuf__ = proto.module(
package="google.ads.googleads.v8.services",
marshal="google.ads.googleads.v8",
manifest={"GetTopicConstantRequest",},
)
class GetTopicConstantRequest(proto.Message):
r"""Request message for
[TopicConstantService.GetTopicConstant][google.ads.googleads.v8.services.TopicConstantService.GetTopicConstant].
Attributes:
resource_name (str):
Required. Resource name of the Topic to
fetch.
"""
resource_name = proto.Field(proto.STRING, number=1,)
__all__ = tuple(sorted(__protobuf__.manifest))
|
googleads/google-ads-python
|
google/ads/googleads/v8/services/types/topic_constant_service.py
|
Python
|
apache-2.0
| 1,217
|
# This module contains functions used to manipulate the AstroData object
import numpy as np
from astrodata import Errors
from astrodata.adutils import logutils
def remove_single_length_dimension(adinput=None):
"""
If there is only one single length dimension in the pixel data, the
remove_single_length_dimension function will remove the single length
dimension. In addition, this function removes any keywords associated with
that dimension. Used by the standardizeStructure primitive in
primitives_F2.py.
"""
# Instantiate the log
log = logutils.get_logger(__name__)
if adinput is None:
raise Errors.InputError()
for ext in adinput:
# Ensure that there is only one single length dimension in the pixel
# data
if ext.data.shape.count(1) == 1:
# Determine the position of the single length dimension in the
# tuple of array dimensions output by ext.data.shape
for i, data_length in enumerate(ext.data.shape):
if ext.data.shape[i] == 1:
position = i
# numpy arrays use 0-based indexing and the axes are ordered from
# slow to fast. So, if the position of the single length dimension
# is located in e.g., ext.data.shape[0], the dimension number of
# the FITS pixel data array is ext.data.ndim + 1 (since FITS pixel
# data arrays use 1-based indexing).
position_list = [x for x in range(ext.data.ndim)]
position_list.reverse()
dimension_number = position_list[position] + 1
# The np.squeeze method only removes a dimension from the array if
# the dimension has a length equal to 1
log.status("Removing the third dimension from %s" %
adinput.filename)
ext.data = np.squeeze(ext.data)
# Set the NAXIS keyword appropriately now that a dimension has been
# removed
ext.set_key_value("NAXIS", ext.data.ndim)
# Remove the keywords relating to the dimension that has been
# removed (IRAF seems to add WCSDIM=3, CTYPE3='LINEAR ', CD3_3=1.,
# LTM1_1=1., LTM2_2=1., LTM3_3=1., WAXMAP01='1 0 2 0 0 0 ',
# WAT0_001='system=image', WAT1_001='wtype=tan axtype=ra' and
# WAT2_001= 'wtype=tan axtype=dec' when doing e.g., imcopy
# f2.fits[*,*,1], so perhaps these should be removed as well?)
keywords = ("NAXIS%(dn)s, AXISLAB%(dn)s" % {"dn":dimension_number})
keyword_list = keywords.split(",")
for keyword in keyword_list:
del ext.header[keyword]
else:
log.warning("No changes will be made to %s, since there was not "
"only one single length dimension in the pixel data" %
adinput.filename)
# This should be a log.debug call, but that doesn't appear to work
# right now, so using log.fullinfo
log.fullinfo("Updated dimensions of %s[%s,%d] = %s" % (
adinput.filename, ext.extname(), ext.extver(), ext.data.shape))
return adinput
|
pyrrho314/recipesystem
|
trunk/gempy/adlibrary/manipulate_ad.py
|
Python
|
mpl-2.0
| 3,293
|
from copy import copy
import random
prev = 1805
curr = 2150
MIN_C = 0
MAX_C = 4999
direction = 1 if curr - prev > 0 else -1
arr = [2069, 1212, 2296, 2800, 544, 1618, 356, 1523, 4965, 3681]
def prt_move_msg(f, t):
print('move from cylinder {} to cylinder {} '.format(f, t))
def search_min_and_return_index(array):
min = array[0]
index = 0
for i, v in enumerate(array):
if v < min:
index = i
min = v
return index
def search_index(array, v):
index = -1
for i, _v in enumerate(array):
if v == _v:
index = i
return i
def split_by_value(sorted_arr, v):
lo = []
hi = []
for i in sorted_arr:
if i > v:
hi.append(i)
elif i < v:
lo.append(i)
return lo, hi
def fcfs():
print(_fcfs(arr, curr))
def _fcfs(_arr, _curr):
total = 0
total += abs(_arr[0] - _curr)
prt_move_msg(_curr, _arr[0])
for i in range(len(_arr) - 1):
total += abs(_arr[i] - _arr[i + 1])
prt_move_msg(_arr[i], _arr[i + 1])
return total
def sstf():
total = 0
pcurr = curr
_arr = copy(arr)
for nouse in range(len(_arr)):
disarr = copy(_arr)
for i, v in enumerate(_arr):
disarr[i] = abs(v - pcurr)
min_index = search_min_and_return_index(disarr)
total += abs(_arr[min_index] - pcurr)
prt_move_msg(pcurr, _arr[min_index])
pcurr = _arr[min_index]
del _arr[min_index]
print(total)
def scan():
total = 0
parr = sorted(arr)
pcurr = curr
lo, hi = split_by_value(parr, curr)
rev = [0, 1] if direction > 0 else [1, 0]
hi.sort(reverse=rev[0])
lo.sort(reverse=rev[1])
hilo = [hi, lo] if direction == 1 else [lo, hi]
total += _fcfs(hilo[0], pcurr)
total += MAX_C - hi[-1]
prt_move_msg(hi[-1], MAX_C)
pcurr = MAX_C
total += _fcfs(hilo[1], pcurr)
print(total)
def look():
total = 0
parr = sorted(arr)
pcurr = curr
lo, hi = split_by_value(parr, curr)
rev = [0, 1] if direction > 0 else [1, 0]
hi.sort(reverse=rev[0])
lo.sort(reverse=rev[1])
hilo = [hi, lo] if direction == 1 else [lo, hi]
total += _fcfs(hilo[0], pcurr)
pcurr = hilo[0][-direction]
total += _fcfs(hilo[1], pcurr)
print(total)
def cscan():
total = 0
parr = sorted(arr)
pcurr = curr
lo, hi = split_by_value(parr, curr)
rev = 0 if direction > 0 else 1
hi.sort(reverse=rev)
lo.sort(reverse=rev)
hilo = [hi, lo] if direction == 1 else [lo, hi]
total += _fcfs(hilo[0], pcurr)
total += MAX_C - hi[-1]
prt_move_msg(hi[-1], MAX_C)
total += MAX_C - MIN_C
prt_move_msg(MAX_C, MIN_C)
pcurr = MIN_C
total += _fcfs(hilo[1], pcurr)
print(total)
def clook():
total = 0
parr = sorted(arr)
pcurr = curr
lo, hi = split_by_value(parr, curr)
rev = 0 if direction > 0 else 1
hi.sort(reverse=rev)
lo.sort(reverse=rev)
hilo = [hi, lo] if direction == 1 else [lo, hi]
total += _fcfs(hilo[0], pcurr)
pcurr = hilo[0][-1]
total += _fcfs(hilo[1], pcurr)
print(total)
def main():
global arr, prev, curr
while True:
arr=list()
prev=int(input('input initial position(0-4999): '))
curr=int(random.uniform(0,4999))
arr.append(curr)
for i in range(999):
arr.append(int(random.uniform(0,4999)))
while True:
s = input('method: ')
if s == 'fcfs':
fcfs()
elif s == 'sstf':
sstf()
elif s == 'scan':
scan()
elif s == 'look':
look()
elif s == 'cscan':
cscan()
elif s == 'clook':
clook()
elif s == 'exit':
break
if __name__ == '__main__':
main()
|
CubicPill/wtfcodes
|
py/disk_sched_calc.py
|
Python
|
mit
| 3,899
|
"""SCons.Tool.sgic++
Tool-specific initialization for MIPSpro C++ on SGI.
There normally shouldn't be any need to import this module directly.
It will usually be imported through the generic SCons.Tool.Tool()
selection method.
"""
#
# Copyright (c) 2001 - 2015 The SCons Foundation
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
__revision__ = "src/engine/SCons/Tool/sgic++.py rel_2.4.0:3365:9259ea1c13d7 2015/09/21 14:03:43 bdbaddog"
import SCons.Util
cplusplus = __import__('c++', globals(), locals(), [])
def generate(env):
"""Add Builders and construction variables for SGI MIPS C++ to an Environment."""
cplusplus.generate(env)
env['CXX'] = 'CC'
env['CXXFLAGS'] = SCons.Util.CLVar('-LANG:std')
env['SHCXX'] = '$CXX'
env['SHOBJSUFFIX'] = '.o'
env['STATIC_AND_SHARED_OBJECTS_ARE_THE_SAME'] = 1
def exists(env):
return env.Detect('CC')
# Local Variables:
# tab-width:4
# indent-tabs-mode:nil
# End:
# vim: set expandtab tabstop=4 shiftwidth=4:
|
Uli1/mapnik
|
scons/scons-local-2.4.0/SCons/Tool/sgic++.py
|
Python
|
lgpl-2.1
| 2,025
|
codes = {
307: 'Temporary Redirect',
303: 'See Other',
302: 'Found',
301: 'Moved Permanently'
}
def authRequired(realm):
return {
'status': 401,
'reason': 'Authentication Required',
'headers': [
('Content-type','text/plain'),
('WWW-Authenticate', 'Basic realm="%s"' % realm)
],
'body': 'Authentication required.'
}
def redirect(url, status=303):
if status not in codes.keys():
raise ValueError('redirect called with unknown status value')
return {
'status': status,
'reason': codes[status],
'headers': [
('Content-type', 'text/plain'),
('Location', url)
],
'body': 'Moved to %s' % url
}
def wrongMethod():
return {
'status': 405,
'reason': 'Method Not Allowed',
'headers': [('Content-type', 'text/plain')],
'body': 'The request was issued with a method not allowed for this resource.'
}
def css(body):
return ok('text/css', body)
def plain(body):
return ok('text/plain', body)
def html(body):
return ok('text/html', body)
def ok(ctype, body):
return {
'status': 200,
'reason': 'OK',
'headers': [('Content-type',ctype)],
'body': body
}
def notFound():
return {
'status': 404,
'reason': 'Not Found',
'headers': [('Content-type','text/plain')],
'body': 'The requested resource cannot be found.'
}
notfound = notFound
def forbidden():
return {
'status': 403,
'reason': 'Forbidden',
'headers': [('Content-type','text/plain')],
'body': 'You do not have access to the requested resource.'
}
def is_a_resp(x):
if type(x)!=dict:
return False
if not x.has_key('status'):
return False
if not x.has_key('reason'):
return False
if not x.has_key('body'):
return False
if not x.has_key('headers'):
return False
def error_verbose(code=None, report=None):
r = {
'status': 500,
'reason': 'Internal Server Error',
'headers': [('Content-type','text/plain')],
'body': '500 Internal Server Error. Error code: %s.' % str(code)
}
r['body'] += '\n\n-------------------------------------------\n'
r['body'] += 'Error Detail:\n\n'
r['body'] += '\n'.join(report)
return r
def error_vague(code=None):
r = {
'status': 500,
'reason': 'Internal Server Error',
'headers': [('Content-type','text/plain')],
'body': '500 Internal Server Error. Error code: %s.' % str(code)
}
return r
|
xelphene/swaf
|
swaf/resp.py
|
Python
|
gpl-3.0
| 2,300
|
from collections import Iterator
print(isinstance([], Iterator))
print(isinstance((x for x in range(10)), Iterator))
print(isinstance({}, Iterator))
it = iter([1, 2, 3, 4, 5])
# 循环:
while True:
try:
# 获得下一个值:
x = next(it)
print(x)
except StopIteration:
# 遇到StopIteration就退出循环
break
|
IIIIIIIIll/sdy_notes_liaoxf
|
LiaoXueFeng/Advanced_properties/iterator.py
|
Python
|
gpl-3.0
| 376
|
# Copyright (c) 2012 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from __future__ import print_function
import ntpath
import os
import posixpath
import re
import subprocess
import sys
from collections import OrderedDict
import gyp.common
import gyp.easy_xml as easy_xml
import gyp.generator.ninja as ninja_generator
import gyp.MSVSNew as MSVSNew
import gyp.MSVSProject as MSVSProject
import gyp.MSVSSettings as MSVSSettings
import gyp.MSVSToolFile as MSVSToolFile
import gyp.MSVSUserFile as MSVSUserFile
import gyp.MSVSUtil as MSVSUtil
import gyp.MSVSVersion as MSVSVersion
from gyp.common import GypError
from gyp.common import OrderedSet
PY3 = bytes != str
# Regular expression for validating Visual Studio GUIDs. If the GUID
# contains lowercase hex letters, MSVS will be fine. However,
# IncrediBuild BuildConsole will parse the solution file, but then
# silently skip building the target causing hard to track down errors.
# Note that this only happens with the BuildConsole, and does not occur
# if IncrediBuild is executed from inside Visual Studio. This regex
# validates that the string looks like a GUID with all uppercase hex
# letters.
VALID_MSVS_GUID_CHARS = re.compile(r"^[A-F0-9\-]+$")
generator_supports_multiple_toolsets = gyp.common.CrossCompileRequested()
generator_default_variables = {
"DRIVER_PREFIX": "",
"DRIVER_SUFFIX": ".sys",
"EXECUTABLE_PREFIX": "",
"EXECUTABLE_SUFFIX": ".exe",
"STATIC_LIB_PREFIX": "",
"SHARED_LIB_PREFIX": "",
"STATIC_LIB_SUFFIX": ".lib",
"SHARED_LIB_SUFFIX": ".dll",
"INTERMEDIATE_DIR": "$(IntDir)",
"SHARED_INTERMEDIATE_DIR": "$(OutDir)/obj/global_intermediate",
"OS": "win",
"PRODUCT_DIR": "$(OutDir)",
"LIB_DIR": "$(OutDir)lib",
"RULE_INPUT_ROOT": "$(InputName)",
"RULE_INPUT_DIRNAME": "$(InputDir)",
"RULE_INPUT_EXT": "$(InputExt)",
"RULE_INPUT_NAME": "$(InputFileName)",
"RULE_INPUT_PATH": "$(InputPath)",
"CONFIGURATION_NAME": "$(ConfigurationName)",
}
# The msvs specific sections that hold paths
generator_additional_path_sections = [
"msvs_cygwin_dirs",
"msvs_props",
]
generator_additional_non_configuration_keys = [
"msvs_cygwin_dirs",
"msvs_cygwin_shell",
"msvs_large_pdb",
"msvs_shard",
"msvs_external_builder",
"msvs_external_builder_out_dir",
"msvs_external_builder_build_cmd",
"msvs_external_builder_clean_cmd",
"msvs_external_builder_clcompile_cmd",
"msvs_enable_winrt",
"msvs_requires_importlibrary",
"msvs_enable_winphone",
"msvs_application_type_revision",
"msvs_target_platform_version",
"msvs_target_platform_minversion",
]
generator_filelist_paths = None
# List of precompiled header related keys.
precomp_keys = [
"msvs_precompiled_header",
"msvs_precompiled_source",
]
cached_username = None
cached_domain = None
# TODO(gspencer): Switch the os.environ calls to be
# win32api.GetDomainName() and win32api.GetUserName() once the
# python version in depot_tools has been updated to work on Vista
# 64-bit.
def _GetDomainAndUserName():
if sys.platform not in ("win32", "cygwin"):
return ("DOMAIN", "USERNAME")
global cached_username
global cached_domain
if not cached_domain or not cached_username:
domain = os.environ.get("USERDOMAIN")
username = os.environ.get("USERNAME")
if not domain or not username:
call = subprocess.Popen(
["net", "config", "Workstation"], stdout=subprocess.PIPE
)
config = call.communicate()[0]
if PY3:
config = config.decode("utf-8")
username_re = re.compile(r"^User name\s+(\S+)", re.MULTILINE)
username_match = username_re.search(config)
if username_match:
username = username_match.group(1)
domain_re = re.compile(r"^Logon domain\s+(\S+)", re.MULTILINE)
domain_match = domain_re.search(config)
if domain_match:
domain = domain_match.group(1)
cached_domain = domain
cached_username = username
return (cached_domain, cached_username)
fixpath_prefix = None
def _NormalizedSource(source):
"""Normalize the path.
But not if that gets rid of a variable, as this may expand to something
larger than one directory.
Arguments:
source: The path to be normalize.d
Returns:
The normalized path.
"""
normalized = os.path.normpath(source)
if source.count("$") == normalized.count("$"):
source = normalized
return source
def _FixPath(path):
"""Convert paths to a form that will make sense in a vcproj file.
Arguments:
path: The path to convert, may contain / etc.
Returns:
The path with all slashes made into backslashes.
"""
if (
fixpath_prefix
and path
and not os.path.isabs(path)
and not path[0] == "$"
and not _IsWindowsAbsPath(path)
):
path = os.path.join(fixpath_prefix, path)
path = path.replace("/", "\\")
path = _NormalizedSource(path)
if path and path[-1] == "\\":
path = path[:-1]
return path
def _IsWindowsAbsPath(path):
"""
On Cygwin systems Python needs a little help determining if a path
is an absolute Windows path or not, so that
it does not treat those as relative, which results in bad paths like:
'..\\C:\\<some path>\\some_source_code_file.cc'
"""
return path.startswith("c:") or path.startswith("C:")
def _FixPaths(paths):
"""Fix each of the paths of the list."""
return [_FixPath(i) for i in paths]
def _ConvertSourcesToFilterHierarchy(
sources, prefix=None, excluded=None, list_excluded=True, msvs_version=None
):
"""Converts a list split source file paths into a vcproj folder hierarchy.
Arguments:
sources: A list of source file paths split.
prefix: A list of source file path layers meant to apply to each of sources.
excluded: A set of excluded files.
msvs_version: A MSVSVersion object.
Returns:
A hierarchy of filenames and MSVSProject.Filter objects that matches the
layout of the source tree.
For example:
_ConvertSourcesToFilterHierarchy([['a', 'bob1.c'], ['b', 'bob2.c']],
prefix=['joe'])
-->
[MSVSProject.Filter('a', contents=['joe\\a\\bob1.c']),
MSVSProject.Filter('b', contents=['joe\\b\\bob2.c'])]
"""
if not prefix:
prefix = []
result = []
excluded_result = []
folders = OrderedDict()
# Gather files into the final result, excluded, or folders.
for s in sources:
if len(s) == 1:
filename = _NormalizedSource("\\".join(prefix + s))
if filename in excluded:
excluded_result.append(filename)
else:
result.append(filename)
elif msvs_version and not msvs_version.UsesVcxproj():
# For MSVS 2008 and earlier, we need to process all files before walking
# the sub folders.
if not folders.get(s[0]):
folders[s[0]] = []
folders[s[0]].append(s[1:])
else:
contents = _ConvertSourcesToFilterHierarchy(
[s[1:]],
prefix + [s[0]],
excluded=excluded,
list_excluded=list_excluded,
msvs_version=msvs_version,
)
contents = MSVSProject.Filter(s[0], contents=contents)
result.append(contents)
# Add a folder for excluded files.
if excluded_result and list_excluded:
excluded_folder = MSVSProject.Filter(
"_excluded_files", contents=excluded_result
)
result.append(excluded_folder)
if msvs_version and msvs_version.UsesVcxproj():
return result
# Populate all the folders.
for f in folders:
contents = _ConvertSourcesToFilterHierarchy(
folders[f],
prefix=prefix + [f],
excluded=excluded,
list_excluded=list_excluded,
msvs_version=msvs_version,
)
contents = MSVSProject.Filter(f, contents=contents)
result.append(contents)
return result
def _ToolAppend(tools, tool_name, setting, value, only_if_unset=False):
if not value:
return
_ToolSetOrAppend(tools, tool_name, setting, value, only_if_unset)
def _ToolSetOrAppend(tools, tool_name, setting, value, only_if_unset=False):
# TODO(bradnelson): ugly hack, fix this more generally!!!
if "Directories" in setting or "Dependencies" in setting:
if type(value) == str:
value = value.replace("/", "\\")
else:
value = [i.replace("/", "\\") for i in value]
if not tools.get(tool_name):
tools[tool_name] = dict()
tool = tools[tool_name]
if "CompileAsWinRT" == setting:
return
if tool.get(setting):
if only_if_unset:
return
if type(tool[setting]) == list and type(value) == list:
tool[setting] += value
else:
raise TypeError(
'Appending "%s" to a non-list setting "%s" for tool "%s" is '
"not allowed, previous value: %s"
% (value, setting, tool_name, str(tool[setting]))
)
else:
tool[setting] = value
def _ConfigTargetVersion(config_data):
return config_data.get("msvs_target_version", "Windows7")
def _ConfigPlatform(config_data):
return config_data.get("msvs_configuration_platform", "Win32")
def _ConfigBaseName(config_name, platform_name):
if config_name.endswith("_" + platform_name):
return config_name[0 : -len(platform_name) - 1]
else:
return config_name
def _ConfigFullName(config_name, config_data):
platform_name = _ConfigPlatform(config_data)
return "%s|%s" % (_ConfigBaseName(config_name, platform_name), platform_name)
def _ConfigWindowsTargetPlatformVersion(config_data, version):
target_ver = config_data.get("msvs_windows_target_platform_version")
if target_ver and re.match(r"^\d+", target_ver):
return target_ver
config_ver = config_data.get("msvs_windows_sdk_version")
vers = [config_ver] if config_ver else version.compatible_sdks
for ver in vers:
for key in [
r"HKLM\Software\Microsoft\Microsoft SDKs\Windows\%s",
r"HKLM\Software\Wow6432Node\Microsoft\Microsoft SDKs\Windows\%s",
]:
sdk_dir = MSVSVersion._RegistryGetValue(key % ver, "InstallationFolder")
if not sdk_dir:
continue
version = MSVSVersion._RegistryGetValue(key % ver, "ProductVersion") or ""
# Find a matching entry in sdk_dir\include.
expected_sdk_dir = r"%s\include" % sdk_dir
names = sorted(
[
x
for x in (
os.listdir(expected_sdk_dir)
if os.path.isdir(expected_sdk_dir)
else []
)
if x.startswith(version)
],
reverse=True,
)
if names:
return names[0]
else:
print(
"Warning: No include files found for detected "
"Windows SDK version %s" % (version),
file=sys.stdout,
)
def _BuildCommandLineForRuleRaw(
spec, cmd, cygwin_shell, has_input_path, quote_cmd, do_setup_env
):
if [x for x in cmd if "$(InputDir)" in x]:
input_dir_preamble = (
"set INPUTDIR=$(InputDir)\n"
"if NOT DEFINED INPUTDIR set INPUTDIR=.\\\n"
"set INPUTDIR=%INPUTDIR:~0,-1%\n"
)
else:
input_dir_preamble = ""
if cygwin_shell:
# Find path to cygwin.
cygwin_dir = _FixPath(spec.get("msvs_cygwin_dirs", ["."])[0])
# Prepare command.
direct_cmd = cmd
direct_cmd = [
i.replace("$(IntDir)", '`cygpath -m "${INTDIR}"`') for i in direct_cmd
]
direct_cmd = [
i.replace("$(OutDir)", '`cygpath -m "${OUTDIR}"`') for i in direct_cmd
]
direct_cmd = [
i.replace("$(InputDir)", '`cygpath -m "${INPUTDIR}"`') for i in direct_cmd
]
if has_input_path:
direct_cmd = [
i.replace("$(InputPath)", '`cygpath -m "${INPUTPATH}"`')
for i in direct_cmd
]
direct_cmd = ['\\"%s\\"' % i.replace('"', '\\\\\\"') for i in direct_cmd]
# direct_cmd = gyp.common.EncodePOSIXShellList(direct_cmd)
direct_cmd = " ".join(direct_cmd)
# TODO(quote): regularize quoting path names throughout the module
cmd = ""
if do_setup_env:
cmd += 'call "$(ProjectDir)%(cygwin_dir)s\\setup_env.bat" && '
cmd += "set CYGWIN=nontsec&& "
if direct_cmd.find("NUMBER_OF_PROCESSORS") >= 0:
cmd += "set /a NUMBER_OF_PROCESSORS_PLUS_1=%%NUMBER_OF_PROCESSORS%%+1&& "
if direct_cmd.find("INTDIR") >= 0:
cmd += "set INTDIR=$(IntDir)&& "
if direct_cmd.find("OUTDIR") >= 0:
cmd += "set OUTDIR=$(OutDir)&& "
if has_input_path and direct_cmd.find("INPUTPATH") >= 0:
cmd += "set INPUTPATH=$(InputPath) && "
cmd += 'bash -c "%(cmd)s"'
cmd = cmd % {"cygwin_dir": cygwin_dir, "cmd": direct_cmd}
return input_dir_preamble + cmd
else:
# Convert cat --> type to mimic unix.
if cmd[0] == "cat":
command = ["type"]
else:
command = [cmd[0].replace("/", "\\")]
# Add call before command to ensure that commands can be tied together one
# after the other without aborting in Incredibuild, since IB makes a bat
# file out of the raw command string, and some commands (like python) are
# actually batch files themselves.
command.insert(0, "call")
# Fix the paths
# TODO(quote): This is a really ugly heuristic, and will miss path fixing
# for arguments like "--arg=path" or "/opt:path".
# If the argument starts with a slash or dash, it's probably a command line
# switch
arguments = [i if (i[:1] in "/-") else _FixPath(i) for i in cmd[1:]]
arguments = [i.replace("$(InputDir)", "%INPUTDIR%") for i in arguments]
arguments = [MSVSSettings.FixVCMacroSlashes(i) for i in arguments]
if quote_cmd:
# Support a mode for using cmd directly.
# Convert any paths to native form (first element is used directly).
# TODO(quote): regularize quoting path names throughout the module
arguments = ['"%s"' % i for i in arguments]
# Collapse into a single command.
return input_dir_preamble + " ".join(command + arguments)
def _BuildCommandLineForRule(spec, rule, has_input_path, do_setup_env):
# Currently this weird argument munging is used to duplicate the way a
# python script would need to be run as part of the chrome tree.
# Eventually we should add some sort of rule_default option to set this
# per project. For now the behavior chrome needs is the default.
mcs = rule.get("msvs_cygwin_shell")
if mcs is None:
mcs = int(spec.get("msvs_cygwin_shell", 1))
elif isinstance(mcs, str):
mcs = int(mcs)
quote_cmd = int(rule.get("msvs_quote_cmd", 1))
return _BuildCommandLineForRuleRaw(
spec, rule["action"], mcs, has_input_path, quote_cmd, do_setup_env=do_setup_env
)
def _AddActionStep(actions_dict, inputs, outputs, description, command):
"""Merge action into an existing list of actions.
Care must be taken so that actions which have overlapping inputs either don't
get assigned to the same input, or get collapsed into one.
Arguments:
actions_dict: dictionary keyed on input name, which maps to a list of
dicts describing the actions attached to that input file.
inputs: list of inputs
outputs: list of outputs
description: description of the action
command: command line to execute
"""
# Require there to be at least one input (call sites will ensure this).
assert inputs
action = {
"inputs": inputs,
"outputs": outputs,
"description": description,
"command": command,
}
# Pick where to stick this action.
# While less than optimal in terms of build time, attach them to the first
# input for now.
chosen_input = inputs[0]
# Add it there.
if chosen_input not in actions_dict:
actions_dict[chosen_input] = []
actions_dict[chosen_input].append(action)
def _AddCustomBuildToolForMSVS(
p, spec, primary_input, inputs, outputs, description, cmd
):
"""Add a custom build tool to execute something.
Arguments:
p: the target project
spec: the target project dict
primary_input: input file to attach the build tool to
inputs: list of inputs
outputs: list of outputs
description: description of the action
cmd: command line to execute
"""
inputs = _FixPaths(inputs)
outputs = _FixPaths(outputs)
tool = MSVSProject.Tool(
"VCCustomBuildTool",
{
"Description": description,
"AdditionalDependencies": ";".join(inputs),
"Outputs": ";".join(outputs),
"CommandLine": cmd,
},
)
# Add to the properties of primary input for each config.
for config_name, c_data in spec["configurations"].items():
p.AddFileConfig(
_FixPath(primary_input), _ConfigFullName(config_name, c_data), tools=[tool]
)
def _AddAccumulatedActionsToMSVS(p, spec, actions_dict):
"""Add actions accumulated into an actions_dict, merging as needed.
Arguments:
p: the target project
spec: the target project dict
actions_dict: dictionary keyed on input name, which maps to a list of
dicts describing the actions attached to that input file.
"""
for primary_input in actions_dict:
inputs = OrderedSet()
outputs = OrderedSet()
descriptions = []
commands = []
for action in actions_dict[primary_input]:
inputs.update(OrderedSet(action["inputs"]))
outputs.update(OrderedSet(action["outputs"]))
descriptions.append(action["description"])
commands.append(action["command"])
# Add the custom build step for one input file.
description = ", and also ".join(descriptions)
command = "\r\n".join(commands)
_AddCustomBuildToolForMSVS(
p,
spec,
primary_input=primary_input,
inputs=inputs,
outputs=outputs,
description=description,
cmd=command,
)
def _RuleExpandPath(path, input_file):
"""Given the input file to which a rule applied, string substitute a path.
Arguments:
path: a path to string expand
input_file: the file to which the rule applied.
Returns:
The string substituted path.
"""
path = path.replace(
"$(InputName)", os.path.splitext(os.path.split(input_file)[1])[0]
)
path = path.replace("$(InputDir)", os.path.dirname(input_file))
path = path.replace(
"$(InputExt)", os.path.splitext(os.path.split(input_file)[1])[1]
)
path = path.replace("$(InputFileName)", os.path.split(input_file)[1])
path = path.replace("$(InputPath)", input_file)
return path
def _FindRuleTriggerFiles(rule, sources):
"""Find the list of files which a particular rule applies to.
Arguments:
rule: the rule in question
sources: the set of all known source files for this project
Returns:
The list of sources that trigger a particular rule.
"""
return rule.get("rule_sources", [])
def _RuleInputsAndOutputs(rule, trigger_file):
"""Find the inputs and outputs generated by a rule.
Arguments:
rule: the rule in question.
trigger_file: the main trigger for this rule.
Returns:
The pair of (inputs, outputs) involved in this rule.
"""
raw_inputs = _FixPaths(rule.get("inputs", []))
raw_outputs = _FixPaths(rule.get("outputs", []))
inputs = OrderedSet()
outputs = OrderedSet()
inputs.add(trigger_file)
for i in raw_inputs:
inputs.add(_RuleExpandPath(i, trigger_file))
for o in raw_outputs:
outputs.add(_RuleExpandPath(o, trigger_file))
return (inputs, outputs)
def _GenerateNativeRulesForMSVS(p, rules, output_dir, spec, options):
"""Generate a native rules file.
Arguments:
p: the target project
rules: the set of rules to include
output_dir: the directory in which the project/gyp resides
spec: the project dict
options: global generator options
"""
rules_filename = "%s%s.rules" % (spec["target_name"], options.suffix)
rules_file = MSVSToolFile.Writer(
os.path.join(output_dir, rules_filename), spec["target_name"]
)
# Add each rule.
for r in rules:
rule_name = r["rule_name"]
rule_ext = r["extension"]
inputs = _FixPaths(r.get("inputs", []))
outputs = _FixPaths(r.get("outputs", []))
# Skip a rule with no action and no inputs.
if "action" not in r and not r.get("rule_sources", []):
continue
cmd = _BuildCommandLineForRule(spec, r, has_input_path=True, do_setup_env=True)
rules_file.AddCustomBuildRule(
name=rule_name,
description=r.get("message", rule_name),
extensions=[rule_ext],
additional_dependencies=inputs,
outputs=outputs,
cmd=cmd,
)
# Write out rules file.
rules_file.WriteIfChanged()
# Add rules file to project.
p.AddToolFile(rules_filename)
def _Cygwinify(path):
path = path.replace("$(OutDir)", "$(OutDirCygwin)")
path = path.replace("$(IntDir)", "$(IntDirCygwin)")
return path
def _GenerateExternalRules(rules, output_dir, spec, sources, options, actions_to_add):
"""Generate an external makefile to do a set of rules.
Arguments:
rules: the list of rules to include
output_dir: path containing project and gyp files
spec: project specification data
sources: set of sources known
options: global generator options
actions_to_add: The list of actions we will add to.
"""
filename = "%s_rules%s.mk" % (spec["target_name"], options.suffix)
mk_file = gyp.common.WriteOnDiff(os.path.join(output_dir, filename))
# Find cygwin style versions of some paths.
mk_file.write('OutDirCygwin:=$(shell cygpath -u "$(OutDir)")\n')
mk_file.write('IntDirCygwin:=$(shell cygpath -u "$(IntDir)")\n')
# Gather stuff needed to emit all: target.
all_inputs = OrderedSet()
all_outputs = OrderedSet()
all_output_dirs = OrderedSet()
first_outputs = []
for rule in rules:
trigger_files = _FindRuleTriggerFiles(rule, sources)
for tf in trigger_files:
inputs, outputs = _RuleInputsAndOutputs(rule, tf)
all_inputs.update(OrderedSet(inputs))
all_outputs.update(OrderedSet(outputs))
# Only use one target from each rule as the dependency for
# 'all' so we don't try to build each rule multiple times.
first_outputs.append(list(outputs)[0])
# Get the unique output directories for this rule.
output_dirs = [os.path.split(i)[0] for i in outputs]
for od in output_dirs:
all_output_dirs.add(od)
first_outputs_cyg = [_Cygwinify(i) for i in first_outputs]
# Write out all: target, including mkdir for each output directory.
mk_file.write("all: %s\n" % " ".join(first_outputs_cyg))
for od in all_output_dirs:
if od:
mk_file.write('\tmkdir -p `cygpath -u "%s"`\n' % od)
mk_file.write("\n")
# Define how each output is generated.
for rule in rules:
trigger_files = _FindRuleTriggerFiles(rule, sources)
for tf in trigger_files:
# Get all the inputs and outputs for this rule for this trigger file.
inputs, outputs = _RuleInputsAndOutputs(rule, tf)
inputs = [_Cygwinify(i) for i in inputs]
outputs = [_Cygwinify(i) for i in outputs]
# Prepare the command line for this rule.
cmd = [_RuleExpandPath(c, tf) for c in rule["action"]]
cmd = ['"%s"' % i for i in cmd]
cmd = " ".join(cmd)
# Add it to the makefile.
mk_file.write("%s: %s\n" % (" ".join(outputs), " ".join(inputs)))
mk_file.write("\t%s\n\n" % cmd)
# Close up the file.
mk_file.close()
# Add makefile to list of sources.
sources.add(filename)
# Add a build action to call makefile.
cmd = [
"make",
"OutDir=$(OutDir)",
"IntDir=$(IntDir)",
"-j",
"${NUMBER_OF_PROCESSORS_PLUS_1}",
"-f",
filename,
]
cmd = _BuildCommandLineForRuleRaw(spec, cmd, True, False, True, True)
# Insert makefile as 0'th input, so it gets the action attached there,
# as this is easier to understand from in the IDE.
all_inputs = list(all_inputs)
all_inputs.insert(0, filename)
_AddActionStep(
actions_to_add,
inputs=_FixPaths(all_inputs),
outputs=_FixPaths(all_outputs),
description="Running external rules for %s" % spec["target_name"],
command=cmd,
)
def _EscapeEnvironmentVariableExpansion(s):
"""Escapes % characters.
Escapes any % characters so that Windows-style environment variable
expansions will leave them alone.
See http://connect.microsoft.com/VisualStudio/feedback/details/106127/cl-d-name-text-containing-percentage-characters-doesnt-compile
to understand why we have to do this.
Args:
s: The string to be escaped.
Returns:
The escaped string.
""" # noqa: E731,E123,E501
s = s.replace("%", "%%")
return s
quote_replacer_regex = re.compile(r'(\\*)"')
def _EscapeCommandLineArgumentForMSVS(s):
"""Escapes a Windows command-line argument.
So that the Win32 CommandLineToArgv function will turn the escaped result back
into the original string.
See http://msdn.microsoft.com/en-us/library/17w5ykft.aspx
("Parsing C++ Command-Line Arguments") to understand why we have to do
this.
Args:
s: the string to be escaped.
Returns:
the escaped string.
"""
def _Replace(match):
# For a literal quote, CommandLineToArgv requires an odd number of
# backslashes preceding it, and it produces half as many literal backslashes
# (rounded down). So we need to produce 2n+1 backslashes.
return 2 * match.group(1) + '\\"'
# Escape all quotes so that they are interpreted literally.
s = quote_replacer_regex.sub(_Replace, s)
# Now add unescaped quotes so that any whitespace is interpreted literally.
s = '"' + s + '"'
return s
delimiters_replacer_regex = re.compile(r"(\\*)([,;]+)")
def _EscapeVCProjCommandLineArgListItem(s):
"""Escapes command line arguments for MSVS.
The VCProj format stores string lists in a single string using commas and
semi-colons as separators, which must be quoted if they are to be
interpreted literally. However, command-line arguments may already have
quotes, and the VCProj parser is ignorant of the backslash escaping
convention used by CommandLineToArgv, so the command-line quotes and the
VCProj quotes may not be the same quotes. So to store a general
command-line argument in a VCProj list, we need to parse the existing
quoting according to VCProj's convention and quote any delimiters that are
not already quoted by that convention. The quotes that we add will also be
seen by CommandLineToArgv, so if backslashes precede them then we also have
to escape those backslashes according to the CommandLineToArgv
convention.
Args:
s: the string to be escaped.
Returns:
the escaped string.
"""
def _Replace(match):
# For a non-literal quote, CommandLineToArgv requires an even number of
# backslashes preceding it, and it produces half as many literal
# backslashes. So we need to produce 2n backslashes.
return 2 * match.group(1) + '"' + match.group(2) + '"'
segments = s.split('"')
# The unquoted segments are at the even-numbered indices.
for i in range(0, len(segments), 2):
segments[i] = delimiters_replacer_regex.sub(_Replace, segments[i])
# Concatenate back into a single string
s = '"'.join(segments)
if len(segments) % 2 == 0:
# String ends while still quoted according to VCProj's convention. This
# means the delimiter and the next list item that follow this one in the
# .vcproj file will be misinterpreted as part of this item. There is nothing
# we can do about this. Adding an extra quote would correct the problem in
# the VCProj but cause the same problem on the final command-line. Moving
# the item to the end of the list does works, but that's only possible if
# there's only one such item. Let's just warn the user.
print(
"Warning: MSVS may misinterpret the odd number of " + "quotes in " + s,
file=sys.stderr,
)
return s
def _EscapeCppDefineForMSVS(s):
"""Escapes a CPP define so that it will reach the compiler unaltered."""
s = _EscapeEnvironmentVariableExpansion(s)
s = _EscapeCommandLineArgumentForMSVS(s)
s = _EscapeVCProjCommandLineArgListItem(s)
# cl.exe replaces literal # characters with = in preprocessor definitions for
# some reason. Octal-encode to work around that.
s = s.replace("#", "\\%03o" % ord("#"))
return s
quote_replacer_regex2 = re.compile(r'(\\+)"')
def _EscapeCommandLineArgumentForMSBuild(s):
"""Escapes a Windows command-line argument for use by MSBuild."""
def _Replace(match):
return (len(match.group(1)) / 2 * 4) * "\\" + '\\"'
# Escape all quotes so that they are interpreted literally.
s = quote_replacer_regex2.sub(_Replace, s)
return s
def _EscapeMSBuildSpecialCharacters(s):
escape_dictionary = {
"%": "%25",
"$": "%24",
"@": "%40",
"'": "%27",
";": "%3B",
"?": "%3F",
"*": "%2A",
}
result = "".join([escape_dictionary.get(c, c) for c in s])
return result
def _EscapeCppDefineForMSBuild(s):
"""Escapes a CPP define so that it will reach the compiler unaltered."""
s = _EscapeEnvironmentVariableExpansion(s)
s = _EscapeCommandLineArgumentForMSBuild(s)
s = _EscapeMSBuildSpecialCharacters(s)
# cl.exe replaces literal # characters with = in preprocessor definitions for
# some reason. Octal-encode to work around that.
s = s.replace("#", "\\%03o" % ord("#"))
return s
def _GenerateRulesForMSVS(
p, output_dir, options, spec, sources, excluded_sources, actions_to_add
):
"""Generate all the rules for a particular project.
Arguments:
p: the project
output_dir: directory to emit rules to
options: global options passed to the generator
spec: the specification for this project
sources: the set of all known source files in this project
excluded_sources: the set of sources excluded from normal processing
actions_to_add: deferred list of actions to add in
"""
rules = spec.get("rules", [])
rules_native = [r for r in rules if not int(r.get("msvs_external_rule", 0))]
rules_external = [r for r in rules if int(r.get("msvs_external_rule", 0))]
# Handle rules that use a native rules file.
if rules_native:
_GenerateNativeRulesForMSVS(p, rules_native, output_dir, spec, options)
# Handle external rules (non-native rules).
if rules_external:
_GenerateExternalRules(
rules_external, output_dir, spec, sources, options, actions_to_add
)
_AdjustSourcesForRules(rules, sources, excluded_sources, False)
def _AdjustSourcesForRules(rules, sources, excluded_sources, is_msbuild):
# Add outputs generated by each rule (if applicable).
for rule in rules:
# Add in the outputs from this rule.
trigger_files = _FindRuleTriggerFiles(rule, sources)
for trigger_file in trigger_files:
# Remove trigger_file from excluded_sources to let the rule be triggered
# (e.g. rule trigger ax_enums.idl is added to excluded_sources
# because it's also in an action's inputs in the same project)
excluded_sources.discard(_FixPath(trigger_file))
# Done if not processing outputs as sources.
if int(rule.get("process_outputs_as_sources", False)):
inputs, outputs = _RuleInputsAndOutputs(rule, trigger_file)
inputs = OrderedSet(_FixPaths(inputs))
outputs = OrderedSet(_FixPaths(outputs))
inputs.remove(_FixPath(trigger_file))
sources.update(inputs)
if not is_msbuild:
excluded_sources.update(inputs)
sources.update(outputs)
def _FilterActionsFromExcluded(excluded_sources, actions_to_add):
"""Take inputs with actions attached out of the list of exclusions.
Arguments:
excluded_sources: list of source files not to be built.
actions_to_add: dict of actions keyed on source file they're attached to.
Returns:
excluded_sources with files that have actions attached removed.
"""
must_keep = OrderedSet(_FixPaths(actions_to_add.keys()))
return [s for s in excluded_sources if s not in must_keep]
def _GetDefaultConfiguration(spec):
return spec["configurations"][spec["default_configuration"]]
def _GetGuidOfProject(proj_path, spec):
"""Get the guid for the project.
Arguments:
proj_path: Path of the vcproj or vcxproj file to generate.
spec: The target dictionary containing the properties of the target.
Returns:
the guid.
Raises:
ValueError: if the specified GUID is invalid.
"""
# Pluck out the default configuration.
default_config = _GetDefaultConfiguration(spec)
# Decide the guid of the project.
guid = default_config.get("msvs_guid")
if guid:
if VALID_MSVS_GUID_CHARS.match(guid) is None:
raise ValueError(
'Invalid MSVS guid: "%s". Must match regex: "%s".'
% (guid, VALID_MSVS_GUID_CHARS.pattern)
)
guid = "{%s}" % guid
guid = guid or MSVSNew.MakeGuid(proj_path)
return guid
def _GetMsbuildToolsetOfProject(proj_path, spec, version):
"""Get the platform toolset for the project.
Arguments:
proj_path: Path of the vcproj or vcxproj file to generate.
spec: The target dictionary containing the properties of the target.
version: The MSVSVersion object.
Returns:
the platform toolset string or None.
"""
# Pluck out the default configuration.
default_config = _GetDefaultConfiguration(spec)
toolset = default_config.get("msbuild_toolset")
if not toolset and version.DefaultToolset():
toolset = version.DefaultToolset()
if spec["type"] == "windows_driver":
toolset = "WindowsKernelModeDriver10.0"
return toolset
def _GenerateProject(project, options, version, generator_flags, spec):
"""Generates a vcproj file.
Arguments:
project: the MSVSProject object.
options: global generator options.
version: the MSVSVersion object.
generator_flags: dict of generator-specific flags.
Returns:
A list of source files that cannot be found on disk.
"""
default_config = _GetDefaultConfiguration(project.spec)
# Skip emitting anything if told to with msvs_existing_vcproj option.
if default_config.get("msvs_existing_vcproj"):
return []
if version.UsesVcxproj():
return _GenerateMSBuildProject(project, options, version, generator_flags, spec)
else:
return _GenerateMSVSProject(project, options, version, generator_flags)
def _GenerateMSVSProject(project, options, version, generator_flags):
"""Generates a .vcproj file. It may create .rules and .user files too.
Arguments:
project: The project object we will generate the file for.
options: Global options passed to the generator.
version: The VisualStudioVersion object.
generator_flags: dict of generator-specific flags.
"""
spec = project.spec
gyp.common.EnsureDirExists(project.path)
platforms = _GetUniquePlatforms(spec)
p = MSVSProject.Writer(
project.path, version, spec["target_name"], project.guid, platforms
)
# Get directory project file is in.
project_dir = os.path.split(project.path)[0]
gyp_path = _NormalizedSource(project.build_file)
relative_path_of_gyp_file = gyp.common.RelativePath(gyp_path, project_dir)
config_type = _GetMSVSConfigurationType(spec, project.build_file)
for config_name, config in spec["configurations"].items():
_AddConfigurationToMSVSProject(p, spec, config_type, config_name, config)
# Prepare list of sources and excluded sources.
gyp_file = os.path.split(project.build_file)[1]
sources, excluded_sources = _PrepareListOfSources(spec, generator_flags, gyp_file)
# Add rules.
actions_to_add = {}
_GenerateRulesForMSVS(
p, project_dir, options, spec, sources, excluded_sources, actions_to_add
)
list_excluded = generator_flags.get("msvs_list_excluded_files", True)
sources, excluded_sources, excluded_idl = _AdjustSourcesAndConvertToFilterHierarchy(
spec, options, project_dir, sources, excluded_sources, list_excluded, version
)
# Add in files.
missing_sources = _VerifySourcesExist(sources, project_dir)
p.AddFiles(sources)
_AddToolFilesToMSVS(p, spec)
_HandlePreCompiledHeaders(p, sources, spec)
_AddActions(actions_to_add, spec, relative_path_of_gyp_file)
_AddCopies(actions_to_add, spec)
_WriteMSVSUserFile(project.path, version, spec)
# NOTE: this stanza must appear after all actions have been decided.
# Don't excluded sources with actions attached, or they won't run.
excluded_sources = _FilterActionsFromExcluded(excluded_sources, actions_to_add)
_ExcludeFilesFromBeingBuilt(p, spec, excluded_sources, excluded_idl, list_excluded)
_AddAccumulatedActionsToMSVS(p, spec, actions_to_add)
# Write it out.
p.WriteIfChanged()
return missing_sources
def _GetUniquePlatforms(spec):
"""Returns the list of unique platforms for this spec, e.g ['win32', ...].
Arguments:
spec: The target dictionary containing the properties of the target.
Returns:
The MSVSUserFile object created.
"""
# Gather list of unique platforms.
platforms = OrderedSet()
for configuration in spec["configurations"]:
platforms.add(_ConfigPlatform(spec["configurations"][configuration]))
platforms = list(platforms)
return platforms
def _CreateMSVSUserFile(proj_path, version, spec):
"""Generates a .user file for the user running this Gyp program.
Arguments:
proj_path: The path of the project file being created. The .user file
shares the same path (with an appropriate suffix).
version: The VisualStudioVersion object.
spec: The target dictionary containing the properties of the target.
Returns:
The MSVSUserFile object created.
"""
(domain, username) = _GetDomainAndUserName()
vcuser_filename = ".".join([proj_path, domain, username, "user"])
user_file = MSVSUserFile.Writer(vcuser_filename, version, spec["target_name"])
return user_file
def _GetMSVSConfigurationType(spec, build_file):
"""Returns the configuration type for this project.
It's a number defined by Microsoft. May raise an exception.
Args:
spec: The target dictionary containing the properties of the target.
build_file: The path of the gyp file.
Returns:
An integer, the configuration type.
"""
try:
config_type = {
"executable": "1", # .exe
"shared_library": "2", # .dll
"loadable_module": "2", # .dll
"static_library": "4", # .lib
"windows_driver": "5", # .sys
"none": "10", # Utility type
}[spec["type"]]
except KeyError:
if spec.get("type"):
raise GypError(
"Target type %s is not a valid target type for "
"target %s in %s." % (spec["type"], spec["target_name"], build_file)
)
else:
raise GypError(
"Missing type field for target %s in %s."
% (spec["target_name"], build_file)
)
return config_type
def _AddConfigurationToMSVSProject(p, spec, config_type, config_name, config):
"""Adds a configuration to the MSVS project.
Many settings in a vcproj file are specific to a configuration. This
function the main part of the vcproj file that's configuration specific.
Arguments:
p: The target project being generated.
spec: The target dictionary containing the properties of the target.
config_type: The configuration type, a number as defined by Microsoft.
config_name: The name of the configuration.
config: The dictionary that defines the special processing to be done
for this configuration.
"""
# Get the information for this configuration
include_dirs, midl_include_dirs, resource_include_dirs = _GetIncludeDirs(config)
libraries = _GetLibraries(spec)
library_dirs = _GetLibraryDirs(config)
out_file, vc_tool, _ = _GetOutputFilePathAndTool(spec, msbuild=False)
defines = _GetDefines(config)
defines = [_EscapeCppDefineForMSVS(d) for d in defines]
disabled_warnings = _GetDisabledWarnings(config)
prebuild = config.get("msvs_prebuild")
postbuild = config.get("msvs_postbuild")
def_file = _GetModuleDefinition(spec)
precompiled_header = config.get("msvs_precompiled_header")
# Prepare the list of tools as a dictionary.
tools = dict()
# Add in user specified msvs_settings.
msvs_settings = config.get("msvs_settings", {})
MSVSSettings.ValidateMSVSSettings(msvs_settings)
# Prevent default library inheritance from the environment.
_ToolAppend(tools, "VCLinkerTool", "AdditionalDependencies", ["$(NOINHERIT)"])
for tool in msvs_settings:
settings = config["msvs_settings"][tool]
for setting in settings:
_ToolAppend(tools, tool, setting, settings[setting])
# Add the information to the appropriate tool
_ToolAppend(tools, "VCCLCompilerTool", "AdditionalIncludeDirectories", include_dirs)
_ToolAppend(tools, "VCMIDLTool", "AdditionalIncludeDirectories", midl_include_dirs)
_ToolAppend(
tools,
"VCResourceCompilerTool",
"AdditionalIncludeDirectories",
resource_include_dirs,
)
# Add in libraries.
_ToolAppend(tools, "VCLinkerTool", "AdditionalDependencies", libraries)
_ToolAppend(tools, "VCLinkerTool", "AdditionalLibraryDirectories", library_dirs)
if out_file:
_ToolAppend(tools, vc_tool, "OutputFile", out_file, only_if_unset=True)
# Add defines.
_ToolAppend(tools, "VCCLCompilerTool", "PreprocessorDefinitions", defines)
_ToolAppend(tools, "VCResourceCompilerTool", "PreprocessorDefinitions", defines)
# Change program database directory to prevent collisions.
_ToolAppend(
tools,
"VCCLCompilerTool",
"ProgramDataBaseFileName",
"$(IntDir)$(ProjectName)\\vc80.pdb",
only_if_unset=True,
)
# Add disabled warnings.
_ToolAppend(tools, "VCCLCompilerTool", "DisableSpecificWarnings", disabled_warnings)
# Add Pre-build.
_ToolAppend(tools, "VCPreBuildEventTool", "CommandLine", prebuild)
# Add Post-build.
_ToolAppend(tools, "VCPostBuildEventTool", "CommandLine", postbuild)
# Turn on precompiled headers if appropriate.
if precompiled_header:
precompiled_header = os.path.split(precompiled_header)[1]
_ToolAppend(tools, "VCCLCompilerTool", "UsePrecompiledHeader", "2")
_ToolAppend(
tools, "VCCLCompilerTool", "PrecompiledHeaderThrough", precompiled_header
)
_ToolAppend(tools, "VCCLCompilerTool", "ForcedIncludeFiles", precompiled_header)
# Loadable modules don't generate import libraries;
# tell dependent projects to not expect one.
if spec["type"] == "loadable_module":
_ToolAppend(tools, "VCLinkerTool", "IgnoreImportLibrary", "true")
# Set the module definition file if any.
if def_file:
_ToolAppend(tools, "VCLinkerTool", "ModuleDefinitionFile", def_file)
_AddConfigurationToMSVS(p, spec, tools, config, config_type, config_name)
def _GetIncludeDirs(config):
"""Returns the list of directories to be used for #include directives.
Arguments:
config: The dictionary that defines the special processing to be done
for this configuration.
Returns:
The list of directory paths.
"""
# TODO(bradnelson): include_dirs should really be flexible enough not to
# require this sort of thing.
include_dirs = config.get("include_dirs", []) + config.get(
"msvs_system_include_dirs", []
)
midl_include_dirs = config.get("midl_include_dirs", []) + config.get(
"msvs_system_include_dirs", []
)
resource_include_dirs = config.get("resource_include_dirs", include_dirs)
include_dirs = _FixPaths(include_dirs)
midl_include_dirs = _FixPaths(midl_include_dirs)
resource_include_dirs = _FixPaths(resource_include_dirs)
return include_dirs, midl_include_dirs, resource_include_dirs
def _GetLibraryDirs(config):
"""Returns the list of directories to be used for library search paths.
Arguments:
config: The dictionary that defines the special processing to be done
for this configuration.
Returns:
The list of directory paths.
"""
library_dirs = config.get("library_dirs", [])
library_dirs = _FixPaths(library_dirs)
return library_dirs
def _GetLibraries(spec):
"""Returns the list of libraries for this configuration.
Arguments:
spec: The target dictionary containing the properties of the target.
Returns:
The list of directory paths.
"""
libraries = spec.get("libraries", [])
# Strip out -l, as it is not used on windows (but is needed so we can pass
# in libraries that are assumed to be in the default library path).
# Also remove duplicate entries, leaving only the last duplicate, while
# preserving order.
found = OrderedSet()
unique_libraries_list = []
for entry in reversed(libraries):
library = re.sub(r"^\-l", "", entry)
if not os.path.splitext(library)[1]:
library += ".lib"
if library not in found:
found.add(library)
unique_libraries_list.append(library)
unique_libraries_list.reverse()
return unique_libraries_list
def _GetOutputFilePathAndTool(spec, msbuild):
"""Returns the path and tool to use for this target.
Figures out the path of the file this spec will create and the name of
the VC tool that will create it.
Arguments:
spec: The target dictionary containing the properties of the target.
Returns:
A triple of (file path, name of the vc tool, name of the msbuild tool)
"""
# Select a name for the output file.
out_file = ""
vc_tool = ""
msbuild_tool = ""
output_file_map = {
"executable": ("VCLinkerTool", "Link", "$(OutDir)", ".exe"),
"shared_library": ("VCLinkerTool", "Link", "$(OutDir)", ".dll"),
"loadable_module": ("VCLinkerTool", "Link", "$(OutDir)", ".dll"),
"windows_driver": ("VCLinkerTool", "Link", "$(OutDir)", ".sys"),
"static_library": ("VCLibrarianTool", "Lib", "$(OutDir)lib\\", ".lib"),
}
output_file_props = output_file_map.get(spec["type"])
if output_file_props and int(spec.get("msvs_auto_output_file", 1)):
vc_tool, msbuild_tool, out_dir, suffix = output_file_props
if spec.get("standalone_static_library", 0):
out_dir = "$(OutDir)"
out_dir = spec.get("product_dir", out_dir)
product_extension = spec.get("product_extension")
if product_extension:
suffix = "." + product_extension
elif msbuild:
suffix = "$(TargetExt)"
prefix = spec.get("product_prefix", "")
product_name = spec.get("product_name", "$(ProjectName)")
out_file = ntpath.join(out_dir, prefix + product_name + suffix)
return out_file, vc_tool, msbuild_tool
def _GetOutputTargetExt(spec):
"""Returns the extension for this target, including the dot
If product_extension is specified, set target_extension to this to avoid
MSB8012, returns None otherwise. Ignores any target_extension settings in
the input files.
Arguments:
spec: The target dictionary containing the properties of the target.
Returns:
A string with the extension, or None
"""
target_extension = spec.get("product_extension")
if target_extension:
return "." + target_extension
return None
def _GetDefines(config):
"""Returns the list of preprocessor definitions for this configuration.
Arguments:
config: The dictionary that defines the special processing to be done
for this configuration.
Returns:
The list of preprocessor definitions.
"""
defines = []
for d in config.get("defines", []):
if type(d) == list:
fd = "=".join([str(dpart) for dpart in d])
else:
fd = str(d)
defines.append(fd)
return defines
def _GetDisabledWarnings(config):
return [str(i) for i in config.get("msvs_disabled_warnings", [])]
def _GetModuleDefinition(spec):
def_file = ""
if spec["type"] in [
"shared_library",
"loadable_module",
"executable",
"windows_driver",
]:
def_files = [s for s in spec.get("sources", []) if s.endswith(".def")]
if len(def_files) == 1:
def_file = _FixPath(def_files[0])
elif def_files:
raise ValueError(
"Multiple module definition files in one target, target %s lists "
"multiple .def files: %s" % (spec["target_name"], " ".join(def_files))
)
return def_file
def _ConvertToolsToExpectedForm(tools):
"""Convert tools to a form expected by Visual Studio.
Arguments:
tools: A dictionary of settings; the tool name is the key.
Returns:
A list of Tool objects.
"""
tool_list = []
for tool, settings in tools.items():
# Collapse settings with lists.
settings_fixed = {}
for setting, value in settings.items():
if type(value) == list:
if (
tool == "VCLinkerTool" and setting == "AdditionalDependencies"
) or setting == "AdditionalOptions":
settings_fixed[setting] = " ".join(value)
else:
settings_fixed[setting] = ";".join(value)
else:
settings_fixed[setting] = value
# Add in this tool.
tool_list.append(MSVSProject.Tool(tool, settings_fixed))
return tool_list
def _AddConfigurationToMSVS(p, spec, tools, config, config_type, config_name):
"""Add to the project file the configuration specified by config.
Arguments:
p: The target project being generated.
spec: the target project dict.
tools: A dictionary of settings; the tool name is the key.
config: The dictionary that defines the special processing to be done
for this configuration.
config_type: The configuration type, a number as defined by Microsoft.
config_name: The name of the configuration.
"""
attributes = _GetMSVSAttributes(spec, config, config_type)
# Add in this configuration.
tool_list = _ConvertToolsToExpectedForm(tools)
p.AddConfig(_ConfigFullName(config_name, config), attrs=attributes, tools=tool_list)
def _GetMSVSAttributes(spec, config, config_type):
# Prepare configuration attributes.
prepared_attrs = {}
source_attrs = config.get("msvs_configuration_attributes", {})
for a in source_attrs:
prepared_attrs[a] = source_attrs[a]
# Add props files.
vsprops_dirs = config.get("msvs_props", [])
vsprops_dirs = _FixPaths(vsprops_dirs)
if vsprops_dirs:
prepared_attrs["InheritedPropertySheets"] = ";".join(vsprops_dirs)
# Set configuration type.
prepared_attrs["ConfigurationType"] = config_type
output_dir = prepared_attrs.get(
"OutputDirectory", "$(SolutionDir)$(ConfigurationName)"
)
prepared_attrs["OutputDirectory"] = _FixPath(output_dir) + "\\"
if "IntermediateDirectory" not in prepared_attrs:
intermediate = "$(ConfigurationName)\\obj\\$(ProjectName)"
prepared_attrs["IntermediateDirectory"] = _FixPath(intermediate) + "\\"
else:
intermediate = _FixPath(prepared_attrs["IntermediateDirectory"]) + "\\"
intermediate = MSVSSettings.FixVCMacroSlashes(intermediate)
prepared_attrs["IntermediateDirectory"] = intermediate
return prepared_attrs
def _AddNormalizedSources(sources_set, sources_array):
sources_set.update(_NormalizedSource(s) for s in sources_array)
def _PrepareListOfSources(spec, generator_flags, gyp_file):
"""Prepare list of sources and excluded sources.
Besides the sources specified directly in the spec, adds the gyp file so
that a change to it will cause a re-compile. Also adds appropriate sources
for actions and copies. Assumes later stage will un-exclude files which
have custom build steps attached.
Arguments:
spec: The target dictionary containing the properties of the target.
gyp_file: The name of the gyp file.
Returns:
A pair of (list of sources, list of excluded sources).
The sources will be relative to the gyp file.
"""
sources = OrderedSet()
_AddNormalizedSources(sources, spec.get("sources", []))
excluded_sources = OrderedSet()
# Add in the gyp file.
if not generator_flags.get("standalone"):
sources.add(gyp_file)
# Add in 'action' inputs and outputs.
for a in spec.get("actions", []):
inputs = a["inputs"]
inputs = [_NormalizedSource(i) for i in inputs]
# Add all inputs to sources and excluded sources.
inputs = OrderedSet(inputs)
sources.update(inputs)
if not spec.get("msvs_external_builder"):
excluded_sources.update(inputs)
if int(a.get("process_outputs_as_sources", False)):
_AddNormalizedSources(sources, a.get("outputs", []))
# Add in 'copies' inputs and outputs.
for cpy in spec.get("copies", []):
_AddNormalizedSources(sources, cpy.get("files", []))
return (sources, excluded_sources)
def _AdjustSourcesAndConvertToFilterHierarchy(
spec, options, gyp_dir, sources, excluded_sources, list_excluded, version
):
"""Adjusts the list of sources and excluded sources.
Also converts the sets to lists.
Arguments:
spec: The target dictionary containing the properties of the target.
options: Global generator options.
gyp_dir: The path to the gyp file being processed.
sources: A set of sources to be included for this project.
excluded_sources: A set of sources to be excluded for this project.
version: A MSVSVersion object.
Returns:
A trio of (list of sources, list of excluded sources,
path of excluded IDL file)
"""
# Exclude excluded sources coming into the generator.
excluded_sources.update(OrderedSet(spec.get("sources_excluded", [])))
# Add excluded sources into sources for good measure.
sources.update(excluded_sources)
# Convert to proper windows form.
# NOTE: sources goes from being a set to a list here.
# NOTE: excluded_sources goes from being a set to a list here.
sources = _FixPaths(sources)
# Convert to proper windows form.
excluded_sources = _FixPaths(excluded_sources)
excluded_idl = _IdlFilesHandledNonNatively(spec, sources)
precompiled_related = _GetPrecompileRelatedFiles(spec)
# Find the excluded ones, minus the precompiled header related ones.
fully_excluded = [i for i in excluded_sources if i not in precompiled_related]
# Convert to folders and the right slashes.
sources = [i.split("\\") for i in sources]
sources = _ConvertSourcesToFilterHierarchy(
sources,
excluded=fully_excluded,
list_excluded=list_excluded,
msvs_version=version,
)
# Prune filters with a single child to flatten ugly directory structures
# such as ../../src/modules/module1 etc.
if version.UsesVcxproj():
while (
all([isinstance(s, MSVSProject.Filter) for s in sources])
and len(set([s.name for s in sources])) == 1
):
assert all([len(s.contents) == 1 for s in sources])
sources = [s.contents[0] for s in sources]
else:
while len(sources) == 1 and isinstance(sources[0], MSVSProject.Filter):
sources = sources[0].contents
return sources, excluded_sources, excluded_idl
def _IdlFilesHandledNonNatively(spec, sources):
# If any non-native rules use 'idl' as an extension exclude idl files.
# Gather a list here to use later.
using_idl = False
for rule in spec.get("rules", []):
if rule["extension"] == "idl" and int(rule.get("msvs_external_rule", 0)):
using_idl = True
break
if using_idl:
excluded_idl = [i for i in sources if i.endswith(".idl")]
else:
excluded_idl = []
return excluded_idl
def _GetPrecompileRelatedFiles(spec):
# Gather a list of precompiled header related sources.
precompiled_related = []
for _, config in spec["configurations"].items():
for k in precomp_keys:
f = config.get(k)
if f:
precompiled_related.append(_FixPath(f))
return precompiled_related
def _ExcludeFilesFromBeingBuilt(p, spec, excluded_sources, excluded_idl, list_excluded):
exclusions = _GetExcludedFilesFromBuild(spec, excluded_sources, excluded_idl)
for file_name, excluded_configs in exclusions.items():
if not list_excluded and len(excluded_configs) == len(spec["configurations"]):
# If we're not listing excluded files, then they won't appear in the
# project, so don't try to configure them to be excluded.
pass
else:
for config_name, config in excluded_configs:
p.AddFileConfig(
file_name,
_ConfigFullName(config_name, config),
{"ExcludedFromBuild": "true"},
)
def _GetExcludedFilesFromBuild(spec, excluded_sources, excluded_idl):
exclusions = {}
# Exclude excluded sources from being built.
for f in excluded_sources:
excluded_configs = []
for config_name, config in spec["configurations"].items():
precomped = [_FixPath(config.get(i, "")) for i in precomp_keys]
# Don't do this for ones that are precompiled header related.
if f not in precomped:
excluded_configs.append((config_name, config))
exclusions[f] = excluded_configs
# If any non-native rules use 'idl' as an extension exclude idl files.
# Exclude them now.
for f in excluded_idl:
excluded_configs = []
for config_name, config in spec["configurations"].items():
excluded_configs.append((config_name, config))
exclusions[f] = excluded_configs
return exclusions
def _AddToolFilesToMSVS(p, spec):
# Add in tool files (rules).
tool_files = OrderedSet()
for _, config in spec["configurations"].items():
for f in config.get("msvs_tool_files", []):
tool_files.add(f)
for f in tool_files:
p.AddToolFile(f)
def _HandlePreCompiledHeaders(p, sources, spec):
# Pre-compiled header source stubs need a different compiler flag
# (generate precompiled header) and any source file not of the same
# kind (i.e. C vs. C++) as the precompiled header source stub needs
# to have use of precompiled headers disabled.
extensions_excluded_from_precompile = []
for config_name, config in spec["configurations"].items():
source = config.get("msvs_precompiled_source")
if source:
source = _FixPath(source)
# UsePrecompiledHeader=1 for if using precompiled headers.
tool = MSVSProject.Tool("VCCLCompilerTool", {"UsePrecompiledHeader": "1"})
p.AddFileConfig(
source, _ConfigFullName(config_name, config), {}, tools=[tool]
)
basename, extension = os.path.splitext(source)
if extension == ".c":
extensions_excluded_from_precompile = [".cc", ".cpp", ".cxx"]
else:
extensions_excluded_from_precompile = [".c"]
def DisableForSourceTree(source_tree):
for source in source_tree:
if isinstance(source, MSVSProject.Filter):
DisableForSourceTree(source.contents)
else:
basename, extension = os.path.splitext(source)
if extension in extensions_excluded_from_precompile:
for config_name, config in spec["configurations"].items():
tool = MSVSProject.Tool(
"VCCLCompilerTool",
{
"UsePrecompiledHeader": "0",
"ForcedIncludeFiles": "$(NOINHERIT)",
},
)
p.AddFileConfig(
_FixPath(source),
_ConfigFullName(config_name, config),
{},
tools=[tool],
)
# Do nothing if there was no precompiled source.
if extensions_excluded_from_precompile:
DisableForSourceTree(sources)
def _AddActions(actions_to_add, spec, relative_path_of_gyp_file):
# Add actions.
actions = spec.get("actions", [])
# Don't setup_env every time. When all the actions are run together in one
# batch file in VS, the PATH will grow too long.
# Membership in this set means that the cygwin environment has been set up,
# and does not need to be set up again.
have_setup_env = set()
for a in actions:
# Attach actions to the gyp file if nothing else is there.
inputs = a.get("inputs") or [relative_path_of_gyp_file]
attached_to = inputs[0]
need_setup_env = attached_to not in have_setup_env
cmd = _BuildCommandLineForRule(
spec, a, has_input_path=False, do_setup_env=need_setup_env
)
have_setup_env.add(attached_to)
# Add the action.
_AddActionStep(
actions_to_add,
inputs=inputs,
outputs=a.get("outputs", []),
description=a.get("message", a["action_name"]),
command=cmd,
)
def _WriteMSVSUserFile(project_path, version, spec):
# Add run_as and test targets.
if "run_as" in spec:
run_as = spec["run_as"]
action = run_as.get("action", [])
environment = run_as.get("environment", [])
working_directory = run_as.get("working_directory", ".")
elif int(spec.get("test", 0)):
action = ["$(TargetPath)", "--gtest_print_time"]
environment = []
working_directory = "."
else:
return # Nothing to add
# Write out the user file.
user_file = _CreateMSVSUserFile(project_path, version, spec)
for config_name, c_data in spec["configurations"].items():
user_file.AddDebugSettings(
_ConfigFullName(config_name, c_data), action, environment, working_directory
)
user_file.WriteIfChanged()
def _AddCopies(actions_to_add, spec):
copies = _GetCopies(spec)
for inputs, outputs, cmd, description in copies:
_AddActionStep(
actions_to_add,
inputs=inputs,
outputs=outputs,
description=description,
command=cmd,
)
def _GetCopies(spec):
copies = []
# Add copies.
for cpy in spec.get("copies", []):
for src in cpy.get("files", []):
dst = os.path.join(cpy["destination"], os.path.basename(src))
# _AddCustomBuildToolForMSVS() will call _FixPath() on the inputs and
# outputs, so do the same for our generated command line.
if src.endswith("/"):
src_bare = src[:-1]
base_dir = posixpath.split(src_bare)[0]
outer_dir = posixpath.split(src_bare)[1]
fixed_dst = _FixPath(dst)
full_dst = '"%s\\%s\\"' % (fixed_dst, outer_dir)
cmd = 'mkdir %s 2>nul & cd "%s" && xcopy /e /f /y "%s" %s' % (
full_dst,
_FixPath(base_dir),
outer_dir,
full_dst,
)
copies.append(
(
[src],
["dummy_copies", dst],
cmd,
"Copying %s to %s" % (src, fixed_dst),
)
)
else:
fix_dst = _FixPath(cpy["destination"])
cmd = 'mkdir "%s" 2>nul & set ERRORLEVEL=0 & copy /Y "%s" "%s"' % (
fix_dst,
_FixPath(src),
_FixPath(dst),
)
copies.append(([src], [dst], cmd, "Copying %s to %s" % (src, fix_dst)))
return copies
def _GetPathDict(root, path):
# |path| will eventually be empty (in the recursive calls) if it was initially
# relative; otherwise it will eventually end up as '\', 'D:\', etc.
if not path or path.endswith(os.sep):
return root
parent, folder = os.path.split(path)
parent_dict = _GetPathDict(root, parent)
if folder not in parent_dict:
parent_dict[folder] = dict()
return parent_dict[folder]
def _DictsToFolders(base_path, bucket, flat):
# Convert to folders recursively.
children = []
for folder, contents in bucket.items():
if type(contents) == dict:
folder_children = _DictsToFolders(
os.path.join(base_path, folder), contents, flat
)
if flat:
children += folder_children
else:
folder_children = MSVSNew.MSVSFolder(
os.path.join(base_path, folder),
name="(" + folder + ")",
entries=folder_children,
)
children.append(folder_children)
else:
children.append(contents)
return children
def _CollapseSingles(parent, node):
# Recursively explorer the tree of dicts looking for projects which are
# the sole item in a folder which has the same name as the project. Bring
# such projects up one level.
if type(node) == dict and len(node) == 1 and next(iter(node)) == parent + ".vcproj":
return node[next(iter(node))]
if type(node) != dict:
return node
for child in node:
node[child] = _CollapseSingles(child, node[child])
return node
def _GatherSolutionFolders(sln_projects, project_objects, flat):
root = {}
# Convert into a tree of dicts on path.
for p in sln_projects:
gyp_file, target = gyp.common.ParseQualifiedTarget(p)[0:2]
if p.endswith("#host"):
target += "_host"
gyp_dir = os.path.dirname(gyp_file)
path_dict = _GetPathDict(root, gyp_dir)
path_dict[target + ".vcproj"] = project_objects[p]
# Walk down from the top until we hit a folder that has more than one entry.
# In practice, this strips the top-level "src/" dir from the hierarchy in
# the solution.
while len(root) == 1 and type(root[next(iter(root))]) == dict:
root = root[next(iter(root))]
# Collapse singles.
root = _CollapseSingles("", root)
# Merge buckets until everything is a root entry.
return _DictsToFolders("", root, flat)
def _GetPathOfProject(qualified_target, spec, options, msvs_version):
default_config = _GetDefaultConfiguration(spec)
proj_filename = default_config.get("msvs_existing_vcproj")
if not proj_filename:
proj_filename = spec["target_name"]
if spec["toolset"] == "host":
proj_filename += "_host"
proj_filename = proj_filename + options.suffix + msvs_version.ProjectExtension()
build_file = gyp.common.BuildFile(qualified_target)
proj_path = os.path.join(os.path.dirname(build_file), proj_filename)
fix_prefix = None
if options.generator_output:
project_dir_path = os.path.dirname(os.path.abspath(proj_path))
proj_path = os.path.join(options.generator_output, proj_path)
fix_prefix = gyp.common.RelativePath(
project_dir_path, os.path.dirname(proj_path)
)
return proj_path, fix_prefix
def _GetPlatformOverridesOfProject(spec):
# Prepare a dict indicating which project configurations are used for which
# solution configurations for this target.
config_platform_overrides = {}
for config_name, c in spec["configurations"].items():
config_fullname = _ConfigFullName(config_name, c)
platform = c.get("msvs_target_platform", _ConfigPlatform(c))
fixed_config_fullname = "%s|%s" % (
_ConfigBaseName(config_name, _ConfigPlatform(c)),
platform,
)
if spec["toolset"] == "host" and generator_supports_multiple_toolsets:
fixed_config_fullname = "%s|x64" % (config_name,)
config_platform_overrides[config_fullname] = fixed_config_fullname
return config_platform_overrides
def _CreateProjectObjects(target_list, target_dicts, options, msvs_version):
"""Create a MSVSProject object for the targets found in target list.
Arguments:
target_list: the list of targets to generate project objects for.
target_dicts: the dictionary of specifications.
options: global generator options.
msvs_version: the MSVSVersion object.
Returns:
A set of created projects, keyed by target.
"""
global fixpath_prefix
# Generate each project.
projects = {}
for qualified_target in target_list:
spec = target_dicts[qualified_target]
proj_path, fixpath_prefix = _GetPathOfProject(
qualified_target, spec, options, msvs_version
)
guid = _GetGuidOfProject(proj_path, spec)
overrides = _GetPlatformOverridesOfProject(spec)
build_file = gyp.common.BuildFile(qualified_target)
# Create object for this project.
target_name = spec["target_name"]
if spec["toolset"] == "host":
target_name += "_host"
obj = MSVSNew.MSVSProject(
proj_path,
name=target_name,
guid=guid,
spec=spec,
build_file=build_file,
config_platform_overrides=overrides,
fixpath_prefix=fixpath_prefix,
)
# Set project toolset if any (MS build only)
if msvs_version.UsesVcxproj():
obj.set_msbuild_toolset(
_GetMsbuildToolsetOfProject(proj_path, spec, msvs_version)
)
projects[qualified_target] = obj
# Set all the dependencies, but not if we are using an external builder like
# ninja
for project in projects.values():
if not project.spec.get("msvs_external_builder"):
deps = project.spec.get("dependencies", [])
deps = [projects[d] for d in deps]
project.set_dependencies(deps)
return projects
def _InitNinjaFlavor(params, target_list, target_dicts):
"""Initialize targets for the ninja flavor.
This sets up the necessary variables in the targets to generate msvs projects
that use ninja as an external builder. The variables in the spec are only set
if they have not been set. This allows individual specs to override the
default values initialized here.
Arguments:
params: Params provided to the generator.
target_list: List of target pairs: 'base/base.gyp:base'.
target_dicts: Dict of target properties keyed on target pair.
"""
for qualified_target in target_list:
spec = target_dicts[qualified_target]
if spec.get("msvs_external_builder"):
# The spec explicitly defined an external builder, so don't change it.
continue
path_to_ninja = spec.get("msvs_path_to_ninja", "ninja.exe")
spec["msvs_external_builder"] = "ninja"
if not spec.get("msvs_external_builder_out_dir"):
gyp_file, _, _ = gyp.common.ParseQualifiedTarget(qualified_target)
gyp_dir = os.path.dirname(gyp_file)
configuration = "$(Configuration)"
if params.get("target_arch") == "x64":
configuration += "_x64"
if params.get("target_arch") == "arm64":
configuration += "_arm64"
spec["msvs_external_builder_out_dir"] = os.path.join(
gyp.common.RelativePath(params["options"].toplevel_dir, gyp_dir),
ninja_generator.ComputeOutputDir(params),
configuration,
)
if not spec.get("msvs_external_builder_build_cmd"):
spec["msvs_external_builder_build_cmd"] = [
path_to_ninja,
"-C",
"$(OutDir)",
"$(ProjectName)",
]
if not spec.get("msvs_external_builder_clean_cmd"):
spec["msvs_external_builder_clean_cmd"] = [
path_to_ninja,
"-C",
"$(OutDir)",
"-tclean",
"$(ProjectName)",
]
def CalculateVariables(default_variables, params):
"""Generated variables that require params to be known."""
generator_flags = params.get("generator_flags", {})
# Select project file format version (if unset, default to auto detecting).
msvs_version = MSVSVersion.SelectVisualStudioVersion(
generator_flags.get("msvs_version", "auto")
)
# Stash msvs_version for later (so we don't have to probe the system twice).
params["msvs_version"] = msvs_version
# Set a variable so conditions can be based on msvs_version.
default_variables["MSVS_VERSION"] = msvs_version.ShortName()
# To determine processor word size on Windows, in addition to checking
# PROCESSOR_ARCHITECTURE (which reflects the word size of the current
# process), it is also necessary to check PROCESSOR_ARCITEW6432 (which
# contains the actual word size of the system when running thru WOW64).
if (
os.environ.get("PROCESSOR_ARCHITECTURE", "").find("64") >= 0
or os.environ.get("PROCESSOR_ARCHITEW6432", "").find("64") >= 0
):
default_variables["MSVS_OS_BITS"] = 64
else:
default_variables["MSVS_OS_BITS"] = 32
if gyp.common.GetFlavor(params) == "ninja":
default_variables["SHARED_INTERMEDIATE_DIR"] = "$(OutDir)gen"
def PerformBuild(data, configurations, params):
options = params["options"]
msvs_version = params["msvs_version"]
devenv = os.path.join(msvs_version.path, "Common7", "IDE", "devenv.com")
for build_file, build_file_dict in data.items():
(build_file_root, build_file_ext) = os.path.splitext(build_file)
if build_file_ext != ".gyp":
continue
sln_path = build_file_root + options.suffix + ".sln"
if options.generator_output:
sln_path = os.path.join(options.generator_output, sln_path)
for config in configurations:
arguments = [devenv, sln_path, "/Build", config]
print("Building [%s]: %s" % (config, arguments))
subprocess.check_call(arguments)
def CalculateGeneratorInputInfo(params):
if params.get("flavor") == "ninja":
toplevel = params["options"].toplevel_dir
qualified_out_dir = os.path.normpath(
os.path.join(
toplevel,
ninja_generator.ComputeOutputDir(params),
"gypfiles-msvs-ninja",
)
)
global generator_filelist_paths
generator_filelist_paths = {
"toplevel": toplevel,
"qualified_out_dir": qualified_out_dir,
}
def GenerateOutput(target_list, target_dicts, data, params):
"""Generate .sln and .vcproj files.
This is the entry point for this generator.
Arguments:
target_list: List of target pairs: 'base/base.gyp:base'.
target_dicts: Dict of target properties keyed on target pair.
data: Dictionary containing per .gyp data.
"""
global fixpath_prefix
options = params["options"]
# Get the project file format version back out of where we stashed it in
# GeneratorCalculatedVariables.
msvs_version = params["msvs_version"]
generator_flags = params.get("generator_flags", {})
# Optionally shard targets marked with 'msvs_shard': SHARD_COUNT.
(target_list, target_dicts) = MSVSUtil.ShardTargets(target_list, target_dicts)
# Optionally use the large PDB workaround for targets marked with
# 'msvs_large_pdb': 1.
(target_list, target_dicts) = MSVSUtil.InsertLargePdbShims(
target_list, target_dicts, generator_default_variables
)
# Optionally configure each spec to use ninja as the external builder.
if params.get("flavor") == "ninja":
_InitNinjaFlavor(params, target_list, target_dicts)
# Prepare the set of configurations.
configs = set()
for qualified_target in target_list:
spec = target_dicts[qualified_target]
for config_name, config in spec["configurations"].items():
config_name = _ConfigFullName(config_name, config)
configs.add(config_name)
if config_name == "Release|arm64":
configs.add("Release|x64")
configs = list(configs)
# Figure out all the projects that will be generated and their guids
project_objects = _CreateProjectObjects(
target_list, target_dicts, options, msvs_version
)
# Generate each project.
missing_sources = []
for project in project_objects.values():
fixpath_prefix = project.fixpath_prefix
missing_sources.extend(
_GenerateProject(project, options, msvs_version, generator_flags, spec)
)
fixpath_prefix = None
for build_file in data:
# Validate build_file extension
target_only_configs = configs
if generator_supports_multiple_toolsets:
target_only_configs = [i for i in configs if i.endswith("arm64")]
if not build_file.endswith(".gyp"):
continue
sln_path = os.path.splitext(build_file)[0] + options.suffix + ".sln"
if options.generator_output:
sln_path = os.path.join(options.generator_output, sln_path)
# Get projects in the solution, and their dependents.
sln_projects = gyp.common.BuildFileTargets(target_list, build_file)
sln_projects += gyp.common.DeepDependencyTargets(target_dicts, sln_projects)
# Create folder hierarchy.
root_entries = _GatherSolutionFolders(
sln_projects, project_objects, flat=msvs_version.FlatSolution()
)
# Create solution.
sln = MSVSNew.MSVSSolution(
sln_path,
entries=root_entries,
variants=target_only_configs,
websiteProperties=False,
version=msvs_version,
)
sln.Write()
if missing_sources:
error_message = "Missing input files:\n" + "\n".join(set(missing_sources))
if generator_flags.get("msvs_error_on_missing_sources", False):
raise GypError(error_message)
else:
print("Warning: " + error_message, file=sys.stdout)
def _GenerateMSBuildFiltersFile(
filters_path,
source_files,
rule_dependencies,
extension_to_rule_name,
platforms,
toolset,
):
"""Generate the filters file.
This file is used by Visual Studio to organize the presentation of source
files into folders.
Arguments:
filters_path: The path of the file to be created.
source_files: The hierarchical structure of all the sources.
extension_to_rule_name: A dictionary mapping file extensions to rules.
"""
filter_group = []
source_group = []
_AppendFiltersForMSBuild(
"",
source_files,
rule_dependencies,
extension_to_rule_name,
platforms,
toolset,
filter_group,
source_group,
)
if filter_group:
content = [
"Project",
{
"ToolsVersion": "4.0",
"xmlns": "http://schemas.microsoft.com/developer/msbuild/2003",
},
["ItemGroup"] + filter_group,
["ItemGroup"] + source_group,
]
easy_xml.WriteXmlIfChanged(content, filters_path, pretty=True, win32=True)
elif os.path.exists(filters_path):
# We don't need this filter anymore. Delete the old filter file.
os.unlink(filters_path)
def _AppendFiltersForMSBuild(
parent_filter_name,
sources,
rule_dependencies,
extension_to_rule_name,
platforms,
toolset,
filter_group,
source_group,
):
"""Creates the list of filters and sources to be added in the filter file.
Args:
parent_filter_name: The name of the filter under which the sources are
found.
sources: The hierarchy of filters and sources to process.
extension_to_rule_name: A dictionary mapping file extensions to rules.
filter_group: The list to which filter entries will be appended.
source_group: The list to which source entries will be appended.
"""
for source in sources:
if isinstance(source, MSVSProject.Filter):
# We have a sub-filter. Create the name of that sub-filter.
if not parent_filter_name:
filter_name = source.name
else:
filter_name = "%s\\%s" % (parent_filter_name, source.name)
# Add the filter to the group.
filter_group.append(
[
"Filter",
{"Include": filter_name},
["UniqueIdentifier", MSVSNew.MakeGuid(source.name)],
]
)
# Recurse and add its dependents.
_AppendFiltersForMSBuild(
filter_name,
source.contents,
rule_dependencies,
extension_to_rule_name,
platforms,
toolset,
filter_group,
source_group,
)
else:
# It's a source. Create a source entry.
_, element = _MapFileToMsBuildSourceType(
source, rule_dependencies, extension_to_rule_name, platforms, toolset
)
source_entry = [element, {"Include": source}]
# Specify the filter it is part of, if any.
if parent_filter_name:
source_entry.append(["Filter", parent_filter_name])
source_group.append(source_entry)
def _MapFileToMsBuildSourceType(
source, rule_dependencies, extension_to_rule_name, platforms, toolset
):
"""Returns the group and element type of the source file.
Arguments:
source: The source file name.
extension_to_rule_name: A dictionary mapping file extensions to rules.
Returns:
A pair of (group this file should be part of, the label of element)
"""
_, ext = os.path.splitext(source)
ext = ext.lower()
if ext in extension_to_rule_name:
group = "rule"
element = extension_to_rule_name[ext]
elif ext in [".cc", ".cpp", ".c", ".cxx", ".mm"]:
group = "compile"
element = "ClCompile"
elif ext in [".h", ".hxx"]:
group = "include"
element = "ClInclude"
elif ext == ".rc":
group = "resource"
element = "ResourceCompile"
elif ext in [".s", ".asm"]:
group = "masm"
element = "MASM"
if "arm64" in platforms and toolset == "target":
element = "MARMASM"
elif ext == ".idl":
group = "midl"
element = "Midl"
elif source in rule_dependencies:
group = "rule_dependency"
element = "CustomBuild"
else:
group = "none"
element = "None"
return (group, element)
def _GenerateRulesForMSBuild(
output_dir,
options,
spec,
sources,
excluded_sources,
props_files_of_rules,
targets_files_of_rules,
actions_to_add,
rule_dependencies,
extension_to_rule_name,
):
# MSBuild rules are implemented using three files: an XML file, a .targets
# file and a .props file.
# For more details see:
# https://devblogs.microsoft.com/cppblog/quick-help-on-vs2010-custom-build-rule/
rules = spec.get("rules", [])
rules_native = [r for r in rules if not int(r.get("msvs_external_rule", 0))]
rules_external = [r for r in rules if int(r.get("msvs_external_rule", 0))]
msbuild_rules = []
for rule in rules_native:
# Skip a rule with no action and no inputs.
if "action" not in rule and not rule.get("rule_sources", []):
continue
msbuild_rule = MSBuildRule(rule, spec)
msbuild_rules.append(msbuild_rule)
rule_dependencies.update(msbuild_rule.additional_dependencies.split(";"))
extension_to_rule_name[msbuild_rule.extension] = msbuild_rule.rule_name
if msbuild_rules:
base = spec["target_name"] + options.suffix
props_name = base + ".props"
targets_name = base + ".targets"
xml_name = base + ".xml"
props_files_of_rules.add(props_name)
targets_files_of_rules.add(targets_name)
props_path = os.path.join(output_dir, props_name)
targets_path = os.path.join(output_dir, targets_name)
xml_path = os.path.join(output_dir, xml_name)
_GenerateMSBuildRulePropsFile(props_path, msbuild_rules)
_GenerateMSBuildRuleTargetsFile(targets_path, msbuild_rules)
_GenerateMSBuildRuleXmlFile(xml_path, msbuild_rules)
if rules_external:
_GenerateExternalRules(
rules_external, output_dir, spec, sources, options, actions_to_add
)
_AdjustSourcesForRules(rules, sources, excluded_sources, True)
class MSBuildRule(object):
"""Used to store information used to generate an MSBuild rule.
Attributes:
rule_name: The rule name, sanitized to use in XML.
target_name: The name of the target.
after_targets: The name of the AfterTargets element.
before_targets: The name of the BeforeTargets element.
depends_on: The name of the DependsOn element.
compute_output: The name of the ComputeOutput element.
dirs_to_make: The name of the DirsToMake element.
inputs: The name of the _inputs element.
tlog: The name of the _tlog element.
extension: The extension this rule applies to.
description: The message displayed when this rule is invoked.
additional_dependencies: A string listing additional dependencies.
outputs: The outputs of this rule.
command: The command used to run the rule.
"""
def __init__(self, rule, spec):
self.display_name = rule["rule_name"]
# Assure that the rule name is only characters and numbers
self.rule_name = re.sub(r"\W", "_", self.display_name)
# Create the various element names, following the example set by the
# Visual Studio 2008 to 2010 conversion. I don't know if VS2010
# is sensitive to the exact names.
self.target_name = "_" + self.rule_name
self.after_targets = self.rule_name + "AfterTargets"
self.before_targets = self.rule_name + "BeforeTargets"
self.depends_on = self.rule_name + "DependsOn"
self.compute_output = "Compute%sOutput" % self.rule_name
self.dirs_to_make = self.rule_name + "DirsToMake"
self.inputs = self.rule_name + "_inputs"
self.tlog = self.rule_name + "_tlog"
self.extension = rule["extension"]
if not self.extension.startswith("."):
self.extension = "." + self.extension
self.description = MSVSSettings.ConvertVCMacrosToMSBuild(
rule.get("message", self.rule_name)
)
old_additional_dependencies = _FixPaths(rule.get("inputs", []))
self.additional_dependencies = ";".join(
[
MSVSSettings.ConvertVCMacrosToMSBuild(i)
for i in old_additional_dependencies
]
)
old_outputs = _FixPaths(rule.get("outputs", []))
self.outputs = ";".join(
[MSVSSettings.ConvertVCMacrosToMSBuild(i) for i in old_outputs]
)
old_command = _BuildCommandLineForRule(
spec, rule, has_input_path=True, do_setup_env=True
)
self.command = MSVSSettings.ConvertVCMacrosToMSBuild(old_command)
def _GenerateMSBuildRulePropsFile(props_path, msbuild_rules):
"""Generate the .props file."""
content = [
"Project",
{"xmlns": "http://schemas.microsoft.com/developer/msbuild/2003"},
]
for rule in msbuild_rules:
content.extend(
[
[
"PropertyGroup",
{
"Condition": "'$(%s)' == '' and '$(%s)' == '' and "
"'$(ConfigurationType)' != 'Makefile'"
% (rule.before_targets, rule.after_targets)
},
[rule.before_targets, "Midl"],
[rule.after_targets, "CustomBuild"],
],
[
"PropertyGroup",
[
rule.depends_on,
{"Condition": "'$(ConfigurationType)' != 'Makefile'"},
"_SelectedFiles;$(%s)" % rule.depends_on,
],
],
[
"ItemDefinitionGroup",
[
rule.rule_name,
["CommandLineTemplate", rule.command],
["Outputs", rule.outputs],
["ExecutionDescription", rule.description],
["AdditionalDependencies", rule.additional_dependencies],
],
],
]
)
easy_xml.WriteXmlIfChanged(content, props_path, pretty=True, win32=True)
def _GenerateMSBuildRuleTargetsFile(targets_path, msbuild_rules):
"""Generate the .targets file."""
content = [
"Project",
{"xmlns": "http://schemas.microsoft.com/developer/msbuild/2003"},
]
item_group = [
"ItemGroup",
[
"PropertyPageSchema",
{"Include": "$(MSBuildThisFileDirectory)$(MSBuildThisFileName).xml"},
],
]
for rule in msbuild_rules:
item_group.append(
[
"AvailableItemName",
{"Include": rule.rule_name},
["Targets", rule.target_name],
]
)
content.append(item_group)
for rule in msbuild_rules:
content.append(
[
"UsingTask",
{
"TaskName": rule.rule_name,
"TaskFactory": "XamlTaskFactory",
"AssemblyName": "Microsoft.Build.Tasks.v4.0",
},
["Task", "$(MSBuildThisFileDirectory)$(MSBuildThisFileName).xml"],
]
)
for rule in msbuild_rules:
rule_name = rule.rule_name
target_outputs = "%%(%s.Outputs)" % rule_name
target_inputs = (
"%%(%s.Identity);%%(%s.AdditionalDependencies);" "$(MSBuildProjectFile)"
) % (rule_name, rule_name)
rule_inputs = "%%(%s.Identity)" % rule_name
extension_condition = (
"'%(Extension)'=='.obj' or "
"'%(Extension)'=='.res' or "
"'%(Extension)'=='.rsc' or "
"'%(Extension)'=='.lib'"
)
remove_section = [
"ItemGroup",
{"Condition": "'@(SelectedFiles)' != ''"},
[
rule_name,
{
"Remove": "@(%s)" % rule_name,
"Condition": "'%(Identity)' != '@(SelectedFiles)'",
},
],
]
inputs_section = [
"ItemGroup",
[rule.inputs, {"Include": "%%(%s.AdditionalDependencies)" % rule_name}],
]
logging_section = [
"ItemGroup",
[
rule.tlog,
{
"Include": "%%(%s.Outputs)" % rule_name,
"Condition": (
"'%%(%s.Outputs)' != '' and "
"'%%(%s.ExcludedFromBuild)' != 'true'" % (rule_name, rule_name)
),
},
["Source", "@(%s, '|')" % rule_name],
["Inputs", "@(%s -> '%%(Fullpath)', ';')" % rule.inputs],
],
]
message_section = [
"Message",
{"Importance": "High", "Text": "%%(%s.ExecutionDescription)" % rule_name},
]
write_tlog_section = [
"WriteLinesToFile",
{
"Condition": "'@(%s)' != '' and '%%(%s.ExcludedFromBuild)' != "
"'true'" % (rule.tlog, rule.tlog),
"File": "$(IntDir)$(ProjectName).write.1.tlog",
"Lines": "^%%(%s.Source);@(%s->'%%(Fullpath)')"
% (rule.tlog, rule.tlog),
},
]
read_tlog_section = [
"WriteLinesToFile",
{
"Condition": "'@(%s)' != '' and '%%(%s.ExcludedFromBuild)' != "
"'true'" % (rule.tlog, rule.tlog),
"File": "$(IntDir)$(ProjectName).read.1.tlog",
"Lines": "^%%(%s.Source);%%(%s.Inputs)" % (rule.tlog, rule.tlog),
},
]
command_and_input_section = [
rule_name,
{
"Condition": "'@(%s)' != '' and '%%(%s.ExcludedFromBuild)' != "
"'true'" % (rule_name, rule_name),
"EchoOff": "true",
"StandardOutputImportance": "High",
"StandardErrorImportance": "High",
"CommandLineTemplate": "%%(%s.CommandLineTemplate)" % rule_name,
"AdditionalOptions": "%%(%s.AdditionalOptions)" % rule_name,
"Inputs": rule_inputs,
},
]
content.extend(
[
[
"Target",
{
"Name": rule.target_name,
"BeforeTargets": "$(%s)" % rule.before_targets,
"AfterTargets": "$(%s)" % rule.after_targets,
"Condition": "'@(%s)' != ''" % rule_name,
"DependsOnTargets": "$(%s);%s"
% (rule.depends_on, rule.compute_output),
"Outputs": target_outputs,
"Inputs": target_inputs,
},
remove_section,
inputs_section,
logging_section,
message_section,
write_tlog_section,
read_tlog_section,
command_and_input_section,
],
[
"PropertyGroup",
[
"ComputeLinkInputsTargets",
"$(ComputeLinkInputsTargets);",
"%s;" % rule.compute_output,
],
[
"ComputeLibInputsTargets",
"$(ComputeLibInputsTargets);",
"%s;" % rule.compute_output,
],
],
[
"Target",
{
"Name": rule.compute_output,
"Condition": "'@(%s)' != ''" % rule_name,
},
[
"ItemGroup",
[
rule.dirs_to_make,
{
"Condition": "'@(%s)' != '' and "
"'%%(%s.ExcludedFromBuild)' != 'true'"
% (rule_name, rule_name),
"Include": "%%(%s.Outputs)" % rule_name,
},
],
[
"Link",
{
"Include": "%%(%s.Identity)" % rule.dirs_to_make,
"Condition": extension_condition,
},
],
[
"Lib",
{
"Include": "%%(%s.Identity)" % rule.dirs_to_make,
"Condition": extension_condition,
},
],
[
"ImpLib",
{
"Include": "%%(%s.Identity)" % rule.dirs_to_make,
"Condition": extension_condition,
},
],
],
[
"MakeDir",
{
"Directories": (
"@(%s->'%%(RootDir)%%(Directory)')" % rule.dirs_to_make
)
},
],
],
]
)
easy_xml.WriteXmlIfChanged(content, targets_path, pretty=True, win32=True)
def _GenerateMSBuildRuleXmlFile(xml_path, msbuild_rules):
# Generate the .xml file
content = [
"ProjectSchemaDefinitions",
{
"xmlns": (
"clr-namespace:Microsoft.Build.Framework.XamlTypes;"
"assembly=Microsoft.Build.Framework"
),
"xmlns:x": "http://schemas.microsoft.com/winfx/2006/xaml",
"xmlns:sys": "clr-namespace:System;assembly=mscorlib",
"xmlns:transformCallback": "Microsoft.Cpp.Dev10.ConvertPropertyCallback",
},
]
for rule in msbuild_rules:
content.extend(
[
[
"Rule",
{
"Name": rule.rule_name,
"PageTemplate": "tool",
"DisplayName": rule.display_name,
"Order": "200",
},
[
"Rule.DataSource",
[
"DataSource",
{"Persistence": "ProjectFile", "ItemType": rule.rule_name},
],
],
[
"Rule.Categories",
[
"Category",
{"Name": "General"},
["Category.DisplayName", ["sys:String", "General"]],
],
[
"Category",
{"Name": "Command Line", "Subtype": "CommandLine"},
["Category.DisplayName", ["sys:String", "Command Line"]],
],
],
[
"StringListProperty",
{
"Name": "Inputs",
"Category": "Command Line",
"IsRequired": "true",
"Switch": " ",
},
[
"StringListProperty.DataSource",
[
"DataSource",
{
"Persistence": "ProjectFile",
"ItemType": rule.rule_name,
"SourceType": "Item",
},
],
],
],
[
"StringProperty",
{
"Name": "CommandLineTemplate",
"DisplayName": "Command Line",
"Visible": "False",
"IncludeInCommandLine": "False",
},
],
[
"DynamicEnumProperty",
{
"Name": rule.before_targets,
"Category": "General",
"EnumProvider": "Targets",
"IncludeInCommandLine": "False",
},
[
"DynamicEnumProperty.DisplayName",
["sys:String", "Execute Before"],
],
[
"DynamicEnumProperty.Description",
[
"sys:String",
"Specifies the targets for the build customization"
" to run before.",
],
],
[
"DynamicEnumProperty.ProviderSettings",
[
"NameValuePair",
{
"Name": "Exclude",
"Value": "^%s|^Compute" % rule.before_targets,
},
],
],
[
"DynamicEnumProperty.DataSource",
[
"DataSource",
{
"Persistence": "ProjectFile",
"HasConfigurationCondition": "true",
},
],
],
],
[
"DynamicEnumProperty",
{
"Name": rule.after_targets,
"Category": "General",
"EnumProvider": "Targets",
"IncludeInCommandLine": "False",
},
[
"DynamicEnumProperty.DisplayName",
["sys:String", "Execute After"],
],
[
"DynamicEnumProperty.Description",
[
"sys:String",
(
"Specifies the targets for the build customization"
" to run after."
),
],
],
[
"DynamicEnumProperty.ProviderSettings",
[
"NameValuePair",
{
"Name": "Exclude",
"Value": "^%s|^Compute" % rule.after_targets,
},
],
],
[
"DynamicEnumProperty.DataSource",
[
"DataSource",
{
"Persistence": "ProjectFile",
"ItemType": "",
"HasConfigurationCondition": "true",
},
],
],
],
[
"StringListProperty",
{
"Name": "Outputs",
"DisplayName": "Outputs",
"Visible": "False",
"IncludeInCommandLine": "False",
},
],
[
"StringProperty",
{
"Name": "ExecutionDescription",
"DisplayName": "Execution Description",
"Visible": "False",
"IncludeInCommandLine": "False",
},
],
[
"StringListProperty",
{
"Name": "AdditionalDependencies",
"DisplayName": "Additional Dependencies",
"IncludeInCommandLine": "False",
"Visible": "false",
},
],
[
"StringProperty",
{
"Subtype": "AdditionalOptions",
"Name": "AdditionalOptions",
"Category": "Command Line",
},
[
"StringProperty.DisplayName",
["sys:String", "Additional Options"],
],
[
"StringProperty.Description",
["sys:String", "Additional Options"],
],
],
],
[
"ItemType",
{"Name": rule.rule_name, "DisplayName": rule.display_name},
],
[
"FileExtension",
{"Name": "*" + rule.extension, "ContentType": rule.rule_name},
],
[
"ContentType",
{
"Name": rule.rule_name,
"DisplayName": "",
"ItemType": rule.rule_name,
},
],
]
)
easy_xml.WriteXmlIfChanged(content, xml_path, pretty=True, win32=True)
def _GetConfigurationAndPlatform(name, settings, spec):
configuration = name.rsplit("_", 1)[0]
platform = settings.get("msvs_configuration_platform", "Win32")
if spec["toolset"] == "host" and platform == "arm64":
platform = "x64" # Host-only tools are always built for x64
return (configuration, platform)
def _GetConfigurationCondition(name, settings, spec):
return r"'$(Configuration)|$(Platform)'=='%s|%s'" % _GetConfigurationAndPlatform(
name, settings, spec
)
def _GetMSBuildProjectConfigurations(configurations, spec):
group = ["ItemGroup", {"Label": "ProjectConfigurations"}]
for (name, settings) in sorted(configurations.items()):
configuration, platform = _GetConfigurationAndPlatform(name, settings, spec)
designation = "%s|%s" % (configuration, platform)
group.append(
[
"ProjectConfiguration",
{"Include": designation},
["Configuration", configuration],
["Platform", platform],
]
)
return [group]
def _GetMSBuildGlobalProperties(spec, version, guid, gyp_file_name):
namespace = os.path.splitext(gyp_file_name)[0]
properties = [
[
"PropertyGroup",
{"Label": "Globals"},
["ProjectGuid", guid],
["Keyword", "Win32Proj"],
["RootNamespace", namespace],
["IgnoreWarnCompileDuplicatedFilename", "true"],
]
]
if (
os.environ.get("PROCESSOR_ARCHITECTURE") == "AMD64"
or os.environ.get("PROCESSOR_ARCHITEW6432") == "AMD64"
):
properties[0].append(["PreferredToolArchitecture", "x64"])
if spec.get("msvs_target_platform_version"):
target_platform_version = spec.get("msvs_target_platform_version")
properties[0].append(["WindowsTargetPlatformVersion", target_platform_version])
if spec.get("msvs_target_platform_minversion"):
target_platform_minversion = spec.get("msvs_target_platform_minversion")
properties[0].append(
["WindowsTargetPlatformMinVersion", target_platform_minversion]
)
else:
properties[0].append(
["WindowsTargetPlatformMinVersion", target_platform_version]
)
if spec.get("msvs_enable_winrt"):
properties[0].append(["DefaultLanguage", "en-US"])
properties[0].append(["AppContainerApplication", "true"])
if spec.get("msvs_application_type_revision"):
app_type_revision = spec.get("msvs_application_type_revision")
properties[0].append(["ApplicationTypeRevision", app_type_revision])
else:
properties[0].append(["ApplicationTypeRevision", "8.1"])
if spec.get("msvs_enable_winphone"):
properties[0].append(["ApplicationType", "Windows Phone"])
else:
properties[0].append(["ApplicationType", "Windows Store"])
platform_name = None
msvs_windows_sdk_version = None
for configuration in spec["configurations"].values():
platform_name = platform_name or _ConfigPlatform(configuration)
msvs_windows_sdk_version = (
msvs_windows_sdk_version
or _ConfigWindowsTargetPlatformVersion(configuration, version)
)
if platform_name and msvs_windows_sdk_version:
break
if msvs_windows_sdk_version:
properties[0].append(
["WindowsTargetPlatformVersion", str(msvs_windows_sdk_version)]
)
elif version.compatible_sdks:
raise GypError(
"%s requires any SDK of %s version, but none were found"
% (version.description, version.compatible_sdks)
)
if platform_name == "ARM":
properties[0].append(["WindowsSDKDesktopARMSupport", "true"])
return properties
def _GetMSBuildConfigurationDetails(spec, build_file):
properties = {}
for name, settings in spec["configurations"].items():
msbuild_attributes = _GetMSBuildAttributes(spec, settings, build_file)
condition = _GetConfigurationCondition(name, settings, spec)
character_set = msbuild_attributes.get("CharacterSet")
config_type = msbuild_attributes.get("ConfigurationType")
_AddConditionalProperty(properties, condition, "ConfigurationType", config_type)
if config_type == "Driver":
_AddConditionalProperty(properties, condition, "DriverType", "WDM")
_AddConditionalProperty(
properties, condition, "TargetVersion", _ConfigTargetVersion(settings)
)
if character_set:
if "msvs_enable_winrt" not in spec:
_AddConditionalProperty(
properties, condition, "CharacterSet", character_set
)
return _GetMSBuildPropertyGroup(spec, "Configuration", properties)
def _GetMSBuildLocalProperties(msbuild_toolset):
# Currently the only local property we support is PlatformToolset
properties = {}
if msbuild_toolset:
properties = [
[
"PropertyGroup",
{"Label": "Locals"},
["PlatformToolset", msbuild_toolset],
]
]
return properties
def _GetMSBuildPropertySheets(configurations, spec):
user_props = r"$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props"
additional_props = {}
props_specified = False
for name, settings in sorted(configurations.items()):
configuration = _GetConfigurationCondition(name, settings, spec)
if "msbuild_props" in settings:
additional_props[configuration] = _FixPaths(settings["msbuild_props"])
props_specified = True
else:
additional_props[configuration] = ""
if not props_specified:
return [
[
"ImportGroup",
{"Label": "PropertySheets"},
[
"Import",
{
"Project": user_props,
"Condition": "exists('%s')" % user_props,
"Label": "LocalAppDataPlatform",
},
],
]
]
else:
sheets = []
for condition, props in additional_props.items():
import_group = [
"ImportGroup",
{"Label": "PropertySheets", "Condition": condition},
[
"Import",
{
"Project": user_props,
"Condition": "exists('%s')" % user_props,
"Label": "LocalAppDataPlatform",
},
],
]
for props_file in props:
import_group.append(["Import", {"Project": props_file}])
sheets.append(import_group)
return sheets
def _ConvertMSVSBuildAttributes(spec, config, build_file):
config_type = _GetMSVSConfigurationType(spec, build_file)
msvs_attributes = _GetMSVSAttributes(spec, config, config_type)
msbuild_attributes = {}
for a in msvs_attributes:
if a in ["IntermediateDirectory", "OutputDirectory"]:
directory = MSVSSettings.ConvertVCMacrosToMSBuild(msvs_attributes[a])
if not directory.endswith("\\"):
directory += "\\"
msbuild_attributes[a] = directory
elif a == "CharacterSet":
msbuild_attributes[a] = _ConvertMSVSCharacterSet(msvs_attributes[a])
elif a == "ConfigurationType":
msbuild_attributes[a] = _ConvertMSVSConfigurationType(msvs_attributes[a])
else:
print("Warning: Do not know how to convert MSVS attribute " + a)
return msbuild_attributes
def _ConvertMSVSCharacterSet(char_set):
if char_set.isdigit():
char_set = {"0": "MultiByte", "1": "Unicode", "2": "MultiByte"}[char_set]
return char_set
def _ConvertMSVSConfigurationType(config_type):
if config_type.isdigit():
config_type = {
"1": "Application",
"2": "DynamicLibrary",
"4": "StaticLibrary",
"5": "Driver",
"10": "Utility",
}[config_type]
return config_type
def _GetMSBuildAttributes(spec, config, build_file):
if "msbuild_configuration_attributes" not in config:
msbuild_attributes = _ConvertMSVSBuildAttributes(spec, config, build_file)
else:
config_type = _GetMSVSConfigurationType(spec, build_file)
config_type = _ConvertMSVSConfigurationType(config_type)
msbuild_attributes = config.get("msbuild_configuration_attributes", {})
msbuild_attributes.setdefault("ConfigurationType", config_type)
output_dir = msbuild_attributes.get(
"OutputDirectory", "$(SolutionDir)$(Configuration)"
)
msbuild_attributes["OutputDirectory"] = _FixPath(output_dir) + "\\"
if "IntermediateDirectory" not in msbuild_attributes:
intermediate = _FixPath("$(Configuration)") + "\\"
msbuild_attributes["IntermediateDirectory"] = intermediate
if "CharacterSet" in msbuild_attributes:
msbuild_attributes["CharacterSet"] = _ConvertMSVSCharacterSet(
msbuild_attributes["CharacterSet"]
)
if "TargetName" not in msbuild_attributes:
prefix = spec.get("product_prefix", "")
product_name = spec.get("product_name", "$(ProjectName)")
target_name = prefix + product_name
msbuild_attributes["TargetName"] = target_name
if "TargetExt" not in msbuild_attributes and "product_extension" in spec:
ext = spec.get("product_extension")
msbuild_attributes["TargetExt"] = "." + ext
if spec.get("msvs_external_builder"):
external_out_dir = spec.get("msvs_external_builder_out_dir", ".")
msbuild_attributes["OutputDirectory"] = _FixPath(external_out_dir) + "\\"
# Make sure that 'TargetPath' matches 'Lib.OutputFile' or 'Link.OutputFile'
# (depending on the tool used) to avoid MSB8012 warning.
msbuild_tool_map = {
"executable": "Link",
"shared_library": "Link",
"loadable_module": "Link",
"windows_driver": "Link",
"static_library": "Lib",
}
msbuild_tool = msbuild_tool_map.get(spec["type"])
if msbuild_tool:
msbuild_settings = config["finalized_msbuild_settings"]
out_file = msbuild_settings[msbuild_tool].get("OutputFile")
if out_file:
msbuild_attributes["TargetPath"] = _FixPath(out_file)
target_ext = msbuild_settings[msbuild_tool].get("TargetExt")
if target_ext:
msbuild_attributes["TargetExt"] = target_ext
return msbuild_attributes
def _GetMSBuildConfigurationGlobalProperties(spec, configurations, build_file):
# TODO(jeanluc) We could optimize out the following and do it only if
# there are actions.
# TODO(jeanluc) Handle the equivalent of setting 'CYGWIN=nontsec'.
new_paths = []
cygwin_dirs = spec.get("msvs_cygwin_dirs", ["."])[0]
if cygwin_dirs:
cyg_path = "$(MSBuildProjectDirectory)\\%s\\bin\\" % _FixPath(cygwin_dirs)
new_paths.append(cyg_path)
# TODO(jeanluc) Change the convention to have both a cygwin_dir and a
# python_dir.
python_path = cyg_path.replace("cygwin\\bin", "python_26")
new_paths.append(python_path)
if new_paths:
new_paths = "$(ExecutablePath);" + ";".join(new_paths)
properties = {}
for (name, configuration) in sorted(configurations.items()):
condition = _GetConfigurationCondition(name, configuration, spec)
attributes = _GetMSBuildAttributes(spec, configuration, build_file)
msbuild_settings = configuration["finalized_msbuild_settings"]
_AddConditionalProperty(
properties, condition, "IntDir", attributes["IntermediateDirectory"]
)
_AddConditionalProperty(
properties, condition, "OutDir", attributes["OutputDirectory"]
)
_AddConditionalProperty(
properties, condition, "TargetName", attributes["TargetName"]
)
if "TargetExt" in attributes:
_AddConditionalProperty(
properties, condition, "TargetExt", attributes["TargetExt"]
)
if attributes.get("TargetPath"):
_AddConditionalProperty(
properties, condition, "TargetPath", attributes["TargetPath"]
)
if attributes.get("TargetExt"):
_AddConditionalProperty(
properties, condition, "TargetExt", attributes["TargetExt"]
)
if new_paths:
_AddConditionalProperty(properties, condition, "ExecutablePath", new_paths)
tool_settings = msbuild_settings.get("", {})
for name, value in sorted(tool_settings.items()):
formatted_value = _GetValueFormattedForMSBuild("", name, value)
_AddConditionalProperty(properties, condition, name, formatted_value)
return _GetMSBuildPropertyGroup(spec, None, properties)
def _AddConditionalProperty(properties, condition, name, value):
"""Adds a property / conditional value pair to a dictionary.
Arguments:
properties: The dictionary to be modified. The key is the name of the
property. The value is itself a dictionary; its key is the value and
the value a list of condition for which this value is true.
condition: The condition under which the named property has the value.
name: The name of the property.
value: The value of the property.
"""
if name not in properties:
properties[name] = {}
values = properties[name]
if value not in values:
values[value] = []
conditions = values[value]
conditions.append(condition)
# Regex for msvs variable references ( i.e. $(FOO) ).
MSVS_VARIABLE_REFERENCE = re.compile(r"\$\(([a-zA-Z_][a-zA-Z0-9_]*)\)")
def _GetMSBuildPropertyGroup(spec, label, properties):
"""Returns a PropertyGroup definition for the specified properties.
Arguments:
spec: The target project dict.
label: An optional label for the PropertyGroup.
properties: The dictionary to be converted. The key is the name of the
property. The value is itself a dictionary; its key is the value and
the value a list of condition for which this value is true.
"""
group = ["PropertyGroup"]
if label:
group.append({"Label": label})
num_configurations = len(spec["configurations"])
def GetEdges(node):
# Use a definition of edges such that user_of_variable -> used_varible.
# This happens to be easier in this case, since a variable's
# definition contains all variables it references in a single string.
edges = set()
for value in sorted(properties[node].keys()):
# Add to edges all $(...) references to variables.
#
# Variable references that refer to names not in properties are excluded
# These can exist for instance to refer built in definitions like
# $(SolutionDir).
#
# Self references are ignored. Self reference is used in a few places to
# append to the default value. I.e. PATH=$(PATH);other_path
edges.update(
set(
[
v
for v in MSVS_VARIABLE_REFERENCE.findall(value)
if v in properties and v != node
]
)
)
return edges
properties_ordered = gyp.common.TopologicallySorted(properties.keys(), GetEdges)
# Walk properties in the reverse of a topological sort on
# user_of_variable -> used_variable as this ensures variables are
# defined before they are used.
# NOTE: reverse(topsort(DAG)) = topsort(reverse_edges(DAG))
for name in reversed(properties_ordered):
values = properties[name]
for value, conditions in sorted(values.items()):
if len(conditions) == num_configurations:
# If the value is the same all configurations,
# just add one unconditional entry.
group.append([name, value])
else:
for condition in conditions:
group.append([name, {"Condition": condition}, value])
return [group]
def _GetMSBuildToolSettingsSections(spec, configurations):
groups = []
for (name, configuration) in sorted(configurations.items()):
msbuild_settings = configuration["finalized_msbuild_settings"]
group = [
"ItemDefinitionGroup",
{"Condition": _GetConfigurationCondition(name, configuration, spec)},
]
for tool_name, tool_settings in sorted(msbuild_settings.items()):
# Skip the tool named '' which is a holder of global settings handled
# by _GetMSBuildConfigurationGlobalProperties.
if tool_name:
if tool_settings:
tool = [tool_name]
for name, value in sorted(tool_settings.items()):
formatted_value = _GetValueFormattedForMSBuild(
tool_name, name, value
)
tool.append([name, formatted_value])
group.append(tool)
groups.append(group)
return groups
def _FinalizeMSBuildSettings(spec, configuration):
if "msbuild_settings" in configuration:
converted = False
msbuild_settings = configuration["msbuild_settings"]
MSVSSettings.ValidateMSBuildSettings(msbuild_settings)
else:
converted = True
msvs_settings = configuration.get("msvs_settings", {})
msbuild_settings = MSVSSettings.ConvertToMSBuildSettings(msvs_settings)
include_dirs, midl_include_dirs, resource_include_dirs = _GetIncludeDirs(
configuration
)
libraries = _GetLibraries(spec)
library_dirs = _GetLibraryDirs(configuration)
out_file, _, msbuild_tool = _GetOutputFilePathAndTool(spec, msbuild=True)
target_ext = _GetOutputTargetExt(spec)
defines = _GetDefines(configuration)
if converted:
# Visual Studio 2010 has TR1
defines = [d for d in defines if d != "_HAS_TR1=0"]
# Warn of ignored settings
ignored_settings = ["msvs_tool_files"]
for ignored_setting in ignored_settings:
value = configuration.get(ignored_setting)
if value:
print(
"Warning: The automatic conversion to MSBuild does not handle "
"%s. Ignoring setting of %s" % (ignored_setting, str(value))
)
defines = [_EscapeCppDefineForMSBuild(d) for d in defines]
disabled_warnings = _GetDisabledWarnings(configuration)
prebuild = configuration.get("msvs_prebuild")
postbuild = configuration.get("msvs_postbuild")
def_file = _GetModuleDefinition(spec)
precompiled_header = configuration.get("msvs_precompiled_header")
# Add the information to the appropriate tool
# TODO(jeanluc) We could optimize and generate these settings only if
# the corresponding files are found, e.g. don't generate ResourceCompile
# if you don't have any resources.
_ToolAppend(
msbuild_settings, "ClCompile", "AdditionalIncludeDirectories", include_dirs
)
_ToolAppend(
msbuild_settings, "Midl", "AdditionalIncludeDirectories", midl_include_dirs
)
_ToolAppend(
msbuild_settings,
"ResourceCompile",
"AdditionalIncludeDirectories",
resource_include_dirs,
)
# Add in libraries, note that even for empty libraries, we want this
# set, to prevent inheriting default libraries from the environment.
_ToolSetOrAppend(msbuild_settings, "Link", "AdditionalDependencies", libraries)
_ToolAppend(msbuild_settings, "Link", "AdditionalLibraryDirectories", library_dirs)
if out_file:
_ToolAppend(
msbuild_settings, msbuild_tool, "OutputFile", out_file, only_if_unset=True
)
if target_ext:
_ToolAppend(
msbuild_settings, msbuild_tool, "TargetExt", target_ext, only_if_unset=True
)
# Add defines.
_ToolAppend(msbuild_settings, "ClCompile", "PreprocessorDefinitions", defines)
_ToolAppend(msbuild_settings, "ResourceCompile", "PreprocessorDefinitions", defines)
# Add disabled warnings.
_ToolAppend(
msbuild_settings, "ClCompile", "DisableSpecificWarnings", disabled_warnings
)
# Turn on precompiled headers if appropriate.
if precompiled_header:
precompiled_header = os.path.split(precompiled_header)[1]
_ToolAppend(msbuild_settings, "ClCompile", "PrecompiledHeader", "Use")
_ToolAppend(
msbuild_settings, "ClCompile", "PrecompiledHeaderFile", precompiled_header
)
_ToolAppend(
msbuild_settings, "ClCompile", "ForcedIncludeFiles", [precompiled_header]
)
else:
_ToolAppend(msbuild_settings, "ClCompile", "PrecompiledHeader", "NotUsing")
# Turn off WinRT compilation
_ToolAppend(msbuild_settings, "ClCompile", "CompileAsWinRT", "false")
# Turn on import libraries if appropriate
if spec.get("msvs_requires_importlibrary"):
_ToolAppend(msbuild_settings, "", "IgnoreImportLibrary", "false")
# Loadable modules don't generate import libraries;
# tell dependent projects to not expect one.
if spec["type"] == "loadable_module":
_ToolAppend(msbuild_settings, "", "IgnoreImportLibrary", "true")
# Set the module definition file if any.
if def_file:
_ToolAppend(msbuild_settings, "Link", "ModuleDefinitionFile", def_file)
configuration["finalized_msbuild_settings"] = msbuild_settings
if prebuild:
_ToolAppend(msbuild_settings, "PreBuildEvent", "Command", prebuild)
if postbuild:
_ToolAppend(msbuild_settings, "PostBuildEvent", "Command", postbuild)
def _GetValueFormattedForMSBuild(tool_name, name, value):
if type(value) == list:
# For some settings, VS2010 does not automatically extends the settings
# TODO(jeanluc) Is this what we want?
if name in [
"AdditionalIncludeDirectories",
"AdditionalLibraryDirectories",
"AdditionalOptions",
"DelayLoadDLLs",
"DisableSpecificWarnings",
"PreprocessorDefinitions",
]:
value.append("%%(%s)" % name)
# For most tools, entries in a list should be separated with ';' but some
# settings use a space. Check for those first.
exceptions = {
"ClCompile": ["AdditionalOptions"],
"Link": ["AdditionalOptions"],
"Lib": ["AdditionalOptions"],
}
if tool_name in exceptions and name in exceptions[tool_name]:
char = " "
else:
char = ";"
formatted_value = char.join(
[MSVSSettings.ConvertVCMacrosToMSBuild(i) for i in value]
)
else:
formatted_value = MSVSSettings.ConvertVCMacrosToMSBuild(value)
return formatted_value
def _VerifySourcesExist(sources, root_dir):
"""Verifies that all source files exist on disk.
Checks that all regular source files, i.e. not created at run time,
exist on disk. Missing files cause needless recompilation but no otherwise
visible errors.
Arguments:
sources: A recursive list of Filter/file names.
root_dir: The root directory for the relative path names.
Returns:
A list of source files that cannot be found on disk.
"""
missing_sources = []
for source in sources:
if isinstance(source, MSVSProject.Filter):
missing_sources.extend(_VerifySourcesExist(source.contents, root_dir))
else:
if "$" not in source:
full_path = os.path.join(root_dir, source)
if not os.path.exists(full_path):
missing_sources.append(full_path)
return missing_sources
def _GetMSBuildSources(
spec,
sources,
exclusions,
rule_dependencies,
extension_to_rule_name,
actions_spec,
sources_handled_by_action,
list_excluded,
):
groups = [
"none",
"masm",
"midl",
"include",
"compile",
"resource",
"rule",
"rule_dependency",
]
grouped_sources = {}
for g in groups:
grouped_sources[g] = []
_AddSources2(
spec,
sources,
exclusions,
grouped_sources,
rule_dependencies,
extension_to_rule_name,
sources_handled_by_action,
list_excluded,
)
sources = []
for g in groups:
if grouped_sources[g]:
sources.append(["ItemGroup"] + grouped_sources[g])
if actions_spec:
sources.append(["ItemGroup"] + actions_spec)
return sources
def _AddSources2(
spec,
sources,
exclusions,
grouped_sources,
rule_dependencies,
extension_to_rule_name,
sources_handled_by_action,
list_excluded,
):
extensions_excluded_from_precompile = []
for source in sources:
if isinstance(source, MSVSProject.Filter):
_AddSources2(
spec,
source.contents,
exclusions,
grouped_sources,
rule_dependencies,
extension_to_rule_name,
sources_handled_by_action,
list_excluded,
)
else:
if source not in sources_handled_by_action:
detail = []
excluded_configurations = exclusions.get(source, [])
if len(excluded_configurations) == len(spec["configurations"]):
detail.append(["ExcludedFromBuild", "true"])
else:
for config_name, configuration in sorted(excluded_configurations):
condition = _GetConfigurationCondition(
config_name, configuration
)
detail.append(
["ExcludedFromBuild", {"Condition": condition}, "true"]
)
# Add precompile if needed
for config_name, configuration in spec["configurations"].items():
precompiled_source = configuration.get(
"msvs_precompiled_source", ""
)
if precompiled_source != "":
precompiled_source = _FixPath(precompiled_source)
if not extensions_excluded_from_precompile:
# If the precompiled header is generated by a C source,
# we must not try to use it for C++ sources,
# and vice versa.
basename, extension = os.path.splitext(precompiled_source)
if extension == ".c":
extensions_excluded_from_precompile = [
".cc",
".cpp",
".cxx",
]
else:
extensions_excluded_from_precompile = [".c"]
if precompiled_source == source:
condition = _GetConfigurationCondition(
config_name, configuration, spec
)
detail.append(
["PrecompiledHeader", {"Condition": condition}, "Create"]
)
else:
# Turn off precompiled header usage for source files of a
# different type than the file that generated the
# precompiled header.
for extension in extensions_excluded_from_precompile:
if source.endswith(extension):
detail.append(["PrecompiledHeader", ""])
detail.append(["ForcedIncludeFiles", ""])
group, element = _MapFileToMsBuildSourceType(
source,
rule_dependencies,
extension_to_rule_name,
_GetUniquePlatforms(spec),
spec["toolset"],
)
if group == "compile" and not os.path.isabs(source):
# Add an <ObjectFileName> value to support duplicate source
# file basenames, except for absolute paths to avoid paths
# with more than 260 characters.
file_name = os.path.splitext(source)[0] + ".obj"
if file_name.startswith("..\\"):
file_name = re.sub(r"^(\.\.\\)+", "", file_name)
elif file_name.startswith("$("):
file_name = re.sub(r"^\$\([^)]+\)\\", "", file_name)
detail.append(["ObjectFileName", "$(IntDir)\\" + file_name])
grouped_sources[group].append([element, {"Include": source}] + detail)
def _GetMSBuildProjectReferences(project):
references = []
if project.dependencies:
group = ["ItemGroup"]
added_dependency_set = set()
for dependency in project.dependencies:
dependency_spec = dependency.spec
should_skip_dep = False
if project.spec["toolset"] == "target":
if dependency_spec["toolset"] == "host":
if dependency_spec["type"] == "static_library":
should_skip_dep = True
if dependency.name.startswith("run_"):
should_skip_dep = False
if should_skip_dep:
continue
canonical_name = dependency.name.replace("_host", "")
added_dependency_set.add(canonical_name)
guid = dependency.guid
project_dir = os.path.split(project.path)[0]
relative_path = gyp.common.RelativePath(dependency.path, project_dir)
project_ref = [
"ProjectReference",
{"Include": relative_path},
["Project", guid],
["ReferenceOutputAssembly", "false"],
]
for config in dependency.spec.get("configurations", {}).values():
if config.get("msvs_use_library_dependency_inputs", 0):
project_ref.append(["UseLibraryDependencyInputs", "true"])
break
# If it's disabled in any config, turn it off in the reference.
if config.get("msvs_2010_disable_uldi_when_referenced", 0):
project_ref.append(["UseLibraryDependencyInputs", "false"])
break
group.append(project_ref)
references.append(group)
return references
def _GenerateMSBuildProject(project, options, version, generator_flags, spec):
spec = project.spec
configurations = spec["configurations"]
toolset = spec["toolset"]
project_dir, project_file_name = os.path.split(project.path)
gyp.common.EnsureDirExists(project.path)
# Prepare list of sources and excluded sources.
gyp_file = os.path.split(project.build_file)[1]
sources, excluded_sources = _PrepareListOfSources(spec, generator_flags, gyp_file)
# Add rules.
actions_to_add = {}
props_files_of_rules = set()
targets_files_of_rules = set()
rule_dependencies = set()
extension_to_rule_name = {}
list_excluded = generator_flags.get("msvs_list_excluded_files", True)
platforms = _GetUniquePlatforms(spec)
# Don't generate rules if we are using an external builder like ninja.
if not spec.get("msvs_external_builder"):
_GenerateRulesForMSBuild(
project_dir,
options,
spec,
sources,
excluded_sources,
props_files_of_rules,
targets_files_of_rules,
actions_to_add,
rule_dependencies,
extension_to_rule_name,
)
else:
rules = spec.get("rules", [])
_AdjustSourcesForRules(rules, sources, excluded_sources, True)
sources, excluded_sources, excluded_idl = _AdjustSourcesAndConvertToFilterHierarchy(
spec, options, project_dir, sources, excluded_sources, list_excluded, version
)
# Don't add actions if we are using an external builder like ninja.
if not spec.get("msvs_external_builder"):
_AddActions(actions_to_add, spec, project.build_file)
_AddCopies(actions_to_add, spec)
# NOTE: this stanza must appear after all actions have been decided.
# Don't excluded sources with actions attached, or they won't run.
excluded_sources = _FilterActionsFromExcluded(excluded_sources, actions_to_add)
exclusions = _GetExcludedFilesFromBuild(spec, excluded_sources, excluded_idl)
actions_spec, sources_handled_by_action = _GenerateActionsForMSBuild(
spec, actions_to_add
)
_GenerateMSBuildFiltersFile(
project.path + ".filters",
sources,
rule_dependencies,
extension_to_rule_name,
platforms,
toolset,
)
missing_sources = _VerifySourcesExist(sources, project_dir)
for configuration in configurations.values():
_FinalizeMSBuildSettings(spec, configuration)
# Add attributes to root element
import_default_section = [
["Import", {"Project": r"$(VCTargetsPath)\Microsoft.Cpp.Default.props"}]
]
import_cpp_props_section = [
["Import", {"Project": r"$(VCTargetsPath)\Microsoft.Cpp.props"}]
]
import_cpp_targets_section = [
["Import", {"Project": r"$(VCTargetsPath)\Microsoft.Cpp.targets"}]
]
import_masm_props_section = [
["Import", {"Project": r"$(VCTargetsPath)\BuildCustomizations\masm.props"}]
]
import_masm_targets_section = [
["Import", {"Project": r"$(VCTargetsPath)\BuildCustomizations\masm.targets"}]
]
import_marmasm_props_section = [
["Import", {"Project": r"$(VCTargetsPath)\BuildCustomizations\marmasm.props"}]
]
import_marmasm_targets_section = [
["Import", {"Project": r"$(VCTargetsPath)\BuildCustomizations\marmasm.targets"}]
]
macro_section = [["PropertyGroup", {"Label": "UserMacros"}]]
content = [
"Project",
{
"xmlns": "http://schemas.microsoft.com/developer/msbuild/2003",
"ToolsVersion": version.ProjectVersion(),
"DefaultTargets": "Build",
},
]
content += _GetMSBuildProjectConfigurations(configurations, spec)
content += _GetMSBuildGlobalProperties(
spec, version, project.guid, project_file_name
)
content += import_default_section
content += _GetMSBuildConfigurationDetails(spec, project.build_file)
if spec.get("msvs_enable_winphone"):
content += _GetMSBuildLocalProperties("v120_wp81")
else:
content += _GetMSBuildLocalProperties(project.msbuild_toolset)
content += import_cpp_props_section
content += import_masm_props_section
if "arm64" in platforms and toolset == "target":
content += import_marmasm_props_section
content += _GetMSBuildExtensions(props_files_of_rules)
content += _GetMSBuildPropertySheets(configurations, spec)
content += macro_section
content += _GetMSBuildConfigurationGlobalProperties(
spec, configurations, project.build_file
)
content += _GetMSBuildToolSettingsSections(spec, configurations)
content += _GetMSBuildSources(
spec,
sources,
exclusions,
rule_dependencies,
extension_to_rule_name,
actions_spec,
sources_handled_by_action,
list_excluded,
)
content += _GetMSBuildProjectReferences(project)
content += import_cpp_targets_section
content += import_masm_targets_section
if "arm64" in platforms and toolset == "target":
content += import_marmasm_targets_section
content += _GetMSBuildExtensionTargets(targets_files_of_rules)
if spec.get("msvs_external_builder"):
content += _GetMSBuildExternalBuilderTargets(spec)
# TODO(jeanluc) File a bug to get rid of runas. We had in MSVS:
# has_run_as = _WriteMSVSUserFile(project.path, version, spec)
easy_xml.WriteXmlIfChanged(content, project.path, pretty=True, win32=True)
return missing_sources
def _GetMSBuildExternalBuilderTargets(spec):
"""Return a list of MSBuild targets for external builders.
The "Build" and "Clean" targets are always generated. If the spec contains
'msvs_external_builder_clcompile_cmd', then the "ClCompile" target will also
be generated, to support building selected C/C++ files.
Arguments:
spec: The gyp target spec.
Returns:
List of MSBuild 'Target' specs.
"""
build_cmd = _BuildCommandLineForRuleRaw(
spec, spec["msvs_external_builder_build_cmd"], False, False, False, False
)
build_target = ["Target", {"Name": "Build"}]
build_target.append(["Exec", {"Command": build_cmd}])
clean_cmd = _BuildCommandLineForRuleRaw(
spec, spec["msvs_external_builder_clean_cmd"], False, False, False, False
)
clean_target = ["Target", {"Name": "Clean"}]
clean_target.append(["Exec", {"Command": clean_cmd}])
targets = [build_target, clean_target]
if spec.get("msvs_external_builder_clcompile_cmd"):
clcompile_cmd = _BuildCommandLineForRuleRaw(
spec,
spec["msvs_external_builder_clcompile_cmd"],
False,
False,
False,
False,
)
clcompile_target = ["Target", {"Name": "ClCompile"}]
clcompile_target.append(["Exec", {"Command": clcompile_cmd}])
targets.append(clcompile_target)
return targets
def _GetMSBuildExtensions(props_files_of_rules):
extensions = ["ImportGroup", {"Label": "ExtensionSettings"}]
for props_file in props_files_of_rules:
extensions.append(["Import", {"Project": props_file}])
return [extensions]
def _GetMSBuildExtensionTargets(targets_files_of_rules):
targets_node = ["ImportGroup", {"Label": "ExtensionTargets"}]
for targets_file in sorted(targets_files_of_rules):
targets_node.append(["Import", {"Project": targets_file}])
return [targets_node]
def _GenerateActionsForMSBuild(spec, actions_to_add):
"""Add actions accumulated into an actions_to_add, merging as needed.
Arguments:
spec: the target project dict
actions_to_add: dictionary keyed on input name, which maps to a list of
dicts describing the actions attached to that input file.
Returns:
A pair of (action specification, the sources handled by this action).
"""
sources_handled_by_action = OrderedSet()
actions_spec = []
for primary_input, actions in actions_to_add.items():
if generator_supports_multiple_toolsets:
primary_input = primary_input.replace(".exe", "_host.exe")
inputs = OrderedSet()
outputs = OrderedSet()
descriptions = []
commands = []
for action in actions:
def fixup_host_exe(i):
if "$(OutDir)" in i:
i = i.replace(".exe", "_host.exe")
return i
if generator_supports_multiple_toolsets:
action["inputs"] = [fixup_host_exe(i) for i in action["inputs"]]
inputs.update(OrderedSet(action["inputs"]))
outputs.update(OrderedSet(action["outputs"]))
descriptions.append(action["description"])
cmd = action["command"]
if generator_supports_multiple_toolsets:
cmd = cmd.replace(".exe", "_host.exe")
# For most actions, add 'call' so that actions that invoke batch files
# return and continue executing. msbuild_use_call provides a way to
# disable this but I have not seen any adverse effect from doing that
# for everything.
if action.get("msbuild_use_call", True):
cmd = "call " + cmd
commands.append(cmd)
# Add the custom build action for one input file.
description = ", and also ".join(descriptions)
# We can't join the commands simply with && because the command line will
# get too long. See also _AddActions: cygwin's setup_env mustn't be called
# for every invocation or the command that sets the PATH will grow too
# long.
command = "\r\n".join(
[c + "\r\nif %errorlevel% neq 0 exit /b %errorlevel%" for c in commands]
)
_AddMSBuildAction(
spec,
primary_input,
inputs,
outputs,
command,
description,
sources_handled_by_action,
actions_spec,
)
return actions_spec, sources_handled_by_action
def _AddMSBuildAction(
spec,
primary_input,
inputs,
outputs,
cmd,
description,
sources_handled_by_action,
actions_spec,
):
command = MSVSSettings.ConvertVCMacrosToMSBuild(cmd)
primary_input = _FixPath(primary_input)
inputs_array = _FixPaths(inputs)
outputs_array = _FixPaths(outputs)
additional_inputs = ";".join([i for i in inputs_array if i != primary_input])
outputs = ";".join(outputs_array)
sources_handled_by_action.add(primary_input)
action_spec = ["CustomBuild", {"Include": primary_input}]
action_spec.extend(
# TODO(jeanluc) 'Document' for all or just if as_sources?
[
["FileType", "Document"],
["Command", command],
["Message", description],
["Outputs", outputs],
]
)
if additional_inputs:
action_spec.append(["AdditionalInputs", additional_inputs])
actions_spec.append(action_spec)
|
arvenil/resume
|
node_modules/node-gyp/gyp/pylib/gyp/generator/msvs.py
|
Python
|
mit
| 150,414
|
from django.contrib import admin
from accounts.models import Account
admin.site.register(Account)
|
akash-dev-github/Transactions
|
transactions/accounts/admin.py
|
Python
|
mit
| 100
|
import webapp2, logging
from database import get_feed_source_by_name, store_feed_source, \
get_feed_source_by_url, change_feed_source_url
class AddHandler(webapp2.RequestHandler):
def post(self):
from database import FeedSource
name = self.request.get('name')
url = self.request.get('url')
frequency_ms = self.request.get('frequency_ms')
should_update = self.request.get('should_update')
should_be_added = True
existing_source = get_feed_source_by_url(url)
if existing_source:
should_be_added = False
self.response.write( \
'The URL (' + url + ') already exists (name - ' + \
existing_source.name + ').<br/>')
self.response.write('Forgot you added it already? :O')
else:
existing_source = get_feed_source_by_name(name)
if existing_source:
if should_update:
should_be_added = False
change_feed_source_url(existing_source, url)
self.response.write('Updated.')
else:
should_be_added = False
self.response.write('The name (' + name + ') already exists.<br/>')
self.response.write( \
'Go back and choose a different name, or tick "Update?".<br/>')
if should_be_added and store_feed_source(name, url, int(frequency_ms)):
self.response.write('Added.');
def get(self):
from database import FeedSource
self.response.write("""<!doctype html><title>Add Feed</title>
<form method="post">
Name - <input name="name"/><br/>
URL - <input name="url"/><br/>
Frequency (milliseconds) -
<input type="number" value="1000" name="frequency_ms"/><br/>
<label>Update?<input type="checkbox" name="should_update" value="1"/></label>
<input type="submit"/>
</form>""")
|
phistuck/FrequentFeedScraper
|
add_handler.py
|
Python
|
mit
| 1,694
|
# -*- coding: utf-8 -*-
#
## This file is part of Invenio.
## Copyright (C) 2012, 2014 CERN.
##
## Invenio is free software; you can redistribute it and/or
## modify it under the terms of the GNU General Public License as
## published by the Free Software Foundation; either version 2 of the
## License, or (at your option) any later version.
##
## Invenio is distributed in the hope that it will be useful, but
## WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
## General Public License for more details.
##
## You should have received a copy of the GNU General Public License
## along with Invenio; if not, write to the Free Software Foundation, Inc.,
## 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
"""
SeqUtils database models.
"""
from invenio.ext.sqlalchemy import db
class SeqSTORE(db.Model):
"""Represents a SeqSTORE record."""
__tablename__ = 'seqSTORE'
id = db.Column(
db.Integer(15, unsigned=True),
primary_key=True, nullable=False,
autoincrement=True
)
seq_name = db.Column(db.String(15))
seq_value = db.Column(db.String(20))
__table_args__ = (db.Index('seq_name_value', seq_name, seq_value,
unique=True),
db.Model.__table_args__)
__all__ = ['SeqSTORE']
|
MSusik/invenio
|
invenio/modules/sequencegenerator/models.py
|
Python
|
gpl-2.0
| 1,370
|
# coding: utf-8
# Licensed under a 3-clause BSD style license - see LICENSE.rst
"""
Test Structured units and quantities.
"""
import pytest
import numpy as np
from numpy.testing import assert_array_equal
from astropy import units as u
from astropy.units import StructuredUnit, Unit, UnitBase, Quantity
from astropy.utils.masked import Masked
class StructuredTestBase:
@classmethod
def setup_class(self):
self.pv_dtype = np.dtype([('p', 'f8'), ('v', 'f8')])
self.pv_t_dtype = np.dtype([('pv', self.pv_dtype), ('t', 'f8')])
self.p_unit = u.km
self.v_unit = u.km / u.s
self.t_unit = u.s
self.pv_dtype = np.dtype([('p', 'f8'), ('v', 'f8')])
self.pv_t_dtype = np.dtype([('pv', self.pv_dtype), ('t', 'f8')])
self.pv = np.array([(1., 0.25), (2., 0.5), (3., 0.75)],
self.pv_dtype)
self.pv_t = np.array([((4., 2.5), 0.),
((5., 5.0), 1.),
((6., 7.5), 2.)], self.pv_t_dtype)
class StructuredTestBaseWithUnits(StructuredTestBase):
@classmethod
def setup_class(self):
super().setup_class()
self.pv_unit = StructuredUnit((self.p_unit, self.v_unit),
('p', 'v'))
self.pv_t_unit = StructuredUnit((self.pv_unit, self.t_unit),
('pv', 't'))
class TestStructuredUnitBasics(StructuredTestBase):
def test_initialization_and_keying(self):
su = StructuredUnit((self.p_unit, self.v_unit), ('p', 'v'))
assert su['p'] is self.p_unit
assert su['v'] is self.v_unit
su2 = StructuredUnit((su, self.t_unit), ('pv', 't'))
assert isinstance(su2['pv'], StructuredUnit)
assert su2['pv']['p'] is self.p_unit
assert su2['pv']['v'] is self.v_unit
assert su2['t'] is self.t_unit
assert su2['pv'] == su
su3 = StructuredUnit(('AU', 'AU/day'), ('p', 'v'))
assert isinstance(su3['p'], UnitBase)
assert isinstance(su3['v'], UnitBase)
su4 = StructuredUnit('AU, AU/day', ('p', 'v'))
assert su4['p'] == u.AU
assert su4['v'] == u.AU / u.day
su5 = StructuredUnit(('AU', 'AU/day'))
assert su5.field_names == ('f0', 'f1')
assert su5['f0'] == u.AU
assert su5['f1'] == u.AU / u.day
def test_recursive_initialization(self):
su = StructuredUnit(((self.p_unit, self.v_unit), self.t_unit),
(('p', 'v'), 't'))
assert isinstance(su['pv'], StructuredUnit)
assert su['pv']['p'] is self.p_unit
assert su['pv']['v'] is self.v_unit
assert su['t'] is self.t_unit
su2 = StructuredUnit(((self.p_unit, self.v_unit), self.t_unit),
(['p_v', ('p', 'v')], 't'))
assert isinstance(su2['p_v'], StructuredUnit)
assert su2['p_v']['p'] is self.p_unit
assert su2['p_v']['v'] is self.v_unit
assert su2['t'] is self.t_unit
su3 = StructuredUnit((('AU', 'AU/day'), 'yr'),
(['p_v', ('p', 'v')], 't'))
assert isinstance(su3['p_v'], StructuredUnit)
assert su3['p_v']['p'] == u.AU
assert su3['p_v']['v'] == u.AU / u.day
assert su3['t'] == u.yr
su4 = StructuredUnit('(AU, AU/day), yr', (('p', 'v'), 't'))
assert isinstance(su4['pv'], StructuredUnit)
assert su4['pv']['p'] == u.AU
assert su4['pv']['v'] == u.AU / u.day
assert su4['t'] == u.yr
def test_extreme_recursive_initialization(self):
su = StructuredUnit('(yr,(AU,AU/day,(km,(day,day))),m)',
('t', ('p', 'v', ('h', ('d1', 'd2'))), 'l'))
assert su.field_names == ('t', ['pvhd1d2',
('p', 'v',
['hd1d2',
('h',
['d1d2',
('d1', 'd2')])])], 'l')
@pytest.mark.parametrize('names, invalid', [
[('t', ['p', 'v']), "['p', 'v']"],
[('t', ['pv', 'p', 'v']), "['pv', 'p', 'v']"],
[('t', ['pv', ['p', 'v']]), "['pv', ['p', 'v']"],
[('t', ()), "()"],
[('t', ('p', None)), "None"],
[('t', ['pv', ('p', '')]), "''"]])
def test_initialization_names_invalid_list_errors(self, names, invalid):
with pytest.raises(ValueError) as exc:
StructuredUnit('(yr,(AU,AU/day)', names)
assert f'invalid entry {invalid}' in str(exc)
def test_looks_like_unit(self):
su = StructuredUnit((self.p_unit, self.v_unit), ('p', 'v'))
assert Unit(su) is su
def test_initialize_with_float_dtype(self):
su = StructuredUnit(('AU', 'AU/d'), self.pv_dtype)
assert isinstance(su['p'], UnitBase)
assert isinstance(su['v'], UnitBase)
assert su['p'] == u.AU
assert su['v'] == u.AU / u.day
su = StructuredUnit((('km', 'km/s'), 'yr'), self.pv_t_dtype)
assert isinstance(su['pv'], StructuredUnit)
assert isinstance(su['pv']['p'], UnitBase)
assert isinstance(su['t'], UnitBase)
assert su['pv']['v'] == u.km / u.s
su = StructuredUnit('(km, km/s), yr', self.pv_t_dtype)
assert isinstance(su['pv'], StructuredUnit)
assert isinstance(su['pv']['p'], UnitBase)
assert isinstance(su['t'], UnitBase)
assert su['pv']['v'] == u.km / u.s
def test_initialize_with_structured_unit_for_names(self):
su = StructuredUnit(('AU', 'AU/d'), names=('p', 'v'))
su2 = StructuredUnit(('km', 'km/s'), names=su)
assert su2.field_names == ('p', 'v')
assert su2['p'] == u.km
assert su2['v'] == u.km / u.s
def test_initialize_single_field(self):
su = StructuredUnit('AU', 'p')
assert isinstance(su, StructuredUnit)
assert isinstance(su['p'], UnitBase)
assert su['p'] == u.AU
su = StructuredUnit('AU')
assert isinstance(su, StructuredUnit)
assert isinstance(su['f0'], UnitBase)
assert su['f0'] == u.AU
def test_equality(self):
su = StructuredUnit(('AU', 'AU/d'), self.pv_dtype)
assert su == StructuredUnit(('AU', 'AU/d'), self.pv_dtype)
assert su != StructuredUnit(('m', 'AU/d'), self.pv_dtype)
# Names should be ignored.
assert su == StructuredUnit(('AU', 'AU/d'))
assert su == StructuredUnit(('AU', 'AU/d'), names=('q', 'w'))
assert su != StructuredUnit(('m', 'm/s'))
def test_parsing(self):
su = Unit('AU, AU/d')
assert isinstance(su, StructuredUnit)
assert isinstance(su['f0'], UnitBase)
assert isinstance(su['f1'], UnitBase)
assert su['f0'] == u.AU
assert su['f1'] == u.AU/u.day
su2 = Unit('AU, AU/d, yr')
assert isinstance(su2, StructuredUnit)
assert su2 == StructuredUnit(('AU', 'AU/d', 'yr'))
su2a = Unit('(AU, AU/d, yr)')
assert isinstance(su2a, StructuredUnit)
assert su2a == su2
su3 = Unit('(km, km/s), yr')
assert isinstance(su3, StructuredUnit)
assert su3 == StructuredUnit((('km', 'km/s'), 'yr'))
su4 = Unit('km,')
assert isinstance(su4, StructuredUnit)
assert su4 == StructuredUnit((u.km,))
su5 = Unit('(m,s),')
assert isinstance(su5, StructuredUnit)
assert su5 == StructuredUnit(((u.m, u.s),))
ldbody_unit = Unit('Msun, 0.5rad^2, (au, au/day)')
assert ldbody_unit == StructuredUnit(
(u.Msun, Unit(u.rad**2 / 2), (u.AU, u.AU / u.day)))
def test_str(self):
su = StructuredUnit(((u.km, u.km/u.s), u.yr))
assert str(su) == '((km, km / s), yr)'
assert Unit(str(su)) == su
def test_repr(self):
su = StructuredUnit(((u.km, u.km/u.s), u.yr))
assert repr(su) == 'Unit("((km, km / s), yr)")'
assert eval(repr(su)) == su
class TestStructuredUnitAsMapping(StructuredTestBaseWithUnits):
def test_len(self):
assert len(self.pv_unit) == 2
assert len(self.pv_t_unit) == 2
def test_keys(self):
slv = list(self.pv_t_unit.keys())
assert slv == ['pv', 't']
def test_values(self):
values = self.pv_t_unit.values()
assert values == (self.pv_unit, self.t_unit)
def test_field_names(self):
field_names = self.pv_t_unit.field_names
assert isinstance(field_names, tuple)
assert field_names == (['pv', ('p', 'v')], 't')
@pytest.mark.parametrize('iterable', [list, set])
def test_as_iterable(self, iterable):
sl = iterable(self.pv_unit)
assert isinstance(sl, iterable)
assert sl == iterable(['p', 'v'])
def test_as_dict(self):
sd = dict(self.pv_t_unit)
assert sd == {'pv': self.pv_unit, 't': self.t_unit}
def test_contains(self):
assert 'p' in self.pv_unit
assert 'v' in self.pv_unit
assert 't' not in self.pv_unit
def test_setitem_fails(self):
with pytest.raises(TypeError, match='item assignment'):
self.pv_t_unit['t'] = u.Gyr
class TestStructuredUnitMethods(StructuredTestBaseWithUnits):
def test_physical_type_id(self):
pv_ptid = self.pv_unit._get_physical_type_id()
assert len(pv_ptid) == 2
assert pv_ptid.dtype.names == ('p', 'v')
p_ptid = self.pv_unit['p']._get_physical_type_id()
v_ptid = self.pv_unit['v']._get_physical_type_id()
# Expected should be (subclass of) void, with structured object dtype.
expected = np.array((p_ptid, v_ptid), [('p', 'O'), ('v', 'O')])[()]
assert pv_ptid == expected
# Names should be ignored in comparison.
assert pv_ptid == np.array((p_ptid, v_ptid), 'O,O')[()]
# Should be possible to address by field and by number.
assert pv_ptid['p'] == p_ptid
assert pv_ptid['v'] == v_ptid
assert pv_ptid[0] == p_ptid
assert pv_ptid[1] == v_ptid
# More complicated version.
pv_t_ptid = self.pv_t_unit._get_physical_type_id()
t_ptid = self.t_unit._get_physical_type_id()
assert pv_t_ptid == np.array((pv_ptid, t_ptid), 'O,O')[()]
assert pv_t_ptid['pv'] == pv_ptid
assert pv_t_ptid['t'] == t_ptid
assert pv_t_ptid['pv'][1] == v_ptid
def test_physical_type(self):
pv_pt = self.pv_unit.physical_type
assert pv_pt == np.array(('length', 'speed'), 'O,O')[()]
pv_t_pt = self.pv_t_unit.physical_type
assert pv_t_pt == np.array((pv_pt, 'time'), 'O,O')[()]
def test_si(self):
pv_t_si = self.pv_t_unit.si
assert pv_t_si == self.pv_t_unit
assert pv_t_si['pv']['v'].scale == 1000
def test_cgs(self):
pv_t_cgs = self.pv_t_unit.cgs
assert pv_t_cgs == self.pv_t_unit
assert pv_t_cgs['pv']['v'].scale == 100000
def test_decompose(self):
pv_t_decompose = self.pv_t_unit.decompose()
assert pv_t_decompose['pv']['v'].scale == 1000
def test_is_equivalent(self):
assert self.pv_unit.is_equivalent(('AU', 'AU/day'))
assert not self.pv_unit.is_equivalent('m')
assert not self.pv_unit.is_equivalent(('AU', 'AU'))
# Names should be ignored.
pv_alt = StructuredUnit('m,m/s', names=('q', 'w'))
assert pv_alt.field_names != self.pv_unit.field_names
assert self.pv_unit.is_equivalent(pv_alt)
# Regular units should work too.
assert not u.m.is_equivalent(self.pv_unit)
def test_conversion(self):
pv1 = self.pv_unit.to(('AU', 'AU/day'), self.pv)
assert isinstance(pv1, np.ndarray)
assert pv1.dtype == self.pv.dtype
assert np.all(pv1['p'] * u.AU == self.pv['p'] * self.p_unit)
assert np.all(pv1['v'] * u.AU / u.day == self.pv['v'] * self.v_unit)
# Names should be from value.
su2 = StructuredUnit((self.p_unit, self.v_unit),
('position', 'velocity'))
pv2 = su2.to(('Mm', 'mm/s'), self.pv)
assert pv2.dtype.names == ('p', 'v')
assert pv2.dtype == self.pv.dtype
# Check recursion.
pv_t1 = self.pv_t_unit.to((('AU', 'AU/day'), 'Myr'), self.pv_t)
assert isinstance(pv_t1, np.ndarray)
assert pv_t1.dtype == self.pv_t.dtype
assert np.all(pv_t1['pv']['p'] * u.AU ==
self.pv_t['pv']['p'] * self.p_unit)
assert np.all(pv_t1['pv']['v'] * u.AU / u.day ==
self.pv_t['pv']['v'] * self.v_unit)
assert np.all(pv_t1['t'] * u.Myr == self.pv_t['t'] * self.t_unit)
# Passing in tuples should work.
pv_t2 = self.pv_t_unit.to((('AU', 'AU/day'), 'Myr'),
((1., 0.1), 10.))
assert pv_t2['pv']['p'] == self.p_unit.to('AU', 1.)
assert pv_t2['pv']['v'] == self.v_unit.to('AU/day', 0.1)
assert pv_t2['t'] == self.t_unit.to('Myr', 10.)
pv_t3 = self.pv_t_unit.to((('AU', 'AU/day'), 'Myr'),
[((1., 0.1), 10.),
((2., 0.2), 20.)])
assert np.all(pv_t3['pv']['p'] == self.p_unit.to('AU', [1., 2.]))
assert np.all(pv_t3['pv']['v'] == self.v_unit.to('AU/day', [0.1, 0.2]))
assert np.all(pv_t3['t'] == self.t_unit.to('Myr', [10., 20.]))
class TestStructuredUnitArithmatic(StructuredTestBaseWithUnits):
def test_multiplication(self):
pv_times_au = self.pv_unit * u.au
assert isinstance(pv_times_au, StructuredUnit)
assert pv_times_au.field_names == ('p', 'v')
assert pv_times_au['p'] == self.p_unit * u.AU
assert pv_times_au['v'] == self.v_unit * u.AU
au_times_pv = u.au * self.pv_unit
assert au_times_pv == pv_times_au
pv_times_au2 = self.pv_unit * 'au'
assert pv_times_au2 == pv_times_au
au_times_pv2 = 'AU' * self.pv_unit
assert au_times_pv2 == pv_times_au
with pytest.raises(TypeError):
self.pv_unit * self.pv_unit
with pytest.raises(TypeError):
's,s' * self.pv_unit
def test_division(self):
pv_by_s = self.pv_unit / u.s
assert isinstance(pv_by_s, StructuredUnit)
assert pv_by_s.field_names == ('p', 'v')
assert pv_by_s['p'] == self.p_unit / u.s
assert pv_by_s['v'] == self.v_unit / u.s
pv_by_s2 = self.pv_unit / 's'
assert pv_by_s2 == pv_by_s
with pytest.raises(TypeError):
1. / self.pv_unit
with pytest.raises(TypeError):
u.s / self.pv_unit
class TestStructuredQuantity(StructuredTestBaseWithUnits):
def test_initialization_and_keying(self):
q_pv = Quantity(self.pv, self.pv_unit)
q_p = q_pv['p']
assert isinstance(q_p, Quantity)
assert isinstance(q_p.unit, UnitBase)
assert np.all(q_p == self.pv['p'] * self.pv_unit['p'])
q_v = q_pv['v']
assert isinstance(q_v, Quantity)
assert isinstance(q_v.unit, UnitBase)
assert np.all(q_v == self.pv['v'] * self.pv_unit['v'])
q_pv_t = Quantity(self.pv_t, self.pv_t_unit)
q_t = q_pv_t['t']
assert np.all(q_t == self.pv_t['t'] * self.pv_t_unit['t'])
q_pv2 = q_pv_t['pv']
assert isinstance(q_pv2, Quantity)
assert q_pv2.unit == self.pv_unit
with pytest.raises(ValueError):
Quantity(self.pv, self.pv_t_unit)
with pytest.raises(ValueError):
Quantity(self.pv_t, self.pv_unit)
def test_initialization_with_unit_tuples(self):
q_pv_t = Quantity(self.pv_t, (('km', 'km/s'), 's'))
assert isinstance(q_pv_t.unit, StructuredUnit)
assert q_pv_t.unit == self.pv_t_unit
def test_initialization_with_string(self):
q_pv_t = Quantity(self.pv_t, '(km, km/s), s')
assert isinstance(q_pv_t.unit, StructuredUnit)
assert q_pv_t.unit == self.pv_t_unit
def test_initialization_by_multiplication_with_unit(self):
q_pv_t = self.pv_t * self.pv_t_unit
assert q_pv_t.unit is self.pv_t_unit
assert np.all(q_pv_t.value == self.pv_t)
assert not np.may_share_memory(q_pv_t, self.pv_t)
q_pv_t2 = self.pv_t_unit * self.pv_t
assert q_pv_t.unit is self.pv_t_unit
# Not testing equality of structured Quantity here.
assert np.all(q_pv_t2.value == q_pv_t.value)
def test_initialization_by_shifting_to_unit(self):
q_pv_t = self.pv_t << self.pv_t_unit
assert q_pv_t.unit is self.pv_t_unit
assert np.all(q_pv_t.value == self.pv_t)
assert np.may_share_memory(q_pv_t, self.pv_t)
def test_getitem(self):
q_pv_t = Quantity(self.pv_t, self.pv_t_unit)
q_pv_t01 = q_pv_t[:2]
assert isinstance(q_pv_t01, Quantity)
assert q_pv_t01.unit == q_pv_t.unit
assert np.all(q_pv_t01['t'] == q_pv_t['t'][:2])
q_pv_t1 = q_pv_t[1]
assert isinstance(q_pv_t1, Quantity)
assert q_pv_t1.unit == q_pv_t.unit
assert q_pv_t1.shape == ()
assert q_pv_t1['t'] == q_pv_t['t'][1]
def test_value(self):
q_pv_t = Quantity(self.pv_t, self.pv_t_unit)
value = q_pv_t.value
assert type(value) is np.ndarray
assert np.all(value == self.pv_t)
value1 = q_pv_t[1].value
assert type(value1) is np.void
assert np.all(value1 == self.pv_t[1])
def test_conversion(self):
q_pv = Quantity(self.pv, self.pv_unit)
q1 = q_pv.to(('AU', 'AU/day'))
assert isinstance(q1, Quantity)
assert q1['p'].unit == u.AU
assert q1['v'].unit == u.AU / u.day
assert np.all(q1['p'] == q_pv['p'].to(u.AU))
assert np.all(q1['v'] == q_pv['v'].to(u.AU/u.day))
q2 = q_pv.to(self.pv_unit)
assert q2['p'].unit == self.p_unit
assert q2['v'].unit == self.v_unit
assert np.all(q2['p'].value == self.pv['p'])
assert np.all(q2['v'].value == self.pv['v'])
assert not np.may_share_memory(q2, q_pv)
pv1 = q_pv.to_value(('AU', 'AU/day'))
assert type(pv1) is np.ndarray
assert np.all(pv1['p'] == q_pv['p'].to_value(u.AU))
assert np.all(pv1['v'] == q_pv['v'].to_value(u.AU/u.day))
pv11 = q_pv[1].to_value(('AU', 'AU/day'))
assert type(pv11) is np.void
assert pv11 == pv1[1]
q_pv_t = Quantity(self.pv_t, self.pv_t_unit)
q2 = q_pv_t.to((('kpc', 'kpc/Myr'), 'Myr'))
assert q2['pv']['p'].unit == u.kpc
assert q2['pv']['v'].unit == u.kpc / u.Myr
assert q2['t'].unit == u.Myr
assert np.all(q2['pv']['p'] == q_pv_t['pv']['p'].to(u.kpc))
assert np.all(q2['pv']['v'] == q_pv_t['pv']['v'].to(u.kpc/u.Myr))
assert np.all(q2['t'] == q_pv_t['t'].to(u.Myr))
def test_conversion_via_lshift(self):
q_pv = Quantity(self.pv, self.pv_unit)
q1 = q_pv << StructuredUnit(('AU', 'AU/day'))
assert isinstance(q1, Quantity)
assert q1['p'].unit == u.AU
assert q1['v'].unit == u.AU / u.day
assert np.all(q1['p'] == q_pv['p'].to(u.AU))
assert np.all(q1['v'] == q_pv['v'].to(u.AU/u.day))
q2 = q_pv << self.pv_unit
assert q2['p'].unit == self.p_unit
assert q2['v'].unit == self.v_unit
assert np.all(q2['p'].value == self.pv['p'])
assert np.all(q2['v'].value == self.pv['v'])
assert np.may_share_memory(q2, q_pv)
q_pv_t = Quantity(self.pv_t, self.pv_t_unit)
q2 = q_pv_t << '(kpc,kpc/Myr),Myr'
assert q2['pv']['p'].unit == u.kpc
assert q2['pv']['v'].unit == u.kpc / u.Myr
assert q2['t'].unit == u.Myr
assert np.all(q2['pv']['p'] == q_pv_t['pv']['p'].to(u.kpc))
assert np.all(q2['pv']['v'] == q_pv_t['pv']['v'].to(u.kpc/u.Myr))
assert np.all(q2['t'] == q_pv_t['t'].to(u.Myr))
def test_inplace_conversion(self):
q_pv = Quantity(self.pv, self.pv_unit)
q1 = q_pv.copy()
q_link = q1
q1 <<= StructuredUnit(('AU', 'AU/day'))
assert q1 is q_link
assert q1['p'].unit == u.AU
assert q1['v'].unit == u.AU / u.day
assert np.all(q1['p'] == q_pv['p'].to(u.AU))
assert np.all(q1['v'] == q_pv['v'].to(u.AU/u.day))
q_pv_t = Quantity(self.pv_t, self.pv_t_unit)
q2 = q_pv_t.copy()
q_link = q2
q2 <<= '(kpc,kpc/Myr),Myr'
assert q2 is q_link
assert q2['pv']['p'].unit == u.kpc
assert q2['pv']['v'].unit == u.kpc / u.Myr
assert q2['t'].unit == u.Myr
assert np.all(q2['pv']['p'] == q_pv_t['pv']['p'].to(u.kpc))
assert np.all(q2['pv']['v'] == q_pv_t['pv']['v'].to(u.kpc/u.Myr))
assert np.all(q2['t'] == q_pv_t['t'].to(u.Myr))
def test_si(self):
q_pv_t = Quantity(self.pv_t, self.pv_t_unit)
q_pv_t_si = q_pv_t.si
assert_array_equal(q_pv_t_si, q_pv_t.to('(m,m/s),s'))
def test_cgs(self):
q_pv_t = Quantity(self.pv_t, self.pv_t_unit)
q_pv_t_cgs = q_pv_t.cgs
assert_array_equal(q_pv_t_cgs, q_pv_t.to('(cm,cm/s),s'))
def test_equality(self):
q_pv = Quantity(self.pv, self.pv_unit)
equal = q_pv == q_pv
not_equal = q_pv != q_pv
assert np.all(equal)
assert not np.any(not_equal)
equal2 = q_pv == q_pv[1]
not_equal2 = q_pv != q_pv[1]
assert np.all(equal2 == [False, True, False])
assert np.all(not_equal2 != equal2)
q1 = q_pv.to(('AU', 'AU/day'))
# Ensure same conversion is done, by placing q1 first.
assert np.all(q1 == q_pv)
assert not np.any(q1 != q_pv)
# Check different names in dtype.
assert np.all(q1.value * u.Unit('AU, AU/day') == q_pv)
assert not np.any(q1.value * u.Unit('AU, AU/day') != q_pv)
assert (q_pv == 'b') is False
assert ('b' != q_pv) is True
q_pv_t = Quantity(self.pv_t, self.pv_t_unit)
assert np.all((q_pv_t[2] == q_pv_t) == [False, False, True])
assert np.all((q_pv_t[2] != q_pv_t) != [False, False, True])
assert (q_pv == q_pv_t) is False
assert (q_pv_t != q_pv) is True
def test_setitem(self):
q_pv = Quantity(self.pv, self.pv_unit)
q_pv[1] = (2., 2.) * self.pv_unit
assert q_pv[1].value == np.array((2., 2.), self.pv_dtype)
q_pv[1:2] = (1., 0.5) * u.Unit('AU, AU/day')
assert q_pv['p'][1] == 1. * u.AU
assert q_pv['v'][1] == 0.5 * u.AU / u.day
q_pv['v'] = 1. * u.km / u.s
assert np.all(q_pv['v'] == 1. * u.km / u.s)
with pytest.raises(u.UnitsError):
q_pv[1] = (1., 1.) * u.Unit('AU, AU')
with pytest.raises(u.UnitsError):
q_pv['v'] = 1. * u.km
q_pv_t = Quantity(self.pv_t, self.pv_t_unit)
q_pv_t[1] = ((2., 2.), 3.) * self.pv_t_unit
assert q_pv_t[1].value == np.array(((2., 2.), 3.), self.pv_t_dtype)
q_pv_t[1:2] = ((1., 0.5), 5.) * u.Unit('(AU, AU/day), yr')
assert q_pv_t['pv'][1] == (1., 0.5) * u.Unit('AU, AU/day')
assert q_pv_t['t'][1] == 5. * u.yr
q_pv_t['pv'] = (1., 0.5) * self.pv_unit
assert np.all(q_pv_t['pv'] == (1., 0.5) * self.pv_unit)
class TestStructuredQuantityFunctions(StructuredTestBaseWithUnits):
@classmethod
def setup_class(self):
super().setup_class()
self.q_pv = self.pv << self.pv_unit
self.q_pv_t = self.pv_t << self.pv_t_unit
def test_empty_like(self):
z = np.empty_like(self.q_pv)
assert z.dtype == self.pv_dtype
assert z.unit == self.pv_unit
assert z.shape == self.pv.shape
@pytest.mark.parametrize('func', [np.zeros_like, np.ones_like])
def test_zeros_ones_like(self, func):
z = func(self.q_pv)
assert z.dtype == self.pv_dtype
assert z.unit == self.pv_unit
assert z.shape == self.pv.shape
assert_array_equal(z, func(self.pv) << self.pv_unit)
class TestStructuredSpecificTypeQuantity(StructuredTestBaseWithUnits):
def setup_class(self):
super().setup_class()
class PositionVelocity(u.SpecificTypeQuantity):
_equivalent_unit = self.pv_unit
self.PositionVelocity = PositionVelocity
def test_init(self):
pv = self.PositionVelocity(self.pv, self.pv_unit)
assert isinstance(pv, self.PositionVelocity)
assert type(pv['p']) is u.Quantity
assert_array_equal(pv['p'], self.pv['p'] << self.pv_unit['p'])
pv2 = self.PositionVelocity(self.pv, 'AU,AU/day')
assert_array_equal(pv2['p'], self.pv['p'] << u.AU)
def test_error_on_non_equivalent_unit(self):
with pytest.raises(u.UnitsError):
self.PositionVelocity(self.pv, 'AU')
with pytest.raises(u.UnitsError):
self.PositionVelocity(self.pv, 'AU,yr')
class TestStructuredLogUnit:
def setup_class(self):
self.mag_time_dtype = np.dtype([('mag', 'f8'), ('t', 'f8')])
self.mag_time = np.array([(20., 10.), (25., 100.)], self.mag_time_dtype)
def test_unit_initialization(self):
mag_time_unit = StructuredUnit((u.STmag, u.s), self.mag_time_dtype)
assert mag_time_unit['mag'] == u.STmag
assert mag_time_unit['t'] == u.s
mag_time_unit2 = u.Unit('mag(ST),s')
assert mag_time_unit2 == mag_time_unit
def test_quantity_initialization(self):
su = u.Unit('mag(ST),s')
mag_time = self.mag_time << su
assert isinstance(mag_time['mag'], u.Magnitude)
assert isinstance(mag_time['t'], u.Quantity)
assert mag_time.unit == su
assert_array_equal(mag_time['mag'], self.mag_time['mag'] << u.STmag)
assert_array_equal(mag_time['t'], self.mag_time['t'] << u.s)
def test_quantity_si(self):
mag_time = self.mag_time << u.Unit('mag(ST),yr')
mag_time_si = mag_time.si
assert_array_equal(mag_time_si['mag'], mag_time['mag'].si)
assert_array_equal(mag_time_si['t'], mag_time['t'].si)
class TestStructuredMaskedQuantity(StructuredTestBaseWithUnits):
"""Somewhat minimal tests. Conversion is most stringent."""
def setup_class(self):
super().setup_class()
self.qpv = self.pv << self.pv_unit
self.pv_mask = np.array([(True, False),
(False, False),
(False, True)], [('p', bool), ('v', bool)])
self.mpv = Masked(self.qpv, mask=self.pv_mask)
def test_init(self):
assert isinstance(self.mpv, Masked)
assert isinstance(self.mpv, Quantity)
assert_array_equal(self.mpv.unmasked, self.qpv)
assert_array_equal(self.mpv.mask, self.pv_mask)
def test_slicing(self):
mp = self.mpv['p']
assert isinstance(mp, Masked)
assert isinstance(mp, Quantity)
assert_array_equal(mp.unmasked, self.qpv['p'])
assert_array_equal(mp.mask, self.pv_mask['p'])
def test_conversion(self):
mpv = self.mpv.to('AU,AU/day')
assert isinstance(mpv, Masked)
assert isinstance(mpv, Quantity)
assert_array_equal(mpv.unmasked, self.qpv.to('AU,AU/day'))
assert_array_equal(mpv.mask, self.pv_mask)
assert np.all(mpv == self.mpv)
def test_si(self):
mpv = self.mpv.si
assert isinstance(mpv, Masked)
assert isinstance(mpv, Quantity)
assert_array_equal(mpv.unmasked, self.qpv.si)
assert_array_equal(mpv.mask, self.pv_mask)
assert np.all(mpv == self.mpv)
|
lpsinger/astropy
|
astropy/units/tests/test_structured.py
|
Python
|
bsd-3-clause
| 27,642
|
# File generated from our OpenAPI spec
from __future__ import absolute_import, division, print_function
from stripe import util
from stripe.api_resources.abstract import APIResource
from stripe.api_resources.customer import Customer
from stripe.six.moves.urllib.parse import quote_plus
class CustomerBalanceTransaction(APIResource):
OBJECT_NAME = "customer_balance_transaction"
def instance_url(self):
token = util.utf8(self.id)
customer = util.utf8(self.customer)
base = Customer.class_url()
cust_extn = quote_plus(customer)
extn = quote_plus(token)
return "%s/%s/balance_transactions/%s" % (base, cust_extn, extn)
@classmethod
def retrieve(cls, id, api_key=None, **params):
raise NotImplementedError(
"Can't retrieve a Customer Balance Transaction without a Customer ID. "
"Use Customer.retrieve_customer_balance_transaction('cus_123', 'cbtxn_123')"
)
|
stripe/stripe-python
|
stripe/api_resources/customer_balance_transaction.py
|
Python
|
mit
| 963
|
from .context import Context, QueryDict
def build_context(api, resource, request):
try:
# Django may raise RawPostDataException sometimes;
# i.e. when processing POST multipart/form-data;
# In that cases we can't access raw body anymore, sorry
raw_body = request.body
except:
raw_body = None
parameters = {}
if request.resolver_match:
parameters.update(request.resolver_match.kwargs)
parameters.update(QueryDict(request.GET.lists()))
ctx = Context(
api, request=request, resource=resource,
method=request.method, parameters=parameters, data=request.POST,
files=request.FILES, raw=raw_body)
return ctx
def resource_dispatcher_factory(api, resource):
from django.http import HttpResponse
def dispatch_request(request, *args, **kw):
ctx = build_context(api, resource, request)
bypass_resource_call = False
middlewares_called = []
for middleware in api.middlewares:
middlewares_called.append(middleware)
try:
method = middleware.process_request
except AttributeError:
pass
else:
if method(request, ctx) is False:
bypass_resource_call = True
break
if not bypass_resource_call:
response = resource(ctx, *args, **kw)
else:
response = HttpResponse()
middlewares_called.reverse()
for middleware in middlewares_called:
try:
method = middleware.process_response
except AttributeError:
pass
else:
if method(request, response, ctx) is False:
break
return response
return dispatch_request
|
marcinn/restosaur
|
restosaur/dispatch.py
|
Python
|
bsd-2-clause
| 1,852
|
def brooke2():
i01.attach()
fullspeed()
gestureforlondon3()
sleep(2)
i01.detach()
sleep(30)
brooke3()
|
MyRobotLab/pyrobotlab
|
home/hairygael/GESTURES/brooke2.py
|
Python
|
apache-2.0
| 136
|
# -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Deleting field 'Document.court'
db.delete_column('Document', 'court_id')
def backwards(self, orm):
# Adding field 'Document.court'
db.add_column('Document', 'court',
self.gf('django.db.models.fields.related.ForeignKey')(
to=orm['search.Court'], null=True),
keep_default=False)
models = {
u'search.citation': {
'Meta': {'object_name': 'Citation', 'db_table': "'Citation'"},
'case_name': (
'django.db.models.fields.TextField', [], {'blank': 'True'}),
'docket_number': (
'django.db.models.fields.CharField', [],
{'max_length': '5000', 'null': 'True', 'blank': 'True'}),
'federal_cite_one': (
'django.db.models.fields.CharField', [],
{'max_length': '50', 'null': 'True', 'blank': 'True'}),
'federal_cite_three': (
'django.db.models.fields.CharField', [],
{'max_length': '50', 'null': 'True', 'blank': 'True'}),
'federal_cite_two': (
'django.db.models.fields.CharField', [],
{'max_length': '50', 'null': 'True', 'blank': 'True'}),
u'id': (
'django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'lexis_cite': (
'django.db.models.fields.CharField', [],
{'max_length': '50', 'null': 'True', 'blank': 'True'}),
'neutral_cite': (
'django.db.models.fields.CharField', [],
{'max_length': '50', 'null': 'True', 'blank': 'True'}),
'scotus_early_cite': (
'django.db.models.fields.CharField', [],
{'max_length': '50', 'null': 'True', 'blank': 'True'}),
'slug': ('django.db.models.fields.SlugField', [],
{'max_length': '50', 'null': 'True'}),
'specialty_cite_one': (
'django.db.models.fields.CharField', [],
{'max_length': '50', 'null': 'True', 'blank': 'True'}),
'state_cite_one': (
'django.db.models.fields.CharField', [],
{'max_length': '50', 'null': 'True', 'blank': 'True'}),
'state_cite_regional': (
'django.db.models.fields.CharField', [],
{'max_length': '50', 'null': 'True', 'blank': 'True'}),
'state_cite_three': (
'django.db.models.fields.CharField', [],
{'max_length': '50', 'null': 'True', 'blank': 'True'}),
'state_cite_two': (
'django.db.models.fields.CharField', [],
{'max_length': '50', 'null': 'True', 'blank': 'True'}),
'westlaw_cite': (
'django.db.models.fields.CharField', [],
{'max_length': '50', 'null': 'True', 'blank': 'True'})
},
u'search.court': {
'Meta': {'ordering': "['position']", 'object_name': 'Court',
'db_table': "'Court'"},
'citation_string': ('django.db.models.fields.CharField', [],
{'max_length': '100', 'blank': 'True'}),
'date_modified': ('django.db.models.fields.DateTimeField', [],
{'auto_now': 'True', 'null': 'True',
'db_index': 'True', 'blank': 'True'}),
'end_date': ('django.db.models.fields.DateField', [],
{'null': 'True', 'blank': 'True'}),
'full_name': (
'django.db.models.fields.CharField', [], {'max_length': "'200'"}),
'has_opinion_scraper': (
'django.db.models.fields.BooleanField', [], {'default': 'False'}),
'has_oral_argument_scraper': (
'django.db.models.fields.BooleanField', [], {'default': 'False'}),
'id': ('django.db.models.fields.CharField', [],
{'max_length': '15', 'primary_key': 'True'}),
'in_use': (
'django.db.models.fields.BooleanField', [], {'default': 'False'}),
'jurisdiction': (
'django.db.models.fields.CharField', [], {'max_length': '3'}),
'notes': (
'django.db.models.fields.TextField', [], {'blank': 'True'}),
'position': (
'django.db.models.fields.FloatField', [],
{'unique': 'True', 'null': 'True', 'db_index': 'True'}),
'short_name': (
'django.db.models.fields.CharField', [], {'max_length': '100'}),
'start_date': ('django.db.models.fields.DateField', [],
{'null': 'True', 'blank': 'True'}),
'url': (
'django.db.models.fields.URLField', [], {'max_length': '500'})
},
u'search.docket': {
'Meta': {'object_name': 'Docket'},
'blocked': ('django.db.models.fields.BooleanField', [],
{'default': 'False', 'db_index': 'True'}),
'case_name': (
'django.db.models.fields.TextField', [], {'blank': 'True'}),
'court': ('django.db.models.fields.related.ForeignKey', [],
{'to': u"orm['search.Court']", 'null': 'True'}),
'date_blocked': (
'django.db.models.fields.DateField', [],
{'db_index': 'True', 'null': 'True', 'blank': 'True'}),
'date_modified': ('django.db.models.fields.DateTimeField', [],
{'auto_now': 'True', 'null': 'True',
'db_index': 'True', 'blank': 'True'}),
u'id': (
'django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'slug': ('django.db.models.fields.SlugField', [],
{'max_length': '50', 'null': 'True'})
},
u'search.document': {
'Meta': {'object_name': 'Document', 'db_table': "'Document'"},
'blocked': ('django.db.models.fields.BooleanField', [],
{'default': 'False', 'db_index': 'True'}),
'cases_cited': (
'django.db.models.fields.related.ManyToManyField', [],
{'blank': 'True', 'related_name': "'citing_opinions'",
'null': 'True',
'symmetrical': 'False', 'to': u"orm['search.Citation']"}),
'citation': ('django.db.models.fields.related.ForeignKey', [],
{'blank': 'True',
'related_name': "'parent_documents'", 'null': 'True',
'to': u"orm['search.Citation']"}),
'citation_count': ('django.db.models.fields.IntegerField', [],
{'default': '0', 'db_index': 'True'}),
'date_blocked': (
'django.db.models.fields.DateField', [],
{'db_index': 'True', 'null': 'True', 'blank': 'True'}),
'date_filed': (
'django.db.models.fields.DateField', [],
{'db_index': 'True', 'null': 'True', 'blank': 'True'}),
'date_modified': ('django.db.models.fields.DateTimeField', [],
{'auto_now': 'True', 'null': 'True',
'db_index': 'True', 'blank': 'True'}),
'docket': ('django.db.models.fields.related.ForeignKey', [],
{'blank': 'True', 'related_name': "'documents'",
'null': 'True', 'to': u"orm['search.Docket']"}),
'download_url': ('django.db.models.fields.URLField', [],
{'db_index': 'True', 'max_length': '500',
'null': 'True', 'blank': 'True'}),
'extracted_by_ocr': ('django.db.models.fields.BooleanField', [],
{'default': 'False', 'db_index': 'True'}),
'html': ('django.db.models.fields.TextField', [],
{'null': 'True', 'blank': 'True'}),
'html_lawbox': ('django.db.models.fields.TextField', [],
{'null': 'True', 'blank': 'True'}),
'html_with_citations': (
'django.db.models.fields.TextField', [], {'blank': 'True'}),
u'id': (
'django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_stub_document': (
'django.db.models.fields.BooleanField', [], {'default': 'False'}),
'judges': ('django.db.models.fields.TextField', [],
{'null': 'True', 'blank': 'True'}),
'local_path': (
'django.db.models.fields.files.FileField', [],
{'db_index': 'True', 'max_length': '100', 'blank': 'True'}),
'nature_of_suit': (
'django.db.models.fields.TextField', [], {'blank': 'True'}),
'plain_text': (
'django.db.models.fields.TextField', [], {'blank': 'True'}),
'precedential_status': (
'django.db.models.fields.CharField', [],
{'db_index': 'True', 'max_length': '50', 'blank': 'True'}),
'sha1': ('django.db.models.fields.CharField', [],
{'max_length': '40', 'db_index': 'True'}),
'source': ('django.db.models.fields.CharField', [],
{'max_length': '3', 'blank': 'True'}),
'time_retrieved': (
'django.db.models.fields.DateTimeField', [],
{'auto_now_add': 'True', 'db_index': 'True', 'blank': 'True'})
}
}
complete_apps = ['search']
|
shashi792/courtlistener
|
alert/search/migrations/0028_delete_court_field.py
|
Python
|
agpl-3.0
| 9,911
|
# -*- coding: utf-8 -*-
""" Core components """
from boto.exception import JSONResponseError, BotoServerError
from dynamic_dynamodb import calculators
from dynamic_dynamodb.aws import dynamodb, sns
from dynamic_dynamodb.core import circuit_breaker
from dynamic_dynamodb.statistics import table as table_stats
from dynamic_dynamodb.log_handler import LOGGER as logger
from dynamic_dynamodb.config_handler import get_table_option, get_global_option
def ensure_provisioning(
table_name, key_name,
num_consec_read_checks,
num_consec_write_checks):
""" Ensure that provisioning is correct
:type table_name: str
:param table_name: Name of the DynamoDB table
:type key_name: str
:param key_name: Configuration option key name
:type num_consec_read_checks: int
:param num_consec_read_checks: How many consecutive checks have we had
:type num_consec_write_checks: int
:param num_consec_write_checks: How many consecutive checks have we had
:returns: (int, int) -- num_consec_read_checks, num_consec_write_checks
"""
if get_global_option('circuit_breaker_url'):
if circuit_breaker.is_open():
logger.warning('Circuit breaker is OPEN!')
return (0, 0)
# Handle throughput alarm checks
__ensure_provisioning_alarm(table_name, key_name)
try:
read_update_needed, updated_read_units, num_consec_read_checks = \
__ensure_provisioning_reads(
table_name,
key_name,
num_consec_read_checks)
write_update_needed, updated_write_units, num_consec_write_checks = \
__ensure_provisioning_writes(
table_name,
key_name,
num_consec_write_checks)
if read_update_needed:
num_consec_read_checks = 0
if write_update_needed:
num_consec_write_checks = 0
# Handle throughput updates
if read_update_needed or write_update_needed:
logger.info(
'{0} - Changing provisioning to {1:d} '
'read units and {2:d} write units'.format(
table_name,
int(updated_read_units),
int(updated_write_units)))
__update_throughput(
table_name,
key_name,
updated_read_units,
updated_write_units)
else:
logger.info('{0} - No need to change provisioning'.format(
table_name))
except JSONResponseError:
raise
except BotoServerError:
raise
return num_consec_read_checks, num_consec_write_checks
def __calculate_always_decrease_rw_values(
table_name, read_units, provisioned_reads,
write_units, provisioned_writes):
""" Calculate values for always-decrease-rw-together
This will only return reads and writes decreases if both reads and writes
are lower than the current provisioning
:type table_name: str
:param table_name: Name of the DynamoDB table
:type read_units: int
:param read_units: New read unit provisioning
:type provisioned_reads: int
:param provisioned_reads: Currently provisioned reads
:type write_units: int
:param write_units: New write unit provisioning
:type provisioned_writes: int
:param provisioned_writes: Currently provisioned writes
:returns: (int, int) -- (reads, writes)
"""
if read_units <= provisioned_reads and write_units <= provisioned_writes:
return (read_units, write_units)
if read_units < provisioned_reads:
logger.info(
'{0} - Reads could be decreased, but we are waiting for '
'writes to get lower than the threshold before '
'scaling down'.format(table_name))
read_units = provisioned_reads
elif write_units < provisioned_writes:
logger.info(
'{0} - Writes could be decreased, but we are waiting for '
'reads to get lower than the threshold before '
'scaling down'.format(table_name))
write_units = provisioned_writes
return (read_units, write_units)
def __ensure_provisioning_reads(table_name, key_name, num_consec_read_checks):
""" Ensure that provisioning is correct
:type table_name: str
:param table_name: Name of the DynamoDB table
:type key_name: str
:param key_name: Configuration option key name
:type num_consec_read_checks: int
:param num_consec_read_checks: How many consecutive checks have we had
:returns: (bool, int, int)
update_needed, updated_read_units, num_consec_read_checks
"""
if not get_table_option(key_name, 'enable_reads_autoscaling'):
logger.info(
'{0} - Autoscaling of reads has been disabled'.format(table_name))
return False, dynamodb.get_provisioned_table_read_units(table_name), 0
update_needed = False
try:
lookback_window_start = get_table_option(
key_name, 'lookback_window_start')
current_read_units = dynamodb.get_provisioned_table_read_units(
table_name)
consumed_read_units_percent = \
table_stats.get_consumed_read_units_percent(
table_name, lookback_window_start)
throttled_read_count = \
table_stats.get_throttled_read_event_count(
table_name, lookback_window_start)
reads_upper_threshold = \
get_table_option(key_name, 'reads_upper_threshold')
reads_lower_threshold = \
get_table_option(key_name, 'reads_lower_threshold')
throttled_reads_upper_threshold = \
get_table_option(key_name, 'throttled_reads_upper_threshold')
increase_reads_with = \
get_table_option(key_name, 'increase_reads_with')
increase_reads_unit = \
get_table_option(key_name, 'increase_reads_unit')
decrease_reads_with = \
get_table_option(key_name, 'decrease_reads_with')
decrease_reads_unit = \
get_table_option(key_name, 'decrease_reads_unit')
min_provisioned_reads = \
get_table_option(key_name, 'min_provisioned_reads')
max_provisioned_reads = \
get_table_option(key_name, 'max_provisioned_reads')
num_read_checks_before_scale_down = \
get_table_option(key_name, 'num_read_checks_before_scale_down')
num_read_checks_reset_percent = \
get_table_option(key_name, 'num_read_checks_reset_percent')
except JSONResponseError:
raise
except BotoServerError:
raise
# Set the updated units to the current read unit value
updated_read_units = current_read_units
# Reset consecutive reads if num_read_checks_reset_percent is reached
if num_read_checks_reset_percent:
if consumed_read_units_percent >= num_read_checks_reset_percent:
logger.info(
'{0} - Resetting the number of consecutive '
'read checks. Reason: Consumed percent {1} is '
'greater than reset percent: {2}'.format(
table_name,
consumed_read_units_percent,
num_read_checks_reset_percent))
num_consec_read_checks = 0
if (consumed_read_units_percent == 0 and not
get_table_option(
key_name, 'allow_scaling_down_reads_on_0_percent')):
logger.info(
'{0} - Scaling down reads is not done when usage is at 0%'.format(
table_name))
# Increase needed due to high CU consumption
elif consumed_read_units_percent >= reads_upper_threshold:
# Exit if up scaling has been disabled
if not get_table_option(key_name, 'enable_reads_up_scaling'):
logger.debug(
'{0} - Up scaling event detected. No action taken as scaling '
'up reads has been disabled in the configuration'.format(
table_name))
else:
if increase_reads_unit == 'percent':
calculated_provisioning = calculators.increase_reads_in_percent(
current_read_units,
increase_reads_with,
get_table_option(key_name, 'max_provisioned_reads'),
consumed_read_units_percent,
table_name)
else:
calculated_provisioning = calculators.increase_reads_in_units(
current_read_units,
increase_reads_with,
get_table_option(key_name, 'max_provisioned_reads'),
consumed_read_units_percent,
table_name)
if current_read_units != calculated_provisioning:
logger.info(
'{0} - Resetting the number of consecutive '
'read checks. Reason: scale up event detected'.format(
table_name))
num_consec_read_checks = 0
update_needed = True
updated_read_units = calculated_provisioning
# Increase needed due to high throttling
elif throttled_read_count > throttled_reads_upper_threshold:
if throttled_reads_upper_threshold > 0:
if increase_reads_unit == 'percent':
calculated_provisioning = calculators.increase_reads_in_percent(
updated_read_units,
increase_reads_with,
get_table_option(key_name, 'max_provisioned_reads'),
consumed_read_units_percent,
table_name)
else:
calculated_provisioning = calculators.increase_reads_in_units(
updated_read_units,
increase_reads_with,
get_table_option(key_name, 'max_provisioned_reads'),
consumed_read_units_percent,
table_name)
if current_read_units != calculated_provisioning:
logger.info(
'{0} - Resetting the number of consecutive '
'read checks. Reason: scale up event detected'.format(
table_name))
num_consec_read_checks = 0
update_needed = True
updated_read_units = calculated_provisioning
# Decrease needed due to low CU consumption
elif consumed_read_units_percent <= reads_lower_threshold:
# Exit if down scaling has been disabled
if not get_table_option(key_name, 'enable_reads_down_scaling'):
logger.debug(
'{0} - Down scaling event detected. No action taken as scaling'
'down reads has been disabled in the configuration'.format(
table_name))
else:
if decrease_reads_unit == 'percent':
calculated_provisioning = calculators.decrease_reads_in_percent(
updated_read_units,
decrease_reads_with,
get_table_option(key_name, 'min_provisioned_reads'),
table_name)
else:
calculated_provisioning = calculators.decrease_reads_in_units(
updated_read_units,
decrease_reads_with,
get_table_option(key_name, 'min_provisioned_reads'),
table_name)
if current_read_units != calculated_provisioning:
num_consec_read_checks = num_consec_read_checks + 1
if num_consec_read_checks >= num_read_checks_before_scale_down:
update_needed = True
updated_read_units = calculated_provisioning
# Never go over the configured max provisioning
if max_provisioned_reads:
if int(updated_read_units) > int(max_provisioned_reads):
update_needed = True
updated_read_units = int(max_provisioned_reads)
logger.info(
'Will not increase writes over max-provisioned-reads '
'limit ({0} writes)'.format(updated_read_units))
# Ensure that we have met the min-provisioning
if min_provisioned_reads:
if int(min_provisioned_reads) > int(updated_read_units):
update_needed = True
updated_read_units = int(min_provisioned_reads)
logger.info(
'{0} - Increasing reads to meet min-provisioned-reads '
'limit ({1} reads)'.format(table_name, updated_read_units))
logger.info('{0} - Consecutive read checks {1}/{2}'.format(
table_name,
num_consec_read_checks,
num_read_checks_before_scale_down))
return update_needed, updated_read_units, num_consec_read_checks
def __ensure_provisioning_writes(
table_name, key_name, num_consec_write_checks):
""" Ensure that provisioning of writes is correct
:type table_name: str
:param table_name: Name of the DynamoDB table
:type key_name: str
:param key_name: Configuration option key name
:type num_consec_write_checks: int
:param num_consec_write_checks: How many consecutive checks have we had
:returns: (bool, int, int)
update_needed, updated_write_units, num_consec_write_checks
"""
if not get_table_option(key_name, 'enable_writes_autoscaling'):
logger.info(
'{0} - Autoscaling of writes has been disabled'.format(table_name))
return False, dynamodb.get_provisioned_table_write_units(table_name), 0
update_needed = False
try:
lookback_window_start = get_table_option(
key_name, 'lookback_window_start')
current_write_units = dynamodb.get_provisioned_table_write_units(
table_name)
consumed_write_units_percent = \
table_stats.get_consumed_write_units_percent(
table_name, lookback_window_start)
throttled_write_count = \
table_stats.get_throttled_write_event_count(
table_name, lookback_window_start)
writes_upper_threshold = \
get_table_option(key_name, 'writes_upper_threshold')
writes_lower_threshold = \
get_table_option(key_name, 'writes_lower_threshold')
throttled_writes_upper_threshold = \
get_table_option(key_name, 'throttled_writes_upper_threshold')
increase_writes_unit = \
get_table_option(key_name, 'increase_writes_unit')
increase_writes_with = \
get_table_option(key_name, 'increase_writes_with')
decrease_writes_unit = \
get_table_option(key_name, 'decrease_writes_unit')
decrease_writes_with = \
get_table_option(key_name, 'decrease_writes_with')
min_provisioned_writes = \
get_table_option(key_name, 'min_provisioned_writes')
max_provisioned_writes = \
get_table_option(key_name, 'max_provisioned_writes')
num_write_checks_before_scale_down = \
get_table_option(key_name, 'num_write_checks_before_scale_down')
num_write_checks_reset_percent = \
get_table_option(key_name, 'num_write_checks_reset_percent')
except JSONResponseError:
raise
except BotoServerError:
raise
# Set the updated units to the current read unit value
updated_write_units = current_write_units
# Reset consecutive write count num_write_checks_reset_percent is reached
if num_write_checks_reset_percent:
if consumed_write_units_percent >= num_write_checks_reset_percent:
logger.info(
'{0} - Resetting the number of consecutive '
'write checks. Reason: Consumed percent {1} is '
'greater than reset percent: {2}'.format(
table_name,
consumed_write_units_percent,
num_write_checks_reset_percent))
num_consec_write_checks = 0
# Check if we should update write provisioning
if (consumed_write_units_percent == 0 and not
get_table_option(
key_name, 'allow_scaling_down_writes_on_0_percent')):
logger.info(
'{0} - Scaling down writes is not done when usage is at 0%'.format(
table_name))
# Increase needed due to high CU consumption
elif consumed_write_units_percent >= writes_upper_threshold:
# Exit if up scaling has been disabled
if not get_table_option(key_name, 'enable_writes_up_scaling'):
logger.debug(
'{0} - Up scaling event detected. No action taken as scaling '
'up writes has been disabled in the configuration'.format(
table_name))
else:
if increase_writes_unit == 'percent':
calculated_provisioning = \
calculators.increase_writes_in_percent(
current_write_units,
increase_writes_with,
get_table_option(key_name, 'max_provisioned_writes'),
consumed_write_units_percent,
table_name)
else:
calculated_provisioning = calculators.increase_writes_in_units(
current_write_units,
increase_writes_with,
get_table_option(key_name, 'max_provisioned_writes'),
consumed_write_units_percent,
table_name)
if current_write_units != calculated_provisioning:
logger.info(
'{0} - Resetting the number of consecutive '
'write checks. Reason: scale up event detected'.format(
table_name))
num_consec_write_checks = 0
update_needed = True
updated_write_units = calculated_provisioning
# Increase needed due to high throttling
elif throttled_write_count > throttled_writes_upper_threshold:
if throttled_writes_upper_threshold > 0:
if increase_writes_unit == 'percent':
calculated_provisioning = \
calculators.increase_writes_in_percent(
current_write_units,
increase_writes_with,
get_table_option(key_name, 'max_provisioned_writes'),
consumed_write_units_percent,
table_name)
else:
calculated_provisioning = calculators.increase_writes_in_units(
current_write_units,
increase_writes_with,
get_table_option(key_name, 'max_provisioned_writes'),
consumed_write_units_percent,
table_name)
if current_write_units != calculated_provisioning:
logger.info(
'{0} - Resetting the number of consecutive '
'write checks. Reason: scale up event detected'.format(
table_name))
num_consec_write_checks = 0
update_needed = True
updated_write_units = calculated_provisioning
# Decrease needed due to low CU consumption
elif consumed_write_units_percent <= writes_lower_threshold:
# Exit if up scaling has been disabled
if not get_table_option(key_name, 'enable_writes_down_scaling'):
logger.debug(
'{0} - Down scaling event detected. No action taken as scaling '
'down writes has been disabled in the configuration'.format(
table_name))
else:
if decrease_writes_unit == 'percent':
calculated_provisioning = \
calculators.decrease_writes_in_percent(
current_write_units,
decrease_writes_with,
get_table_option(key_name, 'min_provisioned_writes'),
table_name)
else:
calculated_provisioning = calculators.decrease_writes_in_units(
current_write_units,
decrease_writes_with,
get_table_option(key_name, 'min_provisioned_writes'),
table_name)
if current_write_units != calculated_provisioning:
num_consec_write_checks = num_consec_write_checks + 1
if (num_consec_write_checks >=
num_write_checks_before_scale_down):
update_needed = True
updated_write_units = calculated_provisioning
# Never go over the configured max provisioning
if max_provisioned_writes:
if int(updated_write_units) > int(max_provisioned_writes):
update_needed = True
updated_write_units = int(max_provisioned_writes)
logger.info(
'Will not increase writes over max-provisioned-writes '
'limit ({0} writes)'.format(updated_write_units))
# Ensure that we have met the min-provisioning
if min_provisioned_writes:
if int(min_provisioned_writes) > int(updated_write_units):
update_needed = True
updated_write_units = int(min_provisioned_writes)
logger.info(
'{0} - Increasing writes to meet min-provisioned-writes '
'limit ({1} writes)'.format(table_name, updated_write_units))
logger.info('{0} - Consecutive write checks {1}/{2}'.format(
table_name,
num_consec_write_checks,
num_write_checks_before_scale_down))
return update_needed, updated_write_units, num_consec_write_checks
def __update_throughput(table_name, key_name, read_units, write_units):
""" Update throughput on the DynamoDB table
:type table_name: str
:param table_name: Name of the DynamoDB table
:type key_name: str
:param key_name: Configuration option key name
:type read_units: int
:param read_units: New read unit provisioning
:type write_units: int
:param write_units: New write unit provisioning
"""
try:
current_ru = dynamodb.get_provisioned_table_read_units(table_name)
current_wu = dynamodb.get_provisioned_table_write_units(table_name)
except JSONResponseError:
raise
# Check table status
try:
table_status = dynamodb.get_table_status(table_name)
except JSONResponseError:
raise
logger.debug('{0} - Table status is {1}'.format(table_name, table_status))
if table_status != 'ACTIVE':
logger.warning(
'{0} - Not performing throughput changes when table '
'is {1}'.format(table_name, table_status))
return
# If this setting is True, we will only scale down when
# BOTH reads AND writes are low
if get_table_option(key_name, 'always_decrease_rw_together'):
read_units, write_units = __calculate_always_decrease_rw_values(
table_name,
read_units,
current_ru,
write_units,
current_wu)
if read_units == current_ru and write_units == current_wu:
logger.info('{0} - No changes to perform'.format(table_name))
return
dynamodb.update_table_provisioning(
table_name,
key_name,
int(read_units),
int(write_units))
def __ensure_provisioning_alarm(table_name, key_name):
""" Ensure that provisioning alarm threshold is not exceeded
:type table_name: str
:param table_name: Name of the DynamoDB table
:type key_name: str
:param key_name: Configuration option key name
"""
lookback_window_start = get_table_option(
key_name, 'lookback_window_start')
consumed_read_units_percent = table_stats.get_consumed_read_units_percent(
table_name, lookback_window_start)
consumed_write_units_percent = table_stats.get_consumed_write_units_percent(
table_name, lookback_window_start)
reads_upper_alarm_threshold = \
get_table_option(key_name, 'reads-upper-alarm-threshold')
reads_lower_alarm_threshold = \
get_table_option(key_name, 'reads-lower-alarm-threshold')
writes_upper_alarm_threshold = \
get_table_option(key_name, 'writes-upper-alarm-threshold')
writes_lower_alarm_threshold = \
get_table_option(key_name, 'writes-lower-alarm-threshold')
# Check upper alarm thresholds
upper_alert_triggered = False
upper_alert_message = []
if (reads_upper_alarm_threshold > 0 and
consumed_read_units_percent >= reads_upper_alarm_threshold):
upper_alert_triggered = True
upper_alert_message.append(
'{0} - Consumed Read Capacity {1:d}% '
'was greater than or equal to the upper '
'alarm threshold {2:d}%\n'.format(
table_name,
consumed_read_units_percent,
reads_upper_alarm_threshold))
if (writes_upper_alarm_threshold > 0 and
consumed_write_units_percent >= writes_upper_alarm_threshold):
upper_alert_triggered = True
upper_alert_message.append(
'{0} - Consumed Write Capacity {1:d}% '
'was greater than or equal to the upper alarm '
'threshold {2:d}%\n'.format(
table_name,
consumed_write_units_percent,
writes_upper_alarm_threshold))
# Check lower alarm thresholds
lower_alert_triggered = False
lower_alert_message = []
if (reads_lower_alarm_threshold > 0 and
consumed_read_units_percent < reads_lower_alarm_threshold):
lower_alert_triggered = True
lower_alert_message.append(
'{0} - Consumed Read Capacity {1:d}% '
'was below the lower alarm threshold {2:d}%\n'.format(
table_name,
consumed_read_units_percent,
reads_lower_alarm_threshold))
if (writes_lower_alarm_threshold > 0 and
consumed_write_units_percent < writes_lower_alarm_threshold):
lower_alert_triggered = True
lower_alert_message.append(
'{0} - Consumed Write Capacity {1:d}% '
'was below the lower alarm threshold {2:d}%\n'.format(
table_name,
consumed_write_units_percent,
writes_lower_alarm_threshold))
# Send alert if needed
if upper_alert_triggered:
logger.info(
'{0} - Will send high provisioning alert'.format(table_name))
sns.publish_table_notification(
key_name,
''.join(upper_alert_message),
['high-throughput-alarm'],
subject='ALARM: High Throughput for Table {0}'.format(table_name))
elif lower_alert_triggered:
logger.info(
'{0} - Will send low provisioning alert'.format(table_name))
sns.publish_table_notification(
key_name,
''.join(lower_alert_message),
['low-throughput-alarm'],
subject='ALARM: Low Throughput for Table {0}'.format(table_name))
else:
logger.debug('{0} - Throughput alarm thresholds not crossed'.format(
table_name))
|
omnidavesz/dynamic-dynamodb
|
dynamic_dynamodb/core/table.py
|
Python
|
apache-2.0
| 27,449
|
#!/usr/bin/env python
"""These flows are designed for high performance transfers."""
import hashlib
import time
import zlib
import logging
from grr.lib import aff4
from grr.lib import flow
from grr.lib import rdfvalue
from grr.lib.aff4_objects import filestore
from grr.lib.rdfvalues import client as rdf_client
from grr.lib.rdfvalues import crypto as rdf_crypto
from grr.lib.rdfvalues import flows as rdf_flows
from grr.lib.rdfvalues import paths as rdf_paths
from grr.lib.rdfvalues import protodict as rdf_protodict
from grr.lib.rdfvalues import structs as rdf_structs
from grr.proto import flows_pb2
class GetFileArgs(rdf_structs.RDFProtoStruct):
protobuf = flows_pb2.GetFileArgs
class GetFile(flow.GRRFlow):
"""An efficient file transfer mechanism (deprecated, use MultiGetFile).
This flow is deprecated in favor of MultiGetFile, but kept for now for use by
MemoryCollector since the buffer hashing performed by MultiGetFile is
pointless for memory acquisition.
GetFile can also retrieve content from device files that report a size of 0 in
stat when read_length is specified.
Returns to parent flow:
An PathSpec.
"""
category = "/Filesystem/"
args_type = GetFileArgs
class SchemaCls(flow.GRRFlow.SchemaCls):
PROGRESS_GRAPH = aff4.Attribute(
"aff4:progress", rdf_flows.ProgressGraph,
"Show a button to generate a progress graph for this flow.",
default="")
# We have a maximum of this many chunk reads outstanding (about 10mb)
WINDOW_SIZE = 200
CHUNK_SIZE = 512 * 1024
@classmethod
def GetDefaultArgs(cls, token=None):
_ = token
result = cls.args_type()
result.pathspec.pathtype = "OS"
return result
@flow.StateHandler(next_state=["Stat"])
def Start(self):
"""Get information about the file from the client."""
self.state.Register("max_chunk_number",
max(2, self.args.read_length / self.CHUNK_SIZE))
self.state.Register("current_chunk_number", 0)
self.state.Register("file_size", 0)
self.state.Register("fd", None)
self.state.Register("stat", None)
self.CallClient("StatFile", rdf_client.ListDirRequest(
pathspec=self.args.pathspec), next_state="Stat")
@flow.StateHandler(next_state=["ReadBuffer", "CheckHashes"])
def Stat(self, responses):
"""Fix up the pathspec of the file."""
response = responses.First()
if responses.success and response:
self.state.stat = response
# TODO(user): This is a workaround for broken clients sending back
# empty pathspecs for pathtype MEMORY. Not needed for clients > 3.0.0.5.
if self.state.stat.pathspec.path:
self.args.pathspec = self.state.stat.pathspec
else:
if not self.args.ignore_stat_failure:
raise IOError("Error: %s" % responses.status)
# Just fill up a bogus stat entry.
self.state.stat = rdf_client.StatEntry(pathspec=self.args.pathspec)
# Adjust the size from st_size if read length is not specified.
if self.args.read_length == 0:
self.state.file_size = self.state.stat.st_size
else:
self.state.file_size = self.args.read_length
self.state.max_chunk_number = (self.state.file_size /
self.CHUNK_SIZE) + 1
self.CreateBlobImage()
self.FetchWindow(min(
self.WINDOW_SIZE,
self.state.max_chunk_number - self.state.current_chunk_number))
def FetchWindow(self, number_of_chunks_to_readahead):
"""Read ahead a number of buffers to fill the window."""
for _ in range(number_of_chunks_to_readahead):
# Do not read past the end of file
if self.state.current_chunk_number > self.state.max_chunk_number:
return
request = rdf_client.BufferReference(
pathspec=self.args.pathspec,
offset=self.state.current_chunk_number * self.CHUNK_SIZE,
length=self.CHUNK_SIZE)
self.CallClient("TransferBuffer", request, next_state="ReadBuffer")
self.state.current_chunk_number += 1
def CreateBlobImage(self):
"""Force creation of the new AFF4 object.
Note that this is pinned on the client id - i.e. the client can not change
aff4 objects outside its tree.
"""
urn = aff4.AFF4Object.VFSGRRClient.PathspecToURN(
self.args.pathspec, self.client_id)
self.state.stat.aff4path = urn
# Create a new BlobImage for the data. Note that this object is pickled
# with this flow between states.
self.state.fd = aff4.FACTORY.Create(urn, "VFSBlobImage", token=self.token)
# The chunksize must be set to be the same as the transfer chunk size.
self.state.fd.SetChunksize(self.CHUNK_SIZE)
self.state.fd.Set(self.state.fd.Schema.STAT(self.state.stat))
@flow.StateHandler(next_state=["ReadBuffer", "CheckHashes"])
def ReadBuffer(self, responses):
"""Read the buffer and write to the file."""
# Did it work?
if responses.success:
response = responses.First()
if not response:
raise IOError("Missing hash for offset %s missing" % response.offset)
if response.offset <= self.state.max_chunk_number * self.CHUNK_SIZE:
# Write the hash to the index. Note that response.data is the hash of
# the block (32 bytes) and response.length is the length of the block.
self.state.fd.AddBlob(response.data, response.length)
self.Log("Received blob hash %s", response.data.encode("hex"))
self.Status("Received %s bytes", self.state.fd.size)
# Add one more chunk to the window.
self.FetchWindow(1)
@flow.StateHandler()
def End(self):
"""Finalize reading the file."""
fd = self.state.fd
if fd is None:
self.Notify("ViewObject", self.client_id, "File failed to be transferred")
else:
self.Notify("ViewObject", fd.urn, "File transferred successfully")
self.Log("Finished reading %s", fd.urn)
self.Log("Flow Completed in %s seconds",
time.time() - self.state.context.create_time / 1e6)
stat_response = self.state.fd.Get(self.state.fd.Schema.STAT)
fd.size = min(fd.size, self.state.file_size)
fd.Set(fd.Schema.CONTENT_LAST, rdfvalue.RDFDatetime().Now())
fd.Close(sync=True)
# Notify any parent flows the file is ready to be used now.
self.SendReply(stat_response)
super(GetFile, self).End()
class HashTracker(object):
def __init__(self, hash_response, is_known=False):
self.hash_response = hash_response
self.is_known = is_known
self.blob_urn = rdfvalue.RDFURN("aff4:/blobs").Add(
hash_response.data.encode("hex"))
class FileTracker(object):
"""A Class to track a single file download."""
def __init__(self, stat_entry, client_id, request_data, index=None):
self.fd = None
self.stat_entry = stat_entry
self.hash_obj = None
self.hash_list = []
self.pathspec = stat_entry.pathspec
self.urn = aff4.AFF4Object.VFSGRRClient.PathspecToURN(
self.pathspec, client_id)
self.stat_entry.aff4path = self.urn
self.request_data = request_data
self.index = index
# The total number of bytes available in this file. This may be different
# from the size as reported by stat() for special files (e.g. proc files).
self.bytes_read = 0
# The number of bytes we are expected to fetch. This value depends on
# - the bytes available (stat_entry.st_size or bytes_read if available).
# - a limit to the file size in the flow (self.args.file_size).
self.size_to_download = 0
def __str__(self):
sha256 = self.hash_obj and self.hash_obj.sha256
if sha256:
return "<Tracker: %s (sha256: %s)>" % (self.urn, sha256)
else:
return "<Tracker: %s >" % self.urn
def CreateVFSFile(self, filetype, token=None, chunksize=None):
"""Create a VFSFile with stat_entry metadata.
We don't do this in __init__ since we need to first need to determine the
appropriate filetype.
Args:
filetype: string filetype
token: ACL token
chunksize: BlobImage chunksize
Side-Effect:
sets self.fd
Returns:
filehandle open for write
"""
# We create the file in the client namespace and populate with metadata.
self.fd = aff4.FACTORY.Create(self.urn, filetype, mode="w",
token=token)
self.fd.SetChunksize(chunksize)
self.fd.Set(self.fd.Schema.STAT(self.stat_entry))
self.fd.Set(self.fd.Schema.PATHSPEC(self.pathspec))
self.fd.Set(self.fd.Schema.CONTENT_LAST(rdfvalue.RDFDatetime().Now()))
return self.fd
class MultiGetFileMixin(object):
"""A flow mixin to efficiently retrieve a number of files.
The class extending this can provide a self.state with the following
attributes:
- file_size: int. Maximum number of bytes to download.
- use_external_stores: boolean. If true, look in any defined external file
stores for files before downloading them, and offer any new files to
external stores. This should be true unless the external checks are
misbehaving.
"""
CHUNK_SIZE = 512 * 1024
# Batch calls to the filestore to at least to group this many items. This
# allows us to amortize file store round trips and increases throughput.
MIN_CALL_TO_FILE_STORE = 200
def Start(self):
"""Initialize our state."""
super(MultiGetFileMixin, self).Start()
self.state.Register("files_hashed", 0)
self.state.Register("use_external_stores", False)
self.state.Register("file_size", 0)
self.state.Register("files_to_fetch", 0)
self.state.Register("files_fetched", 0)
self.state.Register("files_skipped", 0)
# Counter to batch up hash checking in the filestore
self.state.Register("files_hashed_since_check", 0)
# A dict of file trackers which are waiting to be checked by the file
# store. Keys are vfs urns and values are FileTrack instances. Values are
# copied to pending_files for download if not present in FileStore.
self.state.Register("pending_hashes", {})
# A dict of file trackers currently being fetched. Keys are vfs urns and
# values are FileTracker instances.
self.state.Register("pending_files", {})
# A mapping of index values to the original pathspecs.
self.state.Register("indexed_pathspecs", {})
# Set of blobs we still need to fetch.
self.state.Register("blobs_we_need", set())
fd = aff4.FACTORY.Open(filestore.FileStore.PATH, "FileStore", mode="r",
token=self.token)
self.state.Register("filestore", fd)
def GenerateIndex(self, pathspec):
h = hashlib.sha256()
h.update(pathspec.SerializeToString())
return h.hexdigest()
def StartFileFetch(self, pathspec, request_data=None):
"""The entry point for this flow mixin - Schedules new file transfer."""
# Create an index so we can find this pathspec later.
index = self.GenerateIndex(pathspec)
self.state.indexed_pathspecs[index] = pathspec
request_data = request_data or {}
request_data["index"] = index
self.CallClient("StatFile", pathspec=pathspec,
next_state="StoreStat",
request_data=request_data)
request = rdf_client.FingerprintRequest(pathspec=pathspec,
max_filesize=self.state.file_size)
request.AddRequest(
fp_type=rdf_client.FingerprintTuple.Type.FPT_GENERIC,
hashers=[rdf_client.FingerprintTuple.HashType.MD5,
rdf_client.FingerprintTuple.HashType.SHA1,
rdf_client.FingerprintTuple.HashType.SHA256])
self.CallClient("HashFile", request, next_state="ReceiveFileHash",
request_data=request_data)
def ReceiveFetchedFile(self, stat_entry, file_hash, request_data=None):
"""This method will be called for each new file successfully fetched.
Args:
stat_entry: rdf_client.StatEntry object describing the file.
file_hash: rdf_crypto.Hash object with file hashes.
request_data: Arbitrary dictionary that was passed to the corresponding
StartFileFetch call.
"""
@flow.StateHandler()
def StoreStat(self, responses):
if not responses.success:
self.Log("Failed to stat file: %s", responses.status)
return
stat_entry = responses.First()
index = responses.request_data["index"]
self.state.pending_hashes[index] = FileTracker(
stat_entry, self.client_id, responses.request_data, index)
@flow.StateHandler(next_state="CheckHash")
def ReceiveFileHash(self, responses):
"""Add hash digest to tracker and check with filestore."""
# Support old clients which may not have the new client action in place yet.
# TODO(user): Deprecate once all clients have the HashFile action.
if not responses.success and responses.request.request.name == "HashFile":
logging.debug(
"HashFile action not available, falling back to FingerprintFile.")
self.CallClient("FingerprintFile", responses.request.request.payload,
next_state="ReceiveFileHash",
request_data=responses.request_data)
return
index = responses.request_data["index"]
if not responses.success:
self.Log("Failed to hash file: %s", responses.status)
self.state.pending_hashes.pop(index, None)
return
self.state.files_hashed += 1
response = responses.First()
if response.HasField("hash"):
hash_obj = response.hash
else:
# Deprecate this method of returning hashes.
hash_obj = rdf_crypto.Hash()
if len(response.results) < 1 or response.results[0]["name"] != "generic":
self.Log("Failed to hash file: %s", self.state.indexed_pathspecs[index])
self.state.pending_hashes.pop(index, None)
return
result = response.results[0]
try:
for hash_type in ["md5", "sha1", "sha256"]:
value = result.GetItem(hash_type)
setattr(hash_obj, hash_type, value)
except AttributeError:
self.Log("Failed to hash file: %s", self.state.indexed_pathspecs[index])
self.state.pending_hashes.pop(index, None)
return
tracker = self.state.pending_hashes[index]
tracker.hash_obj = hash_obj
tracker.bytes_read = response.bytes_read
self.state.files_hashed_since_check += 1
if self.state.files_hashed_since_check >= self.MIN_CALL_TO_FILE_STORE:
self._CheckHashesWithFileStore()
def _CheckHashesWithFileStore(self):
"""Check all queued up hashes for existence in file store.
Hashes which do not exist in the file store will be downloaded. This
function flushes the entire queue (self.state.pending_hashes) in order to
minimize the round trips to the file store.
If a file was found in the file store it is copied from there into the
client's VFS namespace. Otherwise, we request the client to hash every block
in the file, and add it to the file tracking queue
(self.state.pending_files).
"""
if not self.state.pending_hashes:
return
# This map represents all the hashes in the pending urns.
file_hashes = {}
# Store urns by hash to allow us to remove duplicates.
# keys are hashdigest objects, values are arrays of tracker objects.
hash_to_urn = {}
for index, tracker in self.state.pending_hashes.iteritems():
# We might not have gotten this hash yet
if tracker.hash_obj is None:
continue
digest = tracker.hash_obj.sha256
file_hashes[index] = tracker.hash_obj
hash_to_urn.setdefault(digest, []).append(tracker)
# First we get all the files which are present in the file store.
files_in_filestore = set()
for file_store_urn, hash_obj in self.state.filestore.CheckHashes(
file_hashes.values(), external=self.state.use_external_stores):
self.HeartBeat()
# Since checkhashes only returns one digest per unique hash we need to
# find any other files pending download with the same hash.
for tracker in hash_to_urn[hash_obj.sha256]:
self.state.files_skipped += 1
file_hashes.pop(tracker.index)
files_in_filestore.add(file_store_urn)
# Remove this tracker from the pending_hashes store since we no longer
# need to process it.
self.state.pending_hashes.pop(tracker.index)
# Now that the check is done, reset our counter
self.state.files_hashed_since_check = 0
# Now copy all existing files to the client aff4 space.
for existing_blob in aff4.FACTORY.MultiOpen(files_in_filestore,
mode="rw", token=self.token):
hashset = existing_blob.Get(existing_blob.Schema.HASH)
if hashset is None:
self.Log("Filestore File %s has no hash.", existing_blob.urn)
continue
for file_tracker in hash_to_urn.get(hashset.sha256, []):
# Due to potential filestore corruption, the existing_blob files can
# have 0 size, make sure our size matches the actual size in that case.
if existing_blob.size == 0:
existing_blob.size = (file_tracker.bytes_read or
file_tracker.stat_entry.st_size)
# Create a file in the client name space with the same classtype and
# populate its attributes.
file_tracker.CreateVFSFile(existing_blob.__class__.__name__,
token=self.token,
chunksize=self.CHUNK_SIZE)
file_tracker.fd.FromBlobImage(existing_blob)
file_tracker.fd.Set(hashset)
# Add this file to the index at the canonical location
existing_blob.AddIndex(file_tracker.urn)
# It is not critical that this file be written immediately.
file_tracker.fd.Close(sync=False)
# Let the caller know we have this file already.
self.ReceiveFetchedFile(file_tracker.stat_entry, file_tracker.hash_obj,
request_data=file_tracker.request_data)
# Now we iterate over all the files which are not in the store and arrange
# for them to be copied.
for index in file_hashes:
# Move the tracker from the pending hashes store to the pending files
# store - it will now be downloaded.
file_tracker = self.state.pending_hashes.pop(index)
self.state.pending_files[index] = file_tracker
# Create the VFS file for this file tracker.
file_tracker.CreateVFSFile("VFSBlobImage", token=self.token,
chunksize=self.CHUNK_SIZE)
# If we already know how big the file is we use that, otherwise fall back
# to the size reported by stat.
if file_tracker.bytes_read > 0:
file_tracker.size_to_download = file_tracker.bytes_read
else:
file_tracker.size_to_download = file_tracker.stat_entry.st_size
# We do not have the file here yet - we need to retrieve it.
expected_number_of_hashes = (
file_tracker.size_to_download / self.CHUNK_SIZE + 1)
# We just hash ALL the chunks in the file now. NOTE: This maximizes client
# VFS cache hit rate and is far more efficient than launching multiple
# GetFile flows.
self.state.files_to_fetch += 1
for i in range(expected_number_of_hashes):
if i == expected_number_of_hashes - 1:
# The last chunk is short.
length = file_tracker.size_to_download % self.CHUNK_SIZE
else:
length = self.CHUNK_SIZE
self.CallClient("HashBuffer", pathspec=file_tracker.pathspec,
offset=i * self.CHUNK_SIZE,
length=length, next_state="CheckHash",
request_data=dict(index=index))
if self.state.files_hashed % 100 == 0:
self.Log("Hashed %d files, skipped %s already stored.",
self.state.files_hashed, self.state.files_skipped)
@flow.StateHandler(next_state="WriteBuffer")
def CheckHash(self, responses):
"""Adds the block hash to the file tracker responsible for this vfs URN."""
index = responses.request_data["index"]
if index not in self.state.pending_files:
# This is a blobhash for a file we already failed to read and logged as
# below, check here to avoid logging dups.
return
file_tracker = self.state.pending_files[index]
hash_response = responses.First()
if not responses.success or not hash_response:
self.Log("Failed to read %s: %s" % (file_tracker.urn, responses.status))
del self.state.pending_files[index]
return
hash_tracker = HashTracker(hash_response)
file_tracker.hash_list.append(hash_tracker)
self.state.blobs_we_need.add(hash_tracker.blob_urn)
if len(self.state.blobs_we_need) > self.MIN_CALL_TO_FILE_STORE:
self.FetchFileContent()
def FetchFileContent(self):
"""Fetch as much as the file's content as possible.
This drains the pending_files store by checking which blobs we already have
in the store and issuing calls to the client to receive outstanding blobs.
"""
if not self.state.pending_files:
return
# Check if we have all the blobs in the blob AFF4 namespace..
stats = aff4.FACTORY.Stat(self.state.blobs_we_need, token=self.token)
blobs_we_have = set([x["urn"] for x in stats])
self.state.blobs_we_need = set()
# Now iterate over all the blobs and add them directly to the blob image.
for index, file_tracker in self.state.pending_files.iteritems():
for hash_tracker in file_tracker.hash_list:
# Make sure we read the correct pathspec on the client.
hash_tracker.hash_response.pathspec = file_tracker.pathspec
if hash_tracker.blob_urn in blobs_we_have:
# If we have the data we may call our state directly.
self.CallState([hash_tracker.hash_response],
next_state="WriteBuffer",
request_data=dict(index=index))
else:
# We dont have this blob - ask the client to transmit it.
self.CallClient("TransferBuffer", hash_tracker.hash_response,
next_state="WriteBuffer",
request_data=dict(index=index))
# Clear the file tracker's hash list.
file_tracker.hash_list = []
@flow.StateHandler(next_state="IterateFind")
def WriteBuffer(self, responses):
"""Write the hash received to the blob image."""
# Note that hashes must arrive at this state in the correct order since they
# are sent in the correct order (either via CallState or CallClient).
index = responses.request_data["index"]
if index not in self.state.pending_files:
return
# Failed to read the file - ignore it.
if not responses.success:
return self.RemoveInFlightFile(index)
response = responses.First()
file_tracker = self.state.pending_files.get(index)
if file_tracker:
file_tracker.fd.AddBlob(response.data, response.length)
if (response.length < file_tracker.fd.chunksize or
response.offset + response.length >= file_tracker.size_to_download):
# File done, remove from the store and close it.
self.RemoveInFlightFile(index)
# Close and write the file to the data store.
file_tracker.fd.Close(sync=True)
# Publish the new file event to cause the file to be added to the
# filestore. This is not time critical so do it when we have spare
# capacity.
self.Publish("FileStore.AddFileToStore", file_tracker.fd.urn,
priority=rdf_flows.GrrMessage.Priority.LOW_PRIORITY)
self.state.files_fetched += 1
if not self.state.files_fetched % 100:
self.Log("Fetched %d of %d files.", self.state.files_fetched,
self.state.files_to_fetch)
def RemoveInFlightFile(self, index):
file_tracker = self.state.pending_files.pop(index)
if file_tracker:
self.ReceiveFetchedFile(file_tracker.stat_entry, file_tracker.hash_obj,
request_data=file_tracker.request_data)
@flow.StateHandler(next_state=["CheckHash", "WriteBuffer"])
def End(self):
# There are some files still in flight.
if self.state.pending_hashes or self.state.pending_files:
self._CheckHashesWithFileStore()
self.FetchFileContent()
if not self.runner.OutstandingRequests():
super(MultiGetFileMixin, self).End()
class MultiGetFileArgs(rdf_structs.RDFProtoStruct):
protobuf = flows_pb2.MultiGetFileArgs
class MultiGetFile(MultiGetFileMixin, flow.GRRFlow):
"""A flow to effectively retrieve a number of files."""
args_type = MultiGetFileArgs
@flow.StateHandler(next_state=["ReceiveFileHash", "StoreStat"])
def Start(self):
"""Start state of the flow."""
super(MultiGetFile, self).Start()
self.state.use_external_stores = self.args.use_external_stores
self.state.file_size = self.args.file_size
unique_paths = set()
for pathspec in self.args.pathspecs:
vfs_urn = aff4.AFF4Object.VFSGRRClient.PathspecToURN(
pathspec, self.client_id)
if vfs_urn not in unique_paths:
# Only Stat/Hash each path once, input pathspecs can have dups.
unique_paths.add(vfs_urn)
self.StartFileFetch(pathspec)
def ReceiveFetchedFile(self, stat_entry, unused_hash_obj,
request_data=None):
"""This method will be called for each new file successfully fetched."""
_ = request_data
self.SendReply(stat_entry)
class FileStoreCreateFile(flow.EventListener):
"""Receive an event about a new file and add it to the file store.
The file store is a central place where files are managed in the data
store. Files are deduplicated and stored centrally.
This event listener will be fired when a new file is downloaded through
e.g. the GetFile flow. We then recalculate the file's hashes and store it in
the data store under a canonical URN.
"""
EVENTS = ["FileStore.AddFileToStore"]
well_known_session_id = rdfvalue.SessionID(
flow_name="FileStoreCreateFile")
CHUNK_SIZE = 512 * 1024
def UpdateIndex(self, target_urn, src_urn):
"""Update the index from the source to the target."""
idx = aff4.FACTORY.Create(src_urn, "AFF4Index", mode="w", token=self.token)
idx.Add(target_urn, "", target_urn)
@flow.EventHandler()
def ProcessMessage(self, message=None, event=None):
"""Process the new file and add to the file store."""
_ = event
vfs_urn = message.payload
vfs_fd = aff4.FACTORY.Open(vfs_urn, mode="rw", token=self.token)
filestore_fd = aff4.FACTORY.Create(filestore.FileStore.PATH, "FileStore",
mode="w", token=self.token)
filestore_fd.AddFile(vfs_fd)
vfs_fd.Flush(sync=False)
class GetMBRArgs(rdf_structs.RDFProtoStruct):
protobuf = flows_pb2.GetMBRArgs
class GetMBR(flow.GRRFlow):
"""A flow to retrieve the MBR.
Returns to parent flow:
The retrieved MBR.
"""
category = "/Filesystem/"
args_type = GetMBRArgs
behaviours = flow.GRRFlow.behaviours + "BASIC"
@flow.StateHandler(next_state=["StoreMBR"])
def Start(self):
"""Schedules the ReadBuffer client action."""
pathspec = rdf_paths.PathSpec(
path="\\\\.\\PhysicalDrive0\\",
pathtype=rdf_paths.PathSpec.PathType.OS,
path_options=rdf_paths.PathSpec.Options.CASE_LITERAL)
request = rdf_client.BufferReference(pathspec=pathspec, offset=0,
length=self.args.length)
self.CallClient("ReadBuffer", request, next_state="StoreMBR")
@flow.StateHandler()
def StoreMBR(self, responses):
"""This method stores the MBR."""
if not responses.success:
msg = "Could not retrieve MBR: %s" % responses.status
self.Log(msg)
raise flow.FlowError(msg)
response = responses.First()
mbr = aff4.FACTORY.Create(self.client_id.Add("mbr"), "VFSMemoryFile",
mode="rw", token=self.token)
mbr.write(response.data)
mbr.Close()
self.Log("Successfully stored the MBR (%d bytes)." % len(response.data))
self.SendReply(rdfvalue.RDFBytes(response.data))
class TransferStore(flow.WellKnownFlow):
"""Store a buffer into a determined location."""
well_known_session_id = rdfvalue.SessionID(flow_name="TransferStore")
def ProcessMessage(self, message):
"""Write the blob into the AFF4 blob storage area."""
# Check that the message is authenticated
if (message.auth_state !=
rdf_flows.GrrMessage.AuthorizationState.AUTHENTICATED):
logging.error("TransferStore request from %s is not authenticated.",
message.source)
return
read_buffer = rdf_protodict.DataBlob(message.payload)
# Only store non empty buffers
if read_buffer.data:
data = read_buffer.data
if (read_buffer.compression ==
rdf_protodict.DataBlob.CompressionType.ZCOMPRESSION):
cdata = data
data = zlib.decompress(cdata)
elif (read_buffer.compression ==
rdf_protodict.DataBlob.CompressionType.UNCOMPRESSED):
cdata = zlib.compress(data)
else:
raise RuntimeError("Unsupported compression")
# The hash is done on the uncompressed data
digest = hashlib.sha256(data).digest()
urn = rdfvalue.RDFURN("aff4:/blobs").Add(digest.encode("hex"))
fd = aff4.FACTORY.Create(urn, "AFF4MemoryStream", mode="w",
token=self.token)
fd.OverwriteAndClose(cdata, len(data), sync=True)
logging.debug("Got blob %s (length %s)", digest.encode("hex"),
len(cdata))
class SendFile(flow.GRRFlow):
"""This flow sends a file to remote listener.
To use this flow, choose a key and an IV in hex format (if run from the GUI,
there will be a pregenerated pair key and iv for you to use) and run a
listener on the server you want to use like this:
nc -l <port> | openssl aes-128-cbc -d -K <key> -iv <iv> > <filename>
Returns to parent flow:
A rdf_client.StatEntry of the sent file.
"""
category = "/Filesystem/"
args_type = rdf_client.SendFileRequest
@flow.StateHandler(next_state="Done")
def Start(self):
"""This issues the sendfile request."""
self.CallClient("SendFile", self.args, next_state="Done")
@flow.StateHandler()
def Done(self, responses):
if not responses.success:
self.Log(responses.status.error_message)
raise flow.FlowError(responses.status.error_message)
|
statik/grr
|
lib/flows/general/transfer.py
|
Python
|
apache-2.0
| 30,601
|
import os
import tensorflow as tf
import numpy as np
import tarfile
import icdar
tf.app.flags.DEFINE_string('tarfile',
'data.tar.gz',
'tarfile to uncompress')
tf.app.flags.DEFINE_string('tarpath', '', 'tarfile inner path')
FLAGS = tf.app.flags.FLAGS
TMP_OUTPUT_DIR = "/tmp/"
initialized = False
def prepare_data_once(data_dir):
def prepare_data():
# untar data to tmp
src_tar = os.path.join(data_dir, FLAGS.tarfile)
sub_path = FLAGS.tarpath
output_path = TMP_OUTPUT_DIR
print(src_tar)
global initialized
if initialized is False:
initialized = True
with tarfile.open(src_tar, "r:gz") as tar:
tar.extractall(output_path)
tar.close()
print('finish untar')
data_path = os.path.join(TMP_OUTPUT_DIR, sub_path)
return data_path
return prepare_data
class EastDataSet(object):
def __init__(self, data_dir, batch_size, subset='train', use_distortion=True):
prepare_action = prepare_data_once(data_dir)
data_path = prepare_action()
FLAGS.training_data_path = data_path
generator = icdar.get_batch(num_workers=FLAGS.num_readers,
input_size=FLAGS.input_size,
batch_size=batch_size)
self.generator = generator
self.subset = subset
def gen(self):
while True:
data = next(self.generator)
input_images = np.asarray(data[0])
input_score_maps = np.asarray(data[2])
input_geo_maps = np.asarray(data[3])
input_training_masks = np.asarray(data[4])
yield input_images, input_score_maps, input_geo_maps, input_training_masks
def make_batch(self, batch_size):
dataset = tf.data.Dataset.from_generator(
self.gen, (tf.float32, tf.float32, tf.float32, tf.float32),
(tf.TensorShape([batch_size, 512, 512, 3]),
tf.TensorShape([batch_size, 128, 128, 1]),
tf.TensorShape([batch_size, 128, 128, 5]),
tf.TensorShape([batch_size, 128, 128, 1])))
iterator = dataset.make_one_shot_iterator()
image_batch, score_map_batch, geo_map_batch, training_mask_batch = iterator.get_next()
return image_batch, score_map_batch, geo_map_batch, training_mask_batch
|
ucloud/uai-sdk
|
examples/tensorflow/train/east/code/icdar_dataset.py
|
Python
|
apache-2.0
| 3,472
|
#
# Module implementing queues
#
# multiprocessing/queues.py
#
# Copyright (c) 2006-2008, R Oudkerk
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# 3. Neither the name of author nor the names of any contributors may be
# used to endorse or promote products derived from this software
# without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE
# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
# OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
# HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
# OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
# SUCH DAMAGE.
#
__all__ = ['Queue', 'SimpleQueue', 'JoinableQueue']
import sys
import os
import threading
import collections
import time
import atexit
import weakref
from Queue import Empty, Full
import _multiprocessing
from multiprocessing import Pipe
from multiprocessing.synchronize import Lock, BoundedSemaphore, Semaphore, Condition
from multiprocessing.util import debug, info, Finalize, register_after_fork
from multiprocessing.forking import assert_spawning
#
# Queue type using a pipe, buffer and thread
#
class Queue(object):
def __init__(self, maxsize=0):
if maxsize <= 0:
maxsize = _multiprocessing.SemLock.SEM_VALUE_MAX
self._maxsize = maxsize
self._reader, self._writer = Pipe(duplex=False)
self._rlock = Lock()
self._opid = os.getpid()
if sys.platform == 'win32':
self._wlock = None
else:
self._wlock = Lock()
self._sem = BoundedSemaphore(maxsize)
self._after_fork()
if sys.platform != 'win32':
register_after_fork(self, Queue._after_fork)
def __getstate__(self):
assert_spawning(self)
return (self._maxsize, self._reader, self._writer,
self._rlock, self._wlock, self._sem, self._opid)
def __setstate__(self, state):
(self._maxsize, self._reader, self._writer,
self._rlock, self._wlock, self._sem, self._opid) = state
self._after_fork()
def _after_fork(self):
debug('Queue._after_fork()')
self._notempty = threading.Condition(threading.Lock())
self._buffer = collections.deque()
self._thread = None
self._jointhread = None
self._joincancelled = False
self._closed = False
self._close = None
self._send = self._writer.send
self._recv = self._reader.recv
self._poll = self._reader.poll
def put(self, obj, block=True, timeout=None):
assert not self._closed
if not self._sem.acquire(block, timeout):
raise Full
self._notempty.acquire()
try:
if self._thread is None:
self._start_thread()
self._buffer.append(obj)
self._notempty.notify()
finally:
self._notempty.release()
def get(self, block=True, timeout=None):
if block and timeout is None:
self._rlock.acquire()
try:
res = self._recv()
self._sem.release()
return res
finally:
self._rlock.release()
else:
if block:
deadline = time.time() + timeout
if not self._rlock.acquire(block, timeout):
raise Empty
try:
if not self._poll(block and (deadline-time.time()) or 0.0):
raise Empty
res = self._recv()
self._sem.release()
return res
finally:
self._rlock.release()
def qsize(self):
# Raises NotImplementedError on Mac OSX because of broken sem_getvalue()
return self._maxsize - self._sem._semlock._get_value()
def empty(self):
return not self._poll()
def full(self):
return self._sem._semlock._is_zero()
def get_nowait(self):
return self.get(False)
def put_nowait(self, obj):
return self.put(obj, False)
def close(self):
self._closed = True
self._reader.close()
if self._close:
self._close()
def join_thread(self):
debug('Queue.join_thread()')
assert self._closed
if self._jointhread:
self._jointhread()
def cancel_join_thread(self):
debug('Queue.cancel_join_thread()')
self._joincancelled = True
try:
self._jointhread.cancel()
except AttributeError:
pass
def _start_thread(self):
debug('Queue._start_thread()')
# Start thread which transfers data from buffer to pipe
self._buffer.clear()
self._thread = threading.Thread(
target=Queue._feed,
args=(self._buffer, self._notempty, self._send,
self._wlock, self._writer.close),
name='QueueFeederThread'
)
self._thread.daemon = True
debug('doing self._thread.start()')
self._thread.start()
debug('... done self._thread.start()')
# On process exit we will wait for data to be flushed to pipe.
#
# However, if this process created the queue then all
# processes which use the queue will be descendants of this
# process. Therefore waiting for the queue to be flushed
# is pointless once all the child processes have been joined.
created_by_this_process = (self._opid == os.getpid())
if not self._joincancelled and not created_by_this_process:
self._jointhread = Finalize(
self._thread, Queue._finalize_join,
[weakref.ref(self._thread)],
exitpriority=-5
)
# Send sentinel to the thread queue object when garbage collected
self._close = Finalize(
self, Queue._finalize_close,
[self._buffer, self._notempty],
exitpriority=10
)
@staticmethod
def _finalize_join(twr):
debug('joining queue thread')
thread = twr()
if thread is not None:
thread.join()
debug('... queue thread joined')
else:
debug('... queue thread already dead')
@staticmethod
def _finalize_close(buffer, notempty):
debug('telling queue thread to quit')
notempty.acquire()
try:
buffer.append(_sentinel)
notempty.notify()
finally:
notempty.release()
@staticmethod
def _feed(buffer, notempty, send, writelock, close):
debug('starting thread to feed data to pipe')
from .util import is_exiting
nacquire = notempty.acquire
nrelease = notempty.release
nwait = notempty.wait
bpopleft = buffer.popleft
sentinel = _sentinel
if sys.platform != 'win32':
wacquire = writelock.acquire
wrelease = writelock.release
else:
wacquire = None
try:
while 1:
nacquire()
try:
if not buffer:
nwait()
finally:
nrelease()
try:
while 1:
obj = bpopleft()
if obj is sentinel:
debug('feeder thread got sentinel -- exiting')
close()
return
if wacquire is None:
send(obj)
else:
wacquire()
try:
send(obj)
finally:
wrelease()
except IndexError:
pass
except Exception, e:
# Since this runs in a daemon thread the resources it uses
# may be become unusable while the process is cleaning up.
# We ignore errors which happen after the process has
# started to cleanup.
try:
if is_exiting():
info('error in queue thread: %s', e)
else:
import traceback
traceback.print_exc()
except Exception:
pass
_sentinel = object()
#
# A queue type which also supports join() and task_done() methods
#
# Note that if you do not call task_done() for each finished task then
# eventually the counter's semaphore may overflow causing Bad Things
# to happen.
#
class JoinableQueue(Queue):
def __init__(self, maxsize=0):
Queue.__init__(self, maxsize)
self._unfinished_tasks = Semaphore(0)
self._cond = Condition()
def __getstate__(self):
return Queue.__getstate__(self) + (self._cond, self._unfinished_tasks)
def __setstate__(self, state):
Queue.__setstate__(self, state[:-2])
self._cond, self._unfinished_tasks = state[-2:]
def put(self, obj, block=True, timeout=None):
assert not self._closed
if not self._sem.acquire(block, timeout):
raise Full
self._notempty.acquire()
self._cond.acquire()
try:
if self._thread is None:
self._start_thread()
self._buffer.append(obj)
self._unfinished_tasks.release()
self._notempty.notify()
finally:
self._cond.release()
self._notempty.release()
def task_done(self):
self._cond.acquire()
try:
if not self._unfinished_tasks.acquire(False):
raise ValueError('task_done() called too many times')
if self._unfinished_tasks._semlock._is_zero():
self._cond.notify_all()
finally:
self._cond.release()
def join(self):
self._cond.acquire()
try:
if not self._unfinished_tasks._semlock._is_zero():
self._cond.wait()
finally:
self._cond.release()
#
# Simplified Queue type -- really just a locked pipe
#
class SimpleQueue(object):
def __init__(self):
self._reader, self._writer = Pipe(duplex=False)
self._rlock = Lock()
if sys.platform == 'win32':
self._wlock = None
else:
self._wlock = Lock()
self._make_methods()
def empty(self):
return not self._reader.poll()
def __getstate__(self):
assert_spawning(self)
return (self._reader, self._writer, self._rlock, self._wlock)
def __setstate__(self, state):
(self._reader, self._writer, self._rlock, self._wlock) = state
self._make_methods()
def _make_methods(self):
recv = self._reader.recv
racquire, rrelease = self._rlock.acquire, self._rlock.release
def get():
racquire()
try:
return recv()
finally:
rrelease()
self.get = get
if self._wlock is None:
# writes to a message oriented win32 pipe are atomic
self.put = self._writer.send
else:
send = self._writer.send
wacquire, wrelease = self._wlock.acquire, self._wlock.release
def put(obj):
wacquire()
try:
return send(obj)
finally:
wrelease()
self.put = put
|
Symmetry-Innovations-Pty-Ltd/Python-2.7-for-QNX6.5.0-x86
|
usr/pkg/lib/python2.7/multiprocessing/queues.py
|
Python
|
mit
| 12,547
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import
import json
import uuid
import tornado.web
from .exceptions import \
FieldMissingException, \
InvalidMethodException, \
FormattingException, \
NotFoundException, \
ValueInvalidException, \
APIException
class BaseApi(tornado.web.RequestHandler):
REQUIRED_GET_FIELDS = {}
REQUIRED_POST_FIELDS = {}
REQUIRED_PUT_FIELDS = {}
REQUIRED_PATCH_FIELDS = {}
REQUIRED_DELETE_FIELDS = {}
REQUIRED_OPTIONS_FIELDS = {}
__FIELD_MAP = {
'GET': REQUIRED_GET_FIELDS,
'POST': REQUIRED_POST_FIELDS,
'PUT': REQUIRED_PUT_FIELDS,
'PATCH': REQUIRED_PATCH_FIELDS,
'DELETE': REQUIRED_DELETE_FIELDS,
'OPTIONS': REQUIRED_OPTIONS_FIELDS,
}
ENCODE = 'utf-8'
def __init__(self, *args, **kwargs):
super(BaseApi, self).__init__(*args, **kwargs)
self._error = None
self._data = None
# Request ID that can be used for logging, auditing and debug purposes
self.request_id = str(uuid.uuid4())
def auth(self):
pass
def invalid_method(self):
raise InvalidMethodException(self.request.method)
def write_error(self, *args, **kwargs):
e = kwargs["exc_info"][1]
if isinstance(e, APIException):
serialized = e.serialize()
self.set_error(serialized['type'], serialized['message'], serialized['blame'])
code = e.CODE
else:
self.set_error(e.__class__.__name__, str(e), 'server')
code = 500
self.crap_out(code=code)
def set_error(self, error_type, message, blame):
self._error = {
'type': error_type,
'message': message,
'blame': blame,
'request': self.request_id
}
def write_response(self, data):
self._data = data
self.finalize()
def crap_out(self, code=400):
self.set_status(code)
self.finalize()
self.finish()
def validate(self):
fields = getattr(self, 'REQUIRED_{}_FIELDS'.format(self.request.method))
for field in fields:
if field not in self.request.arguments:
raise FieldMissingException(field)
try:
fields[field].validate(self.request.arguments[field])
except ValueError as e:
raise ValueInvalidException(blame=field, message=str(e))
def finalize(self):
response = {}
if self._error is not None:
response['error'] = self._error
elif self._data is not None:
response['data'] = self._data
self.set_header('Content-Type', 'application/json')
self.write(json.dumps(response))
def prepare(self):
# Before anything else, authenticate.
# We don't want to waste CPU cycles on unauthenticated requests.
self.auth()
self.request.arguments = {}
for argument in self.request.query_arguments:
self.request.arguments[argument] = self.request.query_arguments[argument][0]
if self.request.method == 'GET':
self.validate()
return
body = self.request.body.decode(self.__class__.ENCODE)
if body is None or body == '':
self.validate()
return
if body in self.request.arguments:
del self.request.arguments[body]
try:
json_data = json.loads(body)
self.request.arguments.update(json_data)
except ValueError:
raise FormattingException('request')
self.validate()
def post(self, *args, **kwargs):
self.invalid_method()
def get(self):
self.invalid_method()
def options(self, *args, **kwargs):
self.invalid_method()
def patch(self, *args, **kwargs):
self.invalid_method()
def put(self, *args, **kwargs):
self.invalid_method()
def delete(self, *args, **kwargs):
self.invalid_method()
class NotFoundApi(BaseApi):
def not_found(self):
raise NotFoundException('request')
def post(self, *args, **kwargs):
self.not_found()
def get(self, *args, **kwargs):
self.not_found()
def options(self, *args, **kwargs):
self.not_found()
def patch(self, *args, **kwargs):
self.not_found()
def put(self, *args, **kwargs):
self.not_found()
def delete(self, *args, **kwargs):
self.not_found()
|
py-xia/xia
|
xia/api.py
|
Python
|
mit
| 4,551
|
# -*- coding: utf-8 -*-
"""
Tests of responsetypes
"""
from datetime import datetime
import json
import os
import pyparsing
import random
import unittest
import textwrap
import requests
import mock
from . import new_loncapa_problem, test_capa_system
import calc
from capa.responsetypes import LoncapaProblemError, \
StudentInputError, ResponseError
from capa.correctmap import CorrectMap
from capa.util import convert_files_to_filenames
from capa.xqueue_interface import dateformat
from pytz import UTC
class ResponseTest(unittest.TestCase):
"""Base class for tests of capa responses."""
xml_factory_class = None
# If something is wrong, show it to us.
maxDiff = None
def setUp(self):
if self.xml_factory_class:
self.xml_factory = self.xml_factory_class()
def build_problem(self, capa_system=None, **kwargs):
xml = self.xml_factory.build_xml(**kwargs)
return new_loncapa_problem(xml, capa_system=capa_system)
def assert_grade(self, problem, submission, expected_correctness, msg=None):
input_dict = {'1_2_1': submission}
correct_map = problem.grade_answers(input_dict)
if msg is None:
self.assertEquals(correct_map.get_correctness('1_2_1'), expected_correctness)
else:
self.assertEquals(correct_map.get_correctness('1_2_1'), expected_correctness, msg)
def assert_answer_format(self, problem):
answers = problem.get_question_answers()
self.assertTrue(answers['1_2_1'] is not None)
def assert_multiple_grade(self, problem, correct_answers, incorrect_answers):
for input_str in correct_answers:
result = problem.grade_answers({'1_2_1': input_str}).get_correctness('1_2_1')
self.assertEqual(result, 'correct')
for input_str in incorrect_answers:
result = problem.grade_answers({'1_2_1': input_str}).get_correctness('1_2_1')
self.assertEqual(result, 'incorrect')
def _get_random_number_code(self):
"""Returns code to be used to generate a random result."""
return "str(random.randint(0, 1e9))"
def _get_random_number_result(self, seed_value):
"""Returns a result that should be generated using the random_number_code."""
rand = random.Random(seed_value)
return str(rand.randint(0, 1e9))
class MultiChoiceResponseTest(ResponseTest):
from capa.tests.response_xml_factory import MultipleChoiceResponseXMLFactory
xml_factory_class = MultipleChoiceResponseXMLFactory
def test_multiple_choice_grade(self):
problem = self.build_problem(choices=[False, True, False])
# Ensure that we get the expected grades
self.assert_grade(problem, 'choice_0', 'incorrect')
self.assert_grade(problem, 'choice_1', 'correct')
self.assert_grade(problem, 'choice_2', 'incorrect')
def test_named_multiple_choice_grade(self):
problem = self.build_problem(choices=[False, True, False],
choice_names=["foil_1", "foil_2", "foil_3"])
# Ensure that we get the expected grades
self.assert_grade(problem, 'choice_foil_1', 'incorrect')
self.assert_grade(problem, 'choice_foil_2', 'correct')
self.assert_grade(problem, 'choice_foil_3', 'incorrect')
class TrueFalseResponseTest(ResponseTest):
from capa.tests.response_xml_factory import TrueFalseResponseXMLFactory
xml_factory_class = TrueFalseResponseXMLFactory
def test_true_false_grade(self):
problem = self.build_problem(choices=[False, True, True])
# Check the results
# Mark correct if and only if ALL (and only) correct choices selected
self.assert_grade(problem, 'choice_0', 'incorrect')
self.assert_grade(problem, 'choice_1', 'incorrect')
self.assert_grade(problem, 'choice_2', 'incorrect')
self.assert_grade(problem, ['choice_0', 'choice_1', 'choice_2'], 'incorrect')
self.assert_grade(problem, ['choice_0', 'choice_2'], 'incorrect')
self.assert_grade(problem, ['choice_0', 'choice_1'], 'incorrect')
self.assert_grade(problem, ['choice_1', 'choice_2'], 'correct')
# Invalid choices should be marked incorrect (we have no choice 3)
self.assert_grade(problem, 'choice_3', 'incorrect')
self.assert_grade(problem, 'not_a_choice', 'incorrect')
def test_named_true_false_grade(self):
problem = self.build_problem(choices=[False, True, True],
choice_names=['foil_1', 'foil_2', 'foil_3'])
# Check the results
# Mark correct if and only if ALL (and only) correct chocies selected
self.assert_grade(problem, 'choice_foil_1', 'incorrect')
self.assert_grade(problem, 'choice_foil_2', 'incorrect')
self.assert_grade(problem, 'choice_foil_3', 'incorrect')
self.assert_grade(problem, ['choice_foil_1', 'choice_foil_2', 'choice_foil_3'], 'incorrect')
self.assert_grade(problem, ['choice_foil_1', 'choice_foil_3'], 'incorrect')
self.assert_grade(problem, ['choice_foil_1', 'choice_foil_2'], 'incorrect')
self.assert_grade(problem, ['choice_foil_2', 'choice_foil_3'], 'correct')
# Invalid choices should be marked incorrect
self.assert_grade(problem, 'choice_foil_4', 'incorrect')
self.assert_grade(problem, 'not_a_choice', 'incorrect')
class ImageResponseTest(ResponseTest):
from capa.tests.response_xml_factory import ImageResponseXMLFactory
xml_factory_class = ImageResponseXMLFactory
def test_rectangle_grade(self):
# Define a rectangle with corners (10,10) and (20,20)
problem = self.build_problem(rectangle="(10,10)-(20,20)")
# Anything inside the rectangle (and along the borders) is correct
# Everything else is incorrect
correct_inputs = ["[12,19]", "[10,10]", "[20,20]",
"[10,15]", "[20,15]", "[15,10]", "[15,20]"]
incorrect_inputs = ["[4,6]", "[25,15]", "[15,40]", "[15,4]"]
self.assert_multiple_grade(problem, correct_inputs, incorrect_inputs)
def test_multiple_rectangles_grade(self):
# Define two rectangles
rectangle_str = "(10,10)-(20,20);(100,100)-(200,200)"
# Expect that only points inside the rectangles are marked correct
problem = self.build_problem(rectangle=rectangle_str)
correct_inputs = ["[12,19]", "[120, 130]"]
incorrect_inputs = ["[4,6]", "[25,15]", "[15,40]", "[15,4]",
"[50,55]", "[300, 14]", "[120, 400]"]
self.assert_multiple_grade(problem, correct_inputs, incorrect_inputs)
def test_region_grade(self):
# Define a triangular region with corners (0,0), (5,10), and (0, 10)
region_str = "[ [1,1], [5,10], [0,10] ]"
# Expect that only points inside the triangle are marked correct
problem = self.build_problem(regions=region_str)
correct_inputs = ["[2,4]", "[1,3]"]
incorrect_inputs = ["[0,0]", "[3,5]", "[5,15]", "[30, 12]"]
self.assert_multiple_grade(problem, correct_inputs, incorrect_inputs)
def test_multiple_regions_grade(self):
# Define multiple regions that the user can select
region_str = "[[[10,10], [20,10], [20, 30]], [[100,100], [120,100], [120,150]]]"
# Expect that only points inside the regions are marked correct
problem = self.build_problem(regions=region_str)
correct_inputs = ["[15,12]", "[110,112]"]
incorrect_inputs = ["[0,0]", "[600,300]"]
self.assert_multiple_grade(problem, correct_inputs, incorrect_inputs)
def test_region_and_rectangle_grade(self):
rectangle_str = "(100,100)-(200,200)"
region_str = "[[10,10], [20,10], [20, 30]]"
# Expect that only points inside the rectangle or region are marked correct
problem = self.build_problem(regions=region_str, rectangle=rectangle_str)
correct_inputs = ["[13,12]", "[110,112]"]
incorrect_inputs = ["[0,0]", "[600,300]"]
self.assert_multiple_grade(problem, correct_inputs, incorrect_inputs)
def test_show_answer(self):
rectangle_str = "(100,100)-(200,200)"
region_str = "[[10,10], [20,10], [20, 30]]"
problem = self.build_problem(regions=region_str, rectangle=rectangle_str)
self.assert_answer_format(problem)
class SymbolicResponseTest(ResponseTest):
from capa.tests.response_xml_factory import SymbolicResponseXMLFactory
xml_factory_class = SymbolicResponseXMLFactory
def test_grade_single_input_correct(self):
problem = self.build_problem(math_display=True, expect="2*x+3*y")
# Correct answers
correct_inputs = [
('2x+3y', textwrap.dedent("""
<math xmlns="http://www.w3.org/1998/Math/MathML">
<mstyle displaystyle="true">
<mn>2</mn><mo>*</mo><mi>x</mi><mo>+</mo><mn>3</mn><mo>*</mo><mi>y</mi>
</mstyle></math>"""),
'snuggletex_2x+3y.xml'),
('x+x+3y', textwrap.dedent("""
<math xmlns="http://www.w3.org/1998/Math/MathML">
<mstyle displaystyle="true">
<mi>x</mi><mo>+</mo><mi>x</mi><mo>+</mo><mn>3</mn><mo>*</mo><mi>y</mi>
</mstyle></math>"""),
'snuggletex_x+x+3y.xml'),
]
for (input_str, input_mathml, server_fixture) in correct_inputs:
print "Testing input: {0}".format(input_str)
server_resp = self._load_fixture(server_fixture)
self._assert_symbolic_grade(
problem, input_str, input_mathml,
'correct', snuggletex_resp=server_resp
)
def test_grade_single_input_incorrect(self):
problem = self.build_problem(math_display=True, expect="2*x+3*y")
# Incorrect answers
incorrect_inputs = [
('0', ''),
('4x+3y', textwrap.dedent("""
<math xmlns="http://www.w3.org/1998/Math/MathML">
<mstyle displaystyle="true">
<mn>4</mn><mo>*</mo><mi>x</mi><mo>+</mo><mn>3</mn><mo>*</mo><mi>y</mi>
</mstyle></math>""")),
]
for (input_str, input_mathml) in incorrect_inputs:
self._assert_symbolic_grade(problem, input_str, input_mathml, 'incorrect')
def test_complex_number_grade_correct(self):
problem = self.build_problem(
math_display=True,
expect="[[cos(theta),i*sin(theta)],[i*sin(theta),cos(theta)]]",
options=["matrix", "imaginary"]
)
correct_snuggletex = self._load_fixture('snuggletex_correct.html')
dynamath_input = self._load_fixture('dynamath_input.txt')
student_response = "cos(theta)*[[1,0],[0,1]] + i*sin(theta)*[[0,1],[1,0]]"
self._assert_symbolic_grade(
problem, student_response, dynamath_input,
'correct',
snuggletex_resp=correct_snuggletex
)
def test_complex_number_grade_incorrect(self):
problem = self.build_problem(math_display=True,
expect="[[cos(theta),i*sin(theta)],[i*sin(theta),cos(theta)]]",
options=["matrix", "imaginary"])
wrong_snuggletex = self._load_fixture('snuggletex_wrong.html')
dynamath_input = textwrap.dedent("""
<math xmlns="http://www.w3.org/1998/Math/MathML">
<mstyle displaystyle="true"><mn>2</mn></mstyle>
</math>
""")
self._assert_symbolic_grade(
problem, "2", dynamath_input,
'incorrect',
snuggletex_resp=wrong_snuggletex,
)
def test_multiple_inputs_exception(self):
# Should not allow multiple inputs, since we specify
# only one "expect" value
with self.assertRaises(Exception):
self.build_problem(math_display=True, expect="2*x+3*y", num_inputs=3)
def _assert_symbolic_grade(
self, problem, student_input, dynamath_input, expected_correctness,
snuggletex_resp=""
):
"""
Assert that the symbolic response has a certain grade.
`problem` is the capa problem containing the symbolic response.
`student_input` is the text the student entered.
`dynamath_input` is the JavaScript rendered MathML from the page.
`expected_correctness` is either "correct" or "incorrect"
`snuggletex_resp` is the simulated response from the Snuggletex server
"""
input_dict = {'1_2_1': str(student_input),
'1_2_1_dynamath': str(dynamath_input)}
# Simulate what the Snuggletex server would respond
with mock.patch.object(requests, 'post') as mock_post:
mock_post.return_value.text = snuggletex_resp
correct_map = problem.grade_answers(input_dict)
self.assertEqual(
correct_map.get_correctness('1_2_1'), expected_correctness
)
@staticmethod
def _load_fixture(relpath):
"""
Return a `unicode` object representing the contents
of the fixture file at `relpath` (relative to the test files dir)
"""
abspath = os.path.join(os.path.dirname(__file__), 'test_files', relpath)
with open(abspath) as fixture_file:
contents = fixture_file.read()
return contents.decode('utf8')
class OptionResponseTest(ResponseTest):
from capa.tests.response_xml_factory import OptionResponseXMLFactory
xml_factory_class = OptionResponseXMLFactory
def test_grade(self):
problem = self.build_problem(options=["first", "second", "third"],
correct_option="second")
# Assert that we get the expected grades
self.assert_grade(problem, "first", "incorrect")
self.assert_grade(problem, "second", "correct")
self.assert_grade(problem, "third", "incorrect")
# Options not in the list should be marked incorrect
self.assert_grade(problem, "invalid_option", "incorrect")
def test_quote_option(self):
# Test that option response properly escapes quotes inside options strings
problem = self.build_problem(options=["hasnot", "hasn't", "has'nt"],
correct_option="hasn't")
# Assert that correct option with a quote inside is marked correctly
self.assert_grade(problem, "hasnot", "incorrect")
self.assert_grade(problem, "hasn't", "correct")
self.assert_grade(problem, "hasn\'t", "correct")
self.assert_grade(problem, "has'nt", "incorrect")
class FormulaResponseTest(ResponseTest):
"""
Test the FormulaResponse class
"""
from capa.tests.response_xml_factory import FormulaResponseXMLFactory
xml_factory_class = FormulaResponseXMLFactory
def test_grade(self):
"""
Test basic functionality of FormulaResponse
Specifically, if it can understand equivalence of formulae
"""
# Sample variables x and y in the range [-10, 10]
sample_dict = {'x': (-10, 10), 'y': (-10, 10)}
# The expected solution is numerically equivalent to x+2y
problem = self.build_problem(sample_dict=sample_dict,
num_samples=10,
tolerance=0.01,
answer="x+2*y")
# Expect an equivalent formula to be marked correct
# 2x - x + y + y = x + 2y
input_formula = "2*x - x + y + y"
self.assert_grade(problem, input_formula, "correct")
# Expect an incorrect formula to be marked incorrect
# x + y != x + 2y
input_formula = "x + y"
self.assert_grade(problem, input_formula, "incorrect")
def test_hint(self):
"""
Test the hint-giving functionality of FormulaResponse
"""
# Sample variables x and y in the range [-10, 10]
sample_dict = {'x': (-10, 10), 'y': (-10, 10)}
# Give a hint if the user leaves off the coefficient
# or leaves out x
hints = [('x + 3*y', 'y_coefficient', 'Check the coefficient of y'),
('2*y', 'missing_x', 'Try including the variable x')]
# The expected solution is numerically equivalent to x+2y
problem = self.build_problem(sample_dict=sample_dict,
num_samples=10,
tolerance=0.01,
answer="x+2*y",
hints=hints)
# Expect to receive a hint if we add an extra y
input_dict = {'1_2_1': "x + 2*y + y"}
correct_map = problem.grade_answers(input_dict)
self.assertEquals(correct_map.get_hint('1_2_1'),
'Check the coefficient of y')
# Expect to receive a hint if we leave out x
input_dict = {'1_2_1': "2*y"}
correct_map = problem.grade_answers(input_dict)
self.assertEquals(correct_map.get_hint('1_2_1'),
'Try including the variable x')
def test_script(self):
"""
Test if python script can be used to generate answers
"""
# Calculate the answer using a script
script = "calculated_ans = 'x+x'"
# Sample x in the range [-10,10]
sample_dict = {'x': (-10, 10)}
# The expected solution is numerically equivalent to 2*x
problem = self.build_problem(sample_dict=sample_dict,
num_samples=10,
tolerance=0.01,
answer="$calculated_ans",
script=script)
# Expect that the inputs are graded correctly
self.assert_grade(problem, '2*x', 'correct')
self.assert_grade(problem, '3*x', 'incorrect')
def test_grade_infinity(self):
"""
Test that a large input on a problem with relative tolerance isn't
erroneously marked as correct.
"""
sample_dict = {'x': (1, 2)}
# Test problem
problem = self.build_problem(sample_dict=sample_dict,
num_samples=10,
tolerance="1%",
answer="x")
# Expect such a large answer to be marked incorrect
input_formula = "x*1e999"
self.assert_grade(problem, input_formula, "incorrect")
# Expect such a large negative answer to be marked incorrect
input_formula = "-x*1e999"
self.assert_grade(problem, input_formula, "incorrect")
def test_grade_nan(self):
"""
Test that expressions that evaluate to NaN are not marked as correct.
"""
sample_dict = {'x': (1, 2)}
# Test problem
problem = self.build_problem(sample_dict=sample_dict,
num_samples=10,
tolerance="1%",
answer="x")
# Expect an incorrect answer (+ nan) to be marked incorrect
# Right now this evaluates to 'nan' for a given x (Python implementation-dependent)
input_formula = "10*x + 0*1e999"
self.assert_grade(problem, input_formula, "incorrect")
# Expect an correct answer (+ nan) to be marked incorrect
input_formula = "x + 0*1e999"
self.assert_grade(problem, input_formula, "incorrect")
def test_raises_zero_division_err(self):
"""
See if division by zero raises an error.
"""
sample_dict = {'x': (1, 2)}
problem = self.build_problem(sample_dict=sample_dict,
num_samples=10,
tolerance="1%",
answer="x") # Answer doesn't matter
input_dict = {'1_2_1': '1/0'}
self.assertRaises(StudentInputError, problem.grade_answers, input_dict)
def test_validate_answer(self):
"""
Makes sure that validate_answer works.
"""
sample_dict = {'x': (1, 2)}
problem = self.build_problem(
sample_dict=sample_dict,
num_samples=10,
tolerance="1%",
answer="x"
)
self.assertTrue(problem.responders.values()[0].validate_answer('14*x'))
self.assertFalse(problem.responders.values()[0].validate_answer('3*y+2*x'))
class StringResponseTest(ResponseTest):
from capa.tests.response_xml_factory import StringResponseXMLFactory
xml_factory_class = StringResponseXMLFactory
def test_backward_compatibility_for_multiple_answers(self):
"""
Remove this test, once support for _or_ separator will be removed.
"""
answers = ["Second", "Third", "Fourth"]
problem = self.build_problem(answer="_or_".join(answers), case_sensitive=True)
for answer in answers:
# Exact string should be correct
self.assert_grade(problem, answer, "correct")
# Other strings and the lowercase version of the string are incorrect
self.assert_grade(problem, "Other String", "incorrect")
problem = self.build_problem(answer="_or_".join(answers), case_sensitive=False)
for answer in answers:
# Exact string should be correct
self.assert_grade(problem, answer, "correct")
self.assert_grade(problem, answer.lower(), "correct")
self.assert_grade(problem, "Other String", "incorrect")
def test_regexp(self):
problem = self.build_problem(answer="Second", case_sensitive=False, regexp=True)
self.assert_grade(problem, "Second", "correct")
problem = self.build_problem(answer="sec", case_sensitive=False, regexp=True)
self.assert_grade(problem, "Second", "incorrect")
problem = self.build_problem(answer="sec.*", case_sensitive=False, regexp=True)
self.assert_grade(problem, "Second", "correct")
problem = self.build_problem(answer="sec.*", case_sensitive=True, regexp=True)
self.assert_grade(problem, "Second", "incorrect")
problem = self.build_problem(answer="Sec.*$", case_sensitive=False, regexp=True)
self.assert_grade(problem, "Second", "correct")
problem = self.build_problem(answer="^sec$", case_sensitive=False, regexp=True)
self.assert_grade(problem, "Second", "incorrect")
problem = self.build_problem(answer="^Sec(ond)?$", case_sensitive=False, regexp=True)
self.assert_grade(problem, "Second", "correct")
problem = self.build_problem(answer="^Sec(ond)?$", case_sensitive=False, regexp=True)
self.assert_grade(problem, "Sec", "correct")
problem = self.build_problem(answer="tre+", case_sensitive=False, regexp=True)
self.assert_grade(problem, "There is a tree", "incorrect")
problem = self.build_problem(answer=".*tre+", case_sensitive=False, regexp=True)
self.assert_grade(problem, "There is a tree", "correct")
answers = [
"Martin Luther King Junior",
"Doctor Martin Luther King Junior",
"Dr. Martin Luther King Jr.",
"Martin Luther King"
]
problem = self.build_problem(answer="\w*\.?.*Luther King\s*.*", case_sensitive=True, regexp=True)
for answer in answers:
self.assert_grade(problem, answer, "correct")
problem = self.build_problem(answer="^(-\|){2,5}$", case_sensitive=False, regexp=True)
self.assert_grade(problem, "-|-|-|", "correct")
self.assert_grade(problem, "-|", "incorrect")
self.assert_grade(problem, "-|-|-|-|-|-|", "incorrect")
regexps = [
"^One$",
"two",
"^thre+",
"^4|Four$",
]
problem = self.build_problem(
answer="just_sample",
case_sensitive=False,
regexp=True,
additional_answers=regexps
)
self.assert_grade(problem, "One", "correct")
self.assert_grade(problem, "two", "correct")
self.assert_grade(problem, "!!two!!", "correct")
self.assert_grade(problem, "threeeee", "correct")
self.assert_grade(problem, "three", "correct")
self.assert_grade(problem, "4", "correct")
self.assert_grade(problem, "Four", "correct")
self.assert_grade(problem, "Five", "incorrect")
self.assert_grade(problem, "|", "incorrect")
# test unicode
problem = self.build_problem(answer=u"æ", case_sensitive=False, regexp=True, additional_answers=[u'ö'])
self.assert_grade(problem, u"æ", "correct")
self.assert_grade(problem, u"ö", "correct")
self.assert_grade(problem, u"î", "incorrect")
self.assert_grade(problem, u"o", "incorrect")
def test_backslash_and_unicode_regexps(self):
"""
Test some special cases of [unicode] regexps.
One needs to use either r'' strings or write real `repr` of unicode strings, because of the following
(from python docs, http://docs.python.org/2/library/re.html):
'for example, to match a literal backslash, one might have to write '\\\\' as the pattern string,
because the regular expression must be \\,
and each backslash must be expressed as \\ inside a regular Python string literal.'
Example of real use case in Studio:
a) user inputs regexp in usual regexp language,
b) regexp is saved to xml and is read in python as repr of that string
So a\d in front-end editor will become a\\\\d in xml, so it will match a1 as student answer.
"""
problem = self.build_problem(answer=ur"5\\æ", case_sensitive=False, regexp=True)
self.assert_grade(problem, u"5\æ", "correct")
problem = self.build_problem(answer=u"5\\\\æ", case_sensitive=False, regexp=True)
self.assert_grade(problem, u"5\æ", "correct")
def test_backslash(self):
problem = self.build_problem(answer=u"a\\\\c1", case_sensitive=False, regexp=True)
self.assert_grade(problem, u"a\c1", "correct")
def test_special_chars(self):
problem = self.build_problem(answer=ur"a \s1", case_sensitive=False, regexp=True)
self.assert_grade(problem, u"a 1", "correct")
def test_case_sensitive(self):
# Test single answer
problem = self.build_problem(answer="Second", case_sensitive=True)
# Exact string should be correct
self.assert_grade(problem, "Second", "correct")
# Other strings and the lowercase version of the string are incorrect
self.assert_grade(problem, "Other String", "incorrect")
self.assert_grade(problem, "second", "incorrect")
# Test multiple answers
answers = ["Second", "Third", "Fourth"]
problem = self.build_problem(answer="sample_answer", case_sensitive=True, additional_answers=answers)
for answer in answers:
# Exact string should be correct
self.assert_grade(problem, answer, "correct")
# Other strings and the lowercase version of the string are incorrect
self.assert_grade(problem, "Other String", "incorrect")
self.assert_grade(problem, "second", "incorrect")
def test_bogus_escape_not_raised(self):
"""
We now adding ^ and $ around regexp, so no bogus escape error will be raised.
"""
problem = self.build_problem(answer=u"\\", case_sensitive=False, regexp=True)
self.assert_grade(problem, u"\\", "incorrect")
# right way to search for \
problem = self.build_problem(answer=u"\\\\", case_sensitive=False, regexp=True)
self.assert_grade(problem, u"\\", "correct")
def test_case_insensitive(self):
# Test single answer
problem = self.build_problem(answer="Second", case_sensitive=False)
# Both versions of the string should be allowed, regardless
# of capitalization
self.assert_grade(problem, "Second", "correct")
self.assert_grade(problem, "second", "correct")
# Other strings are not allowed
self.assert_grade(problem, "Other String", "incorrect")
# Test multiple answers
answers = ["Second", "Third", "Fourth"]
problem = self.build_problem(answer="sample_answer", case_sensitive=False, additional_answers=answers)
for answer in answers:
# Exact string should be correct
self.assert_grade(problem, answer, "correct")
self.assert_grade(problem, answer.lower(), "correct")
# Other strings and the lowercase version of the string are incorrect
self.assert_grade(problem, "Other String", "incorrect")
def test_partial_matching(self):
problem = self.build_problem(answer="a2", case_sensitive=False, regexp=True, additional_answers=['.?\\d.?'])
self.assert_grade(problem, "a3", "correct")
self.assert_grade(problem, "3a", "correct")
def test_exception(self):
problem = self.build_problem(answer="a2", case_sensitive=False, regexp=True, additional_answers=['?\\d?'])
with self.assertRaises(Exception) as cm:
self.assert_grade(problem, "a3", "correct")
exception_message = cm.exception.message
self.assertIn("nothing to repeat", exception_message)
def test_hints(self):
hints = [
("wisconsin", "wisc", "The state capital of Wisconsin is Madison"),
("minnesota", "minn", "The state capital of Minnesota is St. Paul"),
]
problem = self.build_problem(
answer="Michigan",
case_sensitive=False,
hints=hints,
)
# We should get a hint for Wisconsin
input_dict = {'1_2_1': 'Wisconsin'}
correct_map = problem.grade_answers(input_dict)
self.assertEquals(correct_map.get_hint('1_2_1'),
"The state capital of Wisconsin is Madison")
# We should get a hint for Minnesota
input_dict = {'1_2_1': 'Minnesota'}
correct_map = problem.grade_answers(input_dict)
self.assertEquals(correct_map.get_hint('1_2_1'),
"The state capital of Minnesota is St. Paul")
# We should NOT get a hint for Michigan (the correct answer)
input_dict = {'1_2_1': 'Michigan'}
correct_map = problem.grade_answers(input_dict)
self.assertEquals(correct_map.get_hint('1_2_1'), "")
# We should NOT get a hint for any other string
input_dict = {'1_2_1': 'California'}
correct_map = problem.grade_answers(input_dict)
self.assertEquals(correct_map.get_hint('1_2_1'), "")
def test_hints_regexp_and_answer_regexp(self):
different_student_answers = [
"May be it is Boston",
"Boston, really?",
"Boston",
"OK, I see, this is Boston",
]
# if problem has regexp = true, it will accept hints written in regexp
hints = [
("wisconsin", "wisc", "The state capital of Wisconsin is Madison"),
("minnesota", "minn", "The state capital of Minnesota is St. Paul"),
(".*Boston.*", "bst", "First letter of correct answer is M."),
('^\\d9$', "numbers", "Should not end with 9."),
]
additional_answers = [
'^\\d[0-8]$',
]
problem = self.build_problem(
answer="Michigan",
case_sensitive=False,
hints=hints,
additional_answers=additional_answers,
regexp=True
)
# We should get a hint for Wisconsin
input_dict = {'1_2_1': 'Wisconsin'}
correct_map = problem.grade_answers(input_dict)
self.assertEquals(correct_map.get_hint('1_2_1'),
"The state capital of Wisconsin is Madison")
# We should get a hint for Minnesota
input_dict = {'1_2_1': 'Minnesota'}
correct_map = problem.grade_answers(input_dict)
self.assertEquals(correct_map.get_hint('1_2_1'),
"The state capital of Minnesota is St. Paul")
# We should NOT get a hint for Michigan (the correct answer)
input_dict = {'1_2_1': 'Michigan'}
correct_map = problem.grade_answers(input_dict)
self.assertEquals(correct_map.get_hint('1_2_1'), "")
# We should NOT get a hint for any other string
input_dict = {'1_2_1': 'California'}
correct_map = problem.grade_answers(input_dict)
self.assertEquals(correct_map.get_hint('1_2_1'), "")
# We should get the same hint for each answer
for answer in different_student_answers:
input_dict = {'1_2_1': answer}
correct_map = problem.grade_answers(input_dict)
self.assertEquals(correct_map.get_hint('1_2_1'), "First letter of correct answer is M.")
input_dict = {'1_2_1': '59'}
correct_map = problem.grade_answers(input_dict)
self.assertEquals(correct_map.get_hint('1_2_1'), "Should not end with 9.")
input_dict = {'1_2_1': '57'}
correct_map = problem.grade_answers(input_dict)
self.assertEquals(correct_map.get_hint('1_2_1'), "")
def test_computed_hints(self):
problem = self.build_problem(
answer="Michigan",
hintfn="gimme_a_hint",
script=textwrap.dedent("""
def gimme_a_hint(answer_ids, student_answers, new_cmap, old_cmap):
aid = answer_ids[0]
answer = student_answers[aid]
new_cmap.set_hint_and_mode(aid, answer+"??", "always")
""")
)
input_dict = {'1_2_1': 'Hello'}
correct_map = problem.grade_answers(input_dict)
self.assertEquals(correct_map.get_hint('1_2_1'), "Hello??")
def test_hint_function_randomization(self):
# The hint function should get the seed from the problem.
problem = self.build_problem(
answer="1",
hintfn="gimme_a_random_hint",
script=textwrap.dedent("""
def gimme_a_random_hint(answer_ids, student_answers, new_cmap, old_cmap):
answer = {code}
new_cmap.set_hint_and_mode(answer_ids[0], answer, "always")
""".format(code=self._get_random_number_code()))
)
correct_map = problem.grade_answers({'1_2_1': '2'})
hint = correct_map.get_hint('1_2_1')
self.assertEqual(hint, self._get_random_number_result(problem.seed))
class CodeResponseTest(ResponseTest):
from capa.tests.response_xml_factory import CodeResponseXMLFactory
xml_factory_class = CodeResponseXMLFactory
def setUp(self):
super(CodeResponseTest, self).setUp()
grader_payload = json.dumps({"grader": "ps04/grade_square.py"})
self.problem = self.build_problem(initial_display="def square(x):",
answer_display="answer",
grader_payload=grader_payload,
num_responses=2)
@staticmethod
def make_queuestate(key, time):
"""Create queuestate dict"""
timestr = datetime.strftime(time, dateformat)
return {'key': key, 'time': timestr}
def test_is_queued(self):
"""
Simple test of whether LoncapaProblem knows when it's been queued
"""
answer_ids = sorted(self.problem.get_question_answers())
# CodeResponse requires internal CorrectMap state. Build it now in the unqueued state
cmap = CorrectMap()
for answer_id in answer_ids:
cmap.update(CorrectMap(answer_id=answer_id, queuestate=None))
self.problem.correct_map.update(cmap)
self.assertEquals(self.problem.is_queued(), False)
# Now we queue the LCP
cmap = CorrectMap()
for i, answer_id in enumerate(answer_ids):
queuestate = CodeResponseTest.make_queuestate(i, datetime.now(UTC))
cmap.update(CorrectMap(answer_id=answer_ids[i], queuestate=queuestate))
self.problem.correct_map.update(cmap)
self.assertEquals(self.problem.is_queued(), True)
def test_update_score(self):
'''
Test whether LoncapaProblem.update_score can deliver queued result to the right subproblem
'''
answer_ids = sorted(self.problem.get_question_answers())
# CodeResponse requires internal CorrectMap state. Build it now in the queued state
old_cmap = CorrectMap()
for i, answer_id in enumerate(answer_ids):
queuekey = 1000 + i
queuestate = CodeResponseTest.make_queuestate(queuekey, datetime.now(UTC))
old_cmap.update(CorrectMap(answer_id=answer_ids[i], queuestate=queuestate))
# Message format common to external graders
grader_msg = '<span>MESSAGE</span>' # Must be valid XML
correct_score_msg = json.dumps({'correct': True, 'score': 1, 'msg': grader_msg})
incorrect_score_msg = json.dumps({'correct': False, 'score': 0, 'msg': grader_msg})
xserver_msgs = {'correct': correct_score_msg,
'incorrect': incorrect_score_msg, }
# Incorrect queuekey, state should not be updated
for correctness in ['correct', 'incorrect']:
self.problem.correct_map = CorrectMap()
self.problem.correct_map.update(old_cmap) # Deep copy
self.problem.update_score(xserver_msgs[correctness], queuekey=0)
self.assertEquals(self.problem.correct_map.get_dict(), old_cmap.get_dict()) # Deep comparison
for answer_id in answer_ids:
self.assertTrue(self.problem.correct_map.is_queued(answer_id)) # Should be still queued, since message undelivered
# Correct queuekey, state should be updated
for correctness in ['correct', 'incorrect']:
for i, answer_id in enumerate(answer_ids):
self.problem.correct_map = CorrectMap()
self.problem.correct_map.update(old_cmap)
new_cmap = CorrectMap()
new_cmap.update(old_cmap)
npoints = 1 if correctness == 'correct' else 0
new_cmap.set(answer_id=answer_id, npoints=npoints, correctness=correctness, msg=grader_msg, queuestate=None)
self.problem.update_score(xserver_msgs[correctness], queuekey=1000 + i)
self.assertEquals(self.problem.correct_map.get_dict(), new_cmap.get_dict())
for j, test_id in enumerate(answer_ids):
if j == i:
self.assertFalse(self.problem.correct_map.is_queued(test_id)) # Should be dequeued, message delivered
else:
self.assertTrue(self.problem.correct_map.is_queued(test_id)) # Should be queued, message undelivered
def test_recentmost_queuetime(self):
'''
Test whether the LoncapaProblem knows about the time of queue requests
'''
answer_ids = sorted(self.problem.get_question_answers())
# CodeResponse requires internal CorrectMap state. Build it now in the unqueued state
cmap = CorrectMap()
for answer_id in answer_ids:
cmap.update(CorrectMap(answer_id=answer_id, queuestate=None))
self.problem.correct_map.update(cmap)
self.assertEquals(self.problem.get_recentmost_queuetime(), None)
# CodeResponse requires internal CorrectMap state. Build it now in the queued state
cmap = CorrectMap()
for i, answer_id in enumerate(answer_ids):
queuekey = 1000 + i
latest_timestamp = datetime.now(UTC)
queuestate = CodeResponseTest.make_queuestate(queuekey, latest_timestamp)
cmap.update(CorrectMap(answer_id=answer_id, queuestate=queuestate))
self.problem.correct_map.update(cmap)
# Queue state only tracks up to second
latest_timestamp = datetime.strptime(
datetime.strftime(latest_timestamp, dateformat), dateformat
).replace(tzinfo=UTC)
self.assertEquals(self.problem.get_recentmost_queuetime(), latest_timestamp)
def test_convert_files_to_filenames(self):
'''
Test whether file objects are converted to filenames without altering other structures
'''
problem_file = os.path.join(os.path.dirname(__file__), "test_files/filename_convert_test.txt")
with open(problem_file) as fp:
answers_with_file = {'1_2_1': 'String-based answer',
'1_3_1': ['answer1', 'answer2', 'answer3'],
'1_4_1': [fp, fp]}
answers_converted = convert_files_to_filenames(answers_with_file)
self.assertEquals(answers_converted['1_2_1'], 'String-based answer')
self.assertEquals(answers_converted['1_3_1'], ['answer1', 'answer2', 'answer3'])
self.assertEquals(answers_converted['1_4_1'], [fp.name, fp.name])
class ChoiceResponseTest(ResponseTest):
from capa.tests.response_xml_factory import ChoiceResponseXMLFactory
xml_factory_class = ChoiceResponseXMLFactory
def test_radio_group_grade(self):
problem = self.build_problem(choice_type='radio',
choices=[False, True, False])
# Check that we get the expected results
self.assert_grade(problem, 'choice_0', 'incorrect')
self.assert_grade(problem, 'choice_1', 'correct')
self.assert_grade(problem, 'choice_2', 'incorrect')
# No choice 3 exists --> mark incorrect
self.assert_grade(problem, 'choice_3', 'incorrect')
def test_checkbox_group_grade(self):
problem = self.build_problem(choice_type='checkbox',
choices=[False, True, True])
# Check that we get the expected results
# (correct if and only if BOTH correct choices chosen)
self.assert_grade(problem, ['choice_1', 'choice_2'], 'correct')
self.assert_grade(problem, 'choice_1', 'incorrect')
self.assert_grade(problem, 'choice_2', 'incorrect')
self.assert_grade(problem, ['choice_0', 'choice_1'], 'incorrect')
self.assert_grade(problem, ['choice_0', 'choice_2'], 'incorrect')
# No choice 3 exists --> mark incorrect
self.assert_grade(problem, 'choice_3', 'incorrect')
class JavascriptResponseTest(ResponseTest):
from capa.tests.response_xml_factory import JavascriptResponseXMLFactory
xml_factory_class = JavascriptResponseXMLFactory
def test_grade(self):
# Compile coffee files into javascript used by the response
coffee_file_path = os.path.dirname(__file__) + "/test_files/js/*.coffee"
os.system("node_modules/.bin/coffee -c %s" % (coffee_file_path))
capa_system = test_capa_system()
capa_system.can_execute_unsafe_code = lambda: True
problem = self.build_problem(
capa_system=capa_system,
generator_src="test_problem_generator.js",
grader_src="test_problem_grader.js",
display_class="TestProblemDisplay",
display_src="test_problem_display.js",
param_dict={'value': '4'},
)
# Test that we get graded correctly
self.assert_grade(problem, json.dumps({0: 4}), "correct")
self.assert_grade(problem, json.dumps({0: 5}), "incorrect")
def test_cant_execute_javascript(self):
# If the system says to disallow unsafe code execution, then making
# this problem will raise an exception.
capa_system = test_capa_system()
capa_system.can_execute_unsafe_code = lambda: False
with self.assertRaises(LoncapaProblemError):
self.build_problem(
capa_system=capa_system,
generator_src="test_problem_generator.js",
grader_src="test_problem_grader.js",
display_class="TestProblemDisplay",
display_src="test_problem_display.js",
param_dict={'value': '4'},
)
class NumericalResponseTest(ResponseTest):
from capa.tests.response_xml_factory import NumericalResponseXMLFactory
xml_factory_class = NumericalResponseXMLFactory
# We blend the line between integration (using evaluator) and exclusively
# unit testing the NumericalResponse (mocking out the evaluator)
# For simple things its not worth the effort.
def test_grade_exact(self):
problem = self.build_problem(answer=4)
correct_responses = ["4", "4.0", "4.00"]
incorrect_responses = ["", "3.9", "4.1", "0"]
self.assert_multiple_grade(problem, correct_responses, incorrect_responses)
def test_grade_decimal_tolerance(self):
problem = self.build_problem(answer=4, tolerance=0.1)
correct_responses = ["4.0", "4.00", "4.09", "3.91"]
incorrect_responses = ["", "4.11", "3.89", "0"]
self.assert_multiple_grade(problem, correct_responses, incorrect_responses)
def test_grade_percent_tolerance(self):
problem = self.build_problem(answer=4, tolerance="10%")
correct_responses = ["4.0", "4.3", "3.7", "4.30", "3.70"]
incorrect_responses = ["", "4.5", "3.5", "0"]
self.assert_multiple_grade(problem, correct_responses, incorrect_responses)
def test_floats(self):
"""
Default tolerance for all responsetypes is 1e-3%.
"""
problem_setup = [
#[given_asnwer, [list of correct responses], [list of incorrect responses]]
[1, ["1"], ["1.1"],],
[2.0, ["2.0"], ["1.0"],],
[4, ["4.0", "4.00004"], ["4.00005"]],
[0.00016, ["1.6*10^-4"], [""]],
[0.000016, ["1.6*10^-5"], ["0.000165"]],
[1.9e24, ["1.9*10^24"], ["1.9001*10^24"]],
[2e-15, ["2*10^-15"], [""]],
[3141592653589793238., ["3141592653589793115."], [""]],
[0.1234567, ["0.123456", "0.1234561"], ["0.123451"]],
[1e-5, ["1e-5", "1.0e-5"], ["-1e-5", "2*1e-5"]],
]
for given_answer, correct_responses, incorrect_responses in problem_setup:
problem = self.build_problem(answer=given_answer)
self.assert_multiple_grade(problem, correct_responses, incorrect_responses)
def test_grade_with_script(self):
script_text = "computed_response = math.sqrt(4)"
problem = self.build_problem(answer="$computed_response", script=script_text)
correct_responses = ["2", "2.0"]
incorrect_responses = ["", "2.01", "1.99", "0"]
self.assert_multiple_grade(problem, correct_responses, incorrect_responses)
def test_raises_zero_division_err(self):
"""See if division by zero is handled correctly."""
problem = self.build_problem(answer="1") # Answer doesn't matter
input_dict = {'1_2_1': '1/0'}
with self.assertRaises(StudentInputError):
problem.grade_answers(input_dict)
def test_staff_inputs_expressions(self):
"""Test that staff may enter in an expression as the answer."""
problem = self.build_problem(answer="1/3", tolerance=1e-3)
correct_responses = ["1/3", "0.333333"]
incorrect_responses = []
self.assert_multiple_grade(problem, correct_responses, incorrect_responses)
def test_staff_inputs_expressions_legacy(self):
"""Test that staff may enter in a complex number as the answer."""
problem = self.build_problem(answer="1+1j", tolerance=1e-3)
self.assert_grade(problem, '1+j', 'correct')
@mock.patch('capa.responsetypes.log')
def test_staff_inputs_bad_syntax(self, mock_log):
"""Test that staff may enter in a complex number as the answer."""
staff_ans = "clearly bad syntax )[+1e"
problem = self.build_problem(answer=staff_ans, tolerance=1e-3)
msg = "There was a problem with the staff answer to this problem"
with self.assertRaisesRegexp(StudentInputError, msg):
self.assert_grade(problem, '1+j', 'correct')
mock_log.debug.assert_called_once_with(
"Content error--answer '%s' is not a valid number", staff_ans
)
@mock.patch('capa.responsetypes.log')
def test_responsetype_i18n(self, mock_log):
"""Test that LoncapaSystem has an i18n that works."""
staff_ans = "clearly bad syntax )[+1e"
problem = self.build_problem(answer=staff_ans, tolerance=1e-3)
class FakeTranslations(object):
"""A fake gettext.Translations object."""
def ugettext(self, text):
"""Return the 'translation' of `text`."""
if text == "There was a problem with the staff answer to this problem.":
text = "TRANSLATED!"
return text
problem.capa_system.i18n = FakeTranslations()
with self.assertRaisesRegexp(StudentInputError, "TRANSLATED!"):
self.assert_grade(problem, '1+j', 'correct')
def test_grade_infinity(self):
"""
Check that infinity doesn't automatically get marked correct.
This resolves a bug where a problem with relative tolerance would
pass with any arbitrarily large student answer.
"""
mapping = {
'some big input': float('inf'),
'some neg input': -float('inf'),
'weird NaN input': float('nan'),
'4': 4
}
def evaluator_side_effect(_, __, math_string):
"""Look up the given response for `math_string`."""
return mapping[math_string]
problem = self.build_problem(answer=4, tolerance='10%')
with mock.patch('capa.responsetypes.evaluator') as mock_eval:
mock_eval.side_effect = evaluator_side_effect
self.assert_grade(problem, 'some big input', 'incorrect')
self.assert_grade(problem, 'some neg input', 'incorrect')
self.assert_grade(problem, 'weird NaN input', 'incorrect')
def test_err_handling(self):
"""
See that `StudentInputError`s are raised when things go wrong.
"""
problem = self.build_problem(answer=4)
errors = [ # (exception raised, message to student)
(calc.UndefinedVariable("x"), r"You may not use variables \(x\) in numerical problems"),
(ValueError("factorial() mess-up"), "factorial function evaluated outside its domain"),
(ValueError(), "Could not interpret '.*' as a number"),
(pyparsing.ParseException("oopsie"), "Invalid math syntax"),
(ZeroDivisionError(), "Could not interpret '.*' as a number")
]
with mock.patch('capa.responsetypes.evaluator') as mock_eval:
for err, msg_regex in errors:
def evaluator_side_effect(_, __, math_string):
"""Raise an error only for the student input."""
if math_string != '4':
raise err
mock_eval.side_effect = evaluator_side_effect
with self.assertRaisesRegexp(StudentInputError, msg_regex):
problem.grade_answers({'1_2_1': 'foobar'})
def test_compare_answer(self):
"""Tests the answer compare function."""
problem = self.build_problem(answer="42")
responder = problem.responders.values()[0]
self.assertTrue(responder.compare_answer('48', '8*6'))
self.assertFalse(responder.compare_answer('48', '9*5'))
def test_validate_answer(self):
"""Tests the answer validation function."""
problem = self.build_problem(answer="42")
responder = problem.responders.values()[0]
self.assertTrue(responder.validate_answer('23.5'))
self.assertFalse(responder.validate_answer('fish'))
class CustomResponseTest(ResponseTest):
from capa.tests.response_xml_factory import CustomResponseXMLFactory
xml_factory_class = CustomResponseXMLFactory
def test_inline_code(self):
# For inline code, we directly modify global context variables
# 'answers' is a list of answers provided to us
# 'correct' is a list we fill in with True/False
# 'expect' is given to us (if provided in the XML)
inline_script = """correct[0] = 'correct' if (answers['1_2_1'] == expect) else 'incorrect'"""
problem = self.build_problem(answer=inline_script, expect="42")
# Check results
self.assert_grade(problem, '42', 'correct')
self.assert_grade(problem, '0', 'incorrect')
def test_inline_message(self):
# Inline code can update the global messages list
# to pass messages to the CorrectMap for a particular input
# The code can also set the global overall_message (str)
# to pass a message that applies to the whole response
inline_script = textwrap.dedent("""
messages[0] = "Test Message"
overall_message = "Overall message"
""")
problem = self.build_problem(answer=inline_script)
input_dict = {'1_2_1': '0'}
correctmap = problem.grade_answers(input_dict)
# Check that the message for the particular input was received
input_msg = correctmap.get_msg('1_2_1')
self.assertEqual(input_msg, "Test Message")
# Check that the overall message (for the whole response) was received
overall_msg = correctmap.get_overall_message()
self.assertEqual(overall_msg, "Overall message")
def test_inline_randomization(self):
# Make sure the seed from the problem gets fed into the script execution.
inline_script = "messages[0] = {code}".format(code=self._get_random_number_code())
problem = self.build_problem(answer=inline_script)
input_dict = {'1_2_1': '0'}
correctmap = problem.grade_answers(input_dict)
input_msg = correctmap.get_msg('1_2_1')
self.assertEqual(input_msg, self._get_random_number_result(problem.seed))
def test_function_code_single_input(self):
# For function code, we pass in these arguments:
#
# 'expect' is the expect attribute of the <customresponse>
#
# 'answer_given' is the answer the student gave (if there is just one input)
# or an ordered list of answers (if there are multiple inputs)
#
# The function should return a dict of the form
# { 'ok': BOOL, 'msg': STRING }
#
script = textwrap.dedent("""
def check_func(expect, answer_given):
return {'ok': answer_given == expect, 'msg': 'Message text'}
""")
problem = self.build_problem(script=script, cfn="check_func", expect="42")
# Correct answer
input_dict = {'1_2_1': '42'}
correct_map = problem.grade_answers(input_dict)
correctness = correct_map.get_correctness('1_2_1')
msg = correct_map.get_msg('1_2_1')
self.assertEqual(correctness, 'correct')
self.assertEqual(msg, "Message text")
# Incorrect answer
input_dict = {'1_2_1': '0'}
correct_map = problem.grade_answers(input_dict)
correctness = correct_map.get_correctness('1_2_1')
msg = correct_map.get_msg('1_2_1')
self.assertEqual(correctness, 'incorrect')
self.assertEqual(msg, "Message text")
def test_function_code_multiple_input_no_msg(self):
# Check functions also have the option of returning
# a single boolean value
# If true, mark all the inputs correct
# If false, mark all the inputs incorrect
script = textwrap.dedent("""
def check_func(expect, answer_given):
return (answer_given[0] == expect and
answer_given[1] == expect)
""")
problem = self.build_problem(script=script, cfn="check_func",
expect="42", num_inputs=2)
# Correct answer -- expect both inputs marked correct
input_dict = {'1_2_1': '42', '1_2_2': '42'}
correct_map = problem.grade_answers(input_dict)
correctness = correct_map.get_correctness('1_2_1')
self.assertEqual(correctness, 'correct')
correctness = correct_map.get_correctness('1_2_2')
self.assertEqual(correctness, 'correct')
# One answer incorrect -- expect both inputs marked incorrect
input_dict = {'1_2_1': '0', '1_2_2': '42'}
correct_map = problem.grade_answers(input_dict)
correctness = correct_map.get_correctness('1_2_1')
self.assertEqual(correctness, 'incorrect')
correctness = correct_map.get_correctness('1_2_2')
self.assertEqual(correctness, 'incorrect')
def test_function_code_multiple_inputs(self):
# If the <customresponse> has multiple inputs associated with it,
# the check function can return a dict of the form:
#
# {'overall_message': STRING,
# 'input_list': [{'ok': BOOL, 'msg': STRING}, ...] }
#
# 'overall_message' is displayed at the end of the response
#
# 'input_list' contains dictionaries representing the correctness
# and message for each input.
script = textwrap.dedent("""
def check_func(expect, answer_given):
check1 = (int(answer_given[0]) == 1)
check2 = (int(answer_given[1]) == 2)
check3 = (int(answer_given[2]) == 3)
return {'overall_message': 'Overall message',
'input_list': [
{'ok': check1, 'msg': 'Feedback 1'},
{'ok': check2, 'msg': 'Feedback 2'},
{'ok': check3, 'msg': 'Feedback 3'} ] }
""")
problem = self.build_problem(script=script,
cfn="check_func", num_inputs=3)
# Grade the inputs (one input incorrect)
input_dict = {'1_2_1': '-999', '1_2_2': '2', '1_2_3': '3'}
correct_map = problem.grade_answers(input_dict)
# Expect that we receive the overall message (for the whole response)
self.assertEqual(correct_map.get_overall_message(), "Overall message")
# Expect that the inputs were graded individually
self.assertEqual(correct_map.get_correctness('1_2_1'), 'incorrect')
self.assertEqual(correct_map.get_correctness('1_2_2'), 'correct')
self.assertEqual(correct_map.get_correctness('1_2_3'), 'correct')
# Expect that we received messages for each individual input
self.assertEqual(correct_map.get_msg('1_2_1'), 'Feedback 1')
self.assertEqual(correct_map.get_msg('1_2_2'), 'Feedback 2')
self.assertEqual(correct_map.get_msg('1_2_3'), 'Feedback 3')
def test_function_code_with_extra_args(self):
script = textwrap.dedent("""\
def check_func(expect, answer_given, options, dynamath):
assert options == "xyzzy", "Options was %r" % options
return {'ok': answer_given == expect, 'msg': 'Message text'}
""")
problem = self.build_problem(script=script, cfn="check_func", expect="42", options="xyzzy", cfn_extra_args="options dynamath")
# Correct answer
input_dict = {'1_2_1': '42'}
correct_map = problem.grade_answers(input_dict)
correctness = correct_map.get_correctness('1_2_1')
msg = correct_map.get_msg('1_2_1')
self.assertEqual(correctness, 'correct')
self.assertEqual(msg, "Message text")
# Incorrect answer
input_dict = {'1_2_1': '0'}
correct_map = problem.grade_answers(input_dict)
correctness = correct_map.get_correctness('1_2_1')
msg = correct_map.get_msg('1_2_1')
self.assertEqual(correctness, 'incorrect')
self.assertEqual(msg, "Message text")
def test_multiple_inputs_return_one_status(self):
# When given multiple inputs, the 'answer_given' argument
# to the check_func() is a list of inputs
#
# The sample script below marks the problem as correct
# if and only if it receives answer_given=[1,2,3]
# (or string values ['1','2','3'])
#
# Since we return a dict describing the status of one input,
# we expect that the same 'ok' value is applied to each
# of the inputs.
script = textwrap.dedent("""
def check_func(expect, answer_given):
check1 = (int(answer_given[0]) == 1)
check2 = (int(answer_given[1]) == 2)
check3 = (int(answer_given[2]) == 3)
return {'ok': (check1 and check2 and check3),
'msg': 'Message text'}
""")
problem = self.build_problem(script=script,
cfn="check_func", num_inputs=3)
# Grade the inputs (one input incorrect)
input_dict = {'1_2_1': '-999', '1_2_2': '2', '1_2_3': '3'}
correct_map = problem.grade_answers(input_dict)
# Everything marked incorrect
self.assertEqual(correct_map.get_correctness('1_2_1'), 'incorrect')
self.assertEqual(correct_map.get_correctness('1_2_2'), 'incorrect')
self.assertEqual(correct_map.get_correctness('1_2_3'), 'incorrect')
# Grade the inputs (everything correct)
input_dict = {'1_2_1': '1', '1_2_2': '2', '1_2_3': '3'}
correct_map = problem.grade_answers(input_dict)
# Everything marked incorrect
self.assertEqual(correct_map.get_correctness('1_2_1'), 'correct')
self.assertEqual(correct_map.get_correctness('1_2_2'), 'correct')
self.assertEqual(correct_map.get_correctness('1_2_3'), 'correct')
# Message is interpreted as an "overall message"
self.assertEqual(correct_map.get_overall_message(), 'Message text')
def test_script_exception_function(self):
# Construct a script that will raise an exception
script = textwrap.dedent("""
def check_func(expect, answer_given):
raise Exception("Test")
""")
problem = self.build_problem(script=script, cfn="check_func")
# Expect that an exception gets raised when we check the answer
with self.assertRaises(ResponseError):
problem.grade_answers({'1_2_1': '42'})
def test_script_exception_inline(self):
# Construct a script that will raise an exception
script = 'raise Exception("Test")'
problem = self.build_problem(answer=script)
# Expect that an exception gets raised when we check the answer
with self.assertRaises(ResponseError):
problem.grade_answers({'1_2_1': '42'})
def test_invalid_dict_exception(self):
# Construct a script that passes back an invalid dict format
script = textwrap.dedent("""
def check_func(expect, answer_given):
return {'invalid': 'test'}
""")
problem = self.build_problem(script=script, cfn="check_func")
# Expect that an exception gets raised when we check the answer
with self.assertRaises(ResponseError):
problem.grade_answers({'1_2_1': '42'})
def test_setup_randomization(self):
# Ensure that the problem setup script gets the random seed from the problem.
script = textwrap.dedent("""
num = {code}
""".format(code=self._get_random_number_code()))
problem = self.build_problem(script=script)
self.assertEqual(problem.context['num'], self._get_random_number_result(problem.seed))
def test_check_function_randomization(self):
# The check function should get random-seeded from the problem.
script = textwrap.dedent("""
def check_func(expect, answer_given):
return {{'ok': True, 'msg': {code} }}
""".format(code=self._get_random_number_code()))
problem = self.build_problem(script=script, cfn="check_func", expect="42")
input_dict = {'1_2_1': '42'}
correct_map = problem.grade_answers(input_dict)
msg = correct_map.get_msg('1_2_1')
self.assertEqual(msg, self._get_random_number_result(problem.seed))
def test_random_isnt_none(self):
# Bug LMS-500 says random.seed(10) fails with:
# File "<string>", line 61, in <module>
# File "/usr/lib/python2.7/random.py", line 116, in seed
# super(Random, self).seed(a)
# TypeError: must be type, not None
r = random.Random()
r.seed(10)
num = r.randint(0, 1e9)
script = textwrap.dedent("""
random.seed(10)
num = random.randint(0, 1e9)
""")
problem = self.build_problem(script=script)
self.assertEqual(problem.context['num'], num)
def test_module_imports_inline(self):
'''
Check that the correct modules are available to custom
response scripts
'''
for module_name in ['random', 'numpy', 'math', 'scipy',
'calc', 'eia', 'chemcalc', 'chemtools',
'miller', 'draganddrop']:
# Create a script that checks that the name is defined
# If the name is not defined, then the script
# will raise an exception
script = textwrap.dedent('''
correct[0] = 'correct'
assert('%s' in globals())''' % module_name)
# Create the problem
problem = self.build_problem(answer=script)
# Expect that we can grade an answer without
# getting an exception
try:
problem.grade_answers({'1_2_1': '42'})
except ResponseError:
self.fail("Could not use name '{0}s' in custom response".format(module_name))
def test_module_imports_function(self):
'''
Check that the correct modules are available to custom
response scripts
'''
for module_name in ['random', 'numpy', 'math', 'scipy',
'calc', 'eia', 'chemcalc', 'chemtools',
'miller', 'draganddrop']:
# Create a script that checks that the name is defined
# If the name is not defined, then the script
# will raise an exception
script = textwrap.dedent('''
def check_func(expect, answer_given):
assert('%s' in globals())
return True''' % module_name)
# Create the problem
problem = self.build_problem(script=script, cfn="check_func")
# Expect that we can grade an answer without
# getting an exception
try:
problem.grade_answers({'1_2_1': '42'})
except ResponseError:
self.fail("Could not use name '{0}s' in custom response".format(module_name))
class SchematicResponseTest(ResponseTest):
from capa.tests.response_xml_factory import SchematicResponseXMLFactory
xml_factory_class = SchematicResponseXMLFactory
def test_grade(self):
# Most of the schematic-specific work is handled elsewhere
# (in client-side JavaScript)
# The <schematicresponse> is responsible only for executing the
# Python code in <answer> with *submission* (list)
# in the global context.
# To test that the context is set up correctly,
# we create a script that sets *correct* to true
# if and only if we find the *submission* (list)
script = "correct = ['correct' if 'test' in submission[0] else 'incorrect']"
problem = self.build_problem(answer=script)
# The actual dictionary would contain schematic information
# sent from the JavaScript simulation
submission_dict = {'test': 'the_answer'}
input_dict = {'1_2_1': json.dumps(submission_dict)}
correct_map = problem.grade_answers(input_dict)
# Expect that the problem is graded as true
# (That is, our script verifies that the context
# is what we expect)
self.assertEqual(correct_map.get_correctness('1_2_1'), 'correct')
def test_check_function_randomization(self):
# The check function should get a random seed from the problem.
script = "correct = ['correct' if (submission[0]['num'] == {code}) else 'incorrect']".format(code=self._get_random_number_code())
problem = self.build_problem(answer=script)
submission_dict = {'num': self._get_random_number_result(problem.seed)}
input_dict = {'1_2_1': json.dumps(submission_dict)}
correct_map = problem.grade_answers(input_dict)
self.assertEqual(correct_map.get_correctness('1_2_1'), 'correct')
def test_script_exception(self):
# Construct a script that will raise an exception
script = "raise Exception('test')"
problem = self.build_problem(answer=script)
# Expect that an exception gets raised when we check the answer
with self.assertRaises(ResponseError):
submission_dict = {'test': 'test'}
input_dict = {'1_2_1': json.dumps(submission_dict)}
problem.grade_answers(input_dict)
class AnnotationResponseTest(ResponseTest):
from capa.tests.response_xml_factory import AnnotationResponseXMLFactory
xml_factory_class = AnnotationResponseXMLFactory
def test_grade(self):
(correct, partially, incorrect) = ('correct', 'partially-correct', 'incorrect')
answer_id = '1_2_1'
options = (('x', correct), ('y', partially), ('z', incorrect))
make_answer = lambda option_ids: {answer_id: json.dumps({'options': option_ids})}
tests = [
{'correctness': correct, 'points': 2, 'answers': make_answer([0])},
{'correctness': partially, 'points': 1, 'answers': make_answer([1])},
{'correctness': incorrect, 'points': 0, 'answers': make_answer([2])},
{'correctness': incorrect, 'points': 0, 'answers': make_answer([0, 1, 2])},
{'correctness': incorrect, 'points': 0, 'answers': make_answer([])},
{'correctness': incorrect, 'points': 0, 'answers': make_answer('')},
{'correctness': incorrect, 'points': 0, 'answers': make_answer(None)},
{'correctness': incorrect, 'points': 0, 'answers': {answer_id: 'null'}},
]
for test in tests:
expected_correctness = test['correctness']
expected_points = test['points']
answers = test['answers']
problem = self.build_problem(options=options)
correct_map = problem.grade_answers(answers)
actual_correctness = correct_map.get_correctness(answer_id)
actual_points = correct_map.get_npoints(answer_id)
self.assertEqual(expected_correctness, actual_correctness,
msg="%s should be marked %s" % (answer_id, expected_correctness))
self.assertEqual(expected_points, actual_points,
msg="%s should have %d points" % (answer_id, expected_points))
class ChoiceTextResponseTest(ResponseTest):
"""
Class containing setup and tests for ChoiceText responsetype.
"""
from response_xml_factory import ChoiceTextResponseXMLFactory
xml_factory_class = ChoiceTextResponseXMLFactory
# `TEST_INPUTS` is a dictionary mapping from
# test_name to a representation of inputs for a test problem.
TEST_INPUTS = {
"1_choice_0_input_correct": [(True, [])],
"1_choice_0_input_incorrect": [(False, [])],
"1_choice_0_input_invalid_choice": [(False, []), (True, [])],
"1_choice_1_input_correct": [(True, ["123"])],
"1_input_script_correct": [(True, ["2"])],
"1_input_script_incorrect": [(True, ["3.25"])],
"1_choice_2_inputs_correct": [(True, ["123", "456"])],
"1_choice_2_inputs_tolerance": [(True, ["123 + .5", "456 + 9"])],
"1_choice_2_inputs_1_wrong": [(True, ["0", "456"])],
"1_choice_2_inputs_both_wrong": [(True, ["0", "0"])],
"1_choice_2_inputs_inputs_blank": [(True, ["", ""])],
"1_choice_2_inputs_empty": [(False, [])],
"1_choice_2_inputs_fail_tolerance": [(True, ["123 + 1.5", "456 + 9"])],
"1_choice_1_input_within_tolerance": [(True, ["122.5"])],
"1_choice_1_input_answer_incorrect": [(True, ["345"])],
"1_choice_1_input_choice_incorrect": [(False, ["123"])],
"2_choices_0_inputs_correct": [(False, []), (True, [])],
"2_choices_0_inputs_incorrect": [(True, []), (False, [])],
"2_choices_0_inputs_blank": [(False, []), (False, [])],
"2_choices_1_input_1_correct": [(False, []), (True, ["123"])],
"2_choices_1_input_1_incorrect": [(True, []), (False, ["123"])],
"2_choices_1_input_input_wrong": [(False, []), (True, ["321"])],
"2_choices_1_input_1_blank": [(False, []), (False, [])],
"2_choices_1_input_2_correct": [(True, []), (False, ["123"])],
"2_choices_1_input_2_incorrect": [(False, []), (True, ["123"])],
"2_choices_2_inputs_correct": [(True, ["123"]), (False, [])],
"2_choices_2_inputs_wrong_choice": [(False, ["123"]), (True, [])],
"2_choices_2_inputs_wrong_input": [(True, ["321"]), (False, [])]
}
# `TEST_SCENARIOS` is a dictionary of the form
# {Test_Name" : (Test_Problem_name, correctness)}
# correctness represents whether the problem should be graded as
# correct or incorrect when the test is run.
TEST_SCENARIOS = {
"1_choice_0_input_correct": ("1_choice_0_input", "correct"),
"1_choice_0_input_incorrect": ("1_choice_0_input", "incorrect"),
"1_choice_0_input_invalid_choice": ("1_choice_0_input", "incorrect"),
"1_input_script_correct": ("1_input_script", "correct"),
"1_input_script_incorrect": ("1_input_script", "incorrect"),
"1_choice_2_inputs_correct": ("1_choice_2_inputs", "correct"),
"1_choice_2_inputs_tolerance": ("1_choice_2_inputs", "correct"),
"1_choice_2_inputs_1_wrong": ("1_choice_2_inputs", "incorrect"),
"1_choice_2_inputs_both_wrong": ("1_choice_2_inputs", "incorrect"),
"1_choice_2_inputs_inputs_blank": ("1_choice_2_inputs", "incorrect"),
"1_choice_2_inputs_empty": ("1_choice_2_inputs", "incorrect"),
"1_choice_2_inputs_fail_tolerance": ("1_choice_2_inputs", "incorrect"),
"1_choice_1_input_correct": ("1_choice_1_input", "correct"),
"1_choice_1_input_within_tolerance": ("1_choice_1_input", "correct"),
"1_choice_1_input_answer_incorrect": ("1_choice_1_input", "incorrect"),
"1_choice_1_input_choice_incorrect": ("1_choice_1_input", "incorrect"),
"2_choices_0_inputs_correct": ("2_choices_0_inputs", "correct"),
"2_choices_0_inputs_incorrect": ("2_choices_0_inputs", "incorrect"),
"2_choices_0_inputs_blank": ("2_choices_0_inputs", "incorrect"),
"2_choices_1_input_1_correct": ("2_choices_1_input_1", "correct"),
"2_choices_1_input_1_incorrect": ("2_choices_1_input_1", "incorrect"),
"2_choices_1_input_input_wrong": ("2_choices_1_input_1", "incorrect"),
"2_choices_1_input_1_blank": ("2_choices_1_input_1", "incorrect"),
"2_choices_1_input_2_correct": ("2_choices_1_input_2", "correct"),
"2_choices_1_input_2_incorrect": ("2_choices_1_input_2", "incorrect"),
"2_choices_2_inputs_correct": ("2_choices_2_inputs", "correct"),
"2_choices_2_inputs_wrong_choice": ("2_choices_2_inputs", "incorrect"),
"2_choices_2_inputs_wrong_input": ("2_choices_2_inputs", "incorrect")
}
# Dictionary that maps from problem_name to arguments for
# _make_problem, that will create the problem.
TEST_PROBLEM_ARGS = {
"1_choice_0_input": {"choices": ("true", {}), "script": ''},
"1_choice_1_input": {
"choices": ("true", {"answer": "123", "tolerance": "1"}),
"script": ''
},
"1_input_script": {
"choices": ("true", {"answer": "$computed_response", "tolerance": "1"}),
"script": "computed_response = math.sqrt(4)"
},
"1_choice_2_inputs": {
"choices": [
(
"true", (
{"answer": "123", "tolerance": "1"},
{"answer": "456", "tolerance": "10"}
)
)
],
"script": ''
},
"2_choices_0_inputs": {
"choices": [("false", {}), ("true", {})],
"script": ''
},
"2_choices_1_input_1": {
"choices": [
("false", {}), ("true", {"answer": "123", "tolerance": "0"})
],
"script": ''
},
"2_choices_1_input_2": {
"choices": [("true", {}), ("false", {"answer": "123", "tolerance": "0"})],
"script": ''
},
"2_choices_2_inputs": {
"choices": [
("true", {"answer": "123", "tolerance": "0"}),
("false", {"answer": "999", "tolerance": "0"})
],
"script": ''
}
}
def _make_problem(self, choices, in_type='radiotextgroup', script=''):
"""
Convenience method to fill in default values for script and
type if needed, then call self.build_problem
"""
return self.build_problem(
choices=choices,
type=in_type,
script=script
)
def _make_answer_dict(self, choice_list):
"""
Convenience method to make generation of answers less tedious,
pass in an iterable argument with elements of the form: [bool, [ans,]]
Will generate an answer dict for those options
"""
answer_dict = {}
for index, choice_answers_pair in enumerate(choice_list):
# Choice is whether this choice is correct
# Answers contains a list of answers to textinpts for the choice
choice, answers = choice_answers_pair
if choice:
# Radio/Checkbox inputs in choicetext problems follow
# a naming convention that gives them names ending with "bc"
choice_id = "1_2_1_choiceinput_{index}bc".format(index=index)
choice_value = "choiceinput_{index}".format(index=index)
answer_dict[choice_id] = choice_value
# Build the names for the numtolerance_inputs and add their answers
# to `answer_dict`.
for ind, answer in enumerate(answers):
# In `answer_id` `index` represents the ordinality of the
# choice and `ind` represents the ordinality of the
# numtolerance_input inside the parent choice.
answer_id = "1_2_1_choiceinput_{index}_numtolerance_input_{ind}".format(
index=index,
ind=ind
)
answer_dict[answer_id] = answer
return answer_dict
def test_invalid_xml(self):
"""
Test that build problem raises errors for invalid options
"""
with self.assertRaises(Exception):
self.build_problem(type="invalidtextgroup")
def test_valid_xml(self):
"""
Test that `build_problem` builds valid xml
"""
self.build_problem()
self.assertTrue(True)
def test_unchecked_input_not_validated(self):
"""
Test that a student can have a non numeric answer in an unselected
choice without causing an error to be raised when the problem is
checked.
"""
two_choice_two_input = self._make_problem(
[
("true", {"answer": "123", "tolerance": "1"}),
("false", {})
],
"checkboxtextgroup"
)
self.assert_grade(
two_choice_two_input,
self._make_answer_dict([(True, ["1"]), (False, ["Platypus"])]),
"incorrect"
)
def test_interpret_error(self):
"""
Test that student answers that cannot be interpeted as numbers
cause the response type to raise an error.
"""
two_choice_two_input = self._make_problem(
[
("true", {"answer": "123", "tolerance": "1"}),
("false", {})
],
"checkboxtextgroup"
)
with self.assertRaisesRegexp(StudentInputError, "Could not interpret"):
# Test that error is raised for input in selected correct choice.
self.assert_grade(
two_choice_two_input,
self._make_answer_dict([(True, ["Platypus"])]),
"correct"
)
with self.assertRaisesRegexp(StudentInputError, "Could not interpret"):
# Test that error is raised for input in selected incorrect choice.
self.assert_grade(
two_choice_two_input,
self._make_answer_dict([(True, ["1"]), (True, ["Platypus"])]),
"correct"
)
def test_staff_answer_error(self):
broken_problem = self._make_problem(
[("true", {"answer": "Platypus", "tolerance": "0"}),
("true", {"answer": "edX", "tolerance": "0"})
],
"checkboxtextgroup"
)
with self.assertRaisesRegexp(
StudentInputError,
"The Staff answer could not be interpreted as a number."
):
self.assert_grade(
broken_problem,
self._make_answer_dict(
[(True, ["1"]), (True, ["1"])]
),
"correct"
)
def test_radio_grades(self):
"""
Test that confirms correct operation of grading when the inputtag is
radiotextgroup.
"""
for name, inputs in self.TEST_INPUTS.iteritems():
# Turn submission into the form expected when grading this problem.
submission = self._make_answer_dict(inputs)
# Lookup the problem_name, and the whether this test problem
# and inputs should be graded as correct or incorrect.
problem_name, correctness = self.TEST_SCENARIOS[name]
# Load the args needed to build the problem for this test.
problem_args = self.TEST_PROBLEM_ARGS[problem_name]
test_choices = problem_args["choices"]
test_script = problem_args["script"]
# Build the actual problem for the test.
test_problem = self._make_problem(test_choices, 'radiotextgroup', test_script)
# Make sure the actual grade matches the expected grade.
self.assert_grade(
test_problem,
submission,
correctness,
msg="{0} should be {1}".format(
name,
correctness
)
)
def test_checkbox_grades(self):
"""
Test that confirms correct operation of grading when the inputtag is
checkboxtextgroup.
"""
# Dictionary from name of test_scenario to (problem_name, correctness)
# Correctness is used to test whether the problem was graded properly
scenarios = {
"2_choices_correct": ("checkbox_two_choices", "correct"),
"2_choices_incorrect": ("checkbox_two_choices", "incorrect"),
"2_choices_2_inputs_correct": (
"checkbox_2_choices_2_inputs",
"correct"
),
"2_choices_2_inputs_missing_choice": (
"checkbox_2_choices_2_inputs",
"incorrect"
),
"2_choices_2_inputs_wrong_input": (
"checkbox_2_choices_2_inputs",
"incorrect"
)
}
# Dictionary scenario_name: test_inputs
inputs = {
"2_choices_correct": [(True, []), (True, [])],
"2_choices_incorrect": [(True, []), (False, [])],
"2_choices_2_inputs_correct": [(True, ["123"]), (True, ["456"])],
"2_choices_2_inputs_missing_choice": [
(True, ["123"]), (False, ["456"])
],
"2_choices_2_inputs_wrong_input": [
(True, ["123"]), (True, ["654"])
]
}
# Two choice zero input problem with both choices being correct.
checkbox_two_choices = self._make_problem(
[("true", {}), ("true", {})], "checkboxtextgroup"
)
# Two choice two input problem with both choices correct.
checkbox_two_choices_two_inputs = self._make_problem(
[("true", {"answer": "123", "tolerance": "0"}),
("true", {"answer": "456", "tolerance": "0"})
],
"checkboxtextgroup"
)
# Dictionary problem_name: problem
problems = {
"checkbox_two_choices": checkbox_two_choices,
"checkbox_2_choices_2_inputs": checkbox_two_choices_two_inputs
}
for name, inputs in inputs.iteritems():
submission = self._make_answer_dict(inputs)
# Load the test problem's name and desired correctness
problem_name, correctness = scenarios[name]
# Load the problem
problem = problems[problem_name]
# Make sure the actual grade matches the expected grade
self.assert_grade(
problem,
submission,
correctness,
msg="{0} should be {1}".format(name, correctness)
)
|
pelikanchik/edx-platform
|
common/lib/capa/capa/tests/test_responsetypes.py
|
Python
|
agpl-3.0
| 86,450
|
import os
import numpy as np
from scipy.stats.mstats import gmean
import sklearn.model_selection
import paths
import labels
from datasets import mlb
import find_best_threshold
np.set_printoptions(threshold=np.nan)
np.set_printoptions(suppress=True)
def submit_cv_ensemble(ensemble, output_file):
thresholds = []
tests = []
for net, val, train in ensemble:
y_val = mlb.transform(train['tags'].str.split()).astype(np.float32)
threshold = find_best_threshold.optimise_f2_thresholds_fast(y_val, net['train'])
thresholds.append(threshold)
test = net['test']
tests.append(test)
threshold_avg = np.average(np.stack(thresholds, axis=0), axis=0)
test_avg = np.average(np.stack(tests, axis=0), axis=0)
test_images = list(map(lambda path: path[:-len('.jpg')], os.listdir(paths.test_jpg)))
test_avg[test_avg > threshold_avg] = 1
test_avg[test_avg <= threshold_avg] = 0
predictions = mlb.inverse_transform(test_avg)
test_results = zip(predictions, test_images)
with open(paths.submissions + output_file, 'w') as submission:
submission.write('image_name,tags\n')
for tags, target in test_results:
output = target + ',' + ' '.join(tags)
submission.write("%s\n" % output)
print('Submission ready!')
def load_cv_folds(model_name):
models = []
for i in range(5):
net = np.load(paths.predictions + model_name + '-split_{}.npz'.format(i))
net = {
'train': np.average(net['train'], axis=0),
'val': np.average(net['val'], axis=0),
'test': np.average(net['test'], axis=0)
}
models.append(net)
labels_df = labels.get_labels_df()
kf = sklearn.model_selection.KFold(n_splits=5, shuffle=True, random_state=1)
split = kf.split(labels_df)
folds = []
for i, ((train_idx, val_idx), net) in enumerate(zip(split, models)):
val = labels_df.ix[val_idx]
train = labels_df.ix[train_idx]
folds.append((net, val, train))
print(i)
return folds
# load_cv_folds takes the model name
folds = load_cv_folds('nn_finetune_resnet_50')
# you can chose a name yourself here, but I name my ensembled model_fold_1+2+3+4+5
submit_cv_ensemble(folds, 'nn_finetune_resnet_50_fold_1+2+3+4+5')
|
Mctigger/KagglePlanetPytorch
|
submit_predictions.py
|
Python
|
mit
| 2,315
|
from rasa_nlu.components import Component
from rasa_nlu import utils
from rasa_nlu.model import Metadata
#import nltk, os
#from nltk.sentiment.vader import SentimentIntensityAnalyzer
from vaderSentiment.vaderSentiment import SentimentIntensityAnalyzer
from textblob import TextBlob
# pipeline name: "sentiment.sentiment_analyzer"
# should be added to component_classes in registry.py
class SentimentAnalyzer(Component):
"""A pre-trained sentiment component"""
name = "sentiment.sentiment_analyzer"
provides = ["entities"]
requires = []
defaults = {}
language_list = ["en", "it"]
def __init__(self, component_config=None):
super(SentimentAnalyzer, self).__init__(component_config)
def train(self, training_data, cfg, **kwargs):
pass
def convert_to_rasa(self, value, confidence):
"""Convert model output into the Rasa NLU compatible output format."""
entity = {"value": value,
"confidence": confidence,
"entity": "sentiment",
"extractor": "sentiment_extractor"}
return entity
def process(self, message, **kwargs):
"""Retrieve the text message, translate to italian, pass it to the classifier
and append the prediction results to the message class."""
ita_message = TextBlob(message.text)
try:
if(ita_message.detect_language()=="it"):
ita_message = ita_message.translate(from_lang="it", to='en')
except:
print("TranslatorError", ita_message)
#ita_message.sentiment
#ita_message.sentiment.polarity
sid = SentimentIntensityAnalyzer()
res = sid.polarity_scores(ita_message)
key, value = max(res.items(), key=lambda x: x[1])
entity = self.convert_to_rasa(key, value)
message.set("entities", message.get("entities", []) + [entity], add_to_output=True)
def persist(self, model_dir):
pass
|
Ventrosky/python-scripts
|
nlp-scripts/sentiment.py
|
Python
|
gpl-3.0
| 1,974
|
# This program is free software; you can redistribute it and/or modify
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Library General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
#
# TestMixins.py
# Copyright (C) 2010 Simon Newton
'''Mixins used by the test definitions.
This module contains classes which can be inherited from to simplify writing
test definitions.
'''
__author__ = 'nomis52@gmail.com (Simon Newton)'
from collections import deque
from ola import PidStore
from ola.DUBDecoder import DecodeResponse
from ola.OlaClient import RDMNack
from ola.UID import UID
from ExpectedResults import *
from ResponderTest import ResponderTestFixture
MAX_LABEL_SIZE = 32
MAX_DMX_ADDRESS = 512
def UnsupportedSetNacks(pid):
"""Repsonders use either NR_UNSUPPORTED_COMMAND_CLASS or NR_UNKNOWN_PID."""
return [
NackSetResult(pid.value, RDMNack.NR_UNSUPPORTED_COMMAND_CLASS),
NackSetResult(pid.value, RDMNack.NR_UNKNOWN_PID),
]
# Generic Get / Set Mixins
# These don't care about the format of the message.
#------------------------------------------------------------------------------
class UnsupportedGetMixin(object):
"""Check that Get fails with NR_UNSUPPORTED_COMMAND_CLASS."""
def Test(self):
self.AddIfGetSupported(
self.NackGetResult(RDMNack.NR_UNSUPPORTED_COMMAND_CLASS))
self.SendRawGet(PidStore.ROOT_DEVICE, self.pid)
class GetMixin(object):
"""GET Mixin that also sets a property if PROVIDES is set.
The target class needs to set EXPECTED_FIELD and optionally PROVIDES.
"""
def Test(self):
self.AddIfGetSupported(self.AckGetResult(
field_names=[self.EXPECTED_FIELD]))
self.SendGet(PidStore.ROOT_DEVICE, self.pid)
def VerifyResult(self, response, fields):
if self.PROVIDES:
value = None
if response.WasAcked():
value = fields[self.EXPECTED_FIELD]
self.SetProperty(self.PROVIDES[0], value)
class GetRequiredMixin(object):
"""GET Mixin that also sets a property if PROVIDES is set.
The target class needs to set EXPECTED_FIELD and optionally PROVIDES.
"""
def Test(self):
self.AddExpectedResults(self.AckGetResult(
field_names=[self.EXPECTED_FIELD]))
self.SendGet(PidStore.ROOT_DEVICE, self.pid)
def VerifyResult(self, response, fields):
if response.WasAcked() and self.PROVIDES:
self.SetProperty(self.PROVIDES[0], fields[self.EXPECTED_FIELD])
class GetWithDataMixin(object):
"""GET a PID with random param data."""
DATA = 'foobarbaz'
def Test(self):
self.AddIfGetSupported([
self.NackGetResult(RDMNack.NR_FORMAT_ERROR),
self.AckGetResult(
warning='Get %s with data returned an ack' % self.pid.name)
])
self.SendRawGet(PidStore.ROOT_DEVICE, self.pid, 'foo')
class GetWithNoDataMixin(object):
"""Attempt a get with no data."""
def Test(self):
self.AddIfGetSupported(self.NackGetResult(RDMNack.NR_FORMAT_ERROR))
self.SendRawGet(PidStore.ROOT_DEVICE, self.pid)
class UnsupportedSetMixin(object):
"""Check that SET fails with NR_UNSUPPORTED_COMMAND_CLASS."""
DATA = ''
def Test(self):
self.AddExpectedResults(UnsupportedSetNacks(self.pid))
self.SendRawSet(PidStore.ROOT_DEVICE, self.pid, self.DATA)
class SetWithDataMixin(ResponderTestFixture):
"""SET a PID with random param data."""
DATA = 'foobarbaz'
def Test(self):
self.AddIfSetSupported([
self.NackSetResult(RDMNack.NR_FORMAT_ERROR),
self.AckSetResult(
warning='Set %s with data returned an ack' % self.pid.name)
])
self.SendRawSet(PidStore.ROOT_DEVICE, self.pid, 'foo')
class SetWithNoDataMixin(object):
"""Attempt a set with no data."""
def Test(self):
self.AddIfSetSupported(self.NackSetResult(RDMNack.NR_FORMAT_ERROR))
self.SendRawSet(PidStore.ROOT_DEVICE, self.pid, '')
# TODO(simon): add a method to check this didn't change the value
# Generic Label Mixins
# These all work in conjunction with the IsSupportedMixin
#------------------------------------------------------------------------------
class SetLabelMixin(object):
"""Set a PID and make sure the value is saved.
If PROVIDES is non empty, the first property will be used to indicate if the
set action is supported. If an ack is returned it'll be set to true,
otherwise false.
"""
TEST_LABEL = 'test label'
PROVIDES = []
SET, VERIFY, RESET = xrange(3)
def ExpectedResults(self):
return [
self.NackSetResult(RDMNack.NR_UNSUPPORTED_COMMAND_CLASS),
self.AckSetResult(action=self.VerifySet)
]
def Test(self):
self._test_state = self.SET
self.AddIfSetSupported(self.ExpectedResults())
self.SendSet(PidStore.ROOT_DEVICE, self.pid, [self.TEST_LABEL])
def VerifySet(self):
self._test_state = self.VERIFY
self.AddExpectedResults(self.AckGetResult(field_names=['label']))
self.SendGet(PidStore.ROOT_DEVICE, self.pid)
def VerifyResult(self, response, fields):
if self._test_state == self.SET:
if self.PROVIDES:
self.SetProperty(self.PROVIDES[0], response.WasAcked())
return
elif self._test_state == self.RESET:
return
new_label = fields['label']
if self.TEST_LABEL == new_label:
return
if (len(new_label) < len(self.TEST_LABEL) and
self.TEST_LABEL.startswith(new_label)):
self.AddAdvisory('Label for %s was truncated to %d characters' %
(self.pid, len(new_label)))
else:
self.SetFailed('Labels didn\'t match, expected "%s", got "%s"' %
(self.TEST_LABEL.encode('string-escape'),
new_label.encode('string-escape')))
def ResetState(self):
if not self.OldValue():
return
self._test_state = self.RESET
self.AddExpectedResults(self.AckSetResult())
self.SendSet(PidStore.ROOT_DEVICE, self.pid, [self.OldValue()])
self._wrapper.Run()
class NonUnicastSetLabelMixin(SetLabelMixin):
"""Send a SET device label to a broadcast or vendorcast uid."""
def Test(self):
if not self.Property('set_device_label_supported'):
self.SetNotRun(' Previous set label was nacked')
self.Stop()
return
self._test_state = self.SET
self.AddExpectedResults(BroadcastResult(action=self.VerifySet))
self.SendDirectedSet(self.Uid(), PidStore.ROOT_DEVICE, self.pid,
[self.TEST_LABEL])
class SetOversizedLabelMixin(object):
"""Send an over-sized SET label command."""
LONG_STRING = 'this is a string which is more than 32 characters'
def Test(self):
self.verify_result = False
self.AddIfSetSupported([
self.NackSetResult(RDMNack.NR_FORMAT_ERROR),
self.NackSetResult(RDMNack.NR_PACKET_SIZE_UNSUPPORTED),
self.NackSetResult(RDMNack.NR_UNSUPPORTED_COMMAND_CLASS),
self.AckSetResult(action=self.VerifySet),
])
self.SendRawSet(PidStore.ROOT_DEVICE, self.pid, self.LONG_STRING)
def VerifySet(self):
"""If we got an ACK back, we send a GET to check what the result was."""
self.verify_result = True
self.AddExpectedResults(self.AckGetResult(field_names=['label']))
self.SendGet(PidStore.ROOT_DEVICE, self.pid)
def VerifyResult(self, response, fields):
if not self.verify_result:
return
if 'label' not in fields:
self.SetFailed('Missing label field in response')
else:
if fields['label'] != self.LONG_STRING[0:MAX_LABEL_SIZE]:
self.AddWarning(
'Setting an oversized %s set the first %d characters' % (
self.PID, len(fields['label'])))
# Generic Set Mixins
# These all work in conjunction with the IsSupportedMixin
#------------------------------------------------------------------------------
class SetMixin(object):
"""The base class for set mixins."""
def OldValue(self):
self.SetBroken('base method of SetMixin called')
def NewValue(self):
self.SetBroken('base method of SetMixin called')
def Test(self):
self.AddIfSetSupported([
self.AckSetResult(action=self.VerifySet),
self.NackSetResult(
RDMNack.NR_UNSUPPORTED_COMMAND_CLASS,
advisory='SET for %s returned unsupported command class' % self.PID),
])
self.SendSet(PidStore.ROOT_DEVICE, self.pid, [self.NewValue()])
def VerifySet(self):
self.AddExpectedResults(
self.AckGetResult(field_values={self.EXPECTED_FIELD: self.NewValue()}))
self.SendGet(PidStore.ROOT_DEVICE, self.pid)
def ResetState(self):
old_value = self.OldValue()
if old_value is None:
return
self.AddExpectedResults(self.AckSetResult())
self.SendSet(PidStore.ROOT_DEVICE, self.pid, [old_value])
self._wrapper.Run()
class SetBoolMixin(SetMixin):
"""Attempt to SET a bool field."""
VALUE = True
def NewValue(self):
"""Decide the new value to set based on the old one.
This ensures we change it.
"""
value = self.OldValue()
if value is not None:
return not value
return self.VALUE
class SetUInt8Mixin(SetMixin):
"""Attempt to SET a uint8 field."""
VALUE = True
def NewValue(self):
"""Decide the new value to set based on the old one.
This ensures we change it.
"""
value = self.OldValue()
if value is not None:
return (value + 1) % 0xff
return self.VALUE
class SetUInt16Mixin(SetMixin):
"""Attempt to SET a uint16 field."""
VALUE = True
def NewValue(self):
"""Decide the new value to set based on the old one.
This ensures we change it.
"""
value = self.OldValue()
if value is not None:
return (value + 1) % 0xffff
return self.VALUE
class SetUInt32Mixin(SetMixin):
"""Attempt to SET a uint32 field."""
VALUE = 100
def NewValue(self):
"""Decide the new value to set based on the old one.
This ensures we change it.
"""
value = self.OldValue()
if value is not None:
return (value + 1) % 0xffffffff
return self.VALUE
# Start address mixins
#------------------------------------------------------------------------------
class SetStartAddressMixin(object):
"""Set the dmx start address."""
SET, VERIFY, RESET = xrange(3)
def CalculateNewAddress(self, current_address, footprint):
if footprint == MAX_DMX_ADDRESS:
start_address = 1
else:
start_address = current_address + 1
if start_address + footprint > MAX_DMX_ADDRESS + 1:
start_address = 1
return start_address
def VerifySet(self):
self._test_state = self.VERIFY
self.AddExpectedResults(
self.AckGetResult(field_values={'dmx_address': self.start_address},
action=self.VerifyDeviceInfo))
self.SendGet(PidStore.ROOT_DEVICE, self.pid)
def VerifyDeviceInfo(self):
device_info_pid = self.LookupPid('DEVICE_INFO')
self.AddExpectedResults(
AckGetResult(
device_info_pid.value,
field_values={'dmx_start_address': self.start_address}))
self.SendGet(PidStore.ROOT_DEVICE, device_info_pid)
def ResetState(self):
old_address = self.Property('dmx_address')
if not old_address or old_address == 0xffff:
return
self._test_state = self.RESET
self.AddExpectedResults(self.AckSetResult())
self.SendSet(PidStore.ROOT_DEVICE, self.pid,
[self.Property('dmx_address')])
self._wrapper.Run()
class SetNonUnicastStartAddressMixin(SetStartAddressMixin):
"""Send a set dmx start address to a non unicast uid."""
def Test(self):
footprint = self.Property('dmx_footprint')
current_address = self.Property('dmx_address')
if footprint == 0 or current_address == 0xffff:
self.SetNotRun(" Device doesn't use a DMX address")
self.Stop()
return
if not self.Property('set_dmx_address_supported'):
self.SetNotRun(' Previous set start address was nacked')
self.Stop()
return
self._test_state = self.SET
self.start_address = self.CalculateNewAddress(current_address, footprint)
self.AddExpectedResults(BroadcastResult(action=self.VerifySet))
self.SendDirectedSet(self.Uid(), PidStore.ROOT_DEVICE, self.pid,
[self.start_address])
# Identify Device Mixin
#------------------------------------------------------------------------------
class SetNonUnicastIdentifyMixin(object):
"""Sets the identify device state.
To avoid sending a broadcast identify on (which may strike all lamps in a
large rig), we instead turn identify on and then send a broadcast off.
"""
REQUIRES = ['identify_state']
def States(self):
return [
self.TurnOn,
self.VerifyOn,
self.TurnOff,
self.VerifyOff,
]
def NextState(self):
self._current_state += 1
try:
return self.States()[self._current_state]
except IndexError:
return None
def Test(self):
self._current_state = -1
self.NextState()()
def TurnOn(self):
self.AddExpectedResults(
self.AckSetResult(action=self.NextState()))
self.SendSet(PidStore.ROOT_DEVICE, self.pid, [True])
def VerifyOn(self):
self.AddExpectedResults(
self.AckGetResult(field_values={'identify_state': True},
action=self.NextState()))
self.SendGet(PidStore.ROOT_DEVICE, self.pid)
def TurnOff(self):
self.AddExpectedResults(BroadcastResult(action=self.NextState()))
self.SendDirectedSet(self.Uid(), PidStore.ROOT_DEVICE, self.pid, [False])
def VerifyOff(self):
self.AddExpectedResults(
self.AckGetResult(field_values={'identify_state': False}))
self.SendGet(PidStore.ROOT_DEVICE, self.pid)
def ResetState(self):
# reset back to the old value
self.SendSet(PidStore.ROOT_DEVICE, self.pid,
[self.Property('identify_state')])
self._wrapper.Run()
# Sensor mixins
#------------------------------------------------------------------------------
class SetUndefinedSensorValues(object):
"""Attempt to set sensor values for all sensors that weren't defined."""
def Test(self):
sensors = self.Property('sensor_definitions')
self._missing_sensors = []
for i in xrange(0, 0xff):
if i not in sensors:
self._missing_sensors.append(i)
if self._missing_sensors:
# loop and get all values
self._DoAction()
else:
self.SetNotRun(' All sensors declared')
self.Stop()
return
def _DoAction(self):
if not self._missing_sensors:
self.Stop()
return
self.AddIfSetSupported([
self.NackSetResult(RDMNack.NR_DATA_OUT_OF_RANGE,
action=self._DoAction),
# SET SENSOR_VALUE may not be supported
self.NackSetResult(RDMNack.NR_UNSUPPORTED_COMMAND_CLASS,
action=self._DoAction),
])
self.SendSet(PidStore.ROOT_DEVICE, self.pid, [self._missing_sensors.pop(0)])
# Discovery Mixins
#------------------------------------------------------------------------------
class DiscoveryMixin(ResponderTestFixture):
"""UnMute the device, send a DUB, confirm the UID, then mute again.
This mixin requires:
LowerBound() the lower UID to use in the DUB
UpperBound() the upprt UID to use in the DUB
And Optionally:
DUBResponseCode(response_code): called when the discovery request
completes.
ExpectResponse(): returns true if we expect the device to answer the DUB
request, false otherwise.
Target:
returns the UID to address the UID command to, defaults to
ffff:ffffffff
"""
PID = 'DISC_UNIQUE_BRANCH'
REQUIRES = ['mute_supported', 'unmute_supported']
def DUBResponseCode(self, response_code):
pass
def ExpectResponse(self):
return True
def Target(self):
return UID.AllDevices()
def UnMuteDevice(self, next_method):
unmute_pid = self.LookupPid('DISC_UNMUTE')
self.AddExpectedResults([
AckDiscoveryResult(unmute_pid.value, action=next_method),
])
self.SendDiscovery(PidStore.ROOT_DEVICE, unmute_pid)
def Test(self):
self._muting = True
if not (self.Property('unmute_supported') and
self.Property('mute_supported')):
self.SetNotRun('Controller does not support mute / unmute commands')
self.Stop()
return
self.UnMuteDevice(self.SendDUB)
def SendDUB(self):
self._muting = False
results = [UnsupportedResult()]
if self.ExpectResponse():
results.append(DUBResult())
else:
results.append(TimeoutResult())
self.AddExpectedResults(results)
self.SendDirectedDiscovery(self.Target(),
PidStore.ROOT_DEVICE,
self.pid,
[self.LowerBound(), self.UpperBound()])
def VerifyResult(self, response, fields):
if self._muting:
return
self.DUBResponseCode(response.response_code)
if (response.response_code ==
OlaClient.RDM_PLUGIN_DISCOVERY_NOT_SUPPORTED):
return
if not self.ExpectResponse():
return
if len(response.raw_response) != 1:
self.SetFailed('Multiple DUB responses returned')
return
uid = DecodeResponse(bytearray(response.raw_response[0]))
if uid is None or uid != self._uid:
self.SetFailed('Missing UID in DUB response')
self.LogDebug(' Located UID: %s' % uid)
def ResetState(self):
# mute the device again
mute_pid = self.LookupPid('DISC_MUTE')
self.SendDiscovery(PidStore.ROOT_DEVICE, mute_pid)
self._wrapper.Run()
|
mlba-team/open-lighting
|
tools/rdm/TestMixins.py
|
Python
|
lgpl-2.1
| 18,035
|
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import datetime
from novaclient.tests import utils
from novaclient.tests.v1_1 import fakes
from novaclient.v1_1 import usage
class UsageTest(utils.TestCase):
def setUp(self):
super(UsageTest, self).setUp()
self.cs = self._get_fake_client()
self.usage_type = self._get_usage_type()
def _get_fake_client(self):
return fakes.FakeClient()
def _get_usage_type(self):
return usage.Usage
def test_usage_list(self, detailed=False):
now = datetime.datetime.now()
usages = self.cs.usage.list(now, now, detailed)
self.cs.assert_called('GET',
"/os-simple-tenant-usage?" +
("start=%s&" % now.isoformat()) +
("end=%s&" % now.isoformat()) +
("detailed=%s" % int(bool(detailed))))
[self.assertIsInstance(u, usage.Usage) for u in usages]
def test_usage_list_detailed(self):
self.test_usage_list(True)
def test_usage_get(self):
now = datetime.datetime.now()
u = self.cs.usage.get("tenantfoo", now, now)
self.cs.assert_called('GET',
"/os-simple-tenant-usage/tenantfoo?" +
("start=%s&" % now.isoformat()) +
("end=%s" % now.isoformat()))
self.assertIsInstance(u, usage.Usage)
|
akash1808/python-novaclient
|
novaclient/tests/v1_1/test_usage.py
|
Python
|
apache-2.0
| 1,951
|
import discord
from sigma.core.permission import check_admin, set_channel_nsfw
async def nsfwpermit(cmd, message, args):
channel = message.channel
if check_admin(message.author, channel):
if set_channel_nsfw(cmd.db, channel.id):
embed = discord.Embed(color=0x9933FF,
title=':eggplant: The NSFW Module has been Enabled for ' + channel.name)
else:
embed = discord.Embed(color=0xFF9900, title=':fire: The NSFW Module has been Disabled for ' + channel.name)
else:
embed = discord.Embed(type='rich', color=0xDB0000,
title='⛔ Insufficient Permissions. Server Admin Only.')
await message.channel.send(None, embed=embed)
|
AXAz0r/apex-sigma
|
sigma/plugins/nsfw/nsfwpermit.py
|
Python
|
gpl-3.0
| 750
|
# -*- coding: utf-8 -*-
#
# simpleapi documentation build configuration file, created by
# sphinx-quickstart on Wed Apr 21 21:02:02 2010.
#
# This file is execfile()d with the current directory set to its containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys, os
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
sys.path.append(os.path.abspath('..'))
# -- General configuration -----------------------------------------------------
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = ['sphinx.ext.autodoc', 'sphinx.ext.todo', 'sphinx.ext.coverage',
'sphinx.ext.graphviz']
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'simpleapi'
copyright = u'2010, Florian Schlachter'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
import simpleapi
version = simpleapi.get_version()
# The full version, including alpha/beta/rc tags.
release = simpleapi.get_version()
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of documents that shouldn't be included in the build.
#unused_docs = []
# List of directories, relative to source directory, that shouldn't be searched
# for source files.
exclude_trees = ['_build']
# The reST default role (used for this markup: `text`) to use for all documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# -- Options for HTML output ---------------------------------------------------
# The theme to use for HTML and HTML Help pages. Major themes that come with
# Sphinx are currently 'default' and 'sphinxdoc'.
html_theme = 'nature'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
html_theme_path = ['_theme']
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_use_modindex = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# If nonempty, this is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = ''
# Output file base name for HTML help builder.
htmlhelp_basename = 'simpleapidoc'
# -- Options for LaTeX output --------------------------------------------------
# The paper size ('letter' or 'a4').
#latex_paper_size = 'letter'
# The font size ('10pt', '11pt' or '12pt').
#latex_font_size = '10pt'
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass [howto/manual]).
latex_documents = [
('index', 'simpleapi.tex', u'simpleapi Documentation',
u'Florian Schlachter', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# Additional stuff for the LaTeX preamble.
#latex_preamble = ''
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_use_modindex = True
|
flosch/simpleapi
|
docs/conf.py
|
Python
|
mit
| 6,460
|
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_db import api as oslo_db_api
from oslo_db import exception as db_exc
from oslo_log import log as logging
from oslo_serialization import jsonutils
from oslo_service import service
from oslo_utils import timeutils
import requests
import six
from six.moves.urllib import parse as urlparse
from heat.common import exception
from heat.common.i18n import _
from heat.common.i18n import _LI
from heat.db import api as db_api
from heat.engine import api
from heat.objects import software_config as software_config_object
from heat.objects import software_deployment as software_deployment_object
from heat.rpc import api as rpc_api
LOG = logging.getLogger(__name__)
class SoftwareConfigService(service.Service):
def show_software_config(self, cnxt, config_id):
sc = software_config_object.SoftwareConfig.get_by_id(cnxt, config_id)
return api.format_software_config(sc)
def list_software_configs(self, cnxt, limit=None, marker=None,
tenant_safe=True):
scs = software_config_object.SoftwareConfig.get_all(
cnxt,
limit=limit,
marker=marker,
tenant_safe=tenant_safe)
result = [api.format_software_config(sc, detail=False) for sc in scs]
return result
def create_software_config(self, cnxt, group, name, config,
inputs, outputs, options):
sc = software_config_object.SoftwareConfig.create(cnxt, {
'group': group,
'name': name,
'config': {
'inputs': inputs,
'outputs': outputs,
'options': options,
'config': config
},
'tenant': cnxt.tenant_id})
return api.format_software_config(sc)
def delete_software_config(self, cnxt, config_id):
software_config_object.SoftwareConfig.delete(cnxt, config_id)
def list_software_deployments(self, cnxt, server_id):
all_sd = software_deployment_object.SoftwareDeployment.get_all(
cnxt, server_id)
result = [api.format_software_deployment(sd) for sd in all_sd]
return result
def metadata_software_deployments(self, cnxt, server_id):
if not server_id:
raise ValueError(_('server_id must be specified'))
all_sd = software_deployment_object.SoftwareDeployment.get_all(
cnxt, server_id)
# sort the configs by config name, to give the list of metadata a
# deterministic and controllable order.
all_sd_s = sorted(all_sd, key=lambda sd: sd.config.name)
result = [api.format_software_config(sd.config) for sd in all_sd_s]
return result
@oslo_db_api.wrap_db_retry(max_retries=10, retry_on_request=True)
def _push_metadata_software_deployments(self, cnxt, server_id, sd):
rs = db_api.resource_get_by_physical_resource_id(cnxt, server_id)
if not rs:
return
deployments = self.metadata_software_deployments(cnxt, server_id)
md = rs.rsrc_metadata or {}
md['deployments'] = deployments
rows_updated = db_api.resource_update(
cnxt, rs.id, {'rsrc_metadata': md}, rs.atomic_key)
if not rows_updated:
raise db_exc.RetryRequest(
exception.DeploymentConcurrentTransaction(server=server_id))
metadata_put_url = None
metadata_queue_id = None
for rd in rs.data:
if rd.key == 'metadata_put_url':
metadata_put_url = rd.value
break
elif rd.key == 'metadata_queue_id':
metadata_queue_id = rd.value
break
if metadata_put_url:
json_md = jsonutils.dumps(md)
requests.put(metadata_put_url, json_md)
elif metadata_queue_id:
zaqar_plugin = cnxt.clients.client_plugin('zaqar')
zaqar = zaqar_plugin.create_for_tenant(sd.stack_user_project_id)
queue = zaqar.queue(metadata_queue_id)
queue.post({'body': md, 'ttl': zaqar_plugin.DEFAULT_TTL})
def _refresh_swift_software_deployment(self, cnxt, sd, deploy_signal_id):
container, object_name = urlparse.urlparse(
deploy_signal_id).path.split('/')[-2:]
swift_plugin = cnxt.clients.client_plugin('swift')
swift = swift_plugin.client()
try:
headers = swift.head_object(container, object_name)
except Exception as ex:
# ignore not-found, in case swift is not consistent yet
if swift_plugin.is_not_found(ex):
LOG.info(_LI('Signal object not found: %(c)s %(o)s'), {
'c': container, 'o': object_name})
return sd
raise ex
lm = headers.get('last-modified')
last_modified = swift_plugin.parse_last_modified(lm)
prev_last_modified = sd.updated_at
if prev_last_modified:
# assume stored as utc, convert to offset-naive datetime
prev_last_modified = prev_last_modified.replace(tzinfo=None)
if prev_last_modified and (last_modified <= prev_last_modified):
return sd
try:
(headers, obj) = swift.get_object(container, object_name)
except Exception as ex:
# ignore not-found, in case swift is not consistent yet
if swift_plugin.is_not_found(ex):
LOG.info(_LI(
'Signal object not found: %(c)s %(o)s'), {
'c': container, 'o': object_name})
return sd
raise ex
if obj:
self.signal_software_deployment(
cnxt, sd.id, jsonutils.loads(obj),
last_modified.isoformat())
return software_deployment_object.SoftwareDeployment.get_by_id(
cnxt, sd.id)
def _refresh_zaqar_software_deployment(self, cnxt, sd, deploy_queue_id):
zaqar_plugin = cnxt.clients.client_plugin('zaqar')
zaqar = zaqar_plugin.create_for_tenant(sd.stack_user_project_id)
queue = zaqar.queue(deploy_queue_id)
messages = list(queue.pop())
if messages:
self.signal_software_deployment(
cnxt, sd.id, messages[0].body, None)
return software_deployment_object.SoftwareDeployment.get_by_id(
cnxt, sd.id)
def show_software_deployment(self, cnxt, deployment_id):
sd = software_deployment_object.SoftwareDeployment.get_by_id(
cnxt, deployment_id)
if sd.status == rpc_api.SOFTWARE_DEPLOYMENT_IN_PROGRESS:
c = sd.config.config
input_values = dict((i['name'], i['value']) for i in c['inputs'])
transport = input_values.get('deploy_signal_transport')
if transport == 'TEMP_URL_SIGNAL':
sd = self._refresh_swift_software_deployment(
cnxt, sd, input_values.get('deploy_signal_id'))
elif transport == 'ZAQAR_SIGNAL':
sd = self._refresh_zaqar_software_deployment(
cnxt, sd, input_values.get('deploy_queue_id'))
return api.format_software_deployment(sd)
def create_software_deployment(self, cnxt, server_id, config_id,
input_values, action, status,
status_reason, stack_user_project_id):
sd = software_deployment_object.SoftwareDeployment.create(cnxt, {
'config_id': config_id,
'server_id': server_id,
'input_values': input_values,
'tenant': cnxt.tenant_id,
'stack_user_project_id': stack_user_project_id,
'action': action,
'status': status,
'status_reason': status_reason})
self._push_metadata_software_deployments(cnxt, server_id, sd)
return api.format_software_deployment(sd)
def signal_software_deployment(self, cnxt, deployment_id, details,
updated_at):
if not deployment_id:
raise ValueError(_('deployment_id must be specified'))
sd = software_deployment_object.SoftwareDeployment.get_by_id(
cnxt, deployment_id)
status = sd.status
if not status == rpc_api.SOFTWARE_DEPLOYMENT_IN_PROGRESS:
# output values are only expected when in an IN_PROGRESS state
return
details = details or {}
output_status_code = rpc_api.SOFTWARE_DEPLOYMENT_OUTPUT_STATUS_CODE
ov = sd.output_values or {}
status = None
status_reasons = {}
status_code = details.get(output_status_code)
if status_code and str(status_code) != '0':
status = rpc_api.SOFTWARE_DEPLOYMENT_FAILED
status_reasons[output_status_code] = _(
'Deployment exited with non-zero status code: %s'
) % details.get(output_status_code)
event_reason = 'deployment failed (%s)' % status_code
else:
event_reason = 'deployment succeeded'
for output in sd.config.config['outputs'] or []:
out_key = output['name']
if out_key in details:
ov[out_key] = details[out_key]
if output.get('error_output', False):
status = rpc_api.SOFTWARE_DEPLOYMENT_FAILED
status_reasons[out_key] = details[out_key]
event_reason = 'deployment failed'
for out_key in rpc_api.SOFTWARE_DEPLOYMENT_OUTPUTS:
ov[out_key] = details.get(out_key)
if status == rpc_api.SOFTWARE_DEPLOYMENT_FAILED:
# build a status reason out of all of the values of outputs
# flagged as error_output
status_reasons = [' : '.join((k, six.text_type(status_reasons[k])))
for k in status_reasons]
status_reason = ', '.join(status_reasons)
else:
status = rpc_api.SOFTWARE_DEPLOYMENT_COMPLETE
status_reason = _('Outputs received')
self.update_software_deployment(
cnxt, deployment_id=deployment_id,
output_values=ov, status=status, status_reason=status_reason,
config_id=None, input_values=None, action=None,
updated_at=updated_at)
# Return a string describing the outcome of handling the signal data
return event_reason
def update_software_deployment(self, cnxt, deployment_id, config_id,
input_values, output_values, action,
status, status_reason, updated_at):
update_data = {}
if config_id:
update_data['config_id'] = config_id
if input_values:
update_data['input_values'] = input_values
if output_values:
update_data['output_values'] = output_values
if action:
update_data['action'] = action
if status:
update_data['status'] = status
if status_reason:
update_data['status_reason'] = status_reason
if updated_at:
update_data['updated_at'] = timeutils.normalize_time(
timeutils.parse_isotime(updated_at))
else:
update_data['updated_at'] = timeutils.utcnow()
sd = software_deployment_object.SoftwareDeployment.update_by_id(
cnxt, deployment_id, update_data)
# only push metadata if this update resulted in the config_id
# changing, since metadata is just a list of configs
if config_id:
self._push_metadata_software_deployments(cnxt, sd.server_id, sd)
return api.format_software_deployment(sd)
def delete_software_deployment(self, cnxt, deployment_id):
software_deployment_object.SoftwareDeployment.delete(
cnxt, deployment_id)
|
miguelgrinberg/heat
|
heat/engine/service_software_config.py
|
Python
|
apache-2.0
| 12,480
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('account', '0009_auto_20151120_0859'),
]
operations = [
migrations.AlterField(
model_name='userprofile',
name='avatar',
field=models.ImageField(null=True, upload_to=b'Users', blank=True),
),
]
|
SISTEMAsw/TAMP
|
gui/account/migrations/0010_auto_20151120_0904.py
|
Python
|
mit
| 436
|
import unittest
import requests_mock
from alertaclient.api import Client
class GroupTestCase(unittest.TestCase):
def setUp(self):
self.client = Client()
self.key = """
{
"group": {
"count": 0,
"href": "http://localhost:8080/group/8ed5d256-4205-4dfc-b25d-185bd019cb21",
"id": "8ed5d256-4205-4dfc-b25d-185bd019cb21",
"name": "myGroup",
"text": "test group"
},
"id": "8ed5d256-4205-4dfc-b25d-185bd019cb21",
"status": "ok"
}
"""
@requests_mock.mock()
def test_group(self, m):
m.post('http://localhost:8080/group', text=self.key)
group = self.client.create_group(name='myGroup', text='test group')
self.assertEqual(group.name, 'myGroup')
self.assertEqual(group.text, 'test group')
|
alerta/python-alerta
|
tests/unit/test_groups.py
|
Python
|
mit
| 939
|
# Copyright (c) 2015-2016, NVIDIA CORPORATION. All rights reserved.
import mock
import tempfile
from . import resize_image
from digits import test_utils
test_utils.skipIfNotFramework('none')
class TestOutputValidation():
def test_no_filename(self):
assert resize_image.validate_output_file(None), 'All new files should be valid'
@mock.patch('os.access')
def test_not_writable(self, mock_access):
mock_access.return_value = False
with tempfile.NamedTemporaryFile('r') as f:
assert not resize_image.validate_output_file(f.name), 'validation should not pass on unwritable file'
def test_normal(self):
with tempfile.NamedTemporaryFile('r') as f:
assert resize_image.validate_output_file(f.name), 'validation should pass on temporary file'
class TestInputValidation():
def test_does_not_exist(self):
assert not resize_image.validate_input_file(''), 'validation should not pass on missing file'
@mock.patch('os.access')
def test_unreadable_file(self, mock_access):
mock_access.return_value = False
with tempfile.NamedTemporaryFile('r') as f:
assert not resize_image.validate_input_file(f.name), 'validation should not pass on unreadable file'
class TestRangeValidation():
def test_number_none_and_not_allowed(self):
assert not resize_image.validate_range(
None, allow_none=False), 'number=None should not be allowed with allow_none=False'
def test_number_not_float_compatible(self):
value = 'a'
assert not resize_image.validate_range(value), 'number=%s should not be accepted' % value
def test_number_below_min(self):
assert not resize_image.validate_range(0, min_value=1), 'validation should not pass with number < min_value'
def test_number_above_max(self):
assert not resize_image.validate_range(2, max_value=1), 'validation should not pass with number > max_value'
def test_range(self):
assert resize_image.validate_range(
5, min_value=0, max_value=255), 'validation should pass with 5 in range (0, 255)'
|
gheinrich/DIGITS-GAN
|
digits/tools/test_resize_image.py
|
Python
|
bsd-3-clause
| 2,136
|
#!/usr/bin/env python
import plotly.plotly as py
from plotly.graph_objs import Data, Layout, Figure, Scatter, Marker
from vsc.pbs.pbsnodes import PbsnodesParser
from vsc.plotly_utils import create_annotations, sign_in
def compute_coordinates(x, y, options):
x_coords = []
y_coords = []
for j in xrange(1, 1 + len(y)):
for i in xrange(1, 1 + len(x)):
x_coords.append(i)
y_coords.append(j)
return x_coords, y_coords
def compute_cpu_colors(cpu, options):
nr_blues = 7
color_map = [
'rgb(37,0,250)',
'rgb(57,28,250)',
'rgb(79,52,250)',
'rgb(107,85,250)',
'rgb(138,119,250)',
'rgb(164,150,250)',
'rgb(200,200,200)', # grey
'rgb(250,177,177)',
'rgb(250,93,93)',
'rgb(250,0,0)',
]
down_color = 'rgb(0,0,0)'
colors = []
for cpu_value in cpu:
if cpu_value < -0.1:
colors.append(down_color)
else:
if cpu_value <= 1.01:
idx = int(round((nr_blues - 1)*cpu_value))
elif cpu_value <= 1.06:
idx = nr_blues
elif cpu_value <= 2.0:
idx = nr_blues + 1
else:
idx = nr_blues + 2
colors.append(color_map[idx])
return colors
def compute_mem_sizes(mem, options):
sizes = []
down_size = 10
for mem_value in mem:
if mem_value < -0.1:
sizes.append(down_size)
else:
size = 15 + 20*mem_value
sizes.append(size)
return sizes
def compute_status_symbols(status, options):
symbol_map = {
'free': 'circle',
'down': 'cross',
'singlejob': 'square',
'multijob': 'diamond',
}
symbols = []
for state in status:
if state.startswith('donw') or state.startswith('offline'):
symbols.append(symbol_map['down'])
else:
symbols.append(symbol_map[state])
return symbols
def compute_texts(names, cpu, mem, status, jobs):
texts = []
for idx in xrange(len(names)):
text_str = '<b>{0}</b>'.format(names[idx])
if not (status[idx].startswith('down') or status[idx].startswith('offline')):
text_str += '<br>CPU: {0:.2f}'.format(cpu[idx])
text_str += '<br>MEM: {0:.2f}'.format(mem[idx])
if status[idx] != 'free':
text_str += '<br>JOB: {0}'.format(','.join(jobs[idx]))
else:
text_str += ' DOWN'
texts.append(text_str)
return texts
def collect_coordinates(names, node_map):
x_coords = []
y_coords = []
for name in names:
x = int(node_map['nodes'][name][0])
y = int(node_map['nodes'][name][1])
x_coords.append(x)
y_coords.append(y)
return x_coords, y_coords
def create_plot(names, cpu, mem, status, jobs, x, y, options,
node_map=None):
if node_map:
x_coords, y_coords = collect_coordinates(names, node_map)
else:
x_coords, y_coords = compute_coordinates(x, y, options)
cpu_colors = compute_cpu_colors(cpu, options)
mem_sizes = compute_mem_sizes(mem, options)
status_symbols = compute_status_symbols(status, options)
texts = compute_texts(names, cpu, mem, status, jobs)
trace = Scatter(
x=x_coords, y=y_coords, mode='markers',
marker=Marker(
color=cpu_colors,
size=mem_sizes,
symbol=status_symbols,
),
text=texts,
)
data = Data([trace])
layout = Layout(
title='{0} load'.format(options.partition),
showlegend=False,
annotations=create_annotations(),
xaxis={'autotick': False},
yaxis={'autotick': False},
width=950,
height=800,
hovermode='closest',
)
figure = Figure(data=data, layout=layout)
filename = '{0}_cpu_load'.format(options.partition)
if options.dryrun:
return 'dryrun'
else:
url = py.plot(figure, filename=filename, auto_open=False)
return url
def compute_maps(nodes, names):
cpu = []
mem = []
for node in (n for n in nodes if n.hostname in names):
cpu.append(node.cpuload if node.cpuload is not None else -1.0)
mem.append(node.memload if node.memload is not None else -1.0)
return cpu, mem
def compute_job_status(nodes, names):
jobs = []
status = []
for node in (n for n in nodes if n.hostname in names):
if node.status:
if node.job_ids:
jobs.append(node.job_ids)
if len(node.job_ids) > 1:
status.append('multijob')
else:
status.append('singlejob')
elif node.state.startswith('down') or node.state.startswith('offline'):
jobs.append([])
status.append('down')
else:
jobs.append([])
status.append('free')
else:
jobs.append(None)
status.append('down')
return jobs, status
def compute_xy_labels(options):
n_min = options.node_offset
n_max = n_min + options.nr_nodes
x_labels = ['n{0:02d}'.format(i) for i in range(n_min, n_max)]
y_labels = options.enclosures.split(',')
return x_labels, y_labels
if __name__ == '__main__':
from argparse import ArgumentParser
import json
import subprocess
import sys
arg_parser = ArgumentParser(description='Create a heatmap of CPU load')
arg_parser.add_argument('--partition', default='thinking',
help='cluster partition to visualize')
arg_parser.add_argument('--enclosures', default='r1i0,r1i1,r1i2,r2i0,r2i1,r2i2,r3i0,r3i1,r3i2,r4i0,r4i1,r5i0,r5i1,r4i2,r5i2,r8i0',
help='list of enclosures')
arg_parser.add_argument('--nr_nodes', type=int, default=16,
help='number of nodes per IRU')
arg_parser.add_argument('--node_offset', type=int, default=1,
help='node offset')
arg_parser.add_argument('--pbsnodes', default='/usr/local/bin/pbsnodes',
help='pbsnodes command to use')
arg_parser.add_argument('--conf', default='~/.plotly/plotly.conf',
help='configuration file to use')
arg_parser.add_argument('--node_map', help='node map file to use')
arg_parser.add_argument('--verbose', action='store_true',
help='verbose output')
arg_parser.add_argument('--dryrun', action='store_true',
help='do not create plot')
arg_parser.add_argument('--file', help='file with pbsnodes output')
options = arg_parser.parse_args()
sign_in(options.conf)
if options.node_map:
with open(options.node_map, 'r') as node_map_file:
node_map = json.load(node_map_file)
else:
node_map = None
parser = PbsnodesParser()
if options.file:
with open(options.file, 'r') as pbs_file:
nodes = parser.parse_file(pbs_file)
else:
try:
node_output = subprocess.check_output([options.pbsnodes])
nodes = parser.parse(node_output)
except subprocess.CalledProcessError:
sys.stderr.write('### error: could not execute pbsnodes\n')
sys.exit(1)
if options.verbose:
print '{0:d} nodes found'.format(len(nodes))
if options.node_map:
x_labels = node_map['x_labels']
y_labels = node_map['y_labels']
else:
x_labels, y_labels = compute_xy_labels(options)
if options.verbose:
print '{0:d} x-labels, {1:d} y-labels'.format(len(x_labels),
len(y_labels))
if options.node_map:
names = node_map['nodes'].keys()
else:
names = [node.hostname for node in nodes
if node.has_property(options.partition)]
if options.verbose:
print 'names:'
print '\n'.join(names)
cpu, mem = compute_maps(nodes, names)
jobs, status = compute_job_status(nodes, names)
url = create_plot(names, cpu, mem, status, jobs,
x_labels, y_labels, options, node_map)
print 'URL: {0}'.format(url)
|
gjbex/vsc-monitoring
|
scripts/plot_cluster_load_map.py
|
Python
|
lgpl-3.0
| 8,306
|
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2012 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
#
# Copyright 2012 Nebula, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
URL patterns for testing Horizon views.
"""
from django.conf.urls.defaults import patterns, url, include
from django.contrib.staticfiles.urls import staticfiles_urlpatterns
from django.views.generic import TemplateView
import horizon
urlpatterns = patterns('',
url(r'^$', 'horizon.views.splash', name='splash'),
url(r'^auth/', include('openstack_auth.urls')),
url(r'', include(horizon.urls)),
url(r'^qunit/$',
TemplateView.as_view(template_name="horizon/qunit.html"),
name='qunit_tests')
)
urlpatterns += staticfiles_urlpatterns()
|
tylertian/Openstack
|
openstack F/horizon/horizon/tests/testurls.py
|
Python
|
apache-2.0
| 1,381
|
'''Configuration file for the FENS2014 poster figures.'''
from __future__ import absolute_import, print_function
scale_factor = 2.5
tick_width = 1. * scale_factor
tick_len = 6. * scale_factor
def get_config():
return _config
_config = {
'scale_factor': scale_factor,
# Sections
'mpl': {
'font.size' : 30,
'lines.linewidth' : scale_factor,
'lines.markersize' : 6. * scale_factor,
'axes.linewidth' : scale_factor,
'xtick.major.size' : tick_len,
'xtick.major.width' : tick_width,
'xtick.major.pad' : 4*scale_factor,
'xtick.minor.size' : tick_len / 2.,
'xtick.minor.width' : tick_width,
'xtick.direction' : 'out',
'ytick.major.size' : tick_len,
'ytick.major.width' : tick_width,
'ytick.major.pad' : 4*scale_factor,
'ytick.minor.size' : tick_len / 2.,
'ytick.minor.width' : tick_width,
'ytick.direction' : 'out',
},
'GridSweepsPlotter': {
'scale_factor' : .8,
'cbar': [1, 0, 0],
'cbar_kw' : {
'label': '',
'location': 'left',
'pad': .2,
},
'sigma_title': False,
'ann': [
dict(
txt='a',
rc=(5, 15),
xytext_offset=(0.5, 1.5),
color='black'
)
],
},
'GridExamplesPlotter': {
'scale_factor': .8,
},
'ConnectionFunctionPlotter': {
'fig_size': (3, 2),
'ylabel_coords': (-.1, .5),
},
'VmExamplesPlotter': {
'fig_size': (2.3, 1.25),
'scale_factor': .9,
},
'GammaScatterAllPlotter': {
'fig_size': (3.2, 3.2),
'legend_kwargs': dict(
loc=(0, 1.),
fontsize='small',
frameon=False,
scatterpoints=1,
ncol=3,
),
'tight_layout_kwargs': {
'pad': 3.,
'rect': (.01, .01, .99, .85),
'pad': 0,
},
},
'MainScatterGridsBumpsPlotter': {
'fig_size': (4., 2.5),
'tight_layout_kwargs': {
'rect': (0.05, 0.05, 0.95, 0.85),
},
'legend_kwargs': dict(
loc=(0.05, 1.02),
frameon=False,
),
},
'EIRasterPlotter': {
'fig_size': (3, 1.5),
'fig_ext': 'pdf',
},
'EIRatePlotter': {
'fig_size': (3, .5),
'rateTop': .85
},
'MainBumpFormationPlotter': {
'xticks' : [True]*3,
'ann': ([], [], []),
},
'GammaSweepsPlotter': {
'AC_xticks': [True]*3,
'ann': [
dict(
txt='a',
rc=(5, 15),
xytext_offset=(1.5, 1),
color='white',
),
],
},
'GammaExamplePlotter': {
'scale_factor': .9,
'xscales': [
[0, 0, 1],
[0, 0, 0],
],
'sigma_titles': [
[1, 1, 1],
[0, 0, 0],
],
'xscale_kw': dict(
x=0.75, y=.2,
),
},
}
def get_config():
return _config
##############################################################################
|
MattNolanLab/ei-attractor
|
grid_cell_model/simulations/007_noise/figures/fens2014-poster/config.py
|
Python
|
gpl-3.0
| 3,260
|
#!/usr/bin/env python
#-*- coding:utf-8 -*-
# Copyright (C) 2009 Francesco Piccinno
#
# Author: Francesco Piccinno <stack.box@gmail.com>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
import os
import sys
import random
import unittest
import tempfile
import gtk
import gtkhex
def random_file():
"""
Create a temporary file with mktemp and fill it with random data
"""
if os.name == 'nt':
tfile = open(tempfile.mktemp(), 'wb+')
else:
tfile = tempfile.NamedTemporaryFile()
tsize = random.randint(1, 3000)
buff = ""
for x in xrange(tsize):
buff += chr(random.randint(0, 255))
tfile.write(buff)
tfile.flush()
if os.name == 'nt':
tfile.close()
return (tfile, buff)
class MyHex(gtkhex.Document):
__gtype_name__ = 'MyHex'
def do_document_changed(self, cdata, push):
assert isinstance(cdata, gtkhex.ChangeData)
assert isinstance(push, bool)
if cdata.v_byte is not None:
assert isinstance(cdata.v_byte, str)
assert isinstance(cdata.end, long)
assert isinstance(cdata.insert, bool)
assert isinstance(cdata.lower_nibble, bool)
assert isinstance(cdata.rep_len, long)
assert isinstance(cdata.start, long)
assert isinstance(cdata.type, int)
if cdata.v_string is not None:
assert isinstance(cdata.v_string, str)
class TestWidget(unittest.TestCase):
def setUp(self):
self.file, self.buff = random_file()
self.doc = gtkhex.hex_document_new_from_file(self.file.name)
self.wid = gtkhex.Hex(self.doc)
def testAH(self):
data = self.buff[0:1]
ahl = self.wid.insert_autohighlight(data, 'yellow')
self.assertTrue(isinstance(ahl, gtkhex.AutoHighlight))
self.wid.delete_autohighlight(ahl)
class TestHexDocument(unittest.TestCase):
def setUp(self):
self.file, self.buff = random_file()
self.doc = gtkhex.hex_document_new_from_file(self.file.name)
f = open(self.file.name, 'rb')
txt = f.read()
f.close()
if txt != self.buff:
raise Exception('File consistency violated')
assert self.doc is not None, 'Error in gtkhex.Document'
def testCD(self):
d = MyHex()
pattern = 'miao'
d.set_data(0, len(pattern), d.file_size, pattern, True)
d.set_byte('S', 0, True, True)
def tearDown(self):
try:
os.unlink(self.file.name)
self.doc.destroy()
self.doc = None
self.file = None
self.buff = None
except:
pass
def test_file(self):
"Test file consinstency"
f = open(self.file.name, "rb")
tmp = f.read()
f.close()
self.assertTrue(tmp == self.buff)
self.assertTrue(len(tmp) == len(self.buff))
def test_get_data(self):
"Test get_data function"
txt = self.doc.get_data(0, len(self.buff))
self.assertTrue(len(txt) == len(self.buff))
self.assertTrue(txt == self.buff)
def test_random_get_data(self):
"Test get_data by getting random slices of text"
start = random.randint(0, len(self.buff) - 1)
length = random.randint(1, len(self.buff) - 1 - start)
data = self.doc.get_data(start, length)
orig = self.buff[start:start + length]
self.assertTrue(len(data) == len(orig))
self.assertTrue(orig == data)
def test_out_get_data(self):
start = random.randint(0, len(self.buff) - 1)
data = self.doc.get_data(start, 1)
byte = self.doc.get_byte(start)
orig = self.buff[start]
self.assertTrue(data == byte)
self.assertTrue(byte == orig)
def test_out_of_limit(self):
out = len(self.buff)
self.assertTrue(self.doc.get_byte(out) is None)
out += 1000
self.assertTrue(self.doc.get_byte(out) is None)
out = -1
self.assertTrue(self.doc.get_byte(out) is None)
self.assertTrue(self.doc.get_data(0, 0) == '')
self.assertRaises(ValueError, gtkhex.Document.get_data, self.doc,
0, sys.maxint)
self.assertRaises(ValueError, gtkhex.Document.get_data, self.doc,
-23, -20)
def test_get_byte(self):
"Test get_byte function"
target = random.randint(0, len(self.buff) - 1)
self.assertTrue(self.doc.get_byte(target) == self.buff[target])
def test_set_byte(self):
"Test set_byte function with replace"
char = chr(random.randint(0, 255))
offset = random.randint(0, len(self.buff) - 1)
old = self.doc.get_byte(offset)
self.doc.set_byte(char, offset, False, False)
self.assertTrue(self.doc.get_byte(offset) == char)
def test_set_byte_insert(self):
"Test set_byte function with insert"
char = chr(random.randint(0, 255))
offset = random.randint(0, len(self.buff) - 1)
old = self.doc.get_byte(offset)
self.doc.set_byte(char, offset, True, False)
self.assertTrue(self.doc.get_byte(offset) == char)
self.assertTrue(self.doc.get_byte(offset + 1) == old)
self.assertTrue(self.doc.file_size == len(self.buff) + 1)
buff = self.buff[:offset] + char + self.buff[offset:]
txt = self.doc.get_data(0, self.doc.file_size)
self.assertTrue(buff == txt)
tfile, fname = tempfile.mkstemp()
self.doc.write_to_file(os.fdopen(tfile, "w+b"))
f = open(fname, "r")
out = f.read()
f.close()
os.unlink(fname)
self.assertTrue(out == txt)
def test_export_html(self):
"Test the export_html function"
if os.name == 'nt':
return
# Really dummy
tfile, path = tempfile.mkstemp()
self.doc.export_html(os.path.dirname(path), os.path.basename(path),
0, self.doc.file_size, 20, 20, gtkhex.GROUP_BYTE)
self.assertTrue(os.system("tidy -qe %s" % path) != 2)
f = os.fdopen(tfile, "w+b")
f.close()
os.unlink(path)
def test_find_funcs(self):
"Test find_* funcs"
start = random.randint(0, len(self.buff) - 1)
length = random.randint(1, len(self.buff) - 1 - start)
txt = self.buff[start:start + length]
self.assertTrue(self.doc.find_forward(txt, 0) == start)
self.assertTrue(self.doc.find_forward(txt, -1) is None)
self.assertTrue(self.doc.find_forward(txt, self.doc.file_size) is None)
self.assertTrue(self.doc.find_forward(txt, self.doc.file_size + 1) is None)
self.assertTrue(self.doc.find_backward(txt, 0) is None)
self.assertTrue(self.doc.find_backward(txt, -1) is None)
self.assertTrue(self.doc.find_backward(txt, self.doc.file_size) == start)
self.assertTrue(self.doc.find_backward(txt, self.doc.file_size + 1) is None)
def test_delete_data(self):
"Test the delete_data function"
start = random.randint(0, len(self.buff) - 1)
length = random.randint(1, len(self.buff) - 1 - start)
self.doc.delete_data(start, length, True)
self.assertTrue(self.doc.file_size == len(self.buff) - length)
self.doc.undo()
self.assertTrue(self.doc.file_size == len(self.buff))
self.doc.redo()
self.assertTrue(self.doc.file_size == len(self.buff) - length)
txt = self.buff[:start] + self.buff[start+length:]
self.assertTrue(txt == self.doc.get_data(0, self.doc.file_size))
def test_get_list(self):
l = gtkhex.hex_document_get_list()
self.assertTrue(isinstance(l, list))
self.assertTrue(self.doc in l)
def test_null_bytes_and_set_data(self):
pattern = "A\x00AB\x00A\x00B\x00A\x00"
d = gtkhex.Document()
d.set_data(0, len(pattern), d.file_size, pattern, False)
# Check this
#d.set_data(0, d.file_size, len(pattern), pattern, False)
txt = d.get_data(0, d.file_size)
self.assertTrue(txt == pattern)
self.assertTrue(d.find_forward("A\x00B") == pattern.index("A\x00B"))
self.assertTrue(d.file_size == len(pattern))
d.delete_data(0, len(pattern), True)
self.assertRaises(ValueError, gtkhex.Document.get_data, d, -20, d.file_size)
self.assertTrue(d.get_data(0, d.file_size) == '')
self.assertTrue(d.file_size == 0)
if __name__ == '__main__':
unittest.main()
|
nopper/pygtkhex
|
tests/testcase.py
|
Python
|
gpl-2.0
| 9,177
|
#-----------------------------------------------------------------------------
# Copyright (c) 2012 - 2019, Anaconda, Inc., and Bokeh Contributors.
# All rights reserved.
#
# The full license is in the file LICENSE.txt, distributed with this software.
#-----------------------------------------------------------------------------
#-----------------------------------------------------------------------------
# Boilerplate
#-----------------------------------------------------------------------------
from __future__ import absolute_import, division, print_function, unicode_literals
import logging
log = logging.getLogger(__name__)
#-----------------------------------------------------------------------------
# Imports
#-----------------------------------------------------------------------------
# Standard library imports
# External imports
# Bokeh imports
from ..message import Message
from . import register
#-----------------------------------------------------------------------------
# Globals and constants
#-----------------------------------------------------------------------------
__all__ = (
'ok_1',
)
#-----------------------------------------------------------------------------
# General API
#-----------------------------------------------------------------------------
#-----------------------------------------------------------------------------
# Dev API
#-----------------------------------------------------------------------------
@register
class ok_1(Message):
''' Define the ``OK`` message (revision 1) for acknowledging successful
handling of a previous message.
The ``content`` fragment of for this message is empty.
'''
msgtype = 'OK'
revision = 1
@classmethod
def create(cls, request_id, **metadata):
''' Create an ``OK`` message
Args:
request_id (str) :
The message ID for the message the precipitated the OK.
Any additional keyword arguments will be put into the message
``metadata`` fragment as-is.
'''
header = cls.create_header(request_id=request_id)
return cls(header, metadata, {})
#-----------------------------------------------------------------------------
# Private API
#-----------------------------------------------------------------------------
#-----------------------------------------------------------------------------
# Code
#-----------------------------------------------------------------------------
|
timsnyder/bokeh
|
bokeh/protocol/messages/ok.py
|
Python
|
bsd-3-clause
| 2,504
|
# -*- coding: utf-8 -*-
from django.core.mail import EmailMultiAlternatives
from django.template import Context, Template
from django.template.loader import get_template
from helpers import ClientRouter, MailAssetsHelper, strip_accents
class UserMail:
"""
This class is responsible for firing emails for Users and Nonprofits
"""
from_email = 'Atados <site@atados.com.br>'
def __init__(self, user):
self.whole_user = user # This is the Nonprofit or Volunteer object
self.user = user.user if not type(user).__name__=='User' else user # This is the User object
self.global_context = {
"assets": {
"check": "https://s3.amazonaws.com/atados-us/images/check.png",
"iconFacebook": "https://s3.amazonaws.com/atados-us/images/icon-fb.png",
"iconInstagram": "https://s3.amazonaws.com/atados-us/images/icon-insta.png",
"logoAtadosSmall": "https://s3.amazonaws.com/atados-us/images/logo.small.png",
"logoAtadosSmall2": "https://s3.amazonaws.com/atados-us/images/mandala.png"
}
}
def sendEmail(self, template_name, subject, context, user_email=None):
text_content = get_template('email/{}.txt'.format(template_name)).render(context)
html_content = get_template('email/{}.html'.format(template_name)).render(context)
msg = EmailMultiAlternatives(subject, text_content, self.from_email, [user_email if user_email else self.user.email])
msg.attach_alternative(text_content, "text/plain")
msg.attach_alternative(html_content, "text/html")
return msg.send() > 0
def make_context(self, data):
context_data = self.global_context.copy()
context_data.update(data)
return Context(context_data)
def sendSignupConfirmation(self, site, token):
return self.sendEmail('emailVerification', 'Confirme seu email do Atados.', self.make_context({ 'token': token , 'site': site}))
class VolunteerMail(UserMail):
"""
This class contains all emails sent to volunteers
"""
def sendSignup(self):
"""
Email A/B from ruler
Sent when volunteer completes registration
"""
return self.sendEmail('volunteerSignup', 'Eba! Seu cadastro foi feito com sucesso', self.make_context({}))
def sendFacebookSignup(self): # pass by now
"""
Sent when volunteer completes registration from Facebook
"""
return self.sendEmail('volunteerFacebookSignup', 'Seja bem vindo ao Atados! \o/', self.make_context({}))
def sendAppliesToProject(self, project):
"""
Email for ruler C
Sent when volunteer applies to project
"""
return self.sendEmail('volunteerAppliesToProject', u'Você se inscreveu em uma vaga :)', self.make_context({'project': project}))
def askActInteractionConfirmation(self, project, volunteer):
"""
Email for ruler D
Sent when volunteer applies to project
"""
confirm_url = ClientRouter.mail_routine_monitoring_build_form_url(True, volunteer.user.email, project.nonprofit.name, "")
refute_url = ClientRouter.mail_routine_monitoring_build_form_url(False, volunteer.user.email, project.nonprofit.name, "")
return self.sendEmail('askActInteractionConfirmation', u'Acompanhamento de Rotina:)',
self.make_context({
'project': project,
'confirm_url': confirm_url,
'refute_url': refute_url
})
)
def sendAskAboutProjectExperience(self, apply):
"""
"""
subject = u"Como foi sua experiência com a Atados!"
feedback_form_url = ClientRouter.mail_ask_about_project_experience_url('volunteer', apply)
return self.sendEmail('volunteerAskAboutProjectExperience', subject, self.make_context({
'project_name': apply.project.name,
'feedback_form_url': feedback_form_url,
}), apply.volunteer.user.email)
#+ def sendAfterApply4Weeks(self): # new ruler
#+ """
#+ """
#+ context = Context({'user': self.user.name})
#+ return self.sendEmail('volunteerAfterApply4Weeks', '~ ~ ~ ~ ~', context)
#+ def send3DaysBeforePontual(self): # new ruler
#+ """
#+ """
#+ context = Context({'user': self.user.name})
#+ return self.sendEmail('volunteer3DaysBeforePontual', '~ ~ ~ ~ ~', context)
class NonprofitMail(UserMail):
"""
This class contains all emails sent to nonprofits
"""
def sendSignup(self):
"""
Email 1 from ruler
"""
return self.sendEmail('nonprofitSignup', 'Recebemos seu cadastro :)', self.make_context({
'review_profile_url': ClientRouter.edit_nonprofit_url(self.user.slug)
}))
def sendApproved(self):
"""
Email 2 from ruler
"""
return self.sendEmail('nonprofitApproved', 'Agora você tem um perfil no Atados', self.make_context({
'new_act_url': ClientRouter.new_act_url()
}))
def sendProjectPostingSuccessful(self, project):
"""
Email *NEW*
"""
return self.sendEmail('projectPostingSuccessful', 'Vaga criada com sucesso!', self.make_context({
'project': project,
'edit_project_url': ClientRouter.edit_project_url(project.slug)
}))
edit_nonprofit_act_url(self, act_slug)
def sendProjectApproved(self, project):
"""
Email 3 from ruler
"""
return self.sendEmail('projectApproved', 'Publicamos a sua vaga de voluntariado', self.make_context({
'project': project,
'act_url': ClientRouter.view_act_url(project.slug)
}))
def sendGetsNotifiedAboutApply(self, apply, message):
"""
Email 4 from ruler
"""
try:
subject = u'Novo voluntário para o {}'.format(apply.project.name)
except UnicodeEncodeError:
subject = u'Novo voluntário para o {}'.format(strip_accents(apply.project.name))
return self.sendEmail('nonprofitGetsNotifiedAboutApply', subject, self.make_context({
'apply': apply,
'volunteer_message': message,
'answer_volunteer_url': ClientRouter.view_volunteer_url(apply.volunteer.user.slug)
}), apply.project.email)
def sendAskAboutProjectExperience(self, project):
"""
"""
subject = u"Nos conta como foi sua experiência com a Atados!"
act_url = ClientRouter.edit_project_url(project.slug)
feedback_form_url = ClientRouter.mail_ask_about_project_experience_url('nonprofit', project)
return self.sendEmail('nonprofitAskAboutProjectExperience', subject, self.make_context({
'project_name': project.name,
'feedback_form_url': feedback_form_url,
'act_url': act_url,
}), project.email)
#+ def send1MonthInactive(self):
#+ """
#+ """
#+ return self.sendEmail('nonprofit1MonthInactive', '~ ~ ~ ~ ~', self.make_context({
#+ 'name': self.user.name
#+ }))
#+ def sendPontual(self):
#+ """
#+ """
#+ return self.sendEmail('nonprofitPontual', '~ ~ ~ ~ ~', self.make_context({
#+ 'name': self.user.name
#+ }))
#+ def sendRecorrente(self):
#+ """
#+ """
#+ return self.sendEmail('nonprofitRecorrente', '~ ~ ~ ~ ~', self.make_context({
#+ 'name': self.user.name
#+ }))
|
atados/api
|
atados_core/emails.py
|
Python
|
mit
| 7,061
|
# -*- coding: utf-8 -*-
"""
Code to manage fetching and storing the metadata of IdPs.
"""
#pylint: disable=no-member
from celery.task import task # pylint: disable=import-error,no-name-in-module
import datetime
import dateutil.parser
import logging
from lxml import etree
import requests
from onelogin.saml2.utils import OneLogin_Saml2_Utils
from third_party_auth.models import SAMLConfiguration, SAMLProviderConfig, SAMLProviderData
log = logging.getLogger(__name__)
SAML_XML_NS = 'urn:oasis:names:tc:SAML:2.0:metadata' # The SAML Metadata XML namespace
class MetadataParseError(Exception):
""" An error occurred while parsing the SAML metadata from an IdP """
pass
@task(name='third_party_auth.fetch_saml_metadata')
def fetch_saml_metadata():
"""
Fetch and store/update the metadata of all IdPs
This task should be run on a daily basis.
It's OK to run this whether or not SAML is enabled.
Return value:
tuple(num_changed, num_failed, num_total)
num_changed: Number of providers that are either new or whose metadata has changed
num_failed: Number of providers that could not be updated
num_total: Total number of providers whose metadata was fetched
"""
if not SAMLConfiguration.is_enabled():
return (0, 0, 0) # Nothing to do until SAML is enabled.
num_changed, num_failed = 0, 0
# First make a list of all the metadata XML URLs:
url_map = {}
for idp_slug in SAMLProviderConfig.key_values('idp_slug', flat=True):
config = SAMLProviderConfig.current(idp_slug)
if not config.enabled:
continue
url = config.metadata_source
if url not in url_map:
url_map[url] = []
if config.entity_id not in url_map[url]:
url_map[url].append(config.entity_id)
# Now fetch the metadata:
for url, entity_ids in url_map.items():
try:
log.info("Fetching %s", url)
if not url.lower().startswith('https'):
log.warning("This SAML metadata URL is not secure! It should use HTTPS. (%s)", url)
response = requests.get(url, verify=True) # May raise HTTPError or SSLError or ConnectionError
response.raise_for_status() # May raise an HTTPError
try:
parser = etree.XMLParser(remove_comments=True)
xml = etree.fromstring(response.text, parser)
except etree.XMLSyntaxError:
raise
# TODO: Can use OneLogin_Saml2_Utils to validate signed XML if anyone is using that
for entity_id in entity_ids:
log.info(u"Processing IdP with entityID %s", entity_id)
public_key, sso_url, expires_at = _parse_metadata_xml(xml, entity_id)
changed = _update_data(entity_id, public_key, sso_url, expires_at)
if changed:
log.info(u"→ Created new record for SAMLProviderData")
num_changed += 1
else:
log.info(u"→ Updated existing SAMLProviderData. Nothing has changed.")
except Exception as err: # pylint: disable=broad-except
log.exception(err.message)
num_failed += 1
return (num_changed, num_failed, len(url_map))
def _parse_metadata_xml(xml, entity_id):
"""
Given an XML document containing SAML 2.0 metadata, parse it and return a tuple of
(public_key, sso_url, expires_at) for the specified entityID.
Raises MetadataParseError if anything is wrong.
"""
if xml.tag == etree.QName(SAML_XML_NS, 'EntityDescriptor'):
entity_desc = xml
else:
if xml.tag != etree.QName(SAML_XML_NS, 'EntitiesDescriptor'):
raise MetadataParseError("Expected root element to be <EntitiesDescriptor>, not {}".format(xml.tag))
entity_desc = xml.find(
".//{}[@entityID='{}']".format(etree.QName(SAML_XML_NS, 'EntityDescriptor'), entity_id)
)
if not entity_desc:
raise MetadataParseError("Can't find EntityDescriptor for entityID {}".format(entity_id))
expires_at = None
if "validUntil" in xml.attrib:
expires_at = dateutil.parser.parse(xml.attrib["validUntil"])
if "cacheDuration" in xml.attrib:
cache_expires = OneLogin_Saml2_Utils.parse_duration(xml.attrib["cacheDuration"])
if expires_at is None or cache_expires < expires_at:
expires_at = cache_expires
sso_desc = entity_desc.find(etree.QName(SAML_XML_NS, "IDPSSODescriptor"))
if not sso_desc:
raise MetadataParseError("IDPSSODescriptor missing")
if 'urn:oasis:names:tc:SAML:2.0:protocol' not in sso_desc.get("protocolSupportEnumeration"):
raise MetadataParseError("This IdP does not support SAML 2.0")
# Now we just need to get the public_key and sso_url
public_key = sso_desc.findtext("./{}//{}".format(
etree.QName(SAML_XML_NS, "KeyDescriptor"), "{http://www.w3.org/2000/09/xmldsig#}X509Certificate"
))
if not public_key:
raise MetadataParseError("Public Key missing. Expected an <X509Certificate>")
public_key = public_key.replace(" ", "")
binding_elements = sso_desc.iterfind("./{}".format(etree.QName(SAML_XML_NS, "SingleSignOnService")))
sso_bindings = {element.get('Binding'): element.get('Location') for element in binding_elements}
try:
# The only binding supported by python-saml and python-social-auth is HTTP-Redirect:
sso_url = sso_bindings['urn:oasis:names:tc:SAML:2.0:bindings:HTTP-Redirect']
except KeyError:
raise MetadataParseError("Unable to find SSO URL with HTTP-Redirect binding.")
return public_key, sso_url, expires_at
def _update_data(entity_id, public_key, sso_url, expires_at):
"""
Update/Create the SAMLProviderData for the given entity ID.
Return value:
False if nothing has changed and existing data's "fetched at" timestamp is just updated.
True if a new record was created. (Either this is a new provider or something changed.)
"""
data_obj = SAMLProviderData.current(entity_id)
fetched_at = datetime.datetime.now()
if data_obj and (data_obj.public_key == public_key and data_obj.sso_url == sso_url):
data_obj.expires_at = expires_at
data_obj.fetched_at = fetched_at
data_obj.save()
return False
else:
SAMLProviderData.objects.create(
entity_id=entity_id,
fetched_at=fetched_at,
expires_at=expires_at,
sso_url=sso_url,
public_key=public_key,
)
return True
|
mushtaqak/edx-platform
|
common/djangoapps/third_party_auth/tasks.py
|
Python
|
agpl-3.0
| 6,642
|
# Copyright 2013 Big Switch Networks Inc.
# All Rights Reserved
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
# @author: KC Wang, Big Switch Networks Inc.
#
# vim: tabstop=4 shiftwidth=4 softtabstop=4
|
vichoward/python-neutronclient
|
neutronclient/tests/unit/fw/__init__.py
|
Python
|
apache-2.0
| 731
|
#!/usr/bin/env python3
from setuptools import setup
with open("README.md", "r") as f:
long_description = f.read()
setup(
name='passpy',
version='1.0.1',
description='ZX2C4\'s pass compatible Python library and cli',
long_description=long_description,
long_description_content_type='text/markdown',
url='https://github.com/bfrascher/passpy',
author='Benedikt Rascher-Friesenhausen',
author_email='benediktrascherfriesenhausen+passpy@gmail.com',
license='GPLv3+',
packages=['passpy'],
install_requires=[
'python-gnupg>=0.3.8',
'GitPython>=1.0.1',
'pyperclip>=1.5',
'click>=2.0',
],
extras_require = {
'color': ['colorama'],
},
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Console',
'Intended Audience :: End Users/Desktop',
'Intended Audience :: Developers',
'License :: OSI Approved :: GNU General Public License v3 (GPLv3)',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.8',
'Operating System :: Microsoft :: Windows',
'Operating System :: POSIX :: Linux',
'Operating System :: MacOS',
'Topic :: Utilities',
],
entry_points='''
[console_scripts]
passpy=passpy.__main__:cli
''',
)
|
bfrascher/passpy
|
setup.py
|
Python
|
gpl-3.0
| 1,565
|
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from src.comparator.resource_database import ResourceDatabase
from src.comparator.wrappers import WithLocation
from google.protobuf import descriptor_pb2 as desc
from google.api import resource_pb2
def make_resource_database(resources):
resource_database = ResourceDatabase()
for resource in resources:
resource_database.register_resource(resource)
return resource_database
def make_file_options_resource_definition(
resource_type: str, resource_patterns: [str]
) -> desc.FileOptions:
file_options = desc.FileOptions()
file_options.Extensions[resource_pb2.resource_definition].append(
resource_pb2.ResourceDescriptor(
type=resource_type,
pattern=resource_patterns,
)
)
return file_options
def make_message_options_resource_definition(
resource_type: str, resource_patterns: [str]
) -> desc.MessageOptions:
message_options = desc.MessageOptions()
resource = message_options.Extensions[resource_pb2.resource]
resource.type = resource_type
resource.pattern.extend(resource_patterns)
return message_options
def make_field_annotation_resource_reference(resource_type: str, is_child_type: bool):
field_options = desc.FieldOptions()
if is_child_type:
field_options.Extensions[
resource_pb2.resource_reference
].child_type = resource_type
else:
field_options.Extensions[resource_pb2.resource_reference].type = resource_type
return field_options
def make_resource_descriptor(
resource_type: str, resource_patterns: [str]
) -> resource_pb2.ResourceDescriptor:
resource_descriptor = resource_pb2.ResourceDescriptor(
type=resource_type, pattern=list(resource_patterns)
)
return WithLocation(resource_descriptor, None, None)
|
googleapis/proto-breaking-change-detector
|
test/tools/mock_resources.py
|
Python
|
apache-2.0
| 2,378
|
#!/usr/bin/env python
'''
Script to determine if this commit has also
been merged through the stage branch
'''
#
# Usage:
# parent_check.py <branch> <commit_id>
#
#
import sys
import subprocess
def run_cli_cmd(cmd, in_stdout=None, in_stderr=None):
'''Run a command and return its output'''
if not in_stderr:
proc = subprocess.Popen(cmd, bufsize=-1, stderr=subprocess.PIPE, stdout=subprocess.PIPE, shell=False)
else:
proc = subprocess.check_output(cmd, bufsize=-1, stdout=in_stdout, stderr=in_stderr, shell=False)
stdout, stderr = proc.communicate()
if proc.returncode != 0:
return {"rc": proc.returncode, "error": stderr}
else:
return {"rc": proc.returncode, "result": stdout}
def main():
'''Check to ensure that the commit that is currently
being submitted is also in the stage branch.
if it is, succeed
else, fail
'''
branch = 'prod'
if sys.argv[1] != branch:
sys.exit(0)
# git co stg
results = run_cli_cmd(['/usr/bin/git', 'checkout', 'stg'])
# git pull latest
results = run_cli_cmd(['/usr/bin/git', 'pull'])
# setup on the <prod> branch in git
results = run_cli_cmd(['/usr/bin/git', 'checkout', 'prod'])
results = run_cli_cmd(['/usr/bin/git', 'pull'])
# merge the passed in commit into my current <branch>
commit_id = sys.argv[2]
results = run_cli_cmd(['/usr/bin/git', 'merge', commit_id])
# get the differences from stg and <branch>
results = run_cli_cmd(['/usr/bin/git', 'rev-list', '--left-right', 'stg...prod'])
# exit here with error code if the result coming back is an error
if results['rc'] != 0:
print results['error']
sys.exit(results['rc'])
count = 0
# Each 'result' is a commit
# Walk through each commit and see if it is in stg
for commit in results['result'].split('\n'):
# continue if it is already in stg
if not commit or commit.startswith('<'):
continue
# remove the first char '>'
commit = commit[1:]
# check if any remote branches contain $commit
results = run_cli_cmd(['/usr/bin/git', 'branch', '-q', '-r', '--contains', commit], in_stderr=None)
# if this comes back empty, nothing contains it, we can skip it as
# we have probably created the merge commit here locally
if results['rc'] == 0 and len(results['result']) == 0:
continue
# The results generally contain origin/pr/246/merge and origin/pr/246/head
# this is the pull request which would contain the commit in question.
#
# If the results do not contain origin/stg then stage does not contain
# the commit in question. Therefore we need to alert!
if 'origin/stg' not in results['result']:
print "\nFAILED: (These commits are not in stage.)\n"
print "\t%s" % commit
count += 1
# Exit with count of commits in #{branch} but not stg
sys.exit(count)
if __name__ == '__main__':
main()
|
robotmaxtron/openshift-ansible
|
git/parent.py
|
Python
|
apache-2.0
| 3,074
|
##############################################################################
# Copyright (c) 2013-2018, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://github.com/spack/spack
# Please also see the NOTICE and LICENSE files for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as
# published by the Free Software Foundation) version 2.1, February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import *
class Fyba(AutotoolsPackage):
"""OpenFYBA is the source code release of the FYBA library, distributed
by the National Mapping Authority of Norway (Statens kartverk) to read
and write files in the National geodata standard format SOSI."""
homepage = "https://github.com/kartverket/fyba"
url = "https://github.com/kartverket/fyba/archive/4.1.1.tar.gz"
version('4.1.1', 'ab687582efdef26593796271529a10cb')
# configure: error: cannot find install-sh or install.sh
force_autoreconf = True
depends_on('autoconf', type='build')
depends_on('automake', type='build')
depends_on('libtool', type='build')
depends_on('m4', type='build')
# error: macro "min" passed 3 arguments, but takes just 2
# https://github.com/kartverket/fyba/issues/21
patch('gcc-6.patch')
# fatal error: 'sys/vfs.h' file not found
# https://github.com/kartverket/fyba/issues/12
patch('vfs-mount-darwin.patch', when='platform=darwin')
|
tmerrick1/spack
|
var/spack/repos/builtin/packages/fyba/package.py
|
Python
|
lgpl-2.1
| 2,243
|
###############################################################################
# ilastik: interactive learning and segmentation toolkit
#
# Copyright (C) 2011-2014, the ilastik developers
# <team@ilastik.org>
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# In addition, as a special exception, the copyright holders of
# ilastik give you permission to combine ilastik with applets,
# workflows and plugins which are not covered under the GNU
# General Public License.
#
# See the LICENSE file for details. License information is also available
# on the ilastik web site at:
# http://ilastik.org/license.html
###############################################################################
import os
from PyQt4.QtGui import QTableView, \
QAbstractItemView, \
QHeaderView, QStackedWidget, \
QLabel, QSizePolicy
from PyQt4.QtCore import Qt, QString
import logging
logger = logging.getLogger(__name__)
#===============================================================================
# Common base class that can be used by the labelListView and the boxListView
#===============================================================================
class ListView(QStackedWidget):
PAGE_EMPTY = 0
PAGE_LISTVIEW = 1
def __init__(self, parent = None):
super(ListView, self).__init__(parent=parent)
self.emptyMessage = QLabel("no elements defined yet")
self.emptyMessage.setAlignment(Qt.AlignHCenter | Qt.AlignVCenter )
self.emptyMessage.setSizePolicy(QSizePolicy.Expanding, QSizePolicy.Expanding)
self.addWidget(self.emptyMessage)
self._table = QTableView()
self.addWidget(self._table)
self._table.clicked.connect(self.tableViewCellClicked)
self._table.doubleClicked.connect(self.tableViewCellDoubleClicked)
self._table.verticalHeader().sectionMoved.connect(self.rowMovedTest)
self._table.setShowGrid(False)
def resetEmptyMessage(self,pystring):
self.emptyMessage.setText(QString(pystring))
def tableViewCellClicked(self, modelIndex):
'''
Reimplemt this function to get interaction when double click
:param modelIndex:
'''
# if (modelIndex.column() == self.model.ColumnID.Delete and
# not self._table.model().flags(modelIndex) == Qt.NoItemFlags):
# self._table.model().removeRow(modelIndex.row())
#
def tableViewCellDoubleClicked(self, modelIndex):
'''
Reimplement this function to get interaction when single click
:param modelIndex:
'''
# if modelIndex.column() == self.model.ColumnID.Color:
# self._colorDialog.setBrushColor(self._table.model()[modelIndex.row()].brushColor())
# self._colorDialog.setPmapColor (self._table.model()[modelIndex.row()].pmapColor())
# self._colorDialog.exec_()
# #print "brush color = {}".format(self._colorDialog.brushColor().name())
# #print "pmap color = {}".format(self._colorDialog.pmapColor().name())
# self._table.model().setData(modelIndex, (self._colorDialog.brushColor(),
# self._colorDialog.pmapColor ()))
def rowMovedTest(self, logicalIndex, oldVisualIndex, newVisualIndex):
logger.debug( "{} {} {}".format(logicalIndex, oldVisualIndex, newVisualIndex) )
def _setListViewLook(self):
table = self._table
#table.setDragEnabled(True)
table.setAcceptDrops(True)
table.setFocusPolicy(Qt.NoFocus)
table.setShowGrid(False)
table.horizontalHeader().hide()
table.verticalHeader().hide()
#table.horizontalHeader().setResizeMode(1, QHeaderView.Stretch)
table.horizontalHeader().setResizeMode(QHeaderView.ResizeToContents)
table.setSelectionMode(QAbstractItemView.SingleSelection)
table.setSelectionBehavior(QAbstractItemView.SelectRows)
def selectRow(self, *args, **kwargs):
self._table.selectRow(*args, **kwargs)
def _onRowsChanged(self, parent, start, end):
model = self._table.model()
if model and model.rowCount() > 0:
self.setCurrentIndex(self.PAGE_LISTVIEW)
else:
self.setCurrentIndex(self.PAGE_EMPTY)
if self.parent()!=None: self.parent().updateGeometry()
def setModel(self, model):
QTableView.setModel(self._table, model)
self._table.setSelectionModel(model._selectionModel)
if model.rowCount() > 0:
self.setCurrentIndex(self.PAGE_LISTVIEW)
else:
self.setCurrentIndex(self.PAGE_EMPTY)
model.rowsInserted.connect(self._onRowsChanged)
model.rowsRemoved.connect(self._onRowsChanged)
self.model=model
self._setListViewLook()
@property
def allowDelete(self):
return not self._table.isColumnHidden(self.model.ColumnID.Delete)
@allowDelete.setter
def allowDelete(self, allow):
self._table.setColumnHidden(self.model.ColumnID.Delete, not allow)
def minimumSizeHint(self):
#http://www.qtcentre.org/threads/14764-QTableView-sizeHint%28%29-issues
t = self._table
vHeader = t.verticalHeader()
hHeader = t.horizontalHeader()
doubleFrame = 2 * t.frameWidth()
w = hHeader.length() + vHeader.width() + doubleFrame;
contentH = 0
if self._table.model():
for i in range(self._table.model().rowCount()):
contentH += self._table.rowHeight(i)
contentH = max(90, contentH)
h = hHeader.height() + contentH + doubleFrame;
from PyQt4.QtCore import QSize
return QSize(w,h)
def sizeHint(self):
return self.minimumSizeHint()
def shrinkToMinimum(self):
"""
shrink the view around the
labels which are currently there
"""
t = self._table
hHeader = t.horizontalHeader()
doubleFrame = 2 * t.frameWidth()
contentH = 0
if self._table.model():
for i in range(self._table.model().rowCount()):
contentH += self._table.rowHeight(i)
h = contentH+2
self.setFixedHeight(h)
|
nielsbuwen/ilastik
|
ilastik/widgets/listView.py
|
Python
|
gpl-3.0
| 6,597
|
# emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: t -*-
# vi: set ft=python sts=4 ts=4 sw=4 noet :
# This file is part of Fail2Ban.
#
# Fail2Ban is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# Fail2Ban is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Fail2Ban; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
# Fail2Ban developers
__copyright__ = "Copyright (c) 2013 Steven Hiscocks"
__license__ = "GPL"
import unittest, sys, os, fileinput, re, time, datetime, inspect
if sys.version_info >= (2, 6):
import json
else:
import simplejson as json
next = lambda x: x.next()
from ..server.filter import Filter
from ..client.filterreader import FilterReader
from .utils import setUpMyTime, tearDownMyTime
TEST_FILES_DIR = os.path.join(os.path.dirname(__file__), "files")
if os.path.exists('config/fail2ban.conf'):
CONFIG_DIR = "config"
else:
CONFIG_DIR='/etc/fail2ban'
class FilterSamplesRegex(unittest.TestCase):
def setUp(self):
"""Call before every test case."""
self.filter = Filter(None)
self.filter.active = True
setUpMyTime()
def tearDown(self):
"""Call after every test case."""
tearDownMyTime()
def testFiltersPresent(self):
"""Check to ensure some tests exist"""
self.assertTrue(
len([test for test in inspect.getmembers(self)
if test[0].startswith('testSampleRegexs')])
>= 10,
"Expected more FilterSampleRegexs tests")
def testSampleRegexsFactory(name):
def testFilter(self):
# Check filter exists
filterConf = FilterReader(name, "jail", {}, basedir=CONFIG_DIR)
self.assertEqual(filterConf.getFile(), name)
self.assertEqual(filterConf.getJailName(), "jail")
filterConf.read()
filterConf.getOptions({})
for opt in filterConf.convert():
if opt[2] == "addfailregex":
self.filter.addFailRegex(opt[3])
elif opt[2] == "maxlines":
self.filter.setMaxLines(opt[3])
elif opt[2] == "addignoreregex":
self.filter.addIgnoreRegex(opt[3])
elif opt[2] == "datepattern":
self.filter.setDatePattern(opt[3])
self.assertTrue(
os.path.isfile(os.path.join(TEST_FILES_DIR, "logs", name)),
"No sample log file available for '%s' filter" % name)
logFile = fileinput.FileInput(
os.path.join(TEST_FILES_DIR, "logs", name))
regexsUsed = set()
for line in logFile:
jsonREMatch = re.match("^# ?failJSON:(.+)$", line)
if jsonREMatch:
try:
faildata = json.loads(jsonREMatch.group(1))
except ValueError, e:
raise ValueError("%s: %s:%i" %
(e, logFile.filename(), logFile.filelineno()))
line = next(logFile)
elif line.startswith("#") or not line.strip():
continue
else:
faildata = {}
ret = self.filter.processLine(
line, returnRawHost=True, checkAllRegex=True)[1]
if not ret:
# Check line is flagged as none match
self.assertFalse(faildata.get('match', True),
"Line not matched when should have: %s:%i %r" %
(logFile.filename(), logFile.filelineno(), line))
elif ret:
# Check line is flagged to match
self.assertTrue(faildata.get('match', False),
"Line matched when shouldn't have: %s:%i %r" %
(logFile.filename(), logFile.filelineno(), line))
self.assertEqual(len(ret), 1, "Multiple regexs matched %r - %s:%i" %
(map(lambda x: x[0], ret),logFile.filename(), logFile.filelineno()))
# Verify timestamp and host as expected
failregex, host, fail2banTime, lines = ret[0]
self.assertEqual(host, faildata.get("host", None))
t = faildata.get("time", None)
try:
jsonTimeLocal = datetime.datetime.strptime(t, "%Y-%m-%dT%H:%M:%S")
except ValueError:
jsonTimeLocal = datetime.datetime.strptime(t, "%Y-%m-%dT%H:%M:%S.%f")
jsonTime = time.mktime(jsonTimeLocal.timetuple())
jsonTime += jsonTimeLocal.microsecond / 1000000
self.assertEqual(fail2banTime, jsonTime,
"UTC Time mismatch fail2ban %s (%s) != failJson %s (%s) (diff %.3f seconds) on: %s:%i %r:" %
(fail2banTime, time.strftime("%Y-%m-%dT%H:%M:%S", time.gmtime(fail2banTime)),
jsonTime, time.strftime("%Y-%m-%dT%H:%M:%S", time.gmtime(jsonTime)),
fail2banTime - jsonTime, logFile.filename(), logFile.filelineno(), line ) )
regexsUsed.add(failregex)
for failRegexIndex, failRegex in enumerate(self.filter.getFailRegex()):
self.assertTrue(
failRegexIndex in regexsUsed,
"Regex for filter '%s' has no samples: %i: %r" %
(name, failRegexIndex, failRegex))
return testFilter
for filter_ in filter(lambda x: not x.endswith('common.conf'), os.listdir(os.path.join(CONFIG_DIR, "filter.d"))):
filterName = filter_.rpartition(".")[0]
if not filterName.startswith('.'):
setattr(
FilterSamplesRegex,
"testSampleRegexs%s" % filterName.upper(),
testSampleRegexsFactory(filterName))
|
marclaporte/fail2ban
|
fail2ban/tests/samplestestcase.py
|
Python
|
gpl-2.0
| 5,262
|
from static_const_member_2 import *
c = Test_int()
try:
a = c.forward_field
a = c.current_profile
a = c.RightIndex
a = Test_int.backward_field
a = Test_int.LeftIndex
a = Test_int.cavity_flags
except:
raise RuntimeError
if Foo.BAZ.val != 2*Foo.BAR.val:
raise RuntimeError
|
jrversteegh/softsailor
|
deps/swig-2.0.4/Examples/test-suite/python/static_const_member_2_runme.py
|
Python
|
gpl-3.0
| 306
|
"""
Spanning tests for all the operations that F() expressions can perform.
"""
import datetime
from django.db import connection
from django.db.models import F
from django.test import TestCase, Approximate, skipUnlessDBFeature
from regressiontests.expressions_regress.models import Number, Experiment
class ExpressionsRegressTests(TestCase):
def setUp(self):
Number(integer=-1).save()
Number(integer=42).save()
Number(integer=1337).save()
self.assertEqual(Number.objects.update(float=F('integer')), 3)
def test_fill_with_value_from_same_object(self):
"""
We can fill a value in all objects with an other value of the
same object.
"""
self.assertQuerysetEqual(
Number.objects.all(),
[
'<Number: -1, -1.000>',
'<Number: 42, 42.000>',
'<Number: 1337, 1337.000>'
]
)
def test_increment_value(self):
"""
We can increment a value of all objects in a query set.
"""
self.assertEqual(
Number.objects.filter(integer__gt=0)
.update(integer=F('integer') + 1),
2)
self.assertQuerysetEqual(
Number.objects.all(),
[
'<Number: -1, -1.000>',
'<Number: 43, 42.000>',
'<Number: 1338, 1337.000>'
]
)
def test_filter_not_equals_other_field(self):
"""
We can filter for objects, where a value is not equals the value
of an other field.
"""
self.assertEqual(
Number.objects.filter(integer__gt=0)
.update(integer=F('integer') + 1),
2)
self.assertQuerysetEqual(
Number.objects.exclude(float=F('integer')),
[
'<Number: 43, 42.000>',
'<Number: 1338, 1337.000>'
]
)
def test_complex_expressions(self):
"""
Complex expressions of different connection types are possible.
"""
n = Number.objects.create(integer=10, float=123.45)
self.assertEqual(Number.objects.filter(pk=n.pk)
.update(float=F('integer') + F('float') * 2),
1)
self.assertEqual(Number.objects.get(pk=n.pk).integer, 10)
self.assertEqual(Number.objects.get(pk=n.pk).float, Approximate(256.900, places=3))
class ExpressionOperatorTests(TestCase):
def setUp(self):
self.n = Number.objects.create(integer=42, float=15.5)
def test_lefthand_addition(self):
# LH Addition of floats and integers
Number.objects.filter(pk=self.n.pk).update(
integer=F('integer') + 15,
float=F('float') + 42.7
)
self.assertEqual(Number.objects.get(pk=self.n.pk).integer, 57)
self.assertEqual(Number.objects.get(pk=self.n.pk).float, Approximate(58.200, places=3))
def test_lefthand_subtraction(self):
# LH Subtraction of floats and integers
Number.objects.filter(pk=self.n.pk).update(integer=F('integer') - 15,
float=F('float') - 42.7)
self.assertEqual(Number.objects.get(pk=self.n.pk).integer, 27)
self.assertEqual(Number.objects.get(pk=self.n.pk).float, Approximate(-27.200, places=3))
def test_lefthand_multiplication(self):
# Multiplication of floats and integers
Number.objects.filter(pk=self.n.pk).update(integer=F('integer') * 15,
float=F('float') * 42.7)
self.assertEqual(Number.objects.get(pk=self.n.pk).integer, 630)
self.assertEqual(Number.objects.get(pk=self.n.pk).float, Approximate(661.850, places=3))
def test_lefthand_division(self):
# LH Division of floats and integers
Number.objects.filter(pk=self.n.pk).update(integer=F('integer') / 2,
float=F('float') / 42.7)
self.assertEqual(Number.objects.get(pk=self.n.pk).integer, 21)
self.assertEqual(Number.objects.get(pk=self.n.pk).float, Approximate(0.363, places=3))
def test_lefthand_modulo(self):
# LH Modulo arithmetic on integers
Number.objects.filter(pk=self.n.pk).update(integer=F('integer') % 20)
self.assertEqual(Number.objects.get(pk=self.n.pk).integer, 2)
self.assertEqual(Number.objects.get(pk=self.n.pk).float, Approximate(15.500, places=3))
def test_lefthand_bitwise_and(self):
# LH Bitwise ands on integers
Number.objects.filter(pk=self.n.pk).update(integer=F('integer') & 56)
self.assertEqual(Number.objects.get(pk=self.n.pk).integer, 40)
self.assertEqual(Number.objects.get(pk=self.n.pk).float, Approximate(15.500, places=3))
@skipUnlessDBFeature('supports_bitwise_or')
def test_lefthand_bitwise_or(self):
# LH Bitwise or on integers
Number.objects.filter(pk=self.n.pk).update(integer=F('integer') | 48)
self.assertEqual(Number.objects.get(pk=self.n.pk).integer, 58)
self.assertEqual(Number.objects.get(pk=self.n.pk).float, Approximate(15.500, places=3))
def test_right_hand_addition(self):
# Right hand operators
Number.objects.filter(pk=self.n.pk).update(integer=15 + F('integer'),
float=42.7 + F('float'))
# RH Addition of floats and integers
self.assertEqual(Number.objects.get(pk=self.n.pk).integer, 57)
self.assertEqual(Number.objects.get(pk=self.n.pk).float, Approximate(58.200, places=3))
def test_right_hand_subtraction(self):
Number.objects.filter(pk=self.n.pk).update(integer=15 - F('integer'),
float=42.7 - F('float'))
# RH Subtraction of floats and integers
self.assertEqual(Number.objects.get(pk=self.n.pk).integer, -27)
self.assertEqual(Number.objects.get(pk=self.n.pk).float, Approximate(27.200, places=3))
def test_right_hand_multiplication(self):
# RH Multiplication of floats and integers
Number.objects.filter(pk=self.n.pk).update(integer=15 * F('integer'),
float=42.7 * F('float'))
self.assertEqual(Number.objects.get(pk=self.n.pk).integer, 630)
self.assertEqual(Number.objects.get(pk=self.n.pk).float, Approximate(661.850, places=3))
def test_right_hand_division(self):
# RH Division of floats and integers
Number.objects.filter(pk=self.n.pk).update(integer=640 / F('integer'),
float=42.7 / F('float'))
self.assertEqual(Number.objects.get(pk=self.n.pk).integer, 15)
self.assertEqual(Number.objects.get(pk=self.n.pk).float, Approximate(2.755, places=3))
def test_right_hand_modulo(self):
# RH Modulo arithmetic on integers
Number.objects.filter(pk=self.n.pk).update(integer=69 % F('integer'))
self.assertEqual(Number.objects.get(pk=self.n.pk).integer, 27)
self.assertEqual(Number.objects.get(pk=self.n.pk).float, Approximate(15.500, places=3))
def test_right_hand_bitwise_and(self):
# RH Bitwise ands on integers
Number.objects.filter(pk=self.n.pk).update(integer=15 & F('integer'))
self.assertEqual(Number.objects.get(pk=self.n.pk).integer, 10)
self.assertEqual(Number.objects.get(pk=self.n.pk).float, Approximate(15.500, places=3))
@skipUnlessDBFeature('supports_bitwise_or')
def test_right_hand_bitwise_or(self):
# RH Bitwise or on integers
Number.objects.filter(pk=self.n.pk).update(integer=15 | F('integer'))
self.assertEqual(Number.objects.get(pk=self.n.pk).integer, 47)
self.assertEqual(Number.objects.get(pk=self.n.pk).float, Approximate(15.500, places=3))
class FTimeDeltaTests(TestCase):
def setUp(self):
sday = datetime.date(2010, 6, 25)
stime = datetime.datetime(2010, 6, 25, 12, 15, 30, 747000)
midnight = datetime.time(0)
delta0 = datetime.timedelta(0)
delta1 = datetime.timedelta(microseconds=253000)
delta2 = datetime.timedelta(seconds=44)
delta3 = datetime.timedelta(hours=21, minutes=8)
delta4 = datetime.timedelta(days=10)
# Test data is set so that deltas and delays will be
# strictly increasing.
self.deltas = []
self.delays = []
self.days_long = []
# e0: started same day as assigned, zero duration
end = stime+delta0
e0 = Experiment.objects.create(name='e0', assigned=sday, start=stime,
end=end, completed=end.date())
self.deltas.append(delta0)
self.delays.append(e0.start-
datetime.datetime.combine(e0.assigned, midnight))
self.days_long.append(e0.completed-e0.assigned)
# e1: started one day after assigned, tiny duration, data
# set so that end time has no fractional seconds, which
# tests an edge case on sqlite. This Experiment is only
# included in the test data when the DB supports microsecond
# precision.
if connection.features.supports_microsecond_precision:
delay = datetime.timedelta(1)
end = stime + delay + delta1
e1 = Experiment.objects.create(name='e1', assigned=sday,
start=stime+delay, end=end, completed=end.date())
self.deltas.append(delta1)
self.delays.append(e1.start-
datetime.datetime.combine(e1.assigned, midnight))
self.days_long.append(e1.completed-e1.assigned)
# e2: started three days after assigned, small duration
end = stime+delta2
e2 = Experiment.objects.create(name='e2',
assigned=sday-datetime.timedelta(3), start=stime, end=end,
completed=end.date())
self.deltas.append(delta2)
self.delays.append(e2.start-
datetime.datetime.combine(e2.assigned, midnight))
self.days_long.append(e2.completed-e2.assigned)
# e3: started four days after assigned, medium duration
delay = datetime.timedelta(4)
end = stime + delay + delta3
e3 = Experiment.objects.create(name='e3',
assigned=sday, start=stime+delay, end=end, completed=end.date())
self.deltas.append(delta3)
self.delays.append(e3.start-
datetime.datetime.combine(e3.assigned, midnight))
self.days_long.append(e3.completed-e3.assigned)
# e4: started 10 days after assignment, long duration
end = stime + delta4
e4 = Experiment.objects.create(name='e4',
assigned=sday-datetime.timedelta(10), start=stime, end=end,
completed=end.date())
self.deltas.append(delta4)
self.delays.append(e4.start-
datetime.datetime.combine(e4.assigned, midnight))
self.days_long.append(e4.completed-e4.assigned)
self.expnames = [e.name for e in Experiment.objects.all()]
def test_delta_add(self):
for i in range(len(self.deltas)):
delta = self.deltas[i]
test_set = [e.name for e in
Experiment.objects.filter(end__lt=F('start')+delta)]
self.assertEqual(test_set, self.expnames[:i])
test_set = [e.name for e in
Experiment.objects.filter(end__lte=F('start')+delta)]
self.assertEqual(test_set, self.expnames[:i+1])
def test_delta_subtract(self):
for i in range(len(self.deltas)):
delta = self.deltas[i]
test_set = [e.name for e in
Experiment.objects.filter(start__gt=F('end')-delta)]
self.assertEqual(test_set, self.expnames[:i])
test_set = [e.name for e in
Experiment.objects.filter(start__gte=F('end')-delta)]
self.assertEqual(test_set, self.expnames[:i+1])
def test_exclude(self):
for i in range(len(self.deltas)):
delta = self.deltas[i]
test_set = [e.name for e in
Experiment.objects.exclude(end__lt=F('start')+delta)]
self.assertEqual(test_set, self.expnames[i:])
test_set = [e.name for e in
Experiment.objects.exclude(end__lte=F('start')+delta)]
self.assertEqual(test_set, self.expnames[i+1:])
def test_date_comparison(self):
for i in range(len(self.days_long)):
days = self.days_long[i]
test_set = [e.name for e in
Experiment.objects.filter(completed__lt=F('assigned')+days)]
self.assertEqual(test_set, self.expnames[:i])
test_set = [e.name for e in
Experiment.objects.filter(completed__lte=F('assigned')+days)]
self.assertEqual(test_set, self.expnames[:i+1])
@skipUnlessDBFeature("supports_mixed_date_datetime_comparisons")
def test_mixed_comparisons1(self):
for i in range(len(self.delays)):
delay = self.delays[i]
if not connection.features.supports_microsecond_precision:
delay = datetime.timedelta(delay.days, delay.seconds)
test_set = [e.name for e in
Experiment.objects.filter(assigned__gt=F('start')-delay)]
self.assertEqual(test_set, self.expnames[:i])
test_set = [e.name for e in
Experiment.objects.filter(assigned__gte=F('start')-delay)]
self.assertEqual(test_set, self.expnames[:i+1])
def test_mixed_comparisons2(self):
delays = [datetime.timedelta(delay.days) for delay in self.delays]
for i in range(len(delays)):
delay = delays[i]
test_set = [e.name for e in
Experiment.objects.filter(start__lt=F('assigned')+delay)]
self.assertEqual(test_set, self.expnames[:i])
test_set = [e.name for e in
Experiment.objects.filter(start__lte=F('assigned')+delay+
datetime.timedelta(1))]
self.assertEqual(test_set, self.expnames[:i+1])
def test_delta_update(self):
for i in range(len(self.deltas)):
delta = self.deltas[i]
exps = Experiment.objects.all()
expected_durations = [e.duration() for e in exps]
expected_starts = [e.start+delta for e in exps]
expected_ends = [e.end+delta for e in exps]
Experiment.objects.update(start=F('start')+delta, end=F('end')+delta)
exps = Experiment.objects.all()
new_starts = [e.start for e in exps]
new_ends = [e.end for e in exps]
new_durations = [e.duration() for e in exps]
self.assertEqual(expected_starts, new_starts)
self.assertEqual(expected_ends, new_ends)
self.assertEqual(expected_durations, new_durations)
def test_delta_invalid_op_mult(self):
raised = False
try:
r = repr(Experiment.objects.filter(end__lt=F('start')*self.deltas[0]))
except TypeError:
raised = True
self.assertTrue(raised, "TypeError not raised on attempt to multiply datetime by timedelta.")
def test_delta_invalid_op_div(self):
raised = False
try:
r = repr(Experiment.objects.filter(end__lt=F('start')/self.deltas[0]))
except TypeError:
raised = True
self.assertTrue(raised, "TypeError not raised on attempt to divide datetime by timedelta.")
def test_delta_invalid_op_mod(self):
raised = False
try:
r = repr(Experiment.objects.filter(end__lt=F('start')%self.deltas[0]))
except TypeError:
raised = True
self.assertTrue(raised, "TypeError not raised on attempt to modulo divide datetime by timedelta.")
def test_delta_invalid_op_and(self):
raised = False
try:
r = repr(Experiment.objects.filter(end__lt=F('start')&self.deltas[0]))
except TypeError:
raised = True
self.assertTrue(raised, "TypeError not raised on attempt to binary and a datetime with a timedelta.")
def test_delta_invalid_op_or(self):
raised = False
try:
r = repr(Experiment.objects.filter(end__lt=F('start')|self.deltas[0]))
except TypeError:
raised = True
self.assertTrue(raised, "TypeError not raised on attempt to binary or a datetime with a timedelta.")
|
disqus/django-old
|
tests/regressiontests/expressions_regress/tests.py
|
Python
|
bsd-3-clause
| 16,640
|
# Copyright 2013, Mirantis Inc
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import logging
from django.core.urlresolvers import reverse
from django.utils.translation import ugettext_lazy as _
from horizon import exceptions
from horizon import forms
from horizon import messages
from openstack_dashboard import api
LOG = logging.getLogger(__name__)
class UpdatePool(forms.SelfHandlingForm):
name = forms.CharField(max_length=80, label=_("Name"))
pool_id = forms.CharField(label=_("ID"),
widget=forms.TextInput(
attrs={'readonly': 'readonly'}))
description = forms.CharField(required=False,
max_length=80, label=_("Description"))
lb_method = forms.ChoiceField(label=_("Load Balancing Method"))
admin_state_up = forms.ChoiceField(choices=[(True, _('UP')),
(False, _('DOWN'))],
label=_("Admin State"))
failure_url = 'horizon:project:loadbalancers:index'
def __init__(self, request, *args, **kwargs):
super(UpdatePool, self).__init__(request, *args, **kwargs)
lb_method_choices = [('ROUND_ROBIN', 'ROUND_ROBIN'),
('LEAST_CONNECTIONS', 'LEAST_CONNECTIONS'),
('SOURCE_IP', 'SOURCE_IP')]
self.fields['lb_method'].choices = lb_method_choices
def handle(self, request, context):
context['admin_state_up'] = (context['admin_state_up'] == 'True')
try:
data = {'pool': {'name': context['name'],
'description': context['description'],
'lb_method': context['lb_method'],
'admin_state_up': context['admin_state_up'],
}}
pool = api.lbaas.pool_update(request, context['pool_id'], **data)
msg = _('Pool %s was successfully updated.') % context['name']
LOG.debug(msg)
messages.success(request, msg)
return pool
except Exception:
msg = _('Failed to update pool %s') % context['name']
LOG.info(msg)
redirect = reverse(self.failure_url)
exceptions.handle(request, msg, redirect=redirect)
class UpdateVip(forms.SelfHandlingForm):
name = forms.CharField(max_length=80, label=_("Name"))
vip_id = forms.CharField(label=_("ID"),
widget=forms.TextInput(
attrs={'readonly': 'readonly'}))
description = forms.CharField(required=False,
max_length=80, label=_("Description"))
pool_id = forms.ChoiceField(label=_("Pool"))
session_persistence = forms.ChoiceField(
required=False, initial={}, label=_("Session Persistence"))
cookie_name = forms.CharField(
initial="", required=False,
max_length=80, label=_("Cookie Name"),
help_text=_("Required for APP_COOKIE persistence;"
" Ignored otherwise."))
connection_limit = forms.IntegerField(
min_value=-1, label=_("Connection Limit"),
help_text=_("Maximum number of connections allowed "
"for the VIP or '-1' if the limit is not set"))
admin_state_up = forms.ChoiceField(choices=[(True, _('UP')),
(False, _('DOWN'))],
label=_("Admin State"))
failure_url = 'horizon:project:loadbalancers:index'
def __init__(self, request, *args, **kwargs):
super(UpdateVip, self).__init__(request, *args, **kwargs)
pool_id_choices = []
try:
tenant_id = request.user.tenant_id
pools = api.lbaas.pool_list(request, tenant_id=tenant_id)
except Exception:
pools = []
exceptions.handle(request,
_('Unable to retrieve pools list.'))
pools = sorted(pools,
key=lambda pool: pool.name)
for p in pools:
if (p.vip_id is None) or (p.id == kwargs['initial']['pool_id']):
pool_id_choices.append((p.id, p.name))
self.fields['pool_id'].choices = pool_id_choices
session_persistence_choices = []
for mode in ('SOURCE_IP', 'HTTP_COOKIE', 'APP_COOKIE'):
session_persistence_choices.append((mode, mode))
session_persistence_choices.append(('', _('No session persistence')))
self.fields[
'session_persistence'].choices = session_persistence_choices
def clean(self):
cleaned_data = super(UpdateVip, self).clean()
persistence = cleaned_data.get('session_persistence')
if (persistence == 'APP_COOKIE' and
not cleaned_data.get('cookie_name')):
msg = _('Cookie name is required for APP_COOKIE persistence.')
self._errors['cookie_name'] = self.error_class([msg])
return cleaned_data
def handle(self, request, context):
context['admin_state_up'] = (context['admin_state_up'] == 'True')
if context['session_persistence']:
stype = context['session_persistence']
if stype == 'APP_COOKIE':
cookie = context['cookie_name']
context['session_persistence'] = {'type': stype,
'cookie_name': cookie}
else:
context['session_persistence'] = {'type': stype}
else:
context['session_persistence'] = {}
try:
data = {'vip': {'name': context['name'],
'description': context['description'],
'pool_id': context['pool_id'],
'session_persistence':
context['session_persistence'],
'connection_limit': context['connection_limit'],
'admin_state_up': context['admin_state_up'],
}}
vip = api.lbaas.vip_update(request, context['vip_id'], **data)
msg = _('VIP %s was successfully updated.') % context['name']
LOG.debug(msg)
messages.success(request, msg)
return vip
except Exception:
msg = _('Failed to update VIP %s') % context['name']
LOG.info(msg)
redirect = reverse(self.failure_url)
exceptions.handle(request, msg, redirect=redirect)
class UpdateMember(forms.SelfHandlingForm):
member_id = forms.CharField(label=_("ID"),
widget=forms.TextInput(
attrs={'readonly': 'readonly'}))
pool_id = forms.ChoiceField(label=_("Pool"))
weight = forms.IntegerField(max_value=256, min_value=0, label=_("Weight"),
help_text=_("Relative part of requests this "
"pool member serves compared to others"))
admin_state_up = forms.ChoiceField(choices=[(True, _('UP')),
(False, _('DOWN'))],
label=_("Admin State"))
failure_url = 'horizon:project:loadbalancers:index'
def __init__(self, request, *args, **kwargs):
super(UpdateMember, self).__init__(request, *args, **kwargs)
pool_id_choices = []
try:
tenant_id = request.user.tenant_id
pools = api.lbaas.pool_list(request, tenant_id=tenant_id)
except Exception:
pools = []
exceptions.handle(request,
_('Unable to retrieve pools list.'))
pools = sorted(pools,
key=lambda pool: pool.name)
for p in pools:
pool_id_choices.append((p.id, p.name))
self.fields['pool_id'].choices = pool_id_choices
def handle(self, request, context):
context['admin_state_up'] = (context['admin_state_up'] == 'True')
try:
data = {'member': {'pool_id': context['pool_id'],
'weight': context['weight'],
'admin_state_up': context['admin_state_up']}}
member = api.lbaas.member_update(request,
context['member_id'], **data)
msg = _('Member %s was successfully updated.')\
% context['member_id']
LOG.debug(msg)
messages.success(request, msg)
return member
except Exception:
msg = _('Failed to update member %s') % context['member_id']
LOG.info(msg)
redirect = reverse(self.failure_url)
exceptions.handle(request, msg, redirect=redirect)
class UpdateMonitor(forms.SelfHandlingForm):
monitor_id = forms.CharField(label=_("ID"),
widget=forms.TextInput(
attrs={'readonly': 'readonly'}))
delay = forms.IntegerField(
min_value=1,
label=_("Delay"),
help_text=_("The minimum time in seconds between regular checks "
"of a member"))
timeout = forms.IntegerField(
min_value=1,
label=_("Timeout"),
help_text=_("The maximum time in seconds for a monitor to wait "
"for a reply"))
max_retries = forms.IntegerField(
max_value=10, min_value=1,
label=_("Max Retries (1~10)"),
help_text=_("Number of permissible failures before changing "
"the status of member to inactive"))
admin_state_up = forms.ChoiceField(choices=[(True, _('UP')),
(False, _('DOWN'))],
label=_("Admin State"))
failure_url = 'horizon:project:loadbalancers:index'
def __init__(self, request, *args, **kwargs):
super(UpdateMonitor, self).__init__(request, *args, **kwargs)
def handle(self, request, context):
context['admin_state_up'] = (context['admin_state_up'] == 'True')
try:
data = {'health_monitor': {
'delay': context['delay'],
'timeout': context['timeout'],
'max_retries': context['max_retries'],
'admin_state_up': context['admin_state_up']}}
monitor = api.lbaas.pool_health_monitor_update(
request, context['monitor_id'], **data)
msg = _('Health monitor %s was successfully updated.')\
% context['monitor_id']
LOG.debug(msg)
messages.success(request, msg)
return monitor
except Exception:
msg = _('Failed to update health monitor %s')\
% context['monitor_id']
LOG.info(msg)
redirect = reverse(self.failure_url)
exceptions.handle(request, msg, redirect=redirect)
|
wangxiangyu/horizon
|
openstack_dashboard/dashboards/project/loadbalancers/forms.py
|
Python
|
apache-2.0
| 11,604
|
"""
python-gerrit
=============
A module that uses the Gerrit REST API as an interface to manage
changes,users, groups, etcetera.
"""
from .gerrit import Gerrit
|
marhag87/python-gerrit
|
gerrit/__init__.py
|
Python
|
apache-2.0
| 163
|
# Pangrams
# Developer: Murillo Grubler
# Link: https://www.hackerrank.com/challenges/pangrams/problem
alphabet = ['A','B','C','D','E','F','G','H','I','J','K','L','M','N','O','P','Q','R','S','T','U','V','W','X','Y','Z']
count = len(alphabet)
st = input()
letters = 0
letters_used = []
for i in range(len(st)):
if st[i].upper() in alphabet and st[i].upper() not in letters_used:
letters_used.append(st[i].upper())
letters += 1
print ("pangram" if letters == count else "not pangram")
|
Murillo/Hackerrank-Algorithms
|
Algorithms/Strings/pangrams.py
|
Python
|
mit
| 503
|
#!/usr/bin/python
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'network'}
DOCUMENTATION = """
---
module: ios_facts
version_added: "2.2"
author:
- "Peter Sprygada (@privateip)"
- "Sumit Jaiswal (@justjais)"
short_description: Collect facts from remote devices running Cisco IOS
description:
- Collects a base set of device facts from a remote device that
is running IOS. This module prepends all of the
base network fact keys with C(ansible_net_<fact>). The facts
module will always collect a base set of facts from the device
and can enable or disable collection of additional facts.
extends_documentation_fragment: ios
notes:
- Tested against IOS 15.6
options:
gather_subset:
description:
- When supplied, this argument restricts the facts collected
to a given subset.
- Possible values for this argument include
C(all), C(min), C(hardware), C(config), and C(interfaces).
- Specify a list of values to include a larger subset.
- Use a value with an initial C(!) to collect all facts except that subset.
required: false
default: '!config'
gather_network_resources:
description:
- When supplied, this argument will restrict the facts collected
to a given subset. Possible values for this argument include
all and the resources like interfaces, vlans etc.
Can specify a list of values to include a larger subset.
Values can also be used with an initial C(M(!)) to specify that
a specific subset should not be collected.
Valid subsets are 'all', 'interfaces', 'l2_interfaces', 'vlans',
'lag_interfaces', 'lacp', 'lacp_interfaces', 'lldp_global',
'lldp_interfaces', 'l3_interfaces'.
version_added: "2.9"
"""
EXAMPLES = """
- name: Gather all legacy facts
ios_facts:
gather_subset: all
- name: Gather only the config and default facts
ios_facts:
gather_subset:
- config
- name: Do not gather hardware facts
ios_facts:
gather_subset:
- "!hardware"
- name: Gather legacy and resource facts
ios_facts:
gather_subset: all
gather_network_resources: all
- name: Gather only the interfaces resource facts and no legacy facts
ios_facts:
gather_subset:
- '!all'
- '!min'
gather_network_resources:
- interfaces
- name: Gather interfaces resource and minimal legacy facts
ios_facts:
gather_subset: min
gather_network_resources: interfaces
- name: Gather L2 interfaces resource and minimal legacy facts
ios_facts:
gather_subset: min
gather_network_resources: l2_interfaces
- name: Gather L3 interfaces resource and minimal legacy facts
ios_facts:
gather_subset: min
gather_network_resources: l3_interfaces
"""
RETURN = """
ansible_net_gather_subset:
description: The list of fact subsets collected from the device
returned: always
type: list
ansible_net_gather_network_resources:
description: The list of fact for network resource subsets collected from the device
returned: when the resource is configured
type: list
# default
ansible_net_model:
description: The model name returned from the device
returned: always
type: str
ansible_net_serialnum:
description: The serial number of the remote device
returned: always
type: str
ansible_net_version:
description: The operating system version running on the remote device
returned: always
type: str
ansible_net_iostype:
description: The operating system type (IOS or IOS-XE) running on the remote device
returned: always
type: str
ansible_net_hostname:
description: The configured hostname of the device
returned: always
type: str
ansible_net_image:
description: The image file the device is running
returned: always
type: str
ansible_net_stacked_models:
description: The model names of each device in the stack
returned: when multiple devices are configured in a stack
type: list
ansible_net_stacked_serialnums:
description: The serial numbers of each device in the stack
returned: when multiple devices are configured in a stack
type: list
ansible_net_api:
description: The name of the transport
returned: always
type: str
ansible_net_python_version:
description: The Python version Ansible controller is using
returned: always
type: str
# hardware
ansible_net_filesystems:
description: All file system names available on the device
returned: when hardware is configured
type: list
ansible_net_filesystems_info:
description: A hash of all file systems containing info about each file system (e.g. free and total space)
returned: when hardware is configured
type: dict
ansible_net_memfree_mb:
description: The available free memory on the remote device in Mb
returned: when hardware is configured
type: int
ansible_net_memtotal_mb:
description: The total memory on the remote device in Mb
returned: when hardware is configured
type: int
# config
ansible_net_config:
description: The current active config from the device
returned: when config is configured
type: str
# interfaces
ansible_net_all_ipv4_addresses:
description: All IPv4 addresses configured on the device
returned: when interfaces is configured
type: list
ansible_net_all_ipv6_addresses:
description: All IPv6 addresses configured on the device
returned: when interfaces is configured
type: list
ansible_net_interfaces:
description: A hash of all interfaces running on the system
returned: when interfaces is configured
type: dict
ansible_net_neighbors:
description:
- The list of CDP and LLDP neighbors from the remote device. If both,
CDP and LLDP neighbor data is present on one port, CDP is preferred.
returned: when interfaces is configured
type: dict
"""
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.network.ios.argspec.facts.facts import FactsArgs
from ansible.module_utils.network.ios.facts.facts import Facts
from ansible.module_utils.network.ios.ios import ios_argument_spec
def main():
""" Main entry point for AnsibleModule
"""
argument_spec = FactsArgs.argument_spec
argument_spec.update(ios_argument_spec)
module = AnsibleModule(argument_spec=argument_spec,
supports_check_mode=True)
warnings = ['default value for `gather_subset` '
'will be changed to `min` from `!config` v2.11 onwards']
result = Facts(module).get_facts()
ansible_facts, additional_warnings = result
warnings.extend(additional_warnings)
module.exit_json(ansible_facts=ansible_facts, warnings=warnings)
if __name__ == '__main__':
main()
|
thaim/ansible
|
lib/ansible/modules/network/ios/ios_facts.py
|
Python
|
mit
| 7,398
|
from mfd import *
from mfd.saitek.x52pro import *
from mfd.saitek.directoutput import *
from time import (sleep, time)
import re
import os
import logging
def nowmillis():
millis = int(round(time() * 1000))
return millis
mfd = None
def addToClipBoard(text):
command = 'echo ' + text.strip() + '| clip'
os.system(command)
"""
empty route:
# --------------
self.routes[''] = [
# display width #
#----------------#
"-- 1 -----------",
"Gateway",
"Wicca (ALRs)",
"Return to Wicca",
"-- repeat ------"]
#----------------#
# display width #
"""
"""
routes:
"""
class FortTradeMFD(X52Pro):
def __init__(self):
super().__init__(self)
# NOTE - display is 3 rows of 16 characters
self.routes = {}
# --------------
self.routes['19 Leonis'] = [
# display width #
#----------------#
"-- 1 -----------",
"Gateway",
"Wicca (Gold)",
"",
"-- 2 -----------",
"Tellus",
"Ahern (Con Tech)",
"",
"-- 3 -----------",
"LP 377-78",
"Gaspar (Gold)",
"",
"-- 4 -----------",
"Graill Redd",
"Ray (no pickup)",
"",
"-- 5 -----------",
"19 Leonis",
"Tshang (ALRs)",
"",
"-- 6 -----------",
"Gateway",
"Wicca (ALRs)",
"Return to Wicca",
"-- repeat ------"]
#----------------#
# display width #
# --------------
self.routes['Aasgaa'] = [
# display width #
#----------------#
"-- 1 -----------",
"Gateway",
"Wicca (Gold)",
"",
"-- 2 -----------",
"LHS 355",
"Ramelli (ResSep)",
"",
"-- 3 -----------",
"BD-22 3573",
"Bohm (Gold)",
"",
"-- 4 -----------",
"Leesti",
"Lucas (Con Tech)",
"WARNING: pirates",
"-- 4 -----------",
"Aasgaa",
"Steiner (ALRs)",
"",
"-- 5 -----------",
"Gateway",
"Wicca (ALRs)",
"Return to Wicca",
"-- repeat ------"]
#----------------#
# display width #
# --------------
self.routes['Alioth *permit'] = [
# display width #
#----------------#
"-- 1 -----------",
"Gateway",
"Wicca (Gold)",
"",
"-- 2 -----------",
"LHS 2936",
"Fraser (ConTech)",
"",
"-- 3 -----------",
"HR 5451",
"Dantec (Beryll.)",
"",
"-- 4 -----------",
"G 224-46",
"Zebrowski (Silv)",
"",
"-- 5 -----------",
"Alioth",
"Gotham (ALRs)",
"WARNING: permit",
"-- 6 -----------",
"Gateway",
"Wicca (ALRs)",
"Return to Wicca",
"-- repeat ------"]
#----------------#
# display width #
# --------------
self.routes['Anayol'] = [
# display width #
#----------------#
"-- 1 -----------",
"Gateway",
"Wicca (Gold)",
"",
"-- 2 -----------",
"LHS 355",
"Ramelli (ResSep)",
"",
"-- 3 -----------",
"LHS 2936",
"Fraser (ConTech)",
"",
"-- 4 -----------",
"BD+31 2373",
"Gaultier (Gold)",
"",
"-- 4 -----------",
"Anayol",
"Andrey (ALRs)",
"",
"-- 5 -----------",
"Gateway",
"Wicca (ALRs)",
"Return to Wicca",
"-- repeat ------"]
#----------------#
# display width #
# --------------
self.routes['Ao Kond'] = [
# display width #
#----------------#
"-- 1 -----------",
"Gateway",
"Wicca (Gold)",
"",
"-- 2 -----------",
"Tellus",
"Ahern (ConsTech)",
"alt. Prog Cells",
"-- 3 -----------",
"MV Virginis",
"Weitz (Pallad.)",
"",
"-- 4 -----------",
"Ao Kond",
"Fettman (ALRs)",
"",
"-- 5 -----------",
"Gateway",
"Wicca (ALRs)",
"Return to Wicca",
"-- repeat ------"]
#----------------#
# display width #
# --------------
self.routes['Arabh'] = [
# display width #
#----------------#
"-- 1 -----------",
"Gateway",
"Wicca (Gold)",
"",
"-- 2 -----------",
"LHS 2936",
"Fraser (ConTech)",
"",
"-- 3 -----------",
"Adeo",
"Drobrov. (Gold)",
"",
"-- 4 -----------",
"Gilya",
"Bell (Land Enr.)",
"",
"-- 5 -----------",
"Arabh",
"Heceta (ALRs)",
"",
"-- 6 -----------",
"Gateway",
"Wicca (ALRs)",
"Return to Wicca",
"-- repeat ------"]
#----------------#
# display width #
# --------------
self.routes['Arany'] = [
# display width #
#----------------#
"-- 1 -----------",
"Gateway",
"Wicca (Gold)",
"",
"-- 2 -----------",
"LHS 2936",
"Fraser (ConTech)",
"",
"-- 3 -----------",
"BD-02 4304"
"Bean (Palladium)",
"",
"-- 4 -----------",
"Chongguls",
"Filip. (Mar Eqp)",
"",
"-- 5 -----------",
"Arany",
"Ford (ALRs)",
"",
"-- 6 -----------",
"Gateway",
"Wicca (ALRs)",
"Return to Wicca",
"-- repeat ------"]
#----------------#
# display width #
# --------------
self.routes['BD-22 3573'] = [
# display width #
#----------------#
"-- 1 -----------",
"Gateway",
"Wicca (Gold)",
"",
"-- 2 -----------",
"LHS 355",
"Ramelli (ResSep)",
"",
"-- 3 -----------",
"Bd-22 3573",
"Bohm (Gold)",
"",
"-- 4 -----------",
"Teveri",
"Wiley (Indite)",
"",
"-- 5 -----------",
"BD-22 3573",
"Khayyam (ALRs)",
"",
"-- 6 -----------",
"Gateway",
"Wicca (ALRs)",
"Return to Wicca",
"-- repeat ------"]
#----------------#
# display width #
# --------------
self.routes['BD+03 3531A'] = [
# display width #
#----------------#
"-- 1 -----------",
"Gateway",
"Wicca (Gold)",
"",
"-- 2 -----------",
"LHS 2936",
"Fraser (ConTech)",
"",
"-- 3 -----------",
"BD-02 4304",
"Bean (Palladium)",
"",
"-- 4 -----------",
"Cantjarisni",
"Cochr. (ResSep)",
"",
"-- 5 -----------",
"BD+03 3531A",
"Horowitz (ALRs)",
"",
"-- 6 -----------",
"Gateway",
"Wicca (ALRs)",
"Return to Wicca",
"-- repeat ------"]
#----------------#
# display width #
# --------------
self.routes['Bielonti'] = [
# display width #
#----------------#
"-- 1 -----------",
"Gateway",
"Wicca (Gold)",
"",
"-- 2 -----------",
"Tellus",
"Ahern (ConsTech)",
"",
"-- 3 -----------",
"LP 377-78",
"Gaspar (Gold)",
"",
"-- 4 -----------",
"MCC 686",
"Smith (Beryll.)",
"",
"-- 5 -----------",
"Bielonti",
"Ahmed (ALRs)",
"",
"-- 6 -----------",
"Gateway",
"Wicca (ALRs)",
"Return to Wicca",
"-- repeat ------"]
#----------------#
# display width #
# --------------
self.routes['Bilfrost'] = [
# display width #
#----------------#
"-- 1 -----------",
"Gateway",
"Wicca (Gold)",
"",
"-- 2 -----------",
"LHS 355",
"Ramelli (ResSep)",
"",
"-- 3 -----------",
"LHS 2936",
"Fraser (ConTech)",
"",
"-- 4 -----------",
"BD+31 2373",
"Gaultier (Gold)",
"",
"-- 5 -----------",
"Bilfrost",
"Williams (ALRs)",
"",
"-- 6 -----------",
"Gateway",
"Wicca (ALRs)",
"Return to Wicca",
"-- repeat ------"]
#----------------#
# display width #
# --------------
self.routes['Bonitou'] = [
# display width #
#----------------#
"-- 1 -----------",
"Gateway",
"Wicca (Gold)",
"",
"-- 2 -----------",
"LHS 2936",
"Fraser (Beryll.)",
"",
"-- 3 -----------",
"Apala",
"Wilson (ConTech)",
"",
"-- 4 -----------",
"Yoruba",
"Apt (Marine Eqp)",
"",
"-- 5 -----------",
"Bonitou",
"Lyakhov (ALRs)",
"",
"-- 6 -----------",
"Gateway",
"Wicca (ALRs)",
"Return to Wicca",
"-- repeat ------"]
#----------------#
# display width #
# --------------
self.routes['Boreas'] = [
# display width #
#----------------#
"-- 1 -----------",
"Gateway",
"Wicca (Gold)",
"",
"-- 2 -----------",
"Tellus",
"Ahern (ConsTech)",
"",
"-- 3 -----------",
"LP 377-78",
"Gaspar (Gold)",
"",
"-- 4 -----------",
"MCC 686",
"Smith (Beryll.)",
"",
"-- 5 -----------",
"Boreas",
"Rice (ALRs)",
"",
"-- 6 -----------",
"Gateway",
"Wicca (ALRs)",
"Return to Wicca",
"-- repeat ------"]
#----------------#
# display width #
# --------------
self.routes['Bukurnabal'] = [
# display width #
#----------------#
"-- 1 -----------",
"Gateway",
"Wicca (Gold)",
"",
"-- 2 -----------",
"LHS 2936",
"Fraser (Beryll.)",
"",
"-- 3 -----------",
"Awawar",
"Cartw. (L. Enr.)",
"",
"-- 4 -----------",
"Bukurnabal",
"Kneale (ALRs)",
"",
"-- 5 -----------",
"Gateway",
"Wicca (ALRs)",
"Return to Wicca",
"-- repeat ------"]
#----------------#
# display width #
# --------------
self.routes['Caraceni'] = [
# display width #
#----------------#
"-- 1 -----------",
"Gateway",
"Wicca (Gold)",
"",
"-- 2 -----------",
"LHS 2936",
"Fraser (Beryll.)",
"",
"-- 3 -----------",
"Holiacan",
"Fort. (ConsTech)",
"",
"-- 4 -----------",
"LTT 13125",
"Ross (Gold)",
"",
"-- 5 -----------",
"Caraceni",
"Kerr (ALRs)",
"",
"-- 6 -----------",
"Gateway",
"Wicca (ALRs)",
"Return to Wicca",
"-- repeat ------"]
#----------------#
# display width #
# --------------
self.routes['Cartoq'] = [
# display width #
#----------------#
"-- 1 -----------",
"Gateway",
"Wicca (Gold)",
"",
"-- 2 -----------",
"Tellus",
"Ahern (ConsTech)",
"",
"-- 3 -----------",
"LP 377-78",
"Gaspar (Gold)",
"",
"-- 4 -----------",
"Naitis",
"Ford (Gallite)",
"",
"-- 5 -----------",
"Cartoq",
"Avdeyev (ALRs)",
"",
"-- 6 -----------",
"Gateway",
"Wicca (ALRs)",
"Return to Wicca",
"-- repeat ------"]
#----------------#
# display width #
# --------------
self.routes['Circios'] = [
# display width #
#----------------#
"-- 1 -----------",
"Gateway",
"Wicca (Gold)",
"",
"-- 2 -----------",
"LHS 2936",
"Fraser (ConTech)",
"",
"-- 3 -----------",
"LP 377-78",
"Gaspar (Gold)",
"",
"-- 4 -----------",
"Tellus",
"Ahern (Land Enr)",
"",
"-- 5 -----------",
"Circios",
"Mullane (ALRs)",
"",
"-- 6 -----------",
"Gateway",
"Wicca (ALRs)",
"Return to Wicca",
"-- repeat ------"]
#----------------#
# display width #
# --------------
self.routes['Contien'] = [
# display width #
#----------------#
"-- 1 -----------",
"Gateway",
"Wicca (Gold)",
"",
"-- 2 -----------",
"GQ Virginis",
"Ray (Prog Cells)",
"alt. Cons Tech",
"-- 3 -----------",
"Anaruwa",
"Skvort. (Palla.)",
"",
"-- 4 -----------",
"Contien",
"Eanes (ALRs)",
"",
"-- 5 -----------",
"Gateway",
"Wicca (ALRs)",
"Return to Wicca",
"-- repeat ------"]
#----------------#
# display width #
# --------------
self.routes['Cybele'] = [
# display width #
#----------------#
"-- 1 -----------",
"Gateway",
"Wicca (Gold)",
"",
"-- 2 -----------",
"Tellus",
"Ahern (ConsTech)",
"",
"-- 3 -----------",
"LP 377-78",
"Gaspar (Gold)",
"",
"-- 4 -----------",
"MCC 686",
"Smith (Supercon)",
"",
"-- 5 -----------",
"Cybele",
"Fraley (ALRs)",
"",
"-- 6 -----------",
"Gateway",
"Wicca (ALRs)",
"Return to Wicca",
"-- repeat ------"]
#----------------#
# display width #
# --------------
self.routes['Daha'] = [
# display width #
#----------------#
"-- 1 -----------",
"Gateway",
"Wicca (Gold)",
"",
"-- 2 -----------",
"LHS 2936",
"Fraser (ConTech)",
"",
"-- 3 -----------",
"LP 377-78",
"Gaspar (Gold)",
"",
"-- 4 -----------",
"La Tenha",
"Rozhd. (ConTech)",
"",
"-- 5 -----------",
"Daha",
"Burbank (ALRs)",
"",
"-- 6 -----------",
"Gateway",
"Wicca (ALRs)",
"Return to Wicca",
"-- repeat ------"]
#----------------#
# display width #
# --------------
self.routes['Dhanhopi'] = [
# display width #
#----------------#
"-- 1 -----------",
"Gateway",
"Wicca (Gold)",
"",
"-- 2 -----------",
"Tellus",
"Ahern (ConsTech)",
"",
"-- 3 -----------",
"LP 377-78",
"Gaspar (Gold)",
"",
"-- 4 -----------",
"MCC 686",
"Smith (Supercon)",
"",
"-- 5 -----------",
"Dhanhopi",
"Plucker (ALRs)",
"",
"-- 6 -----------",
"Gateway",
"Wicca (ALRs)",
"Return to Wicca",
"-- repeat ------"]
#----------------#
# display width #
# --------------
self.routes['Helvetitj'] = [
# display width #
#----------------#
"-- 1 -----------",
"Gateway",
"Wicca (Gold)",
"",
"-- 2 -----------",
"Tellus",
"Ahern (ConsTech)",
"",
"-- 3 -----------",
"LP 377-78",
"Gaspar (Gold)",
"",
"-- 4 -----------",
"Acan",
"Phill. (Mar Eqp)",
"",
"-- 5 -----------",
"Helvetitj",
"Friend (ALRs)",
"",
"-- 6 -----------",
"Gateway",
"Wicca (ALRs)",
"Return to Wicca",
"-- repeat ------"]
#----------------#
# display width #
# --------------
self.routes['HIP 80242'] = [
# display width #
#----------------#
"-- 1 -----------",
"Gateway",
"Wicca (Gold)",
"",
"-- 2 -----------",
"LHS 2936",
"Fraser (ConTech)",
"",
"-- 3 -----------",
"BD-02 4304",
"Bean (Palladium)",
"",
"-- 4 -----------",
"Una",
"Hoard (Res Sep)",
"",
"-- 5 -----------",
"HIP 80242",
"Csoma (ALRs)",
"",
"-- 6 -----------",
"Gateway",
"Wicca (ALRs)",
"Return to Wicca",
"-- repeat ------"]
#----------------#
# display width #
# --------------
self.routes['Holiacan'] = [
# display width #
#----------------#
"-- 1 -----------",
"Gateway",
"Wicca (Gold)",
"",
"-- 2 -----------",
"Tellus",
"Ahern (ConsTech)",
"",
"-- 3 -----------",
"LP 377-78",
"Gaspar (Gold)",
"",
"-- 4 -----------",
"MCC 686",
"Smith (Beryll.)",
"",
"-- 5 -----------",
"Holiacan",
"Hopi (ALRs)",
"",
"-- 6 -----------",
"Gateway",
"Wicca (ALRs)",
"Return to Wicca",
"-- repeat ------"]
#----------------#
# display width #
# --------------
self.routes['Hooriayan'] = [
# display width #
#----------------#
"-- 1 -----------",
"Gateway",
"Wicca (Gold)",
"",
"-- 2 -----------",
"LHS 2936",
"Fraser (ConTech)",
"",
"-- 3 -----------",
"LP 377-78",
"Gaspar (Gold)",
"",
"-- 4 -----------",
"MCC 686",
"Smith (Beryll.)",
"",
"-- 5 -----------",
"Hooriayan",
"Davis (ALRs)",
"",
"-- 6 -----------",
"Gateway",
"Wicca (ALRs)",
"Return to Wicca",
"-- repeat ------"]
#----------------#
# display width #
# --------------
self.routes['HR 8474'] = [
# display width #
#----------------#
"-- 1 -----------",
"Gateway",
"Wicca (Gold)",
"",
"-- 2 -----------",
"LHS 2936",
"Fraser (ConTech)",
"",
"-- 3 -----------",
"BD+65 1846",
"Thiele (Palladium)",
"",
"-- 4 -----------",
"Apala",
"Wilson (ConTech)",
"",
"-- 5 -----------",
"HR 8474",
"Haiseng (ALRs)",
"",
"-- 6 -----------",
"Gateway",
"Wicca (ALRs)",
"Return to Wicca",
"-- repeat ------"]
#----------------#
# display width #
# --------------
self.routes['Ining'] = [
# display width #
#----------------#
"-- 1 -----------",
"Gateway",
"Wicca (Gold)",
"",
"-- 2 -----------",
"LHS 2936",
"Fraser (Beryll.)",
"",
"-- 3 -----------",
"STKM 1-616",
"Davy (Cons Tech)",
"",
"-- 4 -----------",
"Haritis",
"Tem (Palladium)",
"",
"-- 5 -----------",
"Ining",
"Shaara (ALRs)",
"",
"-- 6 -----------",
"Gateway",
"Wicca (ALRs)",
"Return to Wicca",
"-- repeat ------"]
#----------------#
# display width #
# --------------
self.routes['Ithaca'] = [
# display width #
#----------------#
"-- 1 -----------",
"Gateway",
"Wicca (Gold)",
"",
"-- 2 -----------",
"Aulin",
"Aulin (ConsTech)",
"",
"-- 3 -----------",
"G 203-47",
"Thorne (Narc.)",
"",
"-- 4 -----------",
"Ithaca",
"Hume (ALRs)",
"",
"-- 5 -----------",
"Gateway",
"Wicca (ALRs)",
"Return to Wicca",
"-- repeat ------"]
#----------------#
# display width #
# --------------
self.routes['Kokoimudji'] = [
# display width #
#----------------#
"-- 1 -----------",
"Gateway",
"Wicca (Gold)",
"",
"-- 2 -----------",
"MCC 686",
"Smith (Beryll.)",
"",
"-- 3 -----------",
"Leesti",
"Lucas (ConsTech)",
"WARNING: pirates",
"-- 4 -----------",
"Koller",
"Cummings (Pall.)",
"",
"-- 5 -----------",
"Kokoimudji",
"Siodmak (ALRs)",
"",
"-- 6 -----------",
"Gateway",
"Wicca (ALRs)",
"Return to Wicca",
"-- repeat ------"]
#----------------#
# display width #
# --------------
self.routes['Kons'] = [
# display width #
#----------------#
"-- 1 -----------",
"Gateway",
"Wicca (Gold)",
"",
"-- 2 -----------",
"LHS 355",
"Ramelli (ResSep)",
"",
"-- 3 -----------",
"LHS 2936",
"Fraser (ConTech)",
"",
"-- 4 -----------",
"BD+31 2373",
"Lopez (Gold)",
"",
"-- 5 -----------",
"Kons",
"Makarov (ALRs)",
"",
"-- 6 -----------",
"Gateway",
"Wicca (ALRs)",
"Return to Wicca",
"-- repeat ------"]
#----------------#
# display width #
# --------------
self.routes['Kpaniya'] = [
# display width #
#----------------#
"-- 1 -----------",
"Gateway",
"Wicca (Gold)",
"",
"-- 2 -----------",
"LHS 2936",
"Fraser (Beryll.)",
"",
"-- 3 -----------",
"Aganippe",
"Vasil. (Res Sep)",
"",
"-- 4 -----------",
"Kpaniya",
"Tilman (ALRs)",
"",
"-- 5 -----------",
"Gateway",
"Wicca (ALRs)",
"Return to Wicca",
"-- repeat ------"]
#----------------#
# display width #
# --------------
self.routes['La Tenha'] = [
# display width #
#----------------#
"-- 1 -----------",
"Gateway",
"Wicca (Gold)",
"",
"-- 2 -----------",
"Tellus",
"Ahern (ConsTech)",
"",
"-- 3 -----------",
"LP 377-78",
"Gaspar (Gold)",
"",
"-- 4 -----------",
"MCC 686",
"Smith (Tantalum)",
"",
"-- 5 -----------",
"La Tenha",
"Rozhd. (ALRs)",
"",
"-- 6 -----------",
"Gateway",
"Wicca (ALRs)",
"Return to Wicca",
"-- repeat ------"]
#----------------#
# display width #
# --------------
self.routes['Leesti'] = [
# display width #
#----------------#
"-- 1 -----------",
"Gateway",
"Wicca (Gold)",
"",
"-- 2 -----------",
"MCC 686",
"Smith (Beryll.)",
"",
"-- 3 -----------",
"Naitis",
"Ford (Bertrand.)",
"",
"-- 4 -----------",
"Folna",
"Patsayev (Gold)",
"",
"-- 5 -----------",
"Leesti",
"Lucas (ALRs)",
"WARNING: pirates",
"-- 6 -----------",
"Gateway",
"Wicca (ALRs)",
"Return to Wicca",
"-- repeat ------"]
#----------------#
# display width #
# --------------
self.routes['LHS 2405'] = [
# display width #
#----------------#
"-- 1 -----------",
"Gateway",
"Wicca (Gold)",
"",
"-- 2 -----------",
"LHS 2936",
"Fraser (Beryll.)",
"",
"-- 3 -----------",
"Apala",
"Wilson (ConTech)",
"",
"-- 4 -----------",
"LP 27-9",
"Drebbel (Mar Eq)",
"",
"-- 5 -----------",
"LHS 2405",
"Godwin (ALRs)",
"",
"-- 6 -----------",
"Gateway",
"Wicca (ALRs)",
"Return to Wicca",
"-- repeat ------"]
#----------------#
# display width #
# --------------
self.routes['LHS 2771'] = [
# display width #
#----------------#
"-- 1 -----------",
"Gateway",
"Wicca (Gold)",
"",
"-- 2 -----------",
"LHS 2936",
"Fraser (ConTech)",
"",
"-- 3 -----------",
"LP 377-78",
"Gaspar (Gold)",
"",
"-- 4 -----------",
"MCC 686",
"Smith (Beryll.)",
"",
"-- 5 -----------",
"LHS 2771",
"Sarafanov (ALRs)",
"",
"-- 6 -----------",
"Gateway",
"Wicca (ALRs)",
"Return to Wicca",
"-- repeat ------"]
#----------------#
# display width #
# --------------
self.routes['LHS 2936'] = [
# display width #
#----------------#
"-- 1 -----------",
"Gateway",
"Wicca (Gold)",
"",
"-- 2 -----------",
"LHS 2936",
"Fraser (Beryll.)",
"",
"-- 3 -----------",
"G 224-46",
"Zebrow. (Bertr.)",
"",
"-- 4 -----------",
"Andere",
"Malzberg (Gold)",
"",
"-- 5 -----------",
"LHS 2936 (again)",
"Fraser (ALRs)",
"",
"-- 6 -----------",
"Gateway",
"Wicca (ALRs)",
"Return to Wicca",
"-- repeat ------"]
#----------------#
# display width #
# --------------
self.routes['LHS 3079'] = [
# display width #
#----------------#
"-- 1 -----------",
"Gateway",
"Wicca (Gold)",
"",
"-- 2 -----------",
"LHS 2936",
"Fraser (ConTech)",
"",
"-- 3 -----------",
"G 180-18",
"Hale (Bertr.)",
"",
"-- 4 -----------",
"Parutis",
"Evans (Gold)",
"WARNING: Hudson",
"-- 5 -----------",
"LHS 3079",
"Ross (ALRs)",
"",
"-- 6 -----------",
"Gateway",
"Wicca (ALRs)",
"Return to Wicca",
"-- repeat ------"]
#----------------#
# display width #
# --------------
self.routes['LHS 3749'] = [
# display width #
#----------------#
"-- 1 -----------",
"Gateway",
"Wicca (Gold)",
"",
"-- 2 -----------",
"LHS 2936",
"Fraser (Beryll.)",
"",
"-- 3 -----------",
"Apala",
"Wilson (ConTech)",
"",
"-- 4 -----------",
"BD+65 1846",
"Thiele (Pallad.)",
"",
"-- 5 -----------",
"LHS 3749",
"Rodden. (ALRs)",
"",
"-- 6 -----------",
"Gateway",
"Wicca (ALRs)",
"Return to Wicca",
"-- repeat ------"]
#----------------#
# display width #
# --------------
self.routes['LP 490-68'] = [
# display width #
#----------------#
"-- 1 -----------",
"Gateway",
"Wicca (Gold)",
"",
"-- 2 -----------",
"Tellus",
"Ahern (Res Sep)",
"",
"-- 3 -----------",
"BD+19 2511",
"Lie (Beryllium)",
"",
"-- 4 -----------",
"LP 490-68",
"Shaara (ALRs)",
"",
"-- 5 -----------",
"Gateway",
"Wicca (ALRs)",
"Return to Wicca",
"-- repeat ------"]
#----------------#
# display width #
# --------------
self.routes['LP 621-11'] = [
# display width #
#----------------#
"-- 1 -----------",
"Gateway",
"Wicca (Gold)",
"",
"-- 2 -----------",
"LHS 2936",
"Fraser (Beryll.)",
"",
"-- 3 -----------",
"LHS 355",
"Ramelli (ResSep)",
"",
"-- 4 -----------",
"BD-01 2784",
"Xiaog. (Pallad)",
"",
"-- 5 -----------",
"LP 621-11",
"Horch (ALRs)",
"",
"-- 6 -----------",
"Gateway",
"Wicca (ALRs)",
"Return to Wicca",
"-- repeat ------"]
#----------------#
# display width #
# --------------
self.routes['LTT 14478'] = [
# display width #
#----------------#
"-- 1 -----------",
"Gateway",
"Wicca (Gold)",
"",
"-- 2 -----------",
"LHS 2936",
"Fraser (ConTech)",
"",
"-- 3 -----------",
"LP 377-78",
"Gaspar (Gold)",
"",
"-- 4 -----------",
"MCC 686",
"Smith (Beryll.)",
"",
"-- 5 -----------",
"LTT 14478",
"Lanier (ALRs)",
"",
"-- 6 -----------",
"Gateway",
"Wicca (ALRs)",
"Return to Wicca",
"-- repeat ------"]
#----------------#
# display width #
# --------------
self.routes['LTT 5964'] = [
# display width #
#----------------#
"-- 1 -----------",
"Gateway",
"Wicca (Gold)",
"",
"-- 2 -----------",
"LHS 2936",
"Fraser (ConTech)",
"",
"-- 3 -----------",
"LP 377-78",
"Gaspar (Gold)",
"",
"-- 4 -----------",
"LHS 355",
"Ramelli (ResSep)",
"",
"-- 5 -----------",
"LTT 5964",
"Witt (ALRs)",
"",
"-- 6 -----------",
"Gateway",
"Wicca (ALRs)",
"Return to Wicca",
"-- repeat ------"]
#----------------#
# display width #
# --------------
self.routes['Lugh'] = [
# display width #
#----------------#
"-- 1 -----------",
"Gateway",
"Wicca (Gold)",
"",
"-- 2 -----------",
"LHS 355",
"Ramelli (ResSep)",
"",
"-- 3 -----------",
"61 Virginis",
"Furuk. (Pallad)",
"alt. Silver",
"-- 4 -----------",
"Lu Velorum",
"Miletus (Beryll)",
"",
"-- 5 -----------",
"Lugh",
"Balandin (ALRs)",
"",
"-- 6 -----------",
"Gateway",
"Wicca (ALRs)",
"Return to Wicca",
"-- repeat ------"]
#----------------#
# display width #
# --------------
self.routes['Manbatz'] = [
# display width #
#----------------#
"-- 1 -----------",
"Gateway",
"Wicca (Gold)",
"",
"-- 2 -----------",
"GQ Virginis",
"Ray (Cons. Tech)",
"",
"-- 3 -----------",
"Parutis",
"Evans (Gold)",
"WARNING: Hudson",
"-- 4 -----------",
"Manbatz",
"Bretnor (ALRs)",
"",
"-- 5 -----------",
"Gateway",
"Wicca (ALRs)",
"Return to Wicca",
"-- repeat ------"]
#----------------#
# display width #
# --------------
self.routes['Marasing'] = [
# display width #
#----------------#
"-- 1 -----------",
"Gateway",
"Wicca (Gold)",
"",
"-- 2 -----------",
"LHS 2936",
"Fraser (ConTech)",
"",
"-- 3 -----------",
"BD+65 1846",
"Thiele (Pallad.)",
"or Gold / Silver",
"-- 4 -----------",
"Apala",
"Wilson (ConTech)",
"alt. Prog. Cells",
"-- 5 -----------",
"Marasing",
"Landst. (ALRs)",
"",
"-- 6 -----------",
"Gateway",
"Wicca (ALRs)",
"Return to Wicca",
"-- repeat ------"]
#----------------#
# display width #
# --------------
self.routes['Meenates'] = [
# display width #
#----------------#
"-- 1 -----------",
"Gateway",
"Wicca (Gold)",
"",
"-- 2 -----------",
"LHS 2936",
"Fraser (Beryll.)",
"",
"-- 3 -----------",
"G 224-46",
"Zebrow. (Bertr.)",
"",
"-- 4 -----------",
"Andere",
"Kummer (Gold)",
"",
"-- 5 -----------",
"Meenates",
"Burbank (ALRs)",
"",
"-- 6 -----------",
"Gateway",
"Wicca (ALRs)",
"Return to Wicca",
"-- repeat ------"]
#----------------#
# display width #
# --------------
self.routes['MCC 686'] = [
# display width #
#----------------#
"-- 1 -----------",
"Gateway",
"Wicca (Gold)",
"",
"-- 2 -----------",
"LHS 2936",
"Fraser (Beryll.)",
"",
"-- 3 -----------",
"Tellus",
"Ahern (ConsTech)",
"",
"-- 4 -----------",
"LP 388-78",
"Gaspar (Gold)",
"",
"-- 5 -----------",
"MCC 686",
"Smith (ALRs)",
"",
"-- 6 -----------",
"Gateway",
"Wicca (ALRs)",
"Return to Wicca",
"-- repeat ------"]
#----------------#
# display width #
# --------------
self.routes['Mereboga'] = [
# display width #
#----------------#
"-- 1 -----------",
"Gateway",
"Wicca (Gold)",
"",
"-- 2 -----------",
"LHS 2936",
"Fraser (Beryll.)",
"",
"-- 3 -----------",
"Khernidjal",
"West (Cons Tech)",
"",
"-- 4 -----------",
"BD+46 2014",
"Simak (Gold)",
"",
"-- 5 -----------",
"Mereboga",
"Howard (ALRs)",
"",
"-- 6 -----------",
"Gateway",
"Wicca (ALRs)",
"Return to Wicca",
"-- repeat ------"]
#----------------#
# display width #
# --------------
self.routes['Mullag'] = [
# display width #
#----------------#
"-- 1 -----------",
"Gateway",
"Wicca (Gold)",
"",
"-- 2 -----------",
"LHS 2936",
"Fraser (ConTech)",
"",
"-- 3 -----------",
"BD+31 2373",
"Gaultier (Gold)",
"",
"-- 4 -----------",
"LDS 2314",
"Dobrov. (Mar Eq)",
"",
"-- 5 -----------",
"Mullag",
"Potagos (ALRs)",
"",
"-- 6 -----------",
"Gateway",
"Wicca (ALRs)",
"Return to Wicca",
"-- repeat ------"]
#----------------#
# display width #
# --------------
self.routes['Nagybold'] = [
# display width #
#----------------#
"-- 1 -----------",
"Gateway",
"Wicca (Gold)",
"",
"-- 2 -----------",
"LHS 2936",
"Fraser (ConTech)",
"",
"-- 3 -----------",
"MCC 686",
"Baudin (Beryll.)",
"",
"-- 4 -----------",
"LHS 355",
"Ramelli (ResSep)",
"",
"-- 5 -----------",
"Nagybold",
"Gordon (ALRs)",
"WARNING: Outpost",
"-- 6 -----------",
"Gateway",
"Wicca (ALRs)",
"Return to Wicca",
"-- repeat ------"]
#----------------#
# display width #
# --------------
self.routes['Nevermore'] = [
# display width #
#----------------#
"-- 1 -----------",
"Gateway",
"Wicca (Gold)",
"",
"-- 2 -----------",
"LHS 2936",
"Fraser (ConTech)",
"",
"-- 3 -----------",
"LP 377-78",
"Gaspar (Gold)",
"",
"-- 4 -----------",
"Tellus",
"Ahern (ConsTech)",
"",
"-- 5 -----------",
"Nevermore",
"Pinto (ALRs)",
"",
"-- 6 -----------",
"Gateway",
"Wicca (ALRs)",
"Return to Wicca",
"-- repeat ------"]
#----------------#
# display width #
# --------------
self.routes['NLTT 44958'] = [
# display width #
#----------------#
"-- 1 -----------",
"Gateway",
"Wicca (Gold)",
"",
"-- 2 -----------",
"LHS 2936",
"Fraser (ConTech)",
"",
"-- 3 -----------",
"Adeo",
"Foda (Gold)",
"",
"-- 4 -----------",
"Sivas",
"Cavalieri (none)",
"",
"-- 5 -----------",
"NLTT 44958",
"Anderson (ALRs)",
"",
"-- 6 -----------",
"Gateway",
"Wicca (ALRs)",
"Return to Wicca",
"-- repeat ------"]
#----------------#
# display width #
# --------------
self.routes['Olwain'] = [
# display width #
#----------------#
"-- 1 -----------",
"Gateway",
"Wicca (Gold)",
"",
"-- 2 -----------",
"LHS 2936",
"Fraser (ConTech)",
"",
"-- 3 -----------",
"BD+31 2373",
"Gaultier (Gold)",
"",
"-- 4 -----------",
"LHS 2637",
"Perez (ConsTech)",
"",
"-- 5 -----------",
"Olwain",
"Cabot (ALRs)",
"",
"-- 6 -----------",
"Gateway",
"Wicca (ALRs)",
"Return to Wicca",
"-- repeat ------"]
#----------------#
# display width #
# --------------
self.routes['Opala'] = [
# display width #
#----------------#
"-- 1 -----------",
"Gateway",
"Wicca (Gold)",
"",
"-- 2 -----------",
"LHS 2936",
"Fraser (ConTech)",
"",
"-- 3 -----------",
"BD+31 2373",
"Gaultier (Gold)",
"",
"-- 4 -----------",
"Chaxiraxi",
"Gamow (Min Extr)",
"",
"-- 5 -----------",
"Opala",
"Onizuka's (ALRs)",
"",
"-- 6 -----------",
"Gateway",
"Wicca (ALRs)",
"Return to Wicca",
"-- repeat ------"]
#----------------#
# display width #
# --------------
self.routes['Peckollerci'] = [
# display width #
#----------------#
"-- 1 -----------",
"Gateway",
"Wicca (Gold)",
"",
"-- 2 -----------",
"LHS 2936",
"Fraser (ConTech)",
"",
"-- 3 -----------",
"LP 377-78",
"Gaspar (Gold)",
"",
"-- 4 -----------",
"Acan",
"Phill. (Mar Eqp)",
"",
"-- 5 -----------",
"Peckollerci",
"Minkowski (ALRs)",
"",
"-- 6 -----------",
"Gateway",
"Wicca (ALRs)",
"Return to Wicca",
"-- repeat ------"]
#----------------#
# display width #
# --------------
self.routes['Pongo'] = [
# display width #
#----------------#
"-- 1 -----------",
"Gateway",
"Wicca (Gold)",
"",
"-- 2 -----------",
"LHS 2936",
"Fraser (Beryll.)",
"",
"-- 3 -----------",
"G 139-50",
"Filip. (Autofab)",
"",
"-- 4 -----------",
"Pongo",
"Antonelli (ALRs)",
"",
"-- 5 -----------",
"Gateway",
"Wicca (ALRs)",
"Return to Wicca",
"-- repeat ------"]
#----------------#
# display width #
# --------------
self.routes['Quan Gurus'] = [
# display width #
#----------------#
"-- 1 -----------",
"Gateway",
"Wicca (Gold)",
"",
"-- 2 -----------",
"LHS 2936",
"Fraser (Beryll.)",
"",
"-- 3 -----------",
"LHS 355",
"Ramelli (ResSep)",
"",
"-- 4 -----------",
"Quan Gurus",
"Russell (ALRs)",
"",
"-- 5 -----------",
"Gateway",
"Wicca (ALRs)",
"Return to Wicca",
"-- repeat ------"]
#----------------#
# display width #
# --------------
self.routes['Robor'] = [
# display width #
#----------------#
"-- 1 -----------",
"Gateway",
"Wicca (Gold)",
"",
"-- 2 -----------",
"MCC 686",
"Smith (Beryll.)",
"",
"-- 3 -----------",
"Naitis",
"Ford (Bertrand.)",
"",
"-- 4 -----------",
"Folna",
"Patsayev (Gold)",
"",
"-- 5 -----------",
"Robor",
"Hooke (ALRs)",
"",
"-- 6 -----------",
"Gateway",
"Wicca (ALRs)",
"Return to Wicca",
"-- repeat ------"]
#----------------#
# display width #
# --------------
self.routes['Ross 94'] = [
# display width #
#----------------#
"-- 1 -----------",
"Gateway",
"Wicca (Gold)",
"",
"-- 2 -----------",
"LHS 2936",
"Fraser (Beryll.)",
"",
"-- 3 -----------",
"LHS 355",
"Ramelli (ResSep)",
"",
"-- 4 -----------",
"MCC 686",
"Smith (Beryll.)",
"",
"-- 5 -----------",
"Ross 94",
"Kingsbury (ALRs)",
"",
"-- 6 -----------",
"Gateway",
"Wicca (ALRs)",
"Return to Wicca",
"-- repeat ------"]
#----------------#
# display width #
# --------------
self.routes['San Guan'] = [
# display width #
#----------------#
"-- 1 -----------",
"Gateway",
"Wicca (Gold)",
"",
"-- 2 -----------",
"LHS 2936",
"Fraser (ConTech)",
"",
"-- 3 -----------",
"Amahu",
"Kondr. (Pallad.)",
"",
"-- 4 -----------",
"Cantjarisni",
"Cochr. (AutoFab)",
"",
"-- 5 -----------",
"San Guan",
"Alvarado (ALRs)",
"",
"-- 6 -----------",
"Gateway",
"Wicca (ALRs)",
"Return to Wicca",
"-- repeat ------"]
#----------------#
# display width #
# --------------
self.routes['San Tu'] = [
# display width #
#----------------#
"-- 1 -----------",
"Gateway",
"Wicca (Gold)",
"",
"-- 2 -----------",
"LHS 2936",
"Fraser (ConTech)",
"",
"-- 3 -----------",
"LP 377-78",
"Gaspar (Gold)",
"",
"-- 4 -----------",
"MCC 686",
"Smith (Beryll.)",
"",
"-- 5 -----------",
"San Tu",
"Chomsky (ALRs)",
"",
"-- 6 -----------",
"Gateway",
"Wicca (ALRs)",
"Return to Wicca",
"-- repeat ------"]
#----------------#
# display width #
# --------------
self.routes['Siki'] = [
# display width #
#----------------#
"-- 1 -----------",
"Gateway",
"Wicca (Gold)",
"",
"-- 2 -----------",
"MCC 686",
"Baudin (Beryll.)",
"",
"-- 3 -----------",
"Naitis",
"Ford (Coltan)",
"",
"-- 4 -----------",
"Siki",
"Lee (ALRs)",
"WARNING: Outpost",
"-- 5 -----------",
"Gateway",
"Wicca (ALRs)",
"Return to Wicca",
"-- repeat ------"]
#----------------#
# display width #
# --------------
self.routes['Tau Bootis'] = [
# display width #
#----------------#
"-- 1 -----------",
"Gateway",
"Wicca (Gold)",
"",
"-- 2 -----------",
"LHS 2936",
"Fraser (ConTech)",
"",
"-- 3 -----------",
"LP 377-78",
"Gaspar (Gold)",
"",
"-- 4 -----------",
"MCC 686",
"Smith (Beryll.)",
"",
"-- 5 -----------",
"Tau Bootis",
"Pascal (ALRs)",
"",
"-- 6 -----------",
"Gateway",
"Wicca (ALRs)",
"Return to Wicca",
"-- repeat ------"]
#----------------#
# display width #
# --------------
self.routes['Tricorii'] = [
# display width #
#----------------#
"-- 1 -----------",
"Gateway",
"Wicca (Gold)",
"",
"-- 2 -----------",
"LHS 2936",
"Fraser (ConTech)",
"",
"-- 3 -----------",
"BD-02 4304",
"Bean (Palladium)",
"",
"-- 4 -----------",
"Tricorii",
"Hippalus (ALRs)",
"",
"-- 5 -----------",
"Gateway",
"Wicca (ALRs)",
"Return to Wicca",
"-- repeat ------"]
#----------------#
# display width #
# --------------
self.routes['Unkuar'] = [
# display width #
#----------------#
"-- 1 -----------",
"Gateway",
"Wicca (Gold)",
"",
"-- 2 -----------",
"LHS 2936",
"Fraser (ConTech)",
"",
"-- 3 -----------",
"LP 377-78",
"Gaspar (Gold)",
"",
"-- 4 -----------",
"MCC 686",
"Smith (Beryll.)",
"",
"-- 5 -----------",
"Unkuar",
"Flynn (ALRs)",
"",
"-- 6 -----------",
"Gateway",
"Wicca (ALRs)",
"Return to Wicca",
"-- repeat ------"]
#----------------#
# display width #
# --------------
self.routes['V371 Normae'] = [
# display width #
#----------------#
"-- 1 -----------",
"Gateway",
"Wicca (Gold)",
"",
"-- 2 -----------",
"LHS 355",
"Ramelli (ResSep)",
"",
"-- 3 -----------",
"61 Virginis",
"Furukawa (Pall.)",
"alt. Silver",
"-- 4 -----------",
"GQ Virginis",
"Ray (Cons Tech)",
"",
"-- 5 -----------",
"V371 Normae",
"Smith (ALRs)",
"",
"-- 6 -----------",
"Gateway",
"Wicca (ALRs)",
"Return to Wicca",
"-- repeat ------"]
#----------------#
# display width #
# --------------
self.routes['Varam'] = [
# display width #
#----------------#
"-- 1 -----------",
"Gateway",
"Wicca (Gold)",
"",
"-- 2 -----------",
"LHS 2936",
"Fraser (Beryll.)",
"",
"-- 3 -----------",
"Holiacan",
"Fortr. (ConTech)",
"",
"-- 4 -----------",
"HIP 69518",
"Great (Supercon)",
"",
"-- 5 -----------",
"Varam",
"Zebrowski (ALRs)",
"",
"-- 6 -----------",
"Gateway",
"Wicca (ALRs)",
"Return to Wicca",
"-- repeat ------"]
#----------------#
# display width #
# --------------
self.routes['Woloniugo'] = [
# display width #
#----------------#
"-- 1 -----------",
"Gateway",
"Wicca (Gold)",
"",
"-- 2 -----------",
"LHS 2936",
"Fraser (ConTech)",
"",
"-- 3 -----------",
"BD+26 2184",
"Wiberg (Mar Eqp)",
"",
"-- 4 -----------",
"Woloniugo",
"Renenbel. (ALRs)",
"",
"-- 5 -----------",
"Gateway",
"Wicca (ALRs)",
"Return to Wicca",
"-- repeat ------"]
#----------------#
# display width #
# --------------
self.routes['Zosi'] = [
# display width #
#----------------#
"-- 1 -----------",
"Gateway",
"Wicca (Gold)",
"",
"-- 5 -----------",
"LHS 2936",
"Fraser (ConTech)",
"",
"-- 5 -----------",
"LTT 13125",
"Ross (Gold)",
"",
"-- 5 -----------",
"Ross 113",
"Tasman (None)",
"",
"-- 5 -----------",
"Zosi",
"Citi (ALRs)",
"",
"-- 5 -----------",
"Gateway",
"Wicca (ALRs)",
"Return to Wicca",
"-- repeat ------"]
#----------------#
# display width #
self.cursor = 0
self.route = 'Manbatz'
self.mode = 'system'
self.lastinput = nowmillis()
def OnSoftButton(self, *args, **kwargs):
if self.lastinput > nowmillis() - 200:
return
self.lastinput = nowmillis()
select = False
up = False
down = False
if args[0].select:
select = True
if args[0].up:
up = True
if args[0].down:
down = True
if (up):
if self.mode == 'route':
self.cursor = (self.cursor - 1) % len(self.routes[self.route])
else:
self.cursor = (self.cursor - 1) % len(list(self.routes))
if (down):
if self.mode == 'route':
self.cursor = (self.cursor + 1) % len(self.routes[self.route])
else:
self.cursor = (self.cursor + 1) % len(list(self.routes))
if select:
if self.mode == 'route':
# if in route view, switch to system view and focus current system
lines = list(self.routes)
lines.sort()
self.cursor = lines.index(self.route);
self.mode = 'system'
else:
# else if system view, switch to route for selected and jump to line 0
lines = list(self.routes)
lines.sort()
self.route = lines[self.cursor]
self.mode = 'route'
self.cursor = 0
if (select or up or down):
self.PageShow()
def OnPage(self, page_id, activated):
if page_id == 0 and activated:
self.PageShow()
def PageShow(self):
if self.mode == 'route':
lines = self.routes[self.route]
cursor = self.cursor
mfd.display(lines[(cursor + 0) % len(lines)], lines[(cursor + 1) % len(lines)], lines[(cursor + 2) % len(lines)])
for x in range(-1, 1):
if re.match(r'^-- \d', lines[(cursor + x) % len(lines)]):
addToClipBoard(lines[(cursor + x + 1) % len(lines)])
else:
lines = list(self.routes)
lines.sort()
cursor = self.cursor
mfd.display(lines[(cursor - 1) % len(lines)], "> " + lines[(cursor + 0) % len(lines)], lines[(cursor + 1) % len(lines)])
#logging.root.setLevel(logging.DEBUG)
doObj = FortTradeMFD()
mfd = X52ProMFD(doObj)
sleep(0.5)
mfd.doObj.PageShow()
#mfd.display("test1", "test2", "1234567890123456")
print("showing fast-track fortification trade routes")
print("press <enter> to exit")
input()
|
headprogrammingczar/mahon-mfd
|
main.py
|
Python
|
bsd-3-clause
| 44,328
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.