repo_name stringlengths 5 100 | ref stringlengths 12 67 | path stringlengths 4 244 | copies stringlengths 1 8 | content stringlengths 0 1.05M ⌀ |
|---|---|---|---|---|
makyo/furrypoll | refs/heads/master | scripts/nationalities.py | 2 | import models
nationalities = {
'n': 0.0,
'nationalities': {},
}
for response in models.Response.objects.all():
touchpoints = map(lambda tp: tp.touchpoint_type, response.metadata.touchpoints)
if -4 in touchpoints:
nationalities['n'] += 1.0
if response.overview is not None:
if response.overview.country is not None and response.overview.country != 'xx':
if response.overview.country not in nationalities['nationalities']:
nationalities['nationalities'][response.overview.country] = 0.0
nationalities['nationalities'][response.overview.country] += 1.0
print "2015: {}".format(nationalities['n'])
result = result = sorted(nationalities['nationalities'].items(), key=lambda x: x[1], reverse=True)
for i in result[:9]:
print "{}: {:3.2f}%".format(i[0], i[1] / nationalities['n'] * 100)
|
eul721/The-Perfect-Pokemon-Team-Balancer | refs/heads/master | libs/env/Lib/encodings/mac_romanian.py | 593 | """ Python Character Mapping Codec mac_romanian generated from 'MAPPINGS/VENDORS/APPLE/ROMANIAN.TXT' with gencodec.py.
"""#"
import codecs
### Codec APIs
class Codec(codecs.Codec):
def encode(self,input,errors='strict'):
return codecs.charmap_encode(input,errors,encoding_table)
def decode(self,input,errors='strict'):
return codecs.charmap_decode(input,errors,decoding_table)
class IncrementalEncoder(codecs.IncrementalEncoder):
def encode(self, input, final=False):
return codecs.charmap_encode(input,self.errors,encoding_table)[0]
class IncrementalDecoder(codecs.IncrementalDecoder):
def decode(self, input, final=False):
return codecs.charmap_decode(input,self.errors,decoding_table)[0]
class StreamWriter(Codec,codecs.StreamWriter):
pass
class StreamReader(Codec,codecs.StreamReader):
pass
### encodings module API
def getregentry():
return codecs.CodecInfo(
name='mac-romanian',
encode=Codec().encode,
decode=Codec().decode,
incrementalencoder=IncrementalEncoder,
incrementaldecoder=IncrementalDecoder,
streamreader=StreamReader,
streamwriter=StreamWriter,
)
### Decoding Table
decoding_table = (
u'\x00' # 0x00 -> CONTROL CHARACTER
u'\x01' # 0x01 -> CONTROL CHARACTER
u'\x02' # 0x02 -> CONTROL CHARACTER
u'\x03' # 0x03 -> CONTROL CHARACTER
u'\x04' # 0x04 -> CONTROL CHARACTER
u'\x05' # 0x05 -> CONTROL CHARACTER
u'\x06' # 0x06 -> CONTROL CHARACTER
u'\x07' # 0x07 -> CONTROL CHARACTER
u'\x08' # 0x08 -> CONTROL CHARACTER
u'\t' # 0x09 -> CONTROL CHARACTER
u'\n' # 0x0A -> CONTROL CHARACTER
u'\x0b' # 0x0B -> CONTROL CHARACTER
u'\x0c' # 0x0C -> CONTROL CHARACTER
u'\r' # 0x0D -> CONTROL CHARACTER
u'\x0e' # 0x0E -> CONTROL CHARACTER
u'\x0f' # 0x0F -> CONTROL CHARACTER
u'\x10' # 0x10 -> CONTROL CHARACTER
u'\x11' # 0x11 -> CONTROL CHARACTER
u'\x12' # 0x12 -> CONTROL CHARACTER
u'\x13' # 0x13 -> CONTROL CHARACTER
u'\x14' # 0x14 -> CONTROL CHARACTER
u'\x15' # 0x15 -> CONTROL CHARACTER
u'\x16' # 0x16 -> CONTROL CHARACTER
u'\x17' # 0x17 -> CONTROL CHARACTER
u'\x18' # 0x18 -> CONTROL CHARACTER
u'\x19' # 0x19 -> CONTROL CHARACTER
u'\x1a' # 0x1A -> CONTROL CHARACTER
u'\x1b' # 0x1B -> CONTROL CHARACTER
u'\x1c' # 0x1C -> CONTROL CHARACTER
u'\x1d' # 0x1D -> CONTROL CHARACTER
u'\x1e' # 0x1E -> CONTROL CHARACTER
u'\x1f' # 0x1F -> CONTROL CHARACTER
u' ' # 0x20 -> SPACE
u'!' # 0x21 -> EXCLAMATION MARK
u'"' # 0x22 -> QUOTATION MARK
u'#' # 0x23 -> NUMBER SIGN
u'$' # 0x24 -> DOLLAR SIGN
u'%' # 0x25 -> PERCENT SIGN
u'&' # 0x26 -> AMPERSAND
u"'" # 0x27 -> APOSTROPHE
u'(' # 0x28 -> LEFT PARENTHESIS
u')' # 0x29 -> RIGHT PARENTHESIS
u'*' # 0x2A -> ASTERISK
u'+' # 0x2B -> PLUS SIGN
u',' # 0x2C -> COMMA
u'-' # 0x2D -> HYPHEN-MINUS
u'.' # 0x2E -> FULL STOP
u'/' # 0x2F -> SOLIDUS
u'0' # 0x30 -> DIGIT ZERO
u'1' # 0x31 -> DIGIT ONE
u'2' # 0x32 -> DIGIT TWO
u'3' # 0x33 -> DIGIT THREE
u'4' # 0x34 -> DIGIT FOUR
u'5' # 0x35 -> DIGIT FIVE
u'6' # 0x36 -> DIGIT SIX
u'7' # 0x37 -> DIGIT SEVEN
u'8' # 0x38 -> DIGIT EIGHT
u'9' # 0x39 -> DIGIT NINE
u':' # 0x3A -> COLON
u';' # 0x3B -> SEMICOLON
u'<' # 0x3C -> LESS-THAN SIGN
u'=' # 0x3D -> EQUALS SIGN
u'>' # 0x3E -> GREATER-THAN SIGN
u'?' # 0x3F -> QUESTION MARK
u'@' # 0x40 -> COMMERCIAL AT
u'A' # 0x41 -> LATIN CAPITAL LETTER A
u'B' # 0x42 -> LATIN CAPITAL LETTER B
u'C' # 0x43 -> LATIN CAPITAL LETTER C
u'D' # 0x44 -> LATIN CAPITAL LETTER D
u'E' # 0x45 -> LATIN CAPITAL LETTER E
u'F' # 0x46 -> LATIN CAPITAL LETTER F
u'G' # 0x47 -> LATIN CAPITAL LETTER G
u'H' # 0x48 -> LATIN CAPITAL LETTER H
u'I' # 0x49 -> LATIN CAPITAL LETTER I
u'J' # 0x4A -> LATIN CAPITAL LETTER J
u'K' # 0x4B -> LATIN CAPITAL LETTER K
u'L' # 0x4C -> LATIN CAPITAL LETTER L
u'M' # 0x4D -> LATIN CAPITAL LETTER M
u'N' # 0x4E -> LATIN CAPITAL LETTER N
u'O' # 0x4F -> LATIN CAPITAL LETTER O
u'P' # 0x50 -> LATIN CAPITAL LETTER P
u'Q' # 0x51 -> LATIN CAPITAL LETTER Q
u'R' # 0x52 -> LATIN CAPITAL LETTER R
u'S' # 0x53 -> LATIN CAPITAL LETTER S
u'T' # 0x54 -> LATIN CAPITAL LETTER T
u'U' # 0x55 -> LATIN CAPITAL LETTER U
u'V' # 0x56 -> LATIN CAPITAL LETTER V
u'W' # 0x57 -> LATIN CAPITAL LETTER W
u'X' # 0x58 -> LATIN CAPITAL LETTER X
u'Y' # 0x59 -> LATIN CAPITAL LETTER Y
u'Z' # 0x5A -> LATIN CAPITAL LETTER Z
u'[' # 0x5B -> LEFT SQUARE BRACKET
u'\\' # 0x5C -> REVERSE SOLIDUS
u']' # 0x5D -> RIGHT SQUARE BRACKET
u'^' # 0x5E -> CIRCUMFLEX ACCENT
u'_' # 0x5F -> LOW LINE
u'`' # 0x60 -> GRAVE ACCENT
u'a' # 0x61 -> LATIN SMALL LETTER A
u'b' # 0x62 -> LATIN SMALL LETTER B
u'c' # 0x63 -> LATIN SMALL LETTER C
u'd' # 0x64 -> LATIN SMALL LETTER D
u'e' # 0x65 -> LATIN SMALL LETTER E
u'f' # 0x66 -> LATIN SMALL LETTER F
u'g' # 0x67 -> LATIN SMALL LETTER G
u'h' # 0x68 -> LATIN SMALL LETTER H
u'i' # 0x69 -> LATIN SMALL LETTER I
u'j' # 0x6A -> LATIN SMALL LETTER J
u'k' # 0x6B -> LATIN SMALL LETTER K
u'l' # 0x6C -> LATIN SMALL LETTER L
u'm' # 0x6D -> LATIN SMALL LETTER M
u'n' # 0x6E -> LATIN SMALL LETTER N
u'o' # 0x6F -> LATIN SMALL LETTER O
u'p' # 0x70 -> LATIN SMALL LETTER P
u'q' # 0x71 -> LATIN SMALL LETTER Q
u'r' # 0x72 -> LATIN SMALL LETTER R
u's' # 0x73 -> LATIN SMALL LETTER S
u't' # 0x74 -> LATIN SMALL LETTER T
u'u' # 0x75 -> LATIN SMALL LETTER U
u'v' # 0x76 -> LATIN SMALL LETTER V
u'w' # 0x77 -> LATIN SMALL LETTER W
u'x' # 0x78 -> LATIN SMALL LETTER X
u'y' # 0x79 -> LATIN SMALL LETTER Y
u'z' # 0x7A -> LATIN SMALL LETTER Z
u'{' # 0x7B -> LEFT CURLY BRACKET
u'|' # 0x7C -> VERTICAL LINE
u'}' # 0x7D -> RIGHT CURLY BRACKET
u'~' # 0x7E -> TILDE
u'\x7f' # 0x7F -> CONTROL CHARACTER
u'\xc4' # 0x80 -> LATIN CAPITAL LETTER A WITH DIAERESIS
u'\xc5' # 0x81 -> LATIN CAPITAL LETTER A WITH RING ABOVE
u'\xc7' # 0x82 -> LATIN CAPITAL LETTER C WITH CEDILLA
u'\xc9' # 0x83 -> LATIN CAPITAL LETTER E WITH ACUTE
u'\xd1' # 0x84 -> LATIN CAPITAL LETTER N WITH TILDE
u'\xd6' # 0x85 -> LATIN CAPITAL LETTER O WITH DIAERESIS
u'\xdc' # 0x86 -> LATIN CAPITAL LETTER U WITH DIAERESIS
u'\xe1' # 0x87 -> LATIN SMALL LETTER A WITH ACUTE
u'\xe0' # 0x88 -> LATIN SMALL LETTER A WITH GRAVE
u'\xe2' # 0x89 -> LATIN SMALL LETTER A WITH CIRCUMFLEX
u'\xe4' # 0x8A -> LATIN SMALL LETTER A WITH DIAERESIS
u'\xe3' # 0x8B -> LATIN SMALL LETTER A WITH TILDE
u'\xe5' # 0x8C -> LATIN SMALL LETTER A WITH RING ABOVE
u'\xe7' # 0x8D -> LATIN SMALL LETTER C WITH CEDILLA
u'\xe9' # 0x8E -> LATIN SMALL LETTER E WITH ACUTE
u'\xe8' # 0x8F -> LATIN SMALL LETTER E WITH GRAVE
u'\xea' # 0x90 -> LATIN SMALL LETTER E WITH CIRCUMFLEX
u'\xeb' # 0x91 -> LATIN SMALL LETTER E WITH DIAERESIS
u'\xed' # 0x92 -> LATIN SMALL LETTER I WITH ACUTE
u'\xec' # 0x93 -> LATIN SMALL LETTER I WITH GRAVE
u'\xee' # 0x94 -> LATIN SMALL LETTER I WITH CIRCUMFLEX
u'\xef' # 0x95 -> LATIN SMALL LETTER I WITH DIAERESIS
u'\xf1' # 0x96 -> LATIN SMALL LETTER N WITH TILDE
u'\xf3' # 0x97 -> LATIN SMALL LETTER O WITH ACUTE
u'\xf2' # 0x98 -> LATIN SMALL LETTER O WITH GRAVE
u'\xf4' # 0x99 -> LATIN SMALL LETTER O WITH CIRCUMFLEX
u'\xf6' # 0x9A -> LATIN SMALL LETTER O WITH DIAERESIS
u'\xf5' # 0x9B -> LATIN SMALL LETTER O WITH TILDE
u'\xfa' # 0x9C -> LATIN SMALL LETTER U WITH ACUTE
u'\xf9' # 0x9D -> LATIN SMALL LETTER U WITH GRAVE
u'\xfb' # 0x9E -> LATIN SMALL LETTER U WITH CIRCUMFLEX
u'\xfc' # 0x9F -> LATIN SMALL LETTER U WITH DIAERESIS
u'\u2020' # 0xA0 -> DAGGER
u'\xb0' # 0xA1 -> DEGREE SIGN
u'\xa2' # 0xA2 -> CENT SIGN
u'\xa3' # 0xA3 -> POUND SIGN
u'\xa7' # 0xA4 -> SECTION SIGN
u'\u2022' # 0xA5 -> BULLET
u'\xb6' # 0xA6 -> PILCROW SIGN
u'\xdf' # 0xA7 -> LATIN SMALL LETTER SHARP S
u'\xae' # 0xA8 -> REGISTERED SIGN
u'\xa9' # 0xA9 -> COPYRIGHT SIGN
u'\u2122' # 0xAA -> TRADE MARK SIGN
u'\xb4' # 0xAB -> ACUTE ACCENT
u'\xa8' # 0xAC -> DIAERESIS
u'\u2260' # 0xAD -> NOT EQUAL TO
u'\u0102' # 0xAE -> LATIN CAPITAL LETTER A WITH BREVE
u'\u0218' # 0xAF -> LATIN CAPITAL LETTER S WITH COMMA BELOW # for Unicode 3.0 and later
u'\u221e' # 0xB0 -> INFINITY
u'\xb1' # 0xB1 -> PLUS-MINUS SIGN
u'\u2264' # 0xB2 -> LESS-THAN OR EQUAL TO
u'\u2265' # 0xB3 -> GREATER-THAN OR EQUAL TO
u'\xa5' # 0xB4 -> YEN SIGN
u'\xb5' # 0xB5 -> MICRO SIGN
u'\u2202' # 0xB6 -> PARTIAL DIFFERENTIAL
u'\u2211' # 0xB7 -> N-ARY SUMMATION
u'\u220f' # 0xB8 -> N-ARY PRODUCT
u'\u03c0' # 0xB9 -> GREEK SMALL LETTER PI
u'\u222b' # 0xBA -> INTEGRAL
u'\xaa' # 0xBB -> FEMININE ORDINAL INDICATOR
u'\xba' # 0xBC -> MASCULINE ORDINAL INDICATOR
u'\u03a9' # 0xBD -> GREEK CAPITAL LETTER OMEGA
u'\u0103' # 0xBE -> LATIN SMALL LETTER A WITH BREVE
u'\u0219' # 0xBF -> LATIN SMALL LETTER S WITH COMMA BELOW # for Unicode 3.0 and later
u'\xbf' # 0xC0 -> INVERTED QUESTION MARK
u'\xa1' # 0xC1 -> INVERTED EXCLAMATION MARK
u'\xac' # 0xC2 -> NOT SIGN
u'\u221a' # 0xC3 -> SQUARE ROOT
u'\u0192' # 0xC4 -> LATIN SMALL LETTER F WITH HOOK
u'\u2248' # 0xC5 -> ALMOST EQUAL TO
u'\u2206' # 0xC6 -> INCREMENT
u'\xab' # 0xC7 -> LEFT-POINTING DOUBLE ANGLE QUOTATION MARK
u'\xbb' # 0xC8 -> RIGHT-POINTING DOUBLE ANGLE QUOTATION MARK
u'\u2026' # 0xC9 -> HORIZONTAL ELLIPSIS
u'\xa0' # 0xCA -> NO-BREAK SPACE
u'\xc0' # 0xCB -> LATIN CAPITAL LETTER A WITH GRAVE
u'\xc3' # 0xCC -> LATIN CAPITAL LETTER A WITH TILDE
u'\xd5' # 0xCD -> LATIN CAPITAL LETTER O WITH TILDE
u'\u0152' # 0xCE -> LATIN CAPITAL LIGATURE OE
u'\u0153' # 0xCF -> LATIN SMALL LIGATURE OE
u'\u2013' # 0xD0 -> EN DASH
u'\u2014' # 0xD1 -> EM DASH
u'\u201c' # 0xD2 -> LEFT DOUBLE QUOTATION MARK
u'\u201d' # 0xD3 -> RIGHT DOUBLE QUOTATION MARK
u'\u2018' # 0xD4 -> LEFT SINGLE QUOTATION MARK
u'\u2019' # 0xD5 -> RIGHT SINGLE QUOTATION MARK
u'\xf7' # 0xD6 -> DIVISION SIGN
u'\u25ca' # 0xD7 -> LOZENGE
u'\xff' # 0xD8 -> LATIN SMALL LETTER Y WITH DIAERESIS
u'\u0178' # 0xD9 -> LATIN CAPITAL LETTER Y WITH DIAERESIS
u'\u2044' # 0xDA -> FRACTION SLASH
u'\u20ac' # 0xDB -> EURO SIGN
u'\u2039' # 0xDC -> SINGLE LEFT-POINTING ANGLE QUOTATION MARK
u'\u203a' # 0xDD -> SINGLE RIGHT-POINTING ANGLE QUOTATION MARK
u'\u021a' # 0xDE -> LATIN CAPITAL LETTER T WITH COMMA BELOW # for Unicode 3.0 and later
u'\u021b' # 0xDF -> LATIN SMALL LETTER T WITH COMMA BELOW # for Unicode 3.0 and later
u'\u2021' # 0xE0 -> DOUBLE DAGGER
u'\xb7' # 0xE1 -> MIDDLE DOT
u'\u201a' # 0xE2 -> SINGLE LOW-9 QUOTATION MARK
u'\u201e' # 0xE3 -> DOUBLE LOW-9 QUOTATION MARK
u'\u2030' # 0xE4 -> PER MILLE SIGN
u'\xc2' # 0xE5 -> LATIN CAPITAL LETTER A WITH CIRCUMFLEX
u'\xca' # 0xE6 -> LATIN CAPITAL LETTER E WITH CIRCUMFLEX
u'\xc1' # 0xE7 -> LATIN CAPITAL LETTER A WITH ACUTE
u'\xcb' # 0xE8 -> LATIN CAPITAL LETTER E WITH DIAERESIS
u'\xc8' # 0xE9 -> LATIN CAPITAL LETTER E WITH GRAVE
u'\xcd' # 0xEA -> LATIN CAPITAL LETTER I WITH ACUTE
u'\xce' # 0xEB -> LATIN CAPITAL LETTER I WITH CIRCUMFLEX
u'\xcf' # 0xEC -> LATIN CAPITAL LETTER I WITH DIAERESIS
u'\xcc' # 0xED -> LATIN CAPITAL LETTER I WITH GRAVE
u'\xd3' # 0xEE -> LATIN CAPITAL LETTER O WITH ACUTE
u'\xd4' # 0xEF -> LATIN CAPITAL LETTER O WITH CIRCUMFLEX
u'\uf8ff' # 0xF0 -> Apple logo
u'\xd2' # 0xF1 -> LATIN CAPITAL LETTER O WITH GRAVE
u'\xda' # 0xF2 -> LATIN CAPITAL LETTER U WITH ACUTE
u'\xdb' # 0xF3 -> LATIN CAPITAL LETTER U WITH CIRCUMFLEX
u'\xd9' # 0xF4 -> LATIN CAPITAL LETTER U WITH GRAVE
u'\u0131' # 0xF5 -> LATIN SMALL LETTER DOTLESS I
u'\u02c6' # 0xF6 -> MODIFIER LETTER CIRCUMFLEX ACCENT
u'\u02dc' # 0xF7 -> SMALL TILDE
u'\xaf' # 0xF8 -> MACRON
u'\u02d8' # 0xF9 -> BREVE
u'\u02d9' # 0xFA -> DOT ABOVE
u'\u02da' # 0xFB -> RING ABOVE
u'\xb8' # 0xFC -> CEDILLA
u'\u02dd' # 0xFD -> DOUBLE ACUTE ACCENT
u'\u02db' # 0xFE -> OGONEK
u'\u02c7' # 0xFF -> CARON
)
### Encoding table
encoding_table=codecs.charmap_build(decoding_table)
|
txm/potato | refs/heads/master | django/conf/locale/en/formats.py | 318 | # -*- encoding: utf-8 -*-
# This file is distributed under the same license as the Django package.
#
# The *_FORMAT strings use the Django date format syntax,
# see http://docs.djangoproject.com/en/dev/ref/templates/builtins/#date
DATE_FORMAT = 'N j, Y'
TIME_FORMAT = 'P'
DATETIME_FORMAT = 'N j, Y, P'
YEAR_MONTH_FORMAT = 'F Y'
MONTH_DAY_FORMAT = 'F j'
SHORT_DATE_FORMAT = 'm/d/Y'
SHORT_DATETIME_FORMAT = 'm/d/Y P'
FIRST_DAY_OF_WEEK = 0 # Sunday
# The *_INPUT_FORMATS strings use the Python strftime format syntax,
# see http://docs.python.org/library/datetime.html#strftime-strptime-behavior
DATE_INPUT_FORMATS = (
'%Y-%m-%d', '%m/%d/%Y', '%m/%d/%y', # '2006-10-25', '10/25/2006', '10/25/06'
# '%b %d %Y', '%b %d, %Y', # 'Oct 25 2006', 'Oct 25, 2006'
# '%d %b %Y', '%d %b, %Y', # '25 Oct 2006', '25 Oct, 2006'
# '%B %d %Y', '%B %d, %Y', # 'October 25 2006', 'October 25, 2006'
# '%d %B %Y', '%d %B, %Y', # '25 October 2006', '25 October, 2006'
)
TIME_INPUT_FORMATS = (
'%H:%M:%S', # '14:30:59'
'%H:%M', # '14:30'
)
DATETIME_INPUT_FORMATS = (
'%Y-%m-%d %H:%M:%S', # '2006-10-25 14:30:59'
'%Y-%m-%d %H:%M', # '2006-10-25 14:30'
'%Y-%m-%d', # '2006-10-25'
'%m/%d/%Y %H:%M:%S', # '10/25/2006 14:30:59'
'%m/%d/%Y %H:%M', # '10/25/2006 14:30'
'%m/%d/%Y', # '10/25/2006'
'%m/%d/%y %H:%M:%S', # '10/25/06 14:30:59'
'%m/%d/%y %H:%M', # '10/25/06 14:30'
'%m/%d/%y', # '10/25/06'
)
DECIMAL_SEPARATOR = u'.'
THOUSAND_SEPARATOR = u','
NUMBER_GROUPING = 3
|
m2dsupsdlclass/lectures-labs | refs/heads/master | labs/08_frameworks/solutions/momentum_optimizer.py | 1 | class MomentumGradientDescent(GradientDescent):
def __init__(self, params, lr=0.1, momentum=.9):
super(MomentumGradientDescent, self).__init__(params, lr)
self.momentum = momentum
self.velocities = [torch.zeros_like(param, requires_grad=False)
for param in params]
def step(self):
with torch.no_grad():
for i, (param, velocity) in enumerate(zip(self.params,
self.velocities)):
velocity = self.momentum * velocity + param.grad
param -= self.lr * velocity
self.velocities[i] = velocity
|
encukou/samba | refs/heads/master | third_party/dnspython/tests/ntoaaton.py | 47 | # Copyright (C) 2003-2007, 2009-2011 Nominum, Inc.
#
# Permission to use, copy, modify, and distribute this software and its
# documentation for any purpose with or without fee is hereby granted,
# provided that the above copyright notice and this permission notice
# appear in all copies.
#
# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
import unittest
import dns.exception
import dns.ipv4
import dns.ipv6
# for convenience
aton4 = dns.ipv4.inet_aton
ntoa4 = dns.ipv4.inet_ntoa
aton6 = dns.ipv6.inet_aton
ntoa6 = dns.ipv6.inet_ntoa
v4_bad_addrs = ['256.1.1.1', '1.1.1', '1.1.1.1.1', '01.1.1.1',
'+1.1.1.1', '1.1.1.1+', '1..2.3.4', '.1.2.3.4',
'1.2.3.4.']
class NtoAAtoNTestCase(unittest.TestCase):
def test_aton1(self):
a = aton6('::')
self.failUnless(a == '\x00' * 16)
def test_aton2(self):
a = aton6('::1')
self.failUnless(a == '\x00' * 15 + '\x01')
def test_aton3(self):
a = aton6('::10.0.0.1')
self.failUnless(a == '\x00' * 12 + '\x0a\x00\x00\x01')
def test_aton4(self):
a = aton6('abcd::dcba')
self.failUnless(a == '\xab\xcd' + '\x00' * 12 + '\xdc\xba')
def test_aton5(self):
a = aton6('1:2:3:4:5:6:7:8')
self.failUnless(a == \
'00010002000300040005000600070008'.decode('hex_codec'))
def test_bad_aton1(self):
def bad():
a = aton6('abcd:dcba')
self.failUnlessRaises(dns.exception.SyntaxError, bad)
def test_bad_aton2(self):
def bad():
a = aton6('abcd::dcba::1')
self.failUnlessRaises(dns.exception.SyntaxError, bad)
def test_bad_aton3(self):
def bad():
a = aton6('1:2:3:4:5:6:7:8:9')
self.failUnlessRaises(dns.exception.SyntaxError, bad)
def test_aton1(self):
a = aton6('::')
self.failUnless(a == '\x00' * 16)
def test_aton2(self):
a = aton6('::1')
self.failUnless(a == '\x00' * 15 + '\x01')
def test_aton3(self):
a = aton6('::10.0.0.1')
self.failUnless(a == '\x00' * 12 + '\x0a\x00\x00\x01')
def test_aton4(self):
a = aton6('abcd::dcba')
self.failUnless(a == '\xab\xcd' + '\x00' * 12 + '\xdc\xba')
def test_ntoa1(self):
b = '00010002000300040005000600070008'.decode('hex_codec')
t = ntoa6(b)
self.failUnless(t == '1:2:3:4:5:6:7:8')
def test_ntoa2(self):
b = '\x00' * 16
t = ntoa6(b)
self.failUnless(t == '::')
def test_ntoa3(self):
b = '\x00' * 15 + '\x01'
t = ntoa6(b)
self.failUnless(t == '::1')
def test_ntoa4(self):
b = '\x80' + '\x00' * 15
t = ntoa6(b)
self.failUnless(t == '8000::')
def test_ntoa5(self):
b = '\x01\xcd' + '\x00' * 12 + '\x03\xef'
t = ntoa6(b)
self.failUnless(t == '1cd::3ef')
def test_ntoa6(self):
b = 'ffff00000000ffff000000000000ffff'.decode('hex_codec')
t = ntoa6(b)
self.failUnless(t == 'ffff:0:0:ffff::ffff')
def test_ntoa7(self):
b = '00000000ffff000000000000ffffffff'.decode('hex_codec')
t = ntoa6(b)
self.failUnless(t == '0:0:ffff::ffff:ffff')
def test_ntoa8(self):
b = 'ffff0000ffff00000000ffff00000000'.decode('hex_codec')
t = ntoa6(b)
self.failUnless(t == 'ffff:0:ffff::ffff:0:0')
def test_ntoa9(self):
b = '0000000000000000000000000a000001'.decode('hex_codec')
t = ntoa6(b)
self.failUnless(t == '::10.0.0.1')
def test_ntoa10(self):
b = '0000000000000000000000010a000001'.decode('hex_codec')
t = ntoa6(b)
self.failUnless(t == '::1:a00:1')
def test_ntoa11(self):
b = '00000000000000000000ffff0a000001'.decode('hex_codec')
t = ntoa6(b)
self.failUnless(t == '::ffff:10.0.0.1')
def test_ntoa12(self):
b = '000000000000000000000000ffffffff'.decode('hex_codec')
t = ntoa6(b)
self.failUnless(t == '::255.255.255.255')
def test_ntoa13(self):
b = '00000000000000000000ffffffffffff'.decode('hex_codec')
t = ntoa6(b)
self.failUnless(t == '::ffff:255.255.255.255')
def test_ntoa14(self):
b = '0000000000000000000000000001ffff'.decode('hex_codec')
t = ntoa6(b)
self.failUnless(t == '::0.1.255.255')
def test_bad_ntoa1(self):
def bad():
a = ntoa6('')
self.failUnlessRaises(ValueError, bad)
def test_bad_ntoa2(self):
def bad():
a = ntoa6('\x00' * 17)
self.failUnlessRaises(ValueError, bad)
def test_good_v4_aton(self):
pairs = [('1.2.3.4', '\x01\x02\x03\x04'),
('255.255.255.255', '\xff\xff\xff\xff'),
('0.0.0.0', '\x00\x00\x00\x00')]
for (t, b) in pairs:
b1 = aton4(t)
t1 = ntoa4(b1)
self.failUnless(b1 == b)
self.failUnless(t1 == t)
def test_bad_v4_aton(self):
def make_bad(a):
def bad():
return aton4(a)
return bad
for addr in v4_bad_addrs:
self.failUnlessRaises(dns.exception.SyntaxError, make_bad(addr))
def test_bad_v6_aton(self):
addrs = ['+::0', '0::0::', '::0::', '1:2:3:4:5:6:7:8:9',
':::::::']
embedded = ['::' + x for x in v4_bad_addrs]
addrs.extend(embedded)
def make_bad(a):
def bad():
x = aton6(a)
return bad
for addr in addrs:
self.failUnlessRaises(dns.exception.SyntaxError, make_bad(addr))
if __name__ == '__main__':
unittest.main()
|
akosel/incubator-airflow | refs/heads/master | tests/api/__init__.py | 353 | # -*- coding: utf-8 -*-
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
|
dabura667/electrum | refs/heads/master | gui/qt/request_list.py | 1 | #!/usr/bin/env python
#
# Electrum - lightweight Bitcoin client
# Copyright (C) 2015 Thomas Voegtlin
#
# Permission is hereby granted, free of charge, to any person
# obtaining a copy of this software and associated documentation files
# (the "Software"), to deal in the Software without restriction,
# including without limitation the rights to use, copy, modify, merge,
# publish, distribute, sublicense, and/or sell copies of the Software,
# and to permit persons to whom the Software is furnished to do so,
# subject to the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import six
from electrum.i18n import _
from electrum.util import block_explorer_URL, format_satoshis, format_time, age
from electrum.plugins import run_hook
from electrum.paymentrequest import PR_UNPAID, PR_PAID, PR_UNKNOWN, PR_EXPIRED
from PyQt5.QtGui import *
from PyQt5.QtCore import *
from PyQt5.QtWidgets import QTreeWidgetItem, QMenu
from .util import MyTreeWidget, pr_tooltips, pr_icons
class RequestList(MyTreeWidget):
filter_columns = [0, 1, 2, 3, 4] # Date, Account, Address, Description, Amount
def __init__(self, parent):
MyTreeWidget.__init__(self, parent, self.create_menu, [_('Date'), _('Address'), '', _('Description'), _('Amount'), _('Status')], 3)
self.currentItemChanged.connect(self.item_changed)
self.itemClicked.connect(self.item_changed)
self.setSortingEnabled(True)
self.setColumnWidth(0, 180)
self.hideColumn(1)
def item_changed(self, item):
if item is None:
return
if not item.isSelected():
return
addr = str(item.text(1))
req = self.wallet.receive_requests[addr]
expires = age(req['time'] + req['exp']) if req.get('exp') else _('Never')
amount = req['amount']
message = self.wallet.labels.get(addr, '')
self.parent.receive_address_e.setText(addr)
self.parent.receive_message_e.setText(message)
self.parent.receive_amount_e.setAmount(amount)
self.parent.expires_combo.hide()
self.parent.expires_label.show()
self.parent.expires_label.setText(expires)
self.parent.new_request_button.setEnabled(True)
def on_update(self):
self.wallet = self.parent.wallet
# hide receive tab if no receive requests available
b = len(self.wallet.receive_requests) > 0
self.setVisible(b)
self.parent.receive_requests_label.setVisible(b)
if not b:
self.parent.expires_label.hide()
self.parent.expires_combo.show()
# update the receive address if necessary
current_address = self.parent.receive_address_e.text()
domain = self.wallet.get_receiving_addresses()
addr = self.wallet.get_unused_address()
if not current_address in domain and addr:
self.parent.set_receive_address(addr)
self.parent.new_request_button.setEnabled(addr != current_address)
# clear the list and fill it again
self.clear()
for req in self.wallet.get_sorted_requests(self.config):
address = req['address']
if address not in domain:
continue
timestamp = req.get('time', 0)
amount = req.get('amount')
expiration = req.get('exp', None)
message = req.get('memo', '')
date = format_time(timestamp)
status = req.get('status')
signature = req.get('sig')
requestor = req.get('name', '')
amount_str = self.parent.format_amount(amount) if amount else ""
item = QTreeWidgetItem([date, address, '', message, amount_str, pr_tooltips.get(status,'')])
if signature is not None:
item.setIcon(2, QIcon(":icons/seal.png"))
item.setToolTip(2, 'signed by '+ requestor)
if status is not PR_UNKNOWN:
item.setIcon(6, QIcon(pr_icons.get(status)))
self.addTopLevelItem(item)
def create_menu(self, position):
item = self.itemAt(position)
if not item:
return
addr = str(item.text(1))
req = self.wallet.receive_requests[addr]
column = self.currentColumn()
column_title = self.headerItem().text(column)
column_data = item.text(column)
menu = QMenu(self)
menu.addAction(_("Copy %s")%column_title, lambda: self.parent.app.clipboard().setText(column_data))
menu.addAction(_("Copy URI"), lambda: self.parent.view_and_paste('URI', '', self.parent.get_request_URI(addr)))
menu.addAction(_("Save as BIP70 file"), lambda: self.parent.export_payment_request(addr))
menu.addAction(_("Delete"), lambda: self.parent.delete_payment_request(addr))
run_hook('receive_list_menu', menu, addr)
menu.exec_(self.viewport().mapToGlobal(position))
|
openstack/neutron | refs/heads/master | neutron/services/trunk/drivers/linuxbridge/driver.py | 2 | #
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_config import cfg
from oslo_log import log as logging
from neutron_lib.api.definitions import portbindings
from neutron_lib import constants
from neutron_lib.services.trunk import constants as trunk_consts
from neutron.services.trunk.drivers import base
LOG = logging.getLogger(__name__)
NAME = 'linuxbridge'
SUPPORTED_INTERFACES = (
portbindings.VIF_TYPE_BRIDGE,
)
SUPPORTED_SEGMENTATION_TYPES = (
trunk_consts.SEGMENTATION_TYPE_VLAN,
)
class LinuxBridgeDriver(base.DriverBase):
"""Server-side Trunk driver for the ML2 Linux Bridge driver."""
@property
def is_loaded(self):
try:
return NAME in cfg.CONF.ml2.mechanism_drivers
except cfg.NoSuchOptError:
return False
@classmethod
def create(cls):
return cls(NAME, SUPPORTED_INTERFACES, SUPPORTED_SEGMENTATION_TYPES,
constants.AGENT_TYPE_LINUXBRIDGE, can_trunk_bound_port=True)
def register():
# NOTE(kevinbenton): the thing that is keeping this from being
# immediately garbage collected is that it registers callbacks
LinuxBridgeDriver.create()
LOG.debug("Linux bridge trunk driver initialized.")
|
ayseyo/oclapi | refs/heads/master | django-nonrel/ocl/oclapi/views.py | 5 | import dateutil.parser
from django.contrib.contenttypes.models import ContentType
from django.db import DatabaseError
from django.db.models import Q
from django.http import HttpResponse, Http404, HttpResponseForbidden
from rest_framework import generics, status
from rest_framework.generics import get_object_or_404 as generics_get_object_or_404
from rest_framework.generics import RetrieveUpdateDestroyAPIView, ListAPIView
from rest_framework.mixins import ListModelMixin, CreateModelMixin
from rest_framework.response import Response
from oclapi.mixins import PathWalkerMixin
from oclapi.models import ResourceVersionModel, ACCESS_TYPE_EDIT, ACCESS_TYPE_VIEW, ACCESS_TYPE_NONE
from oclapi.permissions import HasPrivateAccess, CanEditConceptDictionary, CanViewConceptDictionary, HasOwnership
from users.models import UserProfile
UPDATED_SINCE_PARAM = 'updatedSince'
def get_object_or_404(queryset, **filter_kwargs):
try:
return generics_get_object_or_404(queryset, **filter_kwargs)
except DatabaseError:
raise Http404
def parse_updated_since_param(request):
updated_since = request.QUERY_PARAMS.get(UPDATED_SINCE_PARAM)
if updated_since:
try:
return dateutil.parser.parse(updated_since)
except ValueError: pass
return None
def parse_boolean_query_param(request, param, default=None):
val = request.QUERY_PARAMS.get(param, default)
if val is None:
return None
for b in [True, False]:
if str(b).lower() == val.lower():
return b
return None
class BaseAPIView(generics.GenericAPIView):
"""
An extension of generics.GenericAPIView that:
1. Adds a hook for a post-initialize step
2. De-couples the lookup field name (in the URL) from the "filter by" field name (in the queryset)
3. Performs a soft delete on destroy()
"""
pk_field = 'mnemonic'
user_is_self = False
def initial(self, request, *args, **kwargs):
super(BaseAPIView, self).initial(request, *args, **kwargs)
self.initialize(request, request.path_info, **kwargs)
def initialize(self, request, path_info_segment, **kwargs):
self.user_is_self = kwargs.pop('user_is_self', False)
def get_object(self, queryset=None):
# Determine the base queryset to use.
if queryset is None:
queryset = self.filter_queryset(self.get_queryset())
else:
pass # Deprecation warning
# Perform the lookup filtering.
lookup = self.kwargs.get(self.lookup_field, None)
filter_kwargs = {self.pk_field: lookup}
obj = get_object_or_404(queryset, **filter_kwargs)
# May raise a permission denied
self.check_object_permissions(self.request, obj)
return obj
def destroy(self, request, *args, **kwargs):
obj = self.get_object()
obj.is_active = False
obj.save()
return Response(status=status.HTTP_204_NO_CONTENT)
class SubResourceMixin(BaseAPIView, PathWalkerMixin):
"""
Base view for a sub-resource.
Includes a post-initialize step that determines the parent resource,
and a get_queryset method that applies the appropriate permissions and filtering.
"""
user = None
userprofile = None
user_is_self = False
parent_path_info = None
parent_resource = None
base_or_clause = []
def initialize(self, request, path_info_segment, **kwargs):
super(SubResourceMixin, self).initialize(request, path_info_segment, **kwargs)
self.user = request.user
if self.user_is_self:
try:
self.userprofile = self.user.get_profile()
if self.userprofile:
self.parent_resource = self.userprofile
return
except UserProfile.DoesNotExist: pass
else:
levels = self.get_level()
self.parent_path_info = self.get_parent_in_path(path_info_segment, levels=levels)
self.parent_resource = None
if self.parent_path_info and '/' != self.parent_path_info:
self.parent_resource = self.get_object_for_path(self.parent_path_info, self.request)
def get_level(self):
levels = 1 if isinstance(self, ListModelMixin) or isinstance(self, CreateModelMixin) else 2
return levels
class ConceptDictionaryMixin(SubResourceMixin):
base_or_clause = [Q(public_access=ACCESS_TYPE_EDIT), Q(public_access=ACCESS_TYPE_VIEW)]
permission_classes = (HasPrivateAccess,)
def get_queryset(self):
queryset = super(ConceptDictionaryMixin, self).get_queryset()
parent_is_self = self.parent_resource and self.userprofile and self.parent_resource == self.userprofile
if self.parent_resource:
if hasattr(self.parent_resource, 'versioned_object'):
self.parent_resource = self.parent_resource.versioned_object
parent_resource_type = ContentType.objects.get_for_model(self.parent_resource)
queryset = queryset.filter(parent_type__pk=parent_resource_type.id, parent_id=self.parent_resource.id)
# below part is commented because this should be the part of permission , not queryset
# if not(self.user.is_staff or parent_is_self):
# queryset = queryset.filter(~Q(public_access=ACCESS_TYPE_NONE))
return queryset
class ConceptDictionaryCreateMixin(ConceptDictionaryMixin):
"""
Concrete view for creating a model instance.
"""
def post(self, request, *args, **kwargs):
return self.create(request, *args, **kwargs)
def create(self, request, *args, **kwargs):
if not self.parent_resource:
return HttpResponse(status=status.HTTP_405_METHOD_NOT_ALLOWED)
permission = HasOwnership()
if not permission.has_object_permission(request, self, self.parent_resource):
return HttpResponseForbidden()
serializer = self.get_serializer(data=request.DATA, files=request.FILES)
if serializer.is_valid():
self.pre_save(serializer.object)
self.object = serializer.save(force_insert=True, parent_resource=self.parent_resource)
if serializer.is_valid():
self.post_save(self.object, created=True)
headers = self.get_success_headers(serializer.data)
serializer = self.get_detail_serializer(self.object, data=request.DATA, files=request.FILES, partial=True)
return Response(serializer.data, status=status.HTTP_201_CREATED,
headers=headers)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
def get_success_headers(self, data):
try:
return {'Location': data['url']}
except (TypeError, KeyError):
return {}
class ConceptDictionaryUpdateMixin(ConceptDictionaryMixin):
"""
Concrete view for updating a model instance.
"""
def put(self, request, *args, **kwargs):
return self.update(request, *args, **kwargs)
def update(self, request, *args, **kwargs):
if not self.parent_resource:
return HttpResponse(status=status.HTTP_405_METHOD_NOT_ALLOWED)
self.object = self.get_object()
created = False
save_kwargs = {'force_update': True, 'parent_resource': self.parent_resource}
success_status_code = status.HTTP_200_OK
serializer = self.get_serializer(self.object, data=request.DATA,
files=request.FILES, partial=True)
if serializer.is_valid():
self.pre_save(serializer.object)
self.object = serializer.save(**save_kwargs)
if serializer.is_valid():
self.post_save(self.object, created=created)
serializer = self.get_detail_serializer(self.object)
return Response(serializer.data, status=success_status_code)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
def get_detail_serializer(self, obj, data=None, files=None, partial=False):
pass
class ConceptDictionaryExtrasMixin(SubResourceMixin):
levels = 1
def initialize(self, request, path_info_segment, **kwargs):
self.parent_path_info = self.get_parent_in_path(path_info_segment, levels=self.levels)
self.parent_resource = self.get_object_for_path(self.parent_path_info, self.request)
if hasattr(self.parent_resource, 'versioned_object'):
self.parent_resource_version = self.parent_resource
self.parent_resource = self.parent_resource_version.versioned_object
else:
self.parent_resource_version = ResourceVersionModel.get_latest_version_of(self.parent_resource)
class ConceptDictionaryExtrasView(ConceptDictionaryExtrasMixin, ListAPIView):
permission_classes = (CanViewConceptDictionary,)
levels = 1
def list(self, request, *args, **kwargs):
extras = self.parent_resource_version.extras or {}
return Response(extras)
class ConceptDictionaryExtraRetrieveUpdateDestroyView(ConceptDictionaryExtrasMixin, RetrieveUpdateDestroyAPIView):
concept_dictionary_version_class = None
permission_classes = (CanEditConceptDictionary,)
levels = 2
def initialize(self, request, path_info_segment, **kwargs):
super(ConceptDictionaryExtraRetrieveUpdateDestroyView, self).initialize(request, path_info_segment, **kwargs)
if request.method in ['GET', 'HEAD']:
self.permission_classes = (CanViewConceptDictionary,)
self.key = kwargs.get('extra')
if not self.parent_resource_version.extras:
self.parent_resource_version.extras = dict()
self.extras = self.parent_resource_version.extras
def retrieve(self, request, *args, **kwargs):
if self.key in self.extras:
return Response({self.key: self.extras[self.key]})
return Response({'detail': 'Not found.'}, status=status.HTTP_404_NOT_FOUND)
def update(self, request, *args, **kwargs):
value = request.DATA.get(self.key)
if not value:
return Response(['Must specify %s param in body.' % self.key], status=status.HTTP_400_BAD_REQUEST)
self.extras[self.key] = value
self.parent_resource_version.update_comment = 'Updated extras: %s=%s.' % (self.key, value)
self.concept_dictionary_version_class.persist_changes(self.parent_resource_version)
return Response({self.key: self.extras[self.key]})
def delete(self, request, *args, **kwargs):
if self.key in self.extras:
del self.extras[self.key]
self.parent_resource_version.update_comment = 'Deleted extra %s.' % self.key
self.concept_dictionary_version_class.persist_changes(self.parent_resource_version)
return Response(status=status.HTTP_204_NO_CONTENT)
return Response({"detail": "Not found."}, status.HTTP_404_NOT_FOUND)
class ChildResourceMixin(SubResourceMixin):
def get_queryset(self):
queryset = super(ChildResourceMixin, self).get_queryset()
if self.parent_resource:
# If we have a parent resource at this point, then the implication is that we have access to that resource
if hasattr(self.parent_resource, 'versioned_object'):
self.parent_resource = self.parent_resource.versioned_object
parent_resource_type = ContentType.objects.get_for_model(self.parent_resource)
queryset = queryset.filter(parent_type__pk=parent_resource_type.id, parent_id=self.parent_resource.id)
else:
queryset = queryset.filter(~Q(public_access=ACCESS_TYPE_NONE))
return queryset
class VersionedResourceChildMixin(ConceptDictionaryMixin):
"""
Base view for a sub-resource that is a child of a versioned resource.
For example, a Concept is a child of a Source, which can be versioned.
Includes a post-initialize step that determines the parent resource,
and a get_queryset method that limits the scope to children of the versioned resource.
"""
parent_resource_version = None
parent_resource_version_model = None
child_list_attribute = None
def initialize(self, request, path_info_segment, **kwargs):
levels = 0
if hasattr(self.model, 'get_url_kwarg') and self.model.get_url_kwarg() in kwargs:
levels += 1
levels = levels + 1 if isinstance(self, ListModelMixin) or isinstance(self, CreateModelMixin) else levels + 2
self.parent_path_info = self.get_parent_in_path(path_info_segment, levels=levels)
self.parent_resource = None
if self.parent_path_info and '/' != self.parent_path_info:
self.parent_resource = self.get_object_for_path(self.parent_path_info, self.request)
if hasattr(self.parent_resource, 'versioned_object'):
self.parent_resource_version = self.parent_resource
self.parent_resource = self.parent_resource_version.versioned_object
else:
self.parent_resource_version = ResourceVersionModel.get_head_of(self.parent_resource)
def get_queryset(self):
all_children = getattr(self.parent_resource_version, self.child_list_attribute) or []
queryset = super(ConceptDictionaryMixin, self).get_queryset()
queryset = queryset.filter(id__in=all_children)
return queryset
class ResourceVersionMixin(BaseAPIView, PathWalkerMixin):
"""
Base view for a resource that is a version of another resource (e.g. a SourceVersion).
Includes a post-initialize step that determines the versioned object, and a get_queryset method
that limits the scope to versions of that object.
"""
versioned_object_path_info = None
versioned_object = None
def initialize(self, request, path_info_segment, **kwargs):
super(ResourceVersionMixin, self).initialize(request, path_info_segment, **kwargs)
if not self.versioned_object:
self.versioned_object_path_info = self.get_parent_in_path(path_info_segment)
self.versioned_object = self.get_object_for_path(self.versioned_object_path_info, request)
def get_queryset(self):
queryset = super(ResourceVersionMixin, self).get_queryset()
versioned_object_type = ContentType.objects.get_for_model(self.versioned_object)
queryset = queryset.filter(versioned_object_type__pk=versioned_object_type.id, versioned_object_id=self.versioned_object.id)
return queryset
class ResourceAttributeChildMixin(BaseAPIView, PathWalkerMixin):
"""
Base view for (a) child(ren) of a resource version.
Currently, the only instances of this view are:
GET [collection parent]/collections/:collection/:version/children
GET [source parent]/sources/:source/:version/children
"""
resource_version_path_info = None
resource_version = None
def initialize(self, request, path_info_segment, **kwargs):
super(ResourceAttributeChildMixin, self).initialize(request, path_info_segment, **kwargs)
self.resource_version_path_info = self.get_parent_in_path(path_info_segment)
self.resource_version = self.get_object_for_path(self.resource_version_path_info, request)
|
smartdata-x/robots | refs/heads/master | pylib/Twisted/twisted/mail/mail.py | 36 | # -*- test-case-name: twisted.mail.test.test_mail -*-
# Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
"""
Mail service support.
"""
# Twisted imports
from twisted.internet import defer
from twisted.application import service, internet
from twisted.python import util
from twisted.python import log
from twisted.cred.portal import Portal
# Sibling imports
from twisted.mail import protocols, smtp
# System imports
import os
from zope.interface import implements, Interface
class DomainWithDefaultDict:
"""
A simulated dictionary for mapping domain names to domain objects with
a default value for non-existing keys.
@ivar domains: See L{__init__}
@ivar default: See L{__init__}
"""
def __init__(self, domains, default):
"""
@type domains: L{dict} of L{bytes} -> L{IDomain} provider
@param domains: A mapping of domain name to domain object.
@type default: L{IDomain} provider
@param default: The default domain.
"""
self.domains = domains
self.default = default
def setDefaultDomain(self, domain):
"""
Set the default domain.
@type domain: L{IDomain} provider
@param domain: The default domain.
"""
self.default = domain
def has_key(self, name):
"""
Test for the presence of a domain name in this dictionary.
This always returns C{True} because a default value will be returned
if the name doesn't exist in this dictionary.
@type name: L{bytes}
@param name: A domain name.
@rtype: L{bool}
@return: C{True} to indicate that the domain name is in this
dictionary.
"""
return 1
def fromkeys(klass, keys, value=None):
"""
Create a new L{DomainWithDefaultDict} with the specified keys.
@type keys: iterable of L{bytes}
@param keys: Domain names to serve as keys in the new dictionary.
@type value: L{NoneType <types.NoneType>} or L{IDomain} provider
@param value: A domain object to serve as the value for all new keys
in the dictionary.
@rtype: L{DomainWithDefaultDict}
@return: A new dictionary.
"""
d = klass()
for k in keys:
d[k] = value
return d
fromkeys = classmethod(fromkeys)
def __contains__(self, name):
"""
Test for the presence of a domain name in this dictionary.
This always returns C{True} because a default value will be returned
if the name doesn't exist in this dictionary.
@type name: L{bytes}
@param name: A domain name.
@rtype: L{bool}
@return: C{True} to indicate that the domain name is in this
dictionary.
"""
return 1
def __getitem__(self, name):
"""
Look up a domain name and, if it is present, return the domain object
associated with it. Otherwise return the default domain.
@type name: L{bytes}
@param name: A domain name.
@rtype: L{IDomain} provider or L{NoneType <types.NoneType>}
@return: A domain object.
"""
return self.domains.get(name, self.default)
def __setitem__(self, name, value):
"""
Associate a domain object with a domain name in this dictionary.
@type name: L{bytes}
@param name: A domain name.
@type value: L{IDomain} provider
@param value: A domain object.
"""
self.domains[name] = value
def __delitem__(self, name):
"""
Delete the entry for a domain name in this dictionary.
@type name: L{bytes}
@param name: A domain name.
"""
del self.domains[name]
def __iter__(self):
"""
Return an iterator over the domain names in this dictionary.
@rtype: iterator over L{bytes}
@return: An iterator over the domain names.
"""
return iter(self.domains)
def __len__(self):
"""
Return the number of domains in this dictionary.
@rtype: L{int}
@return: The number of domains in this dictionary.
"""
return len(self.domains)
def __str__(self):
"""
Build an informal string representation of this dictionary.
@rtype: L{bytes}
@return: A string containing the mapping of domain names to domain
objects.
"""
return '<DomainWithDefaultDict %s>' % (self.domains,)
def __repr__(self):
"""
Build an "official" string representation of this dictionary.
@rtype: L{bytes}
@return: A pseudo-executable string describing the underlying domain
mapping of this object.
"""
return 'DomainWithDefaultDict(%s)' % (self.domains,)
def get(self, key, default=None):
"""
Look up a domain name in this dictionary.
@type key: L{bytes}
@param key: A domain name.
@type default: L{IDomain} provider or L{NoneType <types.NoneType>}
@param default: A domain object to be returned if the domain name is
not in this dictionary.
@rtype: L{IDomain} provider or L{NoneType <types.NoneType>}
@return: The domain object associated with the domain name if it is in
this dictionary. Otherwise, the default value.
"""
return self.domains.get(key, default)
def copy(self):
"""
Make a copy of this dictionary.
@rtype: L{DomainWithDefaultDict}
@return: A copy of this dictionary.
"""
return DomainWithDefaultDict(self.domains.copy(), self.default)
def iteritems(self):
"""
Return an iterator over the domain name/domain object pairs in the
dictionary.
Using the returned iterator while adding or deleting entries from the
dictionary may result in a L{RuntimeError <exceptions.RuntimeError>} or
failing to iterate over all the domain name/domain object pairs.
@rtype: iterator over 2-L{tuple} of (E{1}) L{bytes},
(E{2}) L{IDomain} provider or L{NoneType <types.NoneType>}
@return: An iterator over the domain name/domain object pairs.
"""
return self.domains.iteritems()
def iterkeys(self):
"""
Return an iterator over the domain names in this dictionary.
Using the returned iterator while adding or deleting entries from the
dictionary may result in a L{RuntimeError <exceptions.RuntimeError>} or
failing to iterate over all the domain names.
@rtype: iterator over L{bytes}
@return: An iterator over the domain names.
"""
return self.domains.iterkeys()
def itervalues(self):
"""
Return an iterator over the domain objects in this dictionary.
Using the returned iterator while adding or deleting entries from the
dictionary may result in a L{RuntimeError <exceptions.RuntimeError>}
or failing to iterate over all the domain objects.
@rtype: iterator over L{IDomain} provider or
L{NoneType <types.NoneType>}
@return: An iterator over the domain objects.
"""
return self.domains.itervalues()
def keys(self):
"""
Return a list of all domain names in this dictionary.
@rtype: L{list} of L{bytes}
@return: The domain names in this dictionary.
"""
return self.domains.keys()
def values(self):
"""
Return a list of all domain objects in this dictionary.
@rtype: L{list} of L{IDomain} provider or L{NoneType <types.NoneType>}
@return: The domain objects in this dictionary.
"""
return self.domains.values()
def items(self):
"""
Return a list of all domain name/domain object pairs in this
dictionary.
@rtype: L{list} of 2-L{tuple} of (E{1}) L{bytes}, (E{2}) L{IDomain}
provider or L{NoneType <types.NoneType>}
@return: Domain name/domain object pairs in this dictionary.
"""
return self.domains.items()
def popitem(self):
"""
Remove a random domain name/domain object pair from this dictionary and
return it as a tuple.
@rtype: 2-L{tuple} of (E{1}) L{bytes}, (E{2}) L{IDomain} provider or
L{NoneType <types.NoneType>}
@return: A domain name/domain object pair.
@raise KeyError: When this dictionary is empty.
"""
return self.domains.popitem()
def update(self, other):
"""
Update this dictionary with domain name/domain object pairs from
another dictionary.
When this dictionary contains a domain name which is in the other
dictionary, its value will be overwritten.
@type other: L{dict} of L{bytes} -> L{IDomain} provider and/or
L{bytes} -> L{NoneType <types.NoneType>}
@param other: Another dictionary of domain name/domain object pairs.
@rtype: L{NoneType <types.NoneType>}
@return: None.
"""
return self.domains.update(other)
def clear(self):
"""
Remove all items from this dictionary.
@rtype: L{NoneType <types.NoneType>}
@return: None.
"""
return self.domains.clear()
def setdefault(self, key, default):
"""
Return the domain object associated with the domain name if it is
present in this dictionary. Otherwise, set the value for the
domain name to the default and return that value.
@type key: L{bytes}
@param key: A domain name.
@type default: L{IDomain} provider
@param default: A domain object.
@rtype: L{IDomain} provider or L{NoneType <types.NoneType>}
@return: The domain object associated with the domain name.
"""
return self.domains.setdefault(key, default)
class IDomain(Interface):
"""
An interface for email domains.
"""
def exists(user):
"""
Check whether a user exists in this domain.
@type user: L{User}
@param user: A user.
@rtype: no-argument callable which returns L{IMessage <smtp.IMessage>}
provider
@return: A function which takes no arguments and returns a message
receiver for the user.
@raise SMTPBadRcpt: When the given user does not exist in this domain.
"""
def addUser(user, password):
"""
Add a user to this domain.
@type user: L{bytes}
@param user: A username.
@type password: L{bytes}
@param password: A password.
"""
def getCredentialsCheckers():
"""
Return credentials checkers for this domain.
@rtype: L{list} of L{ICredentialsChecker
<twisted.cred.checkers.ICredentialsChecker>} provider
@return: Credentials checkers for this domain.
"""
class IAliasableDomain(IDomain):
"""
An interface for email domains which can be aliased to other domains.
"""
def setAliasGroup(aliases):
"""
Set the group of defined aliases for this domain.
@type aliases: L{dict} of L{bytes} -> L{IAlias} provider
@param aliases: A mapping of domain name to alias.
"""
def exists(user, memo=None):
"""
Check whether a user exists in this domain or an alias of it.
@type user: L{User}
@param user: A user.
@type memo: L{NoneType <types.NoneType>} or L{dict} of L{AliasBase}
@param memo: A record of the addresses already considered while
resolving aliases. The default value should be used by all
external code.
@rtype: no-argument callable which returns L{IMessage <smtp.IMessage>}
provider
@return: A function which takes no arguments and returns a message
receiver for the user.
@raise SMTPBadRcpt: When the given user does not exist in this domain
or an alias of it.
"""
class BounceDomain:
"""
A domain with no users.
This can be used to block off a domain.
"""
implements(IDomain)
def exists(self, user):
"""
Raise an exception to indicate that the user does not exist in this
domain.
@type user: L{User}
@param user: A user.
@raise SMTPBadRcpt: When the given user does not exist in this domain.
"""
raise smtp.SMTPBadRcpt(user)
def willRelay(self, user, protocol):
"""
Indicate that this domain will not relay.
@type user: L{Address}
@param user: The destination address.
@type protocol: L{Protocol <twisted.internet.protocol.Protocol>}
@param protocol: The protocol over which the message to be relayed is
being received.
@rtype: L{bool}
@return: C{False}.
"""
return False
def addUser(self, user, password):
"""
Ignore attempts to add a user to this domain.
@type user: L{bytes}
@param user: A username.
@type password: L{bytes}
@param password: A password.
"""
pass
def getCredentialsCheckers(self):
"""
Return no credentials checkers for this domain.
@rtype: L{list}
@return: The empty list.
"""
return []
class FileMessage:
"""
A message receiver which delivers a message to a file.
@ivar fp: See L{__init__}.
@ivar name: See L{__init__}.
@ivar finalName: See L{__init__}.
"""
implements(smtp.IMessage)
def __init__(self, fp, name, finalName):
"""
@type fp: file-like object
@param fp: The file in which to store the message while it is being
received.
@type name: L{bytes}
@param name: The full path name of the temporary file.
@type finalName: L{bytes}
@param finalName: The full path name that should be given to the file
holding the message after it has been fully received.
"""
self.fp = fp
self.name = name
self.finalName = finalName
def lineReceived(self, line):
"""
Write a received line to the file.
@type line: L{bytes}
@param line: A received line.
"""
self.fp.write(line+'\n')
def eomReceived(self):
"""
At the end of message, rename the file holding the message to its
final name.
@rtype: L{Deferred} which successfully results in L{bytes}
@return: A deferred which returns the final name of the file.
"""
self.fp.close()
os.rename(self.name, self.finalName)
return defer.succeed(self.finalName)
def connectionLost(self):
"""
Delete the file holding the partially received message.
"""
self.fp.close()
os.remove(self.name)
class MailService(service.MultiService):
"""
An email service.
@type queue: L{Queue} or L{NoneType <types.NoneType>}
@ivar queue: A queue for outgoing messages.
@type domains: L{dict} of L{bytes} -> L{IDomain} provider
@ivar domains: A mapping of supported domain name to domain object.
@type portals: L{dict} of L{bytes} -> L{Portal}
@ivar portals: A mapping of domain name to authentication portal.
@type aliases: L{NoneType <types.NoneType>} or L{dict} of
L{bytes} -> L{IAlias} provider
@ivar aliases: A mapping of domain name to alias.
@type smtpPortal: L{Portal}
@ivar smtpPortal: A portal for authentication for the SMTP server.
@type monitor: L{FileMonitoringService}
@ivar monitor: A service to monitor changes to files.
"""
queue = None
domains = None
portals = None
aliases = None
smtpPortal = None
def __init__(self):
"""
Initialize the mail service.
"""
service.MultiService.__init__(self)
# Domains and portals for "client" protocols - POP3, IMAP4, etc
self.domains = DomainWithDefaultDict({}, BounceDomain())
self.portals = {}
self.monitor = FileMonitoringService()
self.monitor.setServiceParent(self)
self.smtpPortal = Portal(self)
def getPOP3Factory(self):
"""
Create a POP3 protocol factory.
@rtype: L{POP3Factory}
@return: A POP3 protocol factory.
"""
return protocols.POP3Factory(self)
def getSMTPFactory(self):
"""
Create an SMTP protocol factory.
@rtype: L{SMTPFactory <protocols.SMTPFactory>}
@return: An SMTP protocol factory.
"""
return protocols.SMTPFactory(self, self.smtpPortal)
def getESMTPFactory(self):
"""
Create an ESMTP protocol factory.
@rtype: L{ESMTPFactory <protocols.ESMTPFactory>}
@return: An ESMTP protocol factory.
"""
return protocols.ESMTPFactory(self, self.smtpPortal)
def addDomain(self, name, domain):
"""
Add a domain for which the service will accept email.
@type name: L{bytes}
@param name: A domain name.
@type domain: L{IDomain} provider
@param domain: A domain object.
"""
portal = Portal(domain)
map(portal.registerChecker, domain.getCredentialsCheckers())
self.domains[name] = domain
self.portals[name] = portal
if self.aliases and IAliasableDomain.providedBy(domain):
domain.setAliasGroup(self.aliases)
def setQueue(self, queue):
"""
Set the queue for outgoing emails.
@type queue: L{Queue}
@param queue: A queue for outgoing messages.
"""
self.queue = queue
def requestAvatar(self, avatarId, mind, *interfaces):
"""
Return a message delivery for an authenticated SMTP user.
@type avatarId: L{bytes}
@param avatarId: A string which identifies an authenticated user.
@type mind: L{NoneType <types.NoneType>}
@param mind: Unused.
@type interfaces: n-L{tuple} of C{zope.interface.Interface}
@param interfaces: A group of interfaces one of which the avatar must
support.
@rtype: 3-L{tuple} of (E{1}) L{IMessageDelivery},
(E{2}) L{ESMTPDomainDelivery}, (E{3}) no-argument callable
@return: A tuple of the supported interface, a message delivery, and
a logout function.
@raise NotImplementedError: When the given interfaces do not include
L{IMessageDelivery}.
"""
if smtp.IMessageDelivery in interfaces:
a = protocols.ESMTPDomainDelivery(self, avatarId)
return smtp.IMessageDelivery, a, lambda: None
raise NotImplementedError()
def lookupPortal(self, name):
"""
Find the portal for a domain.
@type name: L{bytes}
@param name: A domain name.
@rtype: L{Portal}
@return: A portal.
"""
return self.portals[name]
def defaultPortal(self):
"""
Return the portal for the default domain.
The default domain is named ''.
@rtype: L{Portal}
@return: The portal for the default domain.
"""
return self.portals['']
class FileMonitoringService(internet.TimerService):
"""
A service for monitoring changes to files.
@type files: L{list} of L{list} of (E{1}) L{float}, (E{2}) L{bytes},
(E{3}) callable which takes a L{bytes} argument, (E{4}) L{float}
@ivar files: Information about files to be monitored. Each list entry
provides the following information for a file: interval in seconds
between checks, filename, callback function, time of last modification
to the file.
@type intervals: L{_IntervalDifferentialIterator
<twisted.python.util._IntervalDifferentialIterator>}
@ivar intervals: Intervals between successive file checks.
@type _call: L{IDelayedCall <twisted.internet.interfaces.IDelayedCall>}
provider
@ivar _call: The next scheduled call to check a file.
@type index: L{int}
@ivar index: The index of the next file to be checked.
"""
def __init__(self):
"""
Initialize the file monitoring service.
"""
self.files = []
self.intervals = iter(util.IntervalDifferential([], 60))
def startService(self):
"""
Start the file monitoring service.
"""
service.Service.startService(self)
self._setupMonitor()
def _setupMonitor(self):
"""
Schedule the next monitoring call.
"""
from twisted.internet import reactor
t, self.index = self.intervals.next()
self._call = reactor.callLater(t, self._monitor)
def stopService(self):
"""
Stop the file monitoring service.
"""
service.Service.stopService(self)
if self._call:
self._call.cancel()
self._call = None
def monitorFile(self, name, callback, interval=10):
"""
Start monitoring a file for changes.
@type name: L{bytes}
@param name: The name of a file to monitor.
@type callback: callable which takes a L{bytes} argument
@param callback: The function to call when the file has changed.
@type interval: L{float}
@param interval: The interval in seconds between checks.
"""
try:
mtime = os.path.getmtime(name)
except:
mtime = 0
self.files.append([interval, name, callback, mtime])
self.intervals.addInterval(interval)
def unmonitorFile(self, name):
"""
Stop monitoring a file.
@type name: L{bytes}
@param name: A file name.
"""
for i in range(len(self.files)):
if name == self.files[i][1]:
self.intervals.removeInterval(self.files[i][0])
del self.files[i]
break
def _monitor(self):
"""
Monitor a file and make a callback if it has changed.
"""
self._call = None
if self.index is not None:
name, callback, mtime = self.files[self.index][1:]
try:
now = os.path.getmtime(name)
except:
now = 0
if now > mtime:
log.msg("%s changed, notifying listener" % (name,))
self.files[self.index][3] = now
callback(name)
self._setupMonitor()
|
a358003542/shadowsocks | refs/heads/master | shadowsocks/common.py | 945 | #!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Copyright 2013-2015 clowwindy
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from __future__ import absolute_import, division, print_function, \
with_statement
import socket
import struct
import logging
def compat_ord(s):
if type(s) == int:
return s
return _ord(s)
def compat_chr(d):
if bytes == str:
return _chr(d)
return bytes([d])
_ord = ord
_chr = chr
ord = compat_ord
chr = compat_chr
def to_bytes(s):
if bytes != str:
if type(s) == str:
return s.encode('utf-8')
return s
def to_str(s):
if bytes != str:
if type(s) == bytes:
return s.decode('utf-8')
return s
def inet_ntop(family, ipstr):
if family == socket.AF_INET:
return to_bytes(socket.inet_ntoa(ipstr))
elif family == socket.AF_INET6:
import re
v6addr = ':'.join(('%02X%02X' % (ord(i), ord(j))).lstrip('0')
for i, j in zip(ipstr[::2], ipstr[1::2]))
v6addr = re.sub('::+', '::', v6addr, count=1)
return to_bytes(v6addr)
def inet_pton(family, addr):
addr = to_str(addr)
if family == socket.AF_INET:
return socket.inet_aton(addr)
elif family == socket.AF_INET6:
if '.' in addr: # a v4 addr
v4addr = addr[addr.rindex(':') + 1:]
v4addr = socket.inet_aton(v4addr)
v4addr = map(lambda x: ('%02X' % ord(x)), v4addr)
v4addr.insert(2, ':')
newaddr = addr[:addr.rindex(':') + 1] + ''.join(v4addr)
return inet_pton(family, newaddr)
dbyts = [0] * 8 # 8 groups
grps = addr.split(':')
for i, v in enumerate(grps):
if v:
dbyts[i] = int(v, 16)
else:
for j, w in enumerate(grps[::-1]):
if w:
dbyts[7 - j] = int(w, 16)
else:
break
break
return b''.join((chr(i // 256) + chr(i % 256)) for i in dbyts)
else:
raise RuntimeError("What family?")
def is_ip(address):
for family in (socket.AF_INET, socket.AF_INET6):
try:
if type(address) != str:
address = address.decode('utf8')
inet_pton(family, address)
return family
except (TypeError, ValueError, OSError, IOError):
pass
return False
def patch_socket():
if not hasattr(socket, 'inet_pton'):
socket.inet_pton = inet_pton
if not hasattr(socket, 'inet_ntop'):
socket.inet_ntop = inet_ntop
patch_socket()
ADDRTYPE_IPV4 = 1
ADDRTYPE_IPV6 = 4
ADDRTYPE_HOST = 3
def pack_addr(address):
address_str = to_str(address)
for family in (socket.AF_INET, socket.AF_INET6):
try:
r = socket.inet_pton(family, address_str)
if family == socket.AF_INET6:
return b'\x04' + r
else:
return b'\x01' + r
except (TypeError, ValueError, OSError, IOError):
pass
if len(address) > 255:
address = address[:255] # TODO
return b'\x03' + chr(len(address)) + address
def parse_header(data):
addrtype = ord(data[0])
dest_addr = None
dest_port = None
header_length = 0
if addrtype == ADDRTYPE_IPV4:
if len(data) >= 7:
dest_addr = socket.inet_ntoa(data[1:5])
dest_port = struct.unpack('>H', data[5:7])[0]
header_length = 7
else:
logging.warn('header is too short')
elif addrtype == ADDRTYPE_HOST:
if len(data) > 2:
addrlen = ord(data[1])
if len(data) >= 2 + addrlen:
dest_addr = data[2:2 + addrlen]
dest_port = struct.unpack('>H', data[2 + addrlen:4 +
addrlen])[0]
header_length = 4 + addrlen
else:
logging.warn('header is too short')
else:
logging.warn('header is too short')
elif addrtype == ADDRTYPE_IPV6:
if len(data) >= 19:
dest_addr = socket.inet_ntop(socket.AF_INET6, data[1:17])
dest_port = struct.unpack('>H', data[17:19])[0]
header_length = 19
else:
logging.warn('header is too short')
else:
logging.warn('unsupported addrtype %d, maybe wrong password or '
'encryption method' % addrtype)
if dest_addr is None:
return None
return addrtype, to_bytes(dest_addr), dest_port, header_length
class IPNetwork(object):
ADDRLENGTH = {socket.AF_INET: 32, socket.AF_INET6: 128, False: 0}
def __init__(self, addrs):
self._network_list_v4 = []
self._network_list_v6 = []
if type(addrs) == str:
addrs = addrs.split(',')
list(map(self.add_network, addrs))
def add_network(self, addr):
if addr is "":
return
block = addr.split('/')
addr_family = is_ip(block[0])
addr_len = IPNetwork.ADDRLENGTH[addr_family]
if addr_family is socket.AF_INET:
ip, = struct.unpack("!I", socket.inet_aton(block[0]))
elif addr_family is socket.AF_INET6:
hi, lo = struct.unpack("!QQ", inet_pton(addr_family, block[0]))
ip = (hi << 64) | lo
else:
raise Exception("Not a valid CIDR notation: %s" % addr)
if len(block) is 1:
prefix_size = 0
while (ip & 1) == 0 and ip is not 0:
ip >>= 1
prefix_size += 1
logging.warn("You did't specify CIDR routing prefix size for %s, "
"implicit treated as %s/%d" % (addr, addr, addr_len))
elif block[1].isdigit() and int(block[1]) <= addr_len:
prefix_size = addr_len - int(block[1])
ip >>= prefix_size
else:
raise Exception("Not a valid CIDR notation: %s" % addr)
if addr_family is socket.AF_INET:
self._network_list_v4.append((ip, prefix_size))
else:
self._network_list_v6.append((ip, prefix_size))
def __contains__(self, addr):
addr_family = is_ip(addr)
if addr_family is socket.AF_INET:
ip, = struct.unpack("!I", socket.inet_aton(addr))
return any(map(lambda n_ps: n_ps[0] == ip >> n_ps[1],
self._network_list_v4))
elif addr_family is socket.AF_INET6:
hi, lo = struct.unpack("!QQ", inet_pton(addr_family, addr))
ip = (hi << 64) | lo
return any(map(lambda n_ps: n_ps[0] == ip >> n_ps[1],
self._network_list_v6))
else:
return False
def test_inet_conv():
ipv4 = b'8.8.4.4'
b = inet_pton(socket.AF_INET, ipv4)
assert inet_ntop(socket.AF_INET, b) == ipv4
ipv6 = b'2404:6800:4005:805::1011'
b = inet_pton(socket.AF_INET6, ipv6)
assert inet_ntop(socket.AF_INET6, b) == ipv6
def test_parse_header():
assert parse_header(b'\x03\x0ewww.google.com\x00\x50') == \
(3, b'www.google.com', 80, 18)
assert parse_header(b'\x01\x08\x08\x08\x08\x00\x35') == \
(1, b'8.8.8.8', 53, 7)
assert parse_header((b'\x04$\x04h\x00@\x05\x08\x05\x00\x00\x00\x00\x00'
b'\x00\x10\x11\x00\x50')) == \
(4, b'2404:6800:4005:805::1011', 80, 19)
def test_pack_header():
assert pack_addr(b'8.8.8.8') == b'\x01\x08\x08\x08\x08'
assert pack_addr(b'2404:6800:4005:805::1011') == \
b'\x04$\x04h\x00@\x05\x08\x05\x00\x00\x00\x00\x00\x00\x10\x11'
assert pack_addr(b'www.google.com') == b'\x03\x0ewww.google.com'
def test_ip_network():
ip_network = IPNetwork('127.0.0.0/24,::ff:1/112,::1,192.168.1.1,192.0.2.0')
assert '127.0.0.1' in ip_network
assert '127.0.1.1' not in ip_network
assert ':ff:ffff' in ip_network
assert '::ffff:1' not in ip_network
assert '::1' in ip_network
assert '::2' not in ip_network
assert '192.168.1.1' in ip_network
assert '192.168.1.2' not in ip_network
assert '192.0.2.1' in ip_network
assert '192.0.3.1' in ip_network # 192.0.2.0 is treated as 192.0.2.0/23
assert 'www.google.com' not in ip_network
if __name__ == '__main__':
test_inet_conv()
test_parse_header()
test_pack_header()
test_ip_network()
|
dokterbob/satchmo | refs/heads/master | satchmo/apps/product/urls/base.py | 7 | """Urls which need to be loaded at root level."""
from django.conf.urls.defaults import *
adminpatterns = patterns('',
(r'^admin/product/configurableproduct/(?P<id>\d+)/getoptions/',
'product.views.get_configurable_product_options', {},
'satchmo_admin_configurableproduct'),
)
adminpatterns += patterns('product.views.adminviews',
(r'^admin/inventory/edit/$',
'edit_inventory', {}, 'satchmo_admin_edit_inventory'),
(r'^inventory/export/$',
'export_products', {}, 'satchmo_admin_product_export'),
(r'^inventory/import/$',
'import_products', {}, 'satchmo_admin_product_import'),
# (r'^inventory/report/$',
# 'product_active_report', {}, 'satchmo_admin_product_report'),
(r'^admin/(?P<product_id>\d+)/variations/$',
'variation_manager', {}, 'satchmo_admin_variation_manager'),
(r'^admin/variations/$',
'variation_list', {}, 'satchmo_admin_variation_list'),
)
|
iconmix/skins-addons | refs/heads/master | script.iconmixtools/resources/lib/unidecode/x70.py | 252 | data = (
'You ', # 0x00
'Yang ', # 0x01
'Lu ', # 0x02
'Si ', # 0x03
'Jie ', # 0x04
'Ying ', # 0x05
'Du ', # 0x06
'Wang ', # 0x07
'Hui ', # 0x08
'Xie ', # 0x09
'Pan ', # 0x0a
'Shen ', # 0x0b
'Biao ', # 0x0c
'Chan ', # 0x0d
'Mo ', # 0x0e
'Liu ', # 0x0f
'Jian ', # 0x10
'Pu ', # 0x11
'Se ', # 0x12
'Cheng ', # 0x13
'Gu ', # 0x14
'Bin ', # 0x15
'Huo ', # 0x16
'Xian ', # 0x17
'Lu ', # 0x18
'Qin ', # 0x19
'Han ', # 0x1a
'Ying ', # 0x1b
'Yong ', # 0x1c
'Li ', # 0x1d
'Jing ', # 0x1e
'Xiao ', # 0x1f
'Ying ', # 0x20
'Sui ', # 0x21
'Wei ', # 0x22
'Xie ', # 0x23
'Huai ', # 0x24
'Hao ', # 0x25
'Zhu ', # 0x26
'Long ', # 0x27
'Lai ', # 0x28
'Dui ', # 0x29
'Fan ', # 0x2a
'Hu ', # 0x2b
'Lai ', # 0x2c
'[?] ', # 0x2d
'[?] ', # 0x2e
'Ying ', # 0x2f
'Mi ', # 0x30
'Ji ', # 0x31
'Lian ', # 0x32
'Jian ', # 0x33
'Ying ', # 0x34
'Fen ', # 0x35
'Lin ', # 0x36
'Yi ', # 0x37
'Jian ', # 0x38
'Yue ', # 0x39
'Chan ', # 0x3a
'Dai ', # 0x3b
'Rang ', # 0x3c
'Jian ', # 0x3d
'Lan ', # 0x3e
'Fan ', # 0x3f
'Shuang ', # 0x40
'Yuan ', # 0x41
'Zhuo ', # 0x42
'Feng ', # 0x43
'She ', # 0x44
'Lei ', # 0x45
'Lan ', # 0x46
'Cong ', # 0x47
'Qu ', # 0x48
'Yong ', # 0x49
'Qian ', # 0x4a
'Fa ', # 0x4b
'Guan ', # 0x4c
'Que ', # 0x4d
'Yan ', # 0x4e
'Hao ', # 0x4f
'Hyeng ', # 0x50
'Sa ', # 0x51
'Zan ', # 0x52
'Luan ', # 0x53
'Yan ', # 0x54
'Li ', # 0x55
'Mi ', # 0x56
'Shan ', # 0x57
'Tan ', # 0x58
'Dang ', # 0x59
'Jiao ', # 0x5a
'Chan ', # 0x5b
'[?] ', # 0x5c
'Hao ', # 0x5d
'Ba ', # 0x5e
'Zhu ', # 0x5f
'Lan ', # 0x60
'Lan ', # 0x61
'Nang ', # 0x62
'Wan ', # 0x63
'Luan ', # 0x64
'Xun ', # 0x65
'Xian ', # 0x66
'Yan ', # 0x67
'Gan ', # 0x68
'Yan ', # 0x69
'Yu ', # 0x6a
'Huo ', # 0x6b
'Si ', # 0x6c
'Mie ', # 0x6d
'Guang ', # 0x6e
'Deng ', # 0x6f
'Hui ', # 0x70
'Xiao ', # 0x71
'Xiao ', # 0x72
'Hu ', # 0x73
'Hong ', # 0x74
'Ling ', # 0x75
'Zao ', # 0x76
'Zhuan ', # 0x77
'Jiu ', # 0x78
'Zha ', # 0x79
'Xie ', # 0x7a
'Chi ', # 0x7b
'Zhuo ', # 0x7c
'Zai ', # 0x7d
'Zai ', # 0x7e
'Can ', # 0x7f
'Yang ', # 0x80
'Qi ', # 0x81
'Zhong ', # 0x82
'Fen ', # 0x83
'Niu ', # 0x84
'Jiong ', # 0x85
'Wen ', # 0x86
'Po ', # 0x87
'Yi ', # 0x88
'Lu ', # 0x89
'Chui ', # 0x8a
'Pi ', # 0x8b
'Kai ', # 0x8c
'Pan ', # 0x8d
'Yan ', # 0x8e
'Kai ', # 0x8f
'Pang ', # 0x90
'Mu ', # 0x91
'Chao ', # 0x92
'Liao ', # 0x93
'Gui ', # 0x94
'Kang ', # 0x95
'Tun ', # 0x96
'Guang ', # 0x97
'Xin ', # 0x98
'Zhi ', # 0x99
'Guang ', # 0x9a
'Guang ', # 0x9b
'Wei ', # 0x9c
'Qiang ', # 0x9d
'[?] ', # 0x9e
'Da ', # 0x9f
'Xia ', # 0xa0
'Zheng ', # 0xa1
'Zhu ', # 0xa2
'Ke ', # 0xa3
'Zhao ', # 0xa4
'Fu ', # 0xa5
'Ba ', # 0xa6
'Duo ', # 0xa7
'Duo ', # 0xa8
'Ling ', # 0xa9
'Zhuo ', # 0xaa
'Xuan ', # 0xab
'Ju ', # 0xac
'Tan ', # 0xad
'Pao ', # 0xae
'Jiong ', # 0xaf
'Pao ', # 0xb0
'Tai ', # 0xb1
'Tai ', # 0xb2
'Bing ', # 0xb3
'Yang ', # 0xb4
'Tong ', # 0xb5
'Han ', # 0xb6
'Zhu ', # 0xb7
'Zha ', # 0xb8
'Dian ', # 0xb9
'Wei ', # 0xba
'Shi ', # 0xbb
'Lian ', # 0xbc
'Chi ', # 0xbd
'Huang ', # 0xbe
'[?] ', # 0xbf
'Hu ', # 0xc0
'Shuo ', # 0xc1
'Lan ', # 0xc2
'Jing ', # 0xc3
'Jiao ', # 0xc4
'Xu ', # 0xc5
'Xing ', # 0xc6
'Quan ', # 0xc7
'Lie ', # 0xc8
'Huan ', # 0xc9
'Yang ', # 0xca
'Xiao ', # 0xcb
'Xiu ', # 0xcc
'Xian ', # 0xcd
'Yin ', # 0xce
'Wu ', # 0xcf
'Zhou ', # 0xd0
'Yao ', # 0xd1
'Shi ', # 0xd2
'Wei ', # 0xd3
'Tong ', # 0xd4
'Xue ', # 0xd5
'Zai ', # 0xd6
'Kai ', # 0xd7
'Hong ', # 0xd8
'Luo ', # 0xd9
'Xia ', # 0xda
'Zhu ', # 0xdb
'Xuan ', # 0xdc
'Zheng ', # 0xdd
'Po ', # 0xde
'Yan ', # 0xdf
'Hui ', # 0xe0
'Guang ', # 0xe1
'Zhe ', # 0xe2
'Hui ', # 0xe3
'Kao ', # 0xe4
'[?] ', # 0xe5
'Fan ', # 0xe6
'Shao ', # 0xe7
'Ye ', # 0xe8
'Hui ', # 0xe9
'[?] ', # 0xea
'Tang ', # 0xeb
'Jin ', # 0xec
'Re ', # 0xed
'[?] ', # 0xee
'Xi ', # 0xef
'Fu ', # 0xf0
'Jiong ', # 0xf1
'Che ', # 0xf2
'Pu ', # 0xf3
'Jing ', # 0xf4
'Zhuo ', # 0xf5
'Ting ', # 0xf6
'Wan ', # 0xf7
'Hai ', # 0xf8
'Peng ', # 0xf9
'Lang ', # 0xfa
'Shan ', # 0xfb
'Hu ', # 0xfc
'Feng ', # 0xfd
'Chi ', # 0xfe
'Rong ', # 0xff
)
|
clumsy/intellij-community | refs/heads/master | python/testData/refactoring/changeSignature/updateDocstring.after.py | 83 | def foo(a, d1=1):
"""
:param a:
:param d1:
"""
pass
foo("a", d1="b")
|
cynecx/distorm | refs/heads/master | disOps/x86sets.py | 21 | #
# x86sets.py
#
# Copyright (C) 2009 Gil Dabah, http://ragestorm.net/disops/
#
from x86header import *
OPT = OperandType
IFlag = InstFlag
class Instructions:
""" Initializes all instruction of the 80x86 CPU (includes AMD64). """
def init_INTEGER(self):
Set = lambda *args: self.SetCallback(ISetClass.INTEGER, *args)
# V 1.5.13 - Pushes can be affected by operand size prefix. Segment is encoded in flags.
# SAL is exactly like SHL, so I prefer to use the mnemonic "SHL" (below).
Set("00", ["ADD"], [OPT.RM8, OPT.REG8], IFlag.MODRM_REQUIRED | IFlag.PRE_LOCK)
Set("01", ["ADD"], [OPT.RM_FULL, OPT.REG_FULL], IFlag.MODRM_REQUIRED | IFlag.PRE_LOCK)
Set("02", ["ADD"], [OPT.REG8, OPT.RM8], IFlag.MODRM_REQUIRED | IFlag.PRE_LOCK)
Set("03", ["ADD"], [OPT.REG_FULL, OPT.RM_FULL], IFlag.MODRM_REQUIRED | IFlag.PRE_LOCK)
Set("04", ["ADD"], [OPT.ACC8, OPT.IMM8], IFlag.INST_FLAGS_NONE)
Set("05", ["ADD"], [OPT.ACC_FULL, OPT.IMM_FULL], IFlag.INST_FLAGS_NONE)
Set("06", ["PUSH"], [OPT.SEG], IFlag.PRE_ES | IFlag.INVALID_64BITS)
Set("07", ["POP"], [OPT.SEG], IFlag.PRE_ES | IFlag.INVALID_64BITS)
Set("08", ["OR"], [OPT.RM8, OPT.REG8], IFlag.MODRM_REQUIRED | IFlag.PRE_LOCK)
Set("09", ["OR"], [OPT.RM_FULL, OPT.REG_FULL], IFlag.MODRM_REQUIRED | IFlag.PRE_LOCK)
Set("0a", ["OR"], [OPT.REG8, OPT.RM8], IFlag.MODRM_REQUIRED | IFlag.PRE_LOCK)
Set("0b", ["OR"], [OPT.REG_FULL, OPT.RM_FULL], IFlag.MODRM_REQUIRED | IFlag.PRE_LOCK)
Set("0c", ["OR"], [OPT.ACC8, OPT.IMM8], IFlag.INST_FLAGS_NONE)
Set("0d", ["OR"], [OPT.ACC_FULL, OPT.IMM_FULL], IFlag.INST_FLAGS_NONE)
Set("0e", ["PUSH"], [OPT.SEG], IFlag.PRE_CS | IFlag.INVALID_64BITS)
Set("0f, 00 /00", ["SLDT"], [OPT.RM_FULL], IFlag.MODRM_REQUIRED)
Set("0f, 00 /01", ["STR"], [OPT.RM16], IFlag.MODRM_REQUIRED)
Set("0f, 00 /02", ["LLDT"], [OPT.RM16], IFlag.MODRM_REQUIRED)
Set("0f, 00 /03", ["LTR"], [OPT.RM16], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("0f, 00 /04", ["VERR"], [OPT.RM16], IFlag.MODRM_REQUIRED)
Set("0f, 00 /05", ["VERW"], [OPT.RM16], IFlag.MODRM_REQUIRED)
Set("0f, 01 //00", ["SGDT"], [OPT.MEM16_3264], IFlag.MODRM_REQUIRED | IFlag._64BITS)
Set("0f, 01 //01", ["SIDT"], [OPT.MEM16_3264], IFlag.MODRM_REQUIRED | IFlag._64BITS)
Set("0f, 01 //02", ["LGDT"], [OPT.MEM16_3264], IFlag.MODRM_REQUIRED | IFlag._64BITS)
Set("0f, 01 //03", ["LIDT"], [OPT.MEM16_3264], IFlag.MODRM_REQUIRED | IFlag._64BITS)
# These two instructions need the whole byte, means they use the whole third byte and are NOT divided.
# We'll recognize them by their 3 REG bits in their third byte.
Set("0f, 01 //04", ["SMSW"], [OPT.RFULL_M16], IFlag.MODRM_REQUIRED | IFlag.NOT_DIVIDED)
Set("0f, 01 //06", ["LMSW"], [OPT.RM16], IFlag.MODRM_REQUIRED | IFlag.NOT_DIVIDED)
Set("0f, 01 //07", ["INVLPG"], [OPT.MEM], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("0f, 01 //c8", ["MONITOR"], [], IFlag._32BITS)
Set("0f, 01 //c9", ["MWAIT"], [], IFlag._32BITS)
Set("0f, 01 //f8", ["SWAPGS"], [], IFlag._64BITS_FETCH)
Set("0f, 01 //f9", ["RDTSCP"], [], IFlag._64BITS_FETCH)
Set("0f, 02", ["LAR"], [OPT.REG_FULL, OPT.RM16], IFlag.MODRM_REQUIRED)
Set("0f, 03", ["LSL"], [OPT.REG_FULL, OPT.RM16], IFlag.MODRM_REQUIRED)
Set("0f, 06", ["CLTS"], [], IFlag._32BITS)
Set("0f, 08", ["INVD"], [], IFlag._32BITS)
Set("0f, 09", ["WBINVD"], [], IFlag._32BITS)
Set("0f, 0b", ["UD2"], [], IFlag._32BITS)
# MOV: In 64 bits decoding mode REG is 64 bits by default.
Set("0f, 20", ["MOV"], [OPT.FREG32_64_RM, OPT.CREG], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag._64BITS)
Set("0f, 21", ["MOV"], [OPT.FREG32_64_RM, OPT.DREG], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag._64BITS)
Set("0f, 22", ["MOV"], [OPT.CREG, OPT.FREG32_64_RM], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag._64BITS)
Set("0f, 23", ["MOV"], [OPT.DREG, OPT.FREG32_64_RM], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag._64BITS)
Set("0f, 30", ["WRMSR"], [], IFlag._32BITS)
Set("0f, 31", ["RDTSC"], [], IFlag._32BITS)
Set("0f, 32", ["RDMSR"], [], IFlag._32BITS)
Set("0f, 33", ["RDPMC"], [], IFlag._32BITS)
Set("0f, 80", ["JO"], [OPT.RELC_FULL], IFlag._32BITS)
Set("0f, 81", ["JNO"], [OPT.RELC_FULL], IFlag._32BITS)
Set("0f, 82", ["JB"], [OPT.RELC_FULL], IFlag._32BITS)
Set("0f, 83", ["JAE"], [OPT.RELC_FULL], IFlag._32BITS)
Set("0f, 84", ["JZ"], [OPT.RELC_FULL], IFlag._32BITS)
Set("0f, 85", ["JNZ"], [OPT.RELC_FULL], IFlag._32BITS)
Set("0f, 86", ["JBE"], [OPT.RELC_FULL], IFlag._32BITS)
Set("0f, 87", ["JA"], [OPT.RELC_FULL], IFlag._32BITS)
Set("0f, 88", ["JS"], [OPT.RELC_FULL], IFlag._32BITS)
Set("0f, 89", ["JNS"], [OPT.RELC_FULL], IFlag._32BITS)
Set("0f, 8a", ["JP"], [OPT.RELC_FULL], IFlag._32BITS)
Set("0f, 8b", ["JNP"], [OPT.RELC_FULL], IFlag._32BITS)
Set("0f, 8c", ["JL"], [OPT.RELC_FULL], IFlag._32BITS)
Set("0f, 8d", ["JGE"], [OPT.RELC_FULL], IFlag._32BITS)
Set("0f, 8e", ["JLE"], [OPT.RELC_FULL], IFlag._32BITS)
Set("0f, 8f", ["JG"], [OPT.RELC_FULL], IFlag._32BITS)
Set("0f, 90", ["SETO"], [OPT.RM8], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("0f, 91", ["SETNO"], [OPT.RM8], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("0f, 92", ["SETB"], [OPT.RM8], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("0f, 93", ["SETAE"], [OPT.RM8], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("0f, 94", ["SETZ"], [OPT.RM8], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("0f, 95", ["SETNZ"], [OPT.RM8], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("0f, 96", ["SETBE"], [OPT.RM8], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("0f, 97", ["SETA"], [OPT.RM8], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("0f, 98", ["SETS"], [OPT.RM8], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("0f, 99", ["SETNS"], [OPT.RM8], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("0f, 9a", ["SETP"], [OPT.RM8], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("0f, 9b", ["SETNP"], [OPT.RM8], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("0f, 9c", ["SETL"], [OPT.RM8], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("0f, 9d", ["SETGE"], [OPT.RM8], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("0f, 9e", ["SETLE"], [OPT.RM8], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("0f, 9f", ["SETG"], [OPT.RM8], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("0f, a0", ["PUSH"], [OPT.SEG], IFlag._32BITS | IFlag.PRE_FS | IFlag._64BITS)
Set("0f, a1", ["POP"], [OPT.SEG], IFlag._32BITS | IFlag.PRE_FS | IFlag._64BITS)
Set("0f, a2", ["CPUID"], [], IFlag._32BITS)
Set("0f, a3", ["BT"], [OPT.RM_FULL, OPT.REG_FULL], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("0f, a4", ["SHLD"], [OPT.RM_FULL, OPT.REG_FULL, OPT.IMM8], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("0f, a5", ["SHLD"], [OPT.RM_FULL, OPT.REG_FULL, OPT.REGCL], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("0f, a8", ["PUSH"], [OPT.SEG], IFlag._32BITS | IFlag.PRE_GS | IFlag._64BITS)
Set("0f, a9", ["POP"], [OPT.SEG], IFlag._32BITS | IFlag.PRE_GS | IFlag._64BITS)
Set("0f, aa", ["RSM"], [], IFlag._32BITS)
Set("0f, ab", ["BTS"], [OPT.RM_FULL, OPT.REG_FULL], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_LOCK)
Set("0f, ac", ["SHRD"], [OPT.RM_FULL, OPT.REG_FULL, OPT.IMM8], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("0f, ad", ["SHRD"], [OPT.RM_FULL, OPT.REG_FULL, OPT.REGCL], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("0f, ae /00", ["FXSAVE", "", "FXSAVE64"], [OPT.MEM], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag._64BITS | IFlag.PRE_REX | IFlag.USE_EXMNEMONIC2)
Set("0f, ae /01", ["FXRSTOR", "", "FXRSTOR64"], [OPT.MEM], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag._64BITS | IFlag.PRE_REX | IFlag.USE_EXMNEMONIC2)
Set("0f, ae /02", ["LDMXCSR"], [OPT.MEM], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("0f, ae /03", ["STMXCSR"], [OPT.MEM], IFlag.MODRM_REQUIRED | IFlag._32BITS)
# MFENCE and XSAVEOPT share the same opcode 0f ae /6. It's MFENCE when MOD=11, else XSAVEOPT or XSAVEOPT64 in 64.
Set("0f, ae /06", ["MFENCE", "XSAVEOPT", "XSAVEOPT64"], [OPT.MEM_OPT], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.USE_EXMNEMONIC | IFlag.MNEMONIC_MODRM_BASED | IFlag._64BITS | IFlag.PRE_REX | IFlag.USE_EXMNEMONIC2)
# SFENCE and CLFLUSH share the same opcode 0f ae /7. It's SFENCE when MOD=11, else CLFLUSH.
# But the operand is used only for CLFLUSH, which means it's optional. MOD=11 for first mnemonic.
Set("0f, ae /07", ["SFENCE", "CLFLUSH"], [OPT.MEM_OPT], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.USE_EXMNEMONIC | IFlag.MNEMONIC_MODRM_BASED)
# Same for LFENCE and XRSTOR with 0f ae /5.
Set("0f, ae /05", ["LFENCE", "XRSTOR", "XRSTOR64"], [OPT.MEM_OPT], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.USE_EXMNEMONIC | IFlag.MNEMONIC_MODRM_BASED | IFlag._64BITS | IFlag.PRE_REX | IFlag.USE_EXMNEMONIC2)
Set("0f, af", ["IMUL"], [OPT.REG_FULL, OPT.RM_FULL], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("0f, b0", ["CMPXCHG"], [OPT.RM8, OPT.REG8], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_LOCK)
Set("0f, b1", ["CMPXCHG"], [OPT.RM_FULL, OPT.REG_FULL], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_LOCK)
Set("0f, b2", ["LSS"], [OPT.REG_FULL, OPT.MEM16_FULL], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag._64BITS | IFlag.PRE_REX)
Set("0f, b3", ["BTR"], [OPT.RM_FULL, OPT.REG_FULL], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_LOCK)
Set("0f, b4", ["LFS"], [OPT.REG_FULL, OPT.MEM16_FULL], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag._64BITS | IFlag.PRE_REX)
Set("0f, b5", ["LGS"], [OPT.REG_FULL, OPT.MEM16_FULL], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag._64BITS | IFlag.PRE_REX)
Set("0f, b6", ["MOVZX"], [OPT.REG_FULL, OPT.RM8], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("0f, b7", ["MOVZX"], [OPT.REG_FULL, OPT.RM16], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag._64BITS | IFlag.PRE_REX)
Set("0f, b9", ["UD2"], [], IFlag._32BITS)
Set("0f, ba /04", ["BT"], [OPT.RM_FULL, OPT.IMM8], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("0f, ba /05", ["BTS"], [OPT.RM_FULL, OPT.IMM8], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_LOCK)
Set("0f, ba /06", ["BTR"], [OPT.RM_FULL, OPT.IMM8], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_LOCK)
Set("0f, ba /07", ["BTC"], [OPT.RM_FULL, OPT.IMM8], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_LOCK)
Set("0f, bb", ["BTC"], [OPT.RM_FULL, OPT.REG_FULL], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_LOCK)
Set("0f, bc", ["BSF"], [OPT.REG_FULL, OPT.RM_FULL], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("0f, bd", ["BSR"], [OPT.REG_FULL, OPT.RM_FULL], IFlag.MODRM_REQUIRED | IFlag._32BITS)
# V 1.1.6 MOVSX/MOVZX now support 16bits regs.
Set("0f, be", ["MOVSX"], [OPT.REG_FULL, OPT.RM8], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("0f, bf", ["MOVSX"], [OPT.REG_FULL, OPT.RM16], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag._64BITS | IFlag.PRE_REX)
Set("0f, c0", ["XADD"], [OPT.RM8, OPT.REG8], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_LOCK)
Set("0f, c1", ["XADD"], [OPT.RM_FULL, OPT.REG_FULL], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_LOCK)
Set("0f, c7 /01", ["CMPXCHG8B", "", "CMPXCHG16B"], [OPT.MEM64_128], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_LOCK | IFlag._64BITS | IFlag.PRE_REX | IFlag.USE_EXMNEMONIC2)
Set("0f, c8", ["BSWAP"], [OPT.IB_R_FULL], IFlag._32BITS | IFlag._64BITS | IFlag.PRE_REX | IFlag.GEN_BLOCK)
Set("10", ["ADC"], [OPT.RM8, OPT.REG8], IFlag.MODRM_REQUIRED | IFlag.PRE_LOCK)
Set("11", ["ADC"], [OPT.RM_FULL, OPT.REG_FULL], IFlag.MODRM_REQUIRED | IFlag.PRE_LOCK)
Set("12", ["ADC"], [OPT.REG8, OPT.RM8], IFlag.MODRM_REQUIRED | IFlag.PRE_LOCK)
Set("13", ["ADC"], [OPT.REG_FULL, OPT.RM_FULL], IFlag.MODRM_REQUIRED | IFlag.PRE_LOCK)
Set("14", ["ADC"], [OPT.ACC8, OPT.IMM8], IFlag.INST_FLAGS_NONE)
Set("15", ["ADC"], [OPT.ACC_FULL, OPT.IMM_FULL], IFlag.INST_FLAGS_NONE)
Set("16", ["PUSH"], [OPT.SEG], IFlag.PRE_SS | IFlag.INVALID_64BITS)
Set("17", ["POP"], [OPT.SEG], IFlag.PRE_SS | IFlag.INVALID_64BITS)
Set("18", ["SBB"], [OPT.RM8, OPT.REG8], IFlag.MODRM_REQUIRED | IFlag.PRE_LOCK)
Set("19", ["SBB"], [OPT.RM_FULL, OPT.REG_FULL], IFlag.MODRM_REQUIRED | IFlag.PRE_LOCK)
Set("1a", ["SBB"], [OPT.REG8, OPT.RM8], IFlag.MODRM_REQUIRED | IFlag.PRE_LOCK)
Set("1b", ["SBB"], [OPT.REG_FULL, OPT.RM_FULL], IFlag.MODRM_REQUIRED | IFlag.PRE_LOCK)
Set("1c", ["SBB"], [OPT.ACC8, OPT.IMM8], IFlag.INST_FLAGS_NONE)
Set("1d", ["SBB"], [OPT.ACC_FULL, OPT.IMM_FULL], IFlag.INST_FLAGS_NONE)
Set("1e", ["PUSH"], [OPT.SEG], IFlag.PRE_DS | IFlag.INVALID_64BITS)
Set("1f", ["POP"], [OPT.SEG], IFlag.PRE_DS | IFlag.INVALID_64BITS)
Set("20", ["AND"], [OPT.RM8, OPT.REG8], IFlag.MODRM_REQUIRED | IFlag.PRE_LOCK)
Set("21", ["AND"], [OPT.RM_FULL, OPT.REG_FULL], IFlag.MODRM_REQUIRED | IFlag.PRE_LOCK)
Set("22", ["AND"], [OPT.REG8, OPT.RM8], IFlag.MODRM_REQUIRED | IFlag.PRE_LOCK)
Set("23", ["AND"], [OPT.REG_FULL, OPT.RM_FULL], IFlag.MODRM_REQUIRED | IFlag.PRE_LOCK)
Set("24", ["AND"], [OPT.ACC8, OPT.IMM8], IFlag.INST_FLAGS_NONE)
Set("25", ["AND"], [OPT.ACC_FULL, OPT.IMM_FULL], IFlag.INST_FLAGS_NONE)
Set("27", ["DAA"], [], IFlag.INVALID_64BITS)
Set("28", ["SUB"], [OPT.RM8, OPT.REG8], IFlag.MODRM_REQUIRED | IFlag.PRE_LOCK)
Set("29", ["SUB"], [OPT.RM_FULL, OPT.REG_FULL], IFlag.MODRM_REQUIRED | IFlag.PRE_LOCK)
Set("2a", ["SUB"], [OPT.REG8, OPT.RM8], IFlag.MODRM_REQUIRED | IFlag.PRE_LOCK)
Set("2b", ["SUB"], [OPT.REG_FULL, OPT.RM_FULL], IFlag.MODRM_REQUIRED | IFlag.PRE_LOCK)
Set("2c", ["SUB"], [OPT.ACC8, OPT.IMM8], IFlag.INST_FLAGS_NONE)
Set("2d", ["SUB"], [OPT.ACC_FULL, OPT.IMM_FULL], IFlag.INST_FLAGS_NONE)
Set("2f", ["DAS"], [], IFlag.INVALID_64BITS)
Set("30", ["XOR"], [OPT.RM8, OPT.REG8], IFlag.MODRM_REQUIRED | IFlag.PRE_LOCK)
Set("31", ["XOR"], [OPT.RM_FULL, OPT.REG_FULL], IFlag.MODRM_REQUIRED | IFlag.PRE_LOCK)
Set("32", ["XOR"], [OPT.REG8, OPT.RM8], IFlag.MODRM_REQUIRED | IFlag.PRE_LOCK)
Set("33", ["XOR"], [OPT.REG_FULL, OPT.RM_FULL], IFlag.MODRM_REQUIRED | IFlag.PRE_LOCK)
Set("34", ["XOR"], [OPT.ACC8, OPT.IMM8], IFlag.INST_FLAGS_NONE)
Set("35", ["XOR"], [OPT.ACC_FULL, OPT.IMM_FULL], IFlag.INST_FLAGS_NONE)
Set("37", ["AAA"], [], IFlag.INVALID_64BITS)
Set("38", ["CMP"], [OPT.RM8, OPT.REG8], IFlag.MODRM_REQUIRED)
Set("39", ["CMP"], [OPT.RM_FULL, OPT.REG_FULL], IFlag.MODRM_REQUIRED)
Set("3a", ["CMP"], [OPT.REG8, OPT.RM8], IFlag.MODRM_REQUIRED)
Set("3b", ["CMP"], [OPT.REG_FULL, OPT.RM_FULL], IFlag.MODRM_REQUIRED)
Set("3c", ["CMP"], [OPT.ACC8, OPT.IMM8], IFlag.INST_FLAGS_NONE)
Set("3d", ["CMP"], [OPT.ACC_FULL, OPT.IMM_FULL], IFlag.INST_FLAGS_NONE)
Set("3f", ["AAS"], [], IFlag.INVALID_64BITS)
Set("40", ["INC"], [OPT.IB_R_FULL], IFlag.INVALID_64BITS | IFlag.GEN_BLOCK)
Set("48", ["DEC"], [OPT.IB_R_FULL], IFlag.INVALID_64BITS | IFlag.GEN_BLOCK)
Set("50", ["PUSH"], [OPT.IB_R_FULL], IFlag._64BITS | IFlag.GEN_BLOCK)
Set("58", ["POP"], [OPT.IB_R_FULL], IFlag._64BITS | IFlag.GEN_BLOCK)
Set("60", ["PUSHA"], [], IFlag.NATIVE | IFlag.INVALID_64BITS)
Set("61", ["POPA"], [], IFlag.NATIVE | IFlag.INVALID_64BITS)
Set("62", ["BOUND"], [OPT.REG_FULL, OPT.MEM], IFlag.MODRM_REQUIRED | IFlag.INVALID_64BITS)
Set("68", ["PUSH"], [OPT.IMM_FULL], IFlag._64BITS)
Set("69", ["IMUL"], [OPT.REG_FULL, OPT.RM_FULL, OPT.IMM_FULL], IFlag.MODRM_REQUIRED)
Set("6a", ["PUSH"], [OPT.SEIMM8], IFlag.PRE_OP_SIZE | IFlag._64BITS)
Set("6b", ["IMUL"], [OPT.REG_FULL, OPT.RM_FULL, OPT.SEIMM8], IFlag.MODRM_REQUIRED)
# V 1.5.14 - String instructions aren't supposed to be promoted automatically in 64bits, only with a REX prefix.
# In 64 bits INS/OUTS still supports only 8/16/32 bits.
Set("6c", ["INS"], [OPT.REGI_EDI, OPT.REGDX], IFlag.PRE_REPNZ | IFlag.PRE_REP) # 8 bit.
Set("6d", ["INS"], [OPT.REGI_EDI, OPT.REGDX], IFlag._16BITS | IFlag.PRE_REPNZ | IFlag.PRE_REP) # Full size.
Set("6e", ["OUTS"], [OPT.REGDX, OPT.REGI_ESI], IFlag.PRE_REPNZ | IFlag.PRE_REP) # 8 bit.
Set("6f", ["OUTS"], [OPT.REGDX, OPT.REGI_ESI], IFlag._16BITS | IFlag.PRE_REPNZ | IFlag.PRE_REP) # Full size.
Set("70", ["JO"], [OPT.RELCB], IFlag._64BITS)
Set("71", ["JNO"], [OPT.RELCB], IFlag._64BITS)
Set("72", ["JB"], [OPT.RELCB], IFlag._64BITS)
Set("73", ["JAE"], [OPT.RELCB], IFlag._64BITS)
Set("74", ["JZ"], [OPT.RELCB], IFlag._64BITS)
Set("75", ["JNZ"], [OPT.RELCB], IFlag._64BITS)
Set("76", ["JBE"], [OPT.RELCB], IFlag._64BITS)
Set("77", ["JA"], [OPT.RELCB], IFlag._64BITS)
Set("78", ["JS"], [OPT.RELCB], IFlag._64BITS)
Set("79", ["JNS"], [OPT.RELCB], IFlag._64BITS)
Set("7a", ["JP"], [OPT.RELCB], IFlag._64BITS)
Set("7b", ["JNP"], [OPT.RELCB], IFlag._64BITS)
Set("7c", ["JL"], [OPT.RELCB], IFlag._64BITS)
Set("7d", ["JGE"], [OPT.RELCB], IFlag._64BITS)
Set("7e", ["JLE"], [OPT.RELCB], IFlag._64BITS)
Set("7f", ["JG"], [OPT.RELCB], IFlag._64BITS)
Set("80 /00", ["ADD"], [OPT.RM8, OPT.IMM8], IFlag.MODRM_REQUIRED | IFlag.PRE_LOCK)
Set("80 /01", ["OR"], [OPT.RM8, OPT.IMM8], IFlag.MODRM_REQUIRED | IFlag.PRE_LOCK)
Set("80 /02", ["ADC"], [OPT.RM8, OPT.IMM8], IFlag.MODRM_REQUIRED | IFlag.PRE_LOCK)
Set("80 /03", ["SBB"], [OPT.RM8, OPT.IMM8], IFlag.MODRM_REQUIRED | IFlag.PRE_LOCK)
Set("80 /04", ["AND"], [OPT.RM8, OPT.IMM8], IFlag.MODRM_REQUIRED | IFlag.PRE_LOCK)
Set("80 /05", ["SUB"], [OPT.RM8, OPT.IMM8], IFlag.MODRM_REQUIRED | IFlag.PRE_LOCK)
Set("80 /06", ["XOR"], [OPT.RM8, OPT.IMM8], IFlag.MODRM_REQUIRED | IFlag.PRE_LOCK)
Set("80 /07", ["CMP"], [OPT.RM8, OPT.IMM8], IFlag.MODRM_REQUIRED)
Set("81 /00", ["ADD"], [OPT.RM_FULL, OPT.IMM_FULL], IFlag.MODRM_REQUIRED | IFlag.PRE_LOCK)
Set("81 /01", ["OR"], [OPT.RM_FULL, OPT.IMM_FULL], IFlag.MODRM_REQUIRED | IFlag.PRE_LOCK)
Set("81 /02", ["ADC"], [OPT.RM_FULL, OPT.IMM_FULL], IFlag.MODRM_REQUIRED | IFlag.PRE_LOCK)
Set("81 /03", ["SBB"], [OPT.RM_FULL, OPT.IMM_FULL], IFlag.MODRM_REQUIRED | IFlag.PRE_LOCK)
Set("81 /04", ["AND"], [OPT.RM_FULL, OPT.IMM_FULL], IFlag.MODRM_REQUIRED | IFlag.PRE_LOCK)
Set("81 /05", ["SUB"], [OPT.RM_FULL, OPT.IMM_FULL], IFlag.MODRM_REQUIRED | IFlag.PRE_LOCK)
Set("81 /06", ["XOR"], [OPT.RM_FULL, OPT.IMM_FULL], IFlag.MODRM_REQUIRED | IFlag.PRE_LOCK)
Set("81 /07", ["CMP"], [OPT.RM_FULL, OPT.IMM_FULL], IFlag.MODRM_REQUIRED)
Set("82 /00", ["ADD"], [OPT.RM8, OPT.IMM8], IFlag.MODRM_REQUIRED | IFlag.PRE_LOCK | IFlag.INVALID_64BITS)
Set("82 /01", ["OR"], [OPT.RM8, OPT.IMM8], IFlag.MODRM_REQUIRED | IFlag.PRE_LOCK | IFlag.INVALID_64BITS)
Set("82 /02", ["ADC"], [OPT.RM8, OPT.IMM8], IFlag.MODRM_REQUIRED | IFlag.PRE_LOCK | IFlag.INVALID_64BITS)
Set("82 /03", ["SBB"], [OPT.RM8, OPT.IMM8], IFlag.MODRM_REQUIRED | IFlag.PRE_LOCK | IFlag.INVALID_64BITS)
Set("82 /04", ["AND"], [OPT.RM8, OPT.IMM8], IFlag.MODRM_REQUIRED | IFlag.PRE_LOCK | IFlag.INVALID_64BITS)
Set("82 /05", ["SUB"], [OPT.RM8, OPT.IMM8], IFlag.MODRM_REQUIRED | IFlag.PRE_LOCK | IFlag.INVALID_64BITS)
Set("82 /06", ["XOR"], [OPT.RM8, OPT.IMM8], IFlag.MODRM_REQUIRED | IFlag.PRE_LOCK | IFlag.INVALID_64BITS)
Set("82 /07", ["CMP"], [OPT.RM8, OPT.IMM8], IFlag.MODRM_REQUIRED | IFlag.INVALID_64BITS)
Set("83 /00", ["ADD"], [OPT.RM_FULL, OPT.SEIMM8], IFlag.MODRM_REQUIRED | IFlag.PRE_LOCK)
Set("83 /01", ["OR"], [OPT.RM_FULL, OPT.SEIMM8], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_LOCK)
Set("83 /02", ["ADC"], [OPT.RM_FULL, OPT.SEIMM8], IFlag.MODRM_REQUIRED | IFlag.PRE_LOCK)
Set("83 /03", ["SBB"], [OPT.RM_FULL, OPT.SEIMM8], IFlag.MODRM_REQUIRED | IFlag.PRE_LOCK)
Set("83 /04", ["AND"], [OPT.RM_FULL, OPT.SEIMM8], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_LOCK)
Set("83 /05", ["SUB"], [OPT.RM_FULL, OPT.SEIMM8], IFlag.MODRM_REQUIRED | IFlag.PRE_LOCK)
Set("83 /06", ["XOR"], [OPT.RM_FULL, OPT.SEIMM8], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_LOCK)
Set("83 /07", ["CMP"], [OPT.RM_FULL, OPT.SEIMM8], IFlag.MODRM_REQUIRED)
Set("84", ["TEST"], [OPT.RM8, OPT.REG8], IFlag.MODRM_REQUIRED)
Set("85", ["TEST"], [OPT.RM_FULL, OPT.REG_FULL], IFlag.MODRM_REQUIRED)
Set("86", ["XCHG"], [OPT.RM8, OPT.REG8], IFlag.MODRM_REQUIRED | IFlag.PRE_LOCK)
Set("87", ["XCHG"], [OPT.RM_FULL, OPT.REG_FULL], IFlag.MODRM_REQUIRED | IFlag.PRE_LOCK)
Set("88", ["MOV"], [OPT.RM8, OPT.REG8], IFlag.MODRM_REQUIRED)
Set("89", ["MOV"], [OPT.RM_FULL, OPT.REG_FULL], IFlag.MODRM_REQUIRED)
Set("8a", ["MOV"], [OPT.REG8, OPT.RM8], IFlag.MODRM_REQUIRED)
Set("8b", ["MOV"], [OPT.REG_FULL, OPT.RM_FULL], IFlag.MODRM_REQUIRED)
Set("8c", ["MOV"], [OPT.RFULL_M16, OPT.SREG], IFlag.MODRM_REQUIRED)
Set("8d", ["LEA"], [OPT.REG_FULL, OPT.MEM], IFlag.MODRM_REQUIRED)
Set("8e", ["MOV"], [OPT.SREG, OPT.RFULL_M16], IFlag.MODRM_REQUIRED)
Set("8f /00", ["POP"], [OPT.RM_FULL], IFlag.MODRM_REQUIRED | IFlag._64BITS)
# V 1.7.28 - NOP in 64bits can be prefixed by REX -
# Therefore 0x90 in 16/32 bits is NOP. 0x90 with REX base is an XCHG. All else are NOP.
Set("90", ["XCHG"], [OPT.IB_R_FULL, OPT.ACC_FULL], IFlag.INST_FLAGS_NONE)
# V 1.7.24 - New instruction multi-byte NOP.
Set("0f, 1f", ["NOP"], [OPT.RM_FULL], IFlag.MODRM_REQUIRED)
Set("91", ["XCHG"], [OPT.IB_R_FULL, OPT.ACC_FULL], IFlag.INST_FLAGS_NONE)
Set("92", ["XCHG"], [OPT.IB_R_FULL, OPT.ACC_FULL], IFlag.INST_FLAGS_NONE)
Set("93", ["XCHG"], [OPT.IB_R_FULL, OPT.ACC_FULL], IFlag.INST_FLAGS_NONE)
Set("94", ["XCHG"], [OPT.IB_R_FULL, OPT.ACC_FULL], IFlag.INST_FLAGS_NONE)
Set("95", ["XCHG"], [OPT.IB_R_FULL, OPT.ACC_FULL], IFlag.INST_FLAGS_NONE)
Set("96", ["XCHG"], [OPT.IB_R_FULL, OPT.ACC_FULL], IFlag.INST_FLAGS_NONE)
Set("97", ["XCHG"], [OPT.IB_R_FULL, OPT.ACC_FULL], IFlag.INST_FLAGS_NONE)
Set("98", ["CBW", "CWDE", "CDQE"], [], IFlag.USE_EXMNEMONIC | IFlag.USE_EXMNEMONIC2)
Set("99", ["CWD", "CDQ", "CQO"], [], IFlag.USE_EXMNEMONIC | IFlag.USE_EXMNEMONIC2)
Set("9a", ["CALL FAR"], [OPT.PTR16_FULL], IFlag.INVALID_64BITS)
# V 1.4.a PUSHF/POPF are supposed to be promoted to 64 bits, without a REX.
Set("9c", ["PUSHF"], [], IFlag.NATIVE | IFlag._64BITS)
Set("9d", ["POPF"], [], IFlag.NATIVE | IFlag._64BITS)
Set("9e", ["SAHF"], [], IFlag.INST_FLAGS_NONE)
Set("9f", ["LAHF"], [], IFlag.INST_FLAGS_NONE)
# V 1.6.21 MOV MEM-OFFSET instructions are NOT automatically promoted to 64bits, only with a REX.
Set("a0", ["MOV"], [OPT.ACC8, OPT.MOFFS8], IFlag.INST_FLAGS_NONE)
Set("a1", ["MOV"], [OPT.ACC_FULL, OPT.MOFFS_FULL], IFlag.INST_FLAGS_NONE)
Set("a2", ["MOV"], [OPT.MOFFS8, OPT.ACC8], IFlag.INST_FLAGS_NONE)
Set("a3", ["MOV"], [OPT.MOFFS_FULL, OPT.ACC_FULL], IFlag.INST_FLAGS_NONE)
# movs es:di, ds:si*
# cmps ds:si*, es:di
# stos es:di, al
# scas es:di al
# lods al, ds:si*
Set("a4", ["MOVS"], [OPT.REGI_EDI, OPT.REGI_ESI], IFlag.PRE_REPNZ | IFlag.PRE_REP)
Set("a5", ["MOVS"], [OPT.REGI_EDI, OPT.REGI_ESI], IFlag._16BITS | IFlag.PRE_REPNZ | IFlag.PRE_REP | IFlag._64BITS | IFlag.PRE_REX)
Set("a6", ["CMPS"], [OPT.REGI_ESI, OPT.REGI_EDI], IFlag.PRE_REPNZ | IFlag.PRE_REP)
Set("a7", ["CMPS"], [OPT.REGI_ESI, OPT.REGI_EDI], IFlag._16BITS | IFlag.PRE_REPNZ | IFlag.PRE_REP | IFlag._64BITS | IFlag.PRE_REX)
Set("aa", ["STOS"], [OPT.REGI_EDI, OPT.ACC8], IFlag.PRE_REPNZ | IFlag.PRE_REP)
Set("ab", ["STOS"], [OPT.REGI_EDI, OPT.ACC_FULL], IFlag._16BITS | IFlag.PRE_REPNZ | IFlag.PRE_REP | IFlag._64BITS | IFlag.PRE_REX)
Set("ac", ["LODS"], [OPT.ACC8, OPT.REGI_ESI], IFlag.PRE_REPNZ | IFlag.PRE_REP)
Set("ad", ["LODS"], [OPT.ACC_FULL, OPT.REGI_ESI], IFlag._16BITS | IFlag.PRE_REPNZ | IFlag.PRE_REP | IFlag._64BITS | IFlag.PRE_REX)
Set("ae", ["SCAS"], [OPT.REGI_EDI, OPT.ACC8], IFlag.PRE_REPNZ | IFlag.PRE_REP)
Set("af", ["SCAS"], [OPT.REGI_EDI, OPT.ACC_FULL], IFlag._16BITS | IFlag.PRE_REPNZ | IFlag.PRE_REP | IFlag._64BITS | IFlag.PRE_REX)
Set("a8", ["TEST"], [OPT.ACC8, OPT.IMM8], IFlag.INST_FLAGS_NONE)
Set("a9", ["TEST"], [OPT.ACC_FULL, OPT.IMM_FULL], IFlag.INST_FLAGS_NONE)
Set("b0", ["MOV"], [OPT.IB_RB, OPT.IMM8], IFlag.GEN_BLOCK)
Set("b8", ["MOV"], [OPT.IB_R_FULL, OPT.IMM_FULL], IFlag._64BITS | IFlag.PRE_REX | IFlag.GEN_BLOCK)
Set("c0 /00", ["ROL"], [OPT.RM8, OPT.IMM8], IFlag.MODRM_REQUIRED)
Set("c0 /01", ["ROR"], [OPT.RM8, OPT.IMM8], IFlag.MODRM_REQUIRED)
Set("c0 /02", ["RCL"], [OPT.RM8, OPT.IMM8], IFlag.MODRM_REQUIRED)
Set("c0 /03", ["RCR"], [OPT.RM8, OPT.IMM8], IFlag.MODRM_REQUIRED)
Set("c0 /04", ["SHL"], [OPT.RM8, OPT.IMM8], IFlag.MODRM_REQUIRED)
Set("c0 /05", ["SHR"], [OPT.RM8, OPT.IMM8], IFlag.MODRM_REQUIRED)
Set("c0 /06", ["SAL"], [OPT.RM8, OPT.IMM8], IFlag.MODRM_REQUIRED)
Set("c0 /07", ["SAR"], [OPT.RM8, OPT.IMM8], IFlag.MODRM_REQUIRED)
Set("c1 /00", ["ROL"], [OPT.RM_FULL, OPT.IMM8], IFlag.MODRM_REQUIRED)
Set("c1 /01", ["ROR"], [OPT.RM_FULL, OPT.IMM8], IFlag.MODRM_REQUIRED)
Set("c1 /02", ["RCL"], [OPT.RM_FULL, OPT.IMM8], IFlag.MODRM_REQUIRED)
Set("c1 /03", ["RCR"], [OPT.RM_FULL, OPT.IMM8], IFlag.MODRM_REQUIRED)
Set("c1 /04", ["SHL"], [OPT.RM_FULL, OPT.IMM8], IFlag.MODRM_REQUIRED)
Set("c1 /05", ["SHR"], [OPT.RM_FULL, OPT.IMM8], IFlag.MODRM_REQUIRED)
Set("c1 /06", ["SAL"], [OPT.RM_FULL, OPT.IMM8], IFlag.MODRM_REQUIRED)
Set("c1 /07", ["SAR"], [OPT.RM_FULL, OPT.IMM8], IFlag.MODRM_REQUIRED)
Set("c2", ["RET"], [OPT.IMM16], IFlag._64BITS)
Set("c3", ["RET"], [], IFlag._64BITS)
Set("c4", ["LES"], [OPT.REG_FULL, OPT.MEM16_FULL], IFlag.MODRM_REQUIRED | IFlag.INVALID_64BITS)
Set("c5", ["LDS"], [OPT.REG_FULL, OPT.MEM16_FULL], IFlag.MODRM_REQUIRED | IFlag.INVALID_64BITS)
Set("c6 /00", ["MOV"], [OPT.RM8, OPT.IMM8], IFlag.MODRM_REQUIRED)
Set("c7 /00", ["MOV"], [OPT.RM_FULL, OPT.IMM_FULL], IFlag.MODRM_REQUIRED)
Set("c8", ["ENTER"], [OPT.IMM16_1, OPT.IMM8_2], IFlag._64BITS)
Set("c9", ["LEAVE"], [], IFlag._64BITS)
# V 1.1.6 RETF is NOT promoted automatically in 64bits. So with REX it should be RETFQ.
Set("ca", ["RETF"], [OPT.IMM16], IFlag.NATIVE | IFlag._64BITS | IFlag.PRE_REX)
Set("cb", ["RETF"], [], IFlag.NATIVE | IFlag._64BITS | IFlag.PRE_REX)
Set("cc", ["INT 3"], [], IFlag.INST_FLAGS_NONE)
Set("cd", ["INT"], [OPT.IMM8], IFlag.INST_FLAGS_NONE)
Set("ce", ["INTO"], [], IFlag.INVALID_64BITS)
Set("cf", ["IRET"], [], IFlag.NATIVE | IFlag._64BITS | IFlag.PRE_REX)
Set("d0 /00", ["ROL"], [OPT.RM8, OPT.CONST1], IFlag.MODRM_REQUIRED)
Set("d0 /01", ["ROR"], [OPT.RM8, OPT.CONST1], IFlag.MODRM_REQUIRED)
Set("d0 /02", ["RCL"], [OPT.RM8, OPT.CONST1], IFlag.MODRM_REQUIRED)
Set("d0 /03", ["RCR"], [OPT.RM8, OPT.CONST1], IFlag.MODRM_REQUIRED)
Set("d0 /04", ["SHL"], [OPT.RM8, OPT.CONST1], IFlag.MODRM_REQUIRED)
Set("d0 /05", ["SHR"], [OPT.RM8, OPT.CONST1], IFlag.MODRM_REQUIRED)
Set("d0 /06", ["SAL"], [OPT.RM8, OPT.CONST1], IFlag.MODRM_REQUIRED)
Set("d0 /07", ["SAR"], [OPT.RM8, OPT.CONST1], IFlag.MODRM_REQUIRED)
Set("d1 /00", ["ROL"], [OPT.RM_FULL, OPT.CONST1], IFlag.MODRM_REQUIRED)
Set("d1 /01", ["ROR"], [OPT.RM_FULL, OPT.CONST1], IFlag.MODRM_REQUIRED)
Set("d1 /02", ["RCL"], [OPT.RM_FULL, OPT.CONST1], IFlag.MODRM_REQUIRED)
Set("d1 /03", ["RCR"], [OPT.RM_FULL, OPT.CONST1], IFlag.MODRM_REQUIRED)
Set("d1 /04", ["SHL"], [OPT.RM_FULL, OPT.CONST1], IFlag.MODRM_REQUIRED)
Set("d1 /05", ["SHR"], [OPT.RM_FULL, OPT.CONST1], IFlag.MODRM_REQUIRED)
Set("d1 /06", ["SAL"], [OPT.RM_FULL, OPT.CONST1], IFlag.MODRM_REQUIRED)
Set("d1 /07", ["SAR"], [OPT.RM_FULL, OPT.CONST1], IFlag.MODRM_REQUIRED)
Set("d2 /00", ["ROL"], [OPT.RM8, OPT.REGCL], IFlag.MODRM_REQUIRED)
Set("d2 /01", ["ROR"], [OPT.RM8, OPT.REGCL], IFlag.MODRM_REQUIRED)
Set("d2 /02", ["RCL"], [OPT.RM8, OPT.REGCL], IFlag.MODRM_REQUIRED)
Set("d2 /03", ["RCR"], [OPT.RM8, OPT.REGCL], IFlag.MODRM_REQUIRED)
Set("d2 /04", ["SHL"], [OPT.RM8, OPT.REGCL], IFlag.MODRM_REQUIRED)
Set("d2 /05", ["SHR"], [OPT.RM8, OPT.REGCL], IFlag.MODRM_REQUIRED)
Set("d2 /06", ["SAL"], [OPT.RM8, OPT.REGCL], IFlag.MODRM_REQUIRED)
Set("d2 /07", ["SAR"], [OPT.RM8, OPT.REGCL], IFlag.MODRM_REQUIRED)
Set("d3 /00", ["ROL"], [OPT.RM_FULL, OPT.REGCL], IFlag.MODRM_REQUIRED)
Set("d3 /01", ["ROR"], [OPT.RM_FULL, OPT.REGCL], IFlag.MODRM_REQUIRED)
Set("d3 /02", ["RCL"], [OPT.RM_FULL, OPT.REGCL], IFlag.MODRM_REQUIRED)
Set("d3 /03", ["RCR"], [OPT.RM_FULL, OPT.REGCL], IFlag.MODRM_REQUIRED)
Set("d3 /04", ["SHL"], [OPT.RM_FULL, OPT.REGCL], IFlag.MODRM_REQUIRED)
Set("d3 /05", ["SHR"], [OPT.RM_FULL, OPT.REGCL], IFlag.MODRM_REQUIRED)
Set("d3 /06", ["SAL"], [OPT.RM_FULL, OPT.REGCL], IFlag.MODRM_REQUIRED)
Set("d3 /07", ["SAR"], [OPT.RM_FULL, OPT.REGCL], IFlag.MODRM_REQUIRED)
Set("d4", ["AAM"], [OPT.IMM8], IFlag.INVALID_64BITS)
Set("d5", ["AAD"], [OPT.IMM8], IFlag.INVALID_64BITS)
Set("d6", ["SALC"], [], IFlag.INVALID_64BITS)
# XLATB / XLAT BYTE PTR DS:[EBX + AL]
Set("d7", ["XLAT"], [OPT.REGI_EBXAL], IFlag.PRE_DS)
# LOOPxx are also affected by the ADDRESS-SIZE prefix!
# But they require a suffix letter indicating their size.
# LOOPxx are promoted to 64bits.
Set("e0", ["LOOPNZ"], [OPT.RELCB], IFlag.PRE_ADDR_SIZE | IFlag.NATIVE)
Set("e1", ["LOOPZ"], [OPT.RELCB], IFlag.PRE_ADDR_SIZE | IFlag.NATIVE)
Set("e2", ["LOOP"], [OPT.RELCB], IFlag.PRE_ADDR_SIZE | IFlag.NATIVE)
# JMP CX:
# This is a special instruction, because the ADDRESS-SIZE prefix affects its register size!!!
# INST_PRE_ADDR_SIZE isn't supposed to really be a flag of a static instruction, it's quite a hack to distinguish this instruction.
# J(r/e)CXZ are promoted to 64bits.
Set("e3", ["JCXZ", "JECXZ", "JRCXZ"], [OPT.RELCB], IFlag.PRE_ADDR_SIZE | IFlag.USE_EXMNEMONIC | IFlag.USE_EXMNEMONIC2)
Set("e4", ["IN"], [OPT.ACC8, OPT.IMM8], IFlag.INST_FLAGS_NONE)
Set("e5", ["IN"], [OPT.ACC_FULL_NOT64, OPT.IMM8], IFlag.INST_FLAGS_NONE)
Set("e6", ["OUT"], [OPT.IMM8, OPT.ACC8], IFlag.INST_FLAGS_NONE)
Set("e7", ["OUT"], [OPT.IMM8, OPT.ACC_FULL_NOT64], IFlag.INST_FLAGS_NONE)
Set("e8", ["CALL"], [OPT.RELC_FULL], IFlag._64BITS)
Set("e9", ["JMP"], [OPT.RELC_FULL], IFlag._64BITS)
Set("ea", ["JMP FAR"], [OPT.PTR16_FULL], IFlag.INVALID_64BITS)
Set("eb", ["JMP"], [OPT.RELCB], IFlag._64BITS)
Set("ec", ["IN"], [OPT.ACC8, OPT.REGDX], IFlag.INST_FLAGS_NONE)
Set("ed", ["IN"], [OPT.ACC_FULL_NOT64, OPT.REGDX], IFlag.INST_FLAGS_NONE)
Set("ee", ["OUT"], [OPT.REGDX, OPT.ACC8], IFlag.INST_FLAGS_NONE)
Set("ef", ["OUT"], [OPT.REGDX, OPT.ACC_FULL_NOT64], IFlag.INST_FLAGS_NONE)
Set("f1", ["INT1"], [], IFlag.INST_FLAGS_NONE)
Set("f4", ["HLT"], [], IFlag.INST_FLAGS_NONE)
Set("f5", ["CMC"], [], IFlag.INST_FLAGS_NONE)
Set("f6 /00", ["TEST"], [OPT.RM8, OPT.IMM8], IFlag.MODRM_REQUIRED)
Set("f6 /02", ["NOT"], [OPT.RM8], IFlag.MODRM_REQUIRED | IFlag.PRE_LOCK)
Set("f6 /03", ["NEG"], [OPT.RM8], IFlag.MODRM_REQUIRED | IFlag.PRE_LOCK)
Set("f6 /04", ["MUL"], [OPT.RM8], IFlag.MODRM_REQUIRED)
Set("f6 /05", ["IMUL"], [OPT.RM8], IFlag.MODRM_REQUIRED)
Set("f6 /06", ["DIV"], [OPT.RM8], IFlag.MODRM_REQUIRED)
Set("f6 /07", ["IDIV"], [OPT.RM8], IFlag.MODRM_REQUIRED)
Set("f7 /00", ["TEST"], [OPT.RM_FULL, OPT.IMM_FULL], IFlag.MODRM_REQUIRED)
Set("f7 /02", ["NOT"], [OPT.RM_FULL], IFlag.MODRM_REQUIRED | IFlag.PRE_LOCK)
Set("f7 /03", ["NEG"], [OPT.RM_FULL], IFlag.MODRM_REQUIRED | IFlag.PRE_LOCK)
Set("f7 /04", ["MUL"], [OPT.RM_FULL], IFlag.MODRM_REQUIRED)
Set("f7 /05", ["IMUL"], [OPT.RM_FULL], IFlag.MODRM_REQUIRED)
Set("f7 /06", ["DIV"], [OPT.RM_FULL], IFlag.MODRM_REQUIRED)
Set("f7 /07", ["IDIV"], [OPT.RM_FULL], IFlag.MODRM_REQUIRED)
Set("f8", ["CLC"], [], IFlag.INST_FLAGS_NONE)
Set("f9", ["STC"], [], IFlag.INST_FLAGS_NONE)
Set("fa", ["CLI"], [], IFlag.INST_FLAGS_NONE)
Set("fb", ["STI"], [], IFlag.INST_FLAGS_NONE)
Set("fc", ["CLD"], [], IFlag.INST_FLAGS_NONE)
Set("fd", ["STD"], [], IFlag.INST_FLAGS_NONE)
Set("fe /00", ["INC"], [OPT.RM8], IFlag.MODRM_REQUIRED | IFlag.PRE_LOCK)
Set("fe /01", ["DEC"], [OPT.RM8], IFlag.MODRM_REQUIRED | IFlag.PRE_LOCK)
Set("ff /00", ["INC"], [OPT.RM_FULL], IFlag.MODRM_REQUIRED | IFlag.PRE_LOCK)
Set("ff /01", ["DEC"], [OPT.RM_FULL], IFlag.MODRM_REQUIRED | IFlag.PRE_LOCK)
Set("ff /02", ["CALL"], [OPT.RM_FULL], IFlag.MODRM_REQUIRED | IFlag._64BITS)
Set("ff /03", ["CALL FAR"], [OPT.MEM16_FULL], IFlag.MODRM_REQUIRED | IFlag._64BITS | IFlag.PRE_REX)
Set("ff /04", ["JMP"], [OPT.RM_FULL], IFlag.MODRM_REQUIRED | IFlag._64BITS)
Set("ff /05", ["JMP FAR"], [OPT.MEM16_FULL], IFlag.MODRM_REQUIRED | IFlag._64BITS | IFlag.PRE_REX)
Set("ff /06", ["PUSH"], [OPT.RM_FULL], IFlag.MODRM_REQUIRED | IFlag._64BITS)
# New instructions from AMD July 2007 (POPCNT is already defined in SSE4.2, MONITOR, MWAIT are already defined above):
# Note LZCNT can be prefixed by 0x66 although it has also a mandatory prefix!
Set("f3, 0f, bd", ["LZCNT"], [OPT.REG_FULL, OPT.RM_FULL], IFlag.MODRM_REQUIRED | IFlag.PRE_OP_SIZE)
Set("0f, 38, f0", ["MOVBE"], [OPT.REG_FULL, OPT.RM_FULL], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("0f, 38, f1", ["MOVBE"], [OPT.RM_FULL, OPT.REG_FULL], IFlag.MODRM_REQUIRED | IFlag._32BITS)
# New instructions from Intel 2008:
Set("0f, 01, d0", ["XGETBV"], [], IFlag._32BITS)
Set("0f, 01, d1", ["XSETBV"], [], IFlag._32BITS)
Set("0f, ae /04", ["XSAVE", "", "XSAVE64"], [OPT.MEM], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag._64BITS | IFlag.PRE_REX | IFlag.USE_EXMNEMONIC2)
# XRSTOR is declared below (see LFENCE), cause it is shared with LFENCE.
# New instruction from Intel September 2009:
Set("0f, 37", ["GETSEC"], [], IFlag.MODRM_REQUIRED | IFlag._32BITS)
# XSAVEOPT is declared below (see SFENCE).
def init_Exported(self):
""" Exported instruction are special instruction that create a collision in the DB.
Therefore they are exported directly so diStorm can use them manually in the
insts.c instruction look-up code.
Note that their opcodes are totally ignored here.
Also the path to the instruction in the trie has to be defined by any instruction with same opcode!
So for instance, NOP|PAUSE|XCHG -> XHG is really defined, the rest are exported.
Inside diStorm it will know which one to use. """
Set = lambda *args: self.SetCallback(ISetClass.INTEGER, *args)
# 63 /R
# 16/32: ARPL reg/mem16, reg16
# 64: MOVSXD OT_REG_FULL, OT_RM_FULL
# Damn processor, my DB won't support mixing of operands types.
# Define ARPL!
Set("63", ["ARPL"], [OPT.RM16, OPT.REG16], IFlag.MODRM_REQUIRED)
# MOVSXD:
# This is the worst defined instruction ever. It has so many variations.
# I decided after a third review, to make it like MOVSXD RAX, EAX when there IS a REX.W.
# Otherwise it will be MOVSXD EAX, EAX, which really zero extends to RAX.
# Completely ignoring DB 0x66, which is possible by the docs, BTW.
Set("63", ["MOVSXD"], [OPT.REG32_64, OPT.RM32], IFlag.MODRM_REQUIRED | IFlag._64BITS | IFlag.PRE_REX | IFlag.EXPORTED)
Set("90", ["NOP"], [], IFlag.EXPORTED)
# This instruction is supported directly in diStorm, since it's not a mandatory prefix really.
Set("f3, 90", ["PAUSE"], [], IFlag._32BITS | IFlag.EXPORTED)
# Wait instruction is needed, but it can be a prefix. See next page for more info.
Set("9b", ["WAIT"], [], IFlag.EXPORTED)
# VMPTRLD and RDRAND use same 2 first bytes and 06 as group (thus 2.3 bytes).
# When MOD is 3 it's the RDRAND instruction and for the rest it's VMPTRLD.
# The problem is that they have different operands, so a hack is required in the lookup instruction code.
# Plus remember that this opcode is prefixed (because of VMCLEAR) sometimes and therefore will be part of a prefixed table!
Set("0f, c7 /06", ["RDRAND"], [OPT.RM_FULL], IFlag._32BITS | IFlag.MODRM_INCLUDED | IFlag.MODRM_REQUIRED | IFlag._64BITS | IFlag.EXPORTED)
Set = lambda *args: self.SetCallback(ISetClass._3DNOW, *args)
# This is not really an instruction, but a gateway to all 3dnow instructions.
Set("0f, 0f", ["_3DNOW"], [OPT.MM, OPT.MM64], IFlag.MODRM_REQUIRED | IFlag._3DNOW_FETCH | IFlag.EXPORTED)
def init_FPU(self):
Set = lambda *args: self.SetCallback(ISetClass.FPU, *args)
# The WAIT instruction is tricky, it starts a 3 bytes instruction series.
# If you find a 3 bytes long instruction you are on your own.
# But the problem is that if you don't find a 3 bytes long instruction and the first byte that is going to be DB'ed
# is this 0x9b byte, which represents the WAIT instruction, thus you'll have to output it as a standalone instruction.
# Example:
# 9B DB E3 ~ FINIT
# 9B DB E4 ~ WAIT; DB 0xDB; ...
# Get the idea?
# It might be a part of a long instruction (3 bytes), else it just a simple one byte instruction by its own.
# This way is a simple rule which is broken easily when dealing with Trie DB, the whole point is that the byte says
# "read another byte" or "I'm your one", but here both happens.
# That's why I will have to hardcode the WAIT instruction in the decode function which DB'es unknown bytes.
# Set(0x9b, "WAIT") ....
# IFlag.PRE_OP_SIZE is set in order to tell the decoder that 0x9b can be part of the instruction.
# Because it's found in the prefixed table at the same entry of 0x66 for mandatory prefix.
Set("9b, d9 //06", ["FSTENV"], [OPT.MEM], IFlag.PRE_OP_SIZE | IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("9b, d9 //07", ["FSTCW"], [OPT.FPUM16], IFlag.PRE_OP_SIZE | IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("9b, db //e2", ["FCLEX"], [], IFlag.PRE_OP_SIZE | IFlag._32BITS)
Set("9b, db //e3", ["FINIT"], [], IFlag.PRE_OP_SIZE | IFlag._32BITS)
Set("9b, dd //06", ["FSAVE"], [OPT.MEM], IFlag.PRE_OP_SIZE | IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("9b, dd //07", ["FSTSW"], [OPT.FPUM16], IFlag.PRE_OP_SIZE | IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("9b, df //e0", ["FSTSW"], [OPT.ACC16], IFlag.PRE_OP_SIZE | IFlag._32BITS)
Set("d8 //00", ["FADD"], [OPT.FPUM32], IFlag.MODRM_REQUIRED)
Set("d8 //01", ["FMUL"], [OPT.FPUM32], IFlag.MODRM_REQUIRED)
Set("d8 //02", ["FCOM"], [OPT.FPUM32], IFlag.MODRM_REQUIRED)
Set("d8 //03", ["FCOMP"], [OPT.FPUM32], IFlag.MODRM_REQUIRED)
Set("d8 //04", ["FSUB"], [OPT.FPUM32], IFlag.MODRM_REQUIRED)
Set("d8 //05", ["FSUBR"], [OPT.FPUM32], IFlag.MODRM_REQUIRED)
Set("d8 //06", ["FDIV"], [OPT.FPUM32], IFlag.MODRM_REQUIRED)
Set("d8 //07", ["FDIVR"], [OPT.FPUM32], IFlag.MODRM_REQUIRED)
Set("d8 //c0", ["FADD"], [OPT.FPU_SSI], IFlag.GEN_BLOCK)
Set("d8 //c8", ["FMUL"], [OPT.FPU_SSI], IFlag.GEN_BLOCK)
Set("d8 //d0", ["FCOM"], [OPT.FPU_SI], IFlag.GEN_BLOCK)
Set("d8 //d8", ["FCOMP"], [OPT.FPU_SI], IFlag.GEN_BLOCK)
Set("d8 //d9", ["FCOMP"], [], IFlag.INST_FLAGS_NONE)
Set("d8 //e0", ["FSUB"], [OPT.FPU_SSI], IFlag.GEN_BLOCK)
Set("d8 //e8", ["FSUBR"], [OPT.FPU_SSI], IFlag.GEN_BLOCK)
Set("d8 //f0", ["FDIV"], [OPT.FPU_SSI], IFlag.GEN_BLOCK)
Set("d8 //f8", ["FDIVR"], [OPT.FPU_SSI], IFlag.GEN_BLOCK)
Set("d9 //00", ["FLD"], [OPT.FPUM32], IFlag.MODRM_REQUIRED)
Set("d9 //02", ["FST"], [OPT.FPUM32], IFlag.MODRM_REQUIRED)
Set("d9 //03", ["FSTP"], [OPT.FPUM32], IFlag.MODRM_REQUIRED)
Set("d9 //04", ["FLDENV"], [OPT.MEM], IFlag.MODRM_REQUIRED)
Set("d9 //05", ["FLDCW"], [OPT.FPUM16], IFlag.MODRM_REQUIRED)
Set("d9 //06", ["FNSTENV"], [OPT.MEM], IFlag.MODRM_REQUIRED)
Set("d9 //07", ["FNSTCW"], [OPT.FPUM16], IFlag.MODRM_REQUIRED)
Set("d9 //c0", ["FLD"], [OPT.FPU_SI], IFlag.GEN_BLOCK)
Set("d9 //c8", ["FXCH"], [OPT.FPU_SI], IFlag.GEN_BLOCK)
Set("d9 //c9", ["FXCH"], [], IFlag.INST_FLAGS_NONE)
Set("d9 //d0", ["FNOP"], [], IFlag.INST_FLAGS_NONE)
Set("d9 //e0", ["FCHS"], [], IFlag.INST_FLAGS_NONE)
Set("d9 //e1", ["FABS"], [], IFlag.INST_FLAGS_NONE)
Set("d9 //e4", ["FTST"], [], IFlag.INST_FLAGS_NONE)
Set("d9 //e5", ["FXAM"], [], IFlag.INST_FLAGS_NONE)
Set("d9 //e8", ["FLD1"], [], IFlag.INST_FLAGS_NONE)
Set("d9 //e9", ["FLDL2T"], [], IFlag.INST_FLAGS_NONE)
Set("d9 //ea", ["FLDL2E"], [], IFlag.INST_FLAGS_NONE)
Set("d9 //eb", ["FLDPI"], [], IFlag.INST_FLAGS_NONE)
Set("d9 //ec", ["FLDLG2"], [], IFlag.INST_FLAGS_NONE)
Set("d9 //ed", ["FLDLN2"], [], IFlag.INST_FLAGS_NONE)
Set("d9 //ee", ["FLDZ"], [], IFlag.INST_FLAGS_NONE)
Set("d9 //f0", ["F2XM1"], [], IFlag.INST_FLAGS_NONE)
Set("d9 //f1", ["FYL2X"], [], IFlag.INST_FLAGS_NONE)
Set("d9 //f2", ["FPTAN"], [], IFlag.INST_FLAGS_NONE)
Set("d9 //f3", ["FPATAN"], [], IFlag.INST_FLAGS_NONE)
Set("d9 //f4", ["FXTRACT"], [], IFlag.INST_FLAGS_NONE)
Set("d9 //f5", ["FPREM1"], [], IFlag.INST_FLAGS_NONE)
Set("d9 //f6", ["FDECSTP"], [], IFlag.INST_FLAGS_NONE)
Set("d9 //f7", ["FINCSTP"], [], IFlag.INST_FLAGS_NONE)
Set("d9 //f8", ["FPREM"], [], IFlag.INST_FLAGS_NONE)
Set("d9 //f9", ["FYL2XP1"], [], IFlag.INST_FLAGS_NONE)
Set("d9 //fa", ["FSQRT"], [], IFlag.INST_FLAGS_NONE)
Set("d9 //fb", ["FSINCOS"], [], IFlag.INST_FLAGS_NONE)
Set("d9 //fc", ["FRNDINT"], [], IFlag.INST_FLAGS_NONE)
Set("d9 //fd", ["FSCALE"], [], IFlag.INST_FLAGS_NONE)
Set("d9 //fe", ["FSIN"], [], IFlag.INST_FLAGS_NONE)
Set("d9 //ff", ["FCOS"], [], IFlag.INST_FLAGS_NONE)
Set("da //00", ["FIADD"], [OPT.FPUM32], IFlag.MODRM_REQUIRED)
Set("da //01", ["FIMUL"], [OPT.FPUM32], IFlag.MODRM_REQUIRED)
Set("da //02", ["FICOM"], [OPT.FPUM32], IFlag.MODRM_REQUIRED)
Set("da //03", ["FICOMP"], [OPT.FPUM32], IFlag.MODRM_REQUIRED)
Set("da //04", ["FISUB"], [OPT.FPUM32], IFlag.MODRM_REQUIRED)
Set("da //05", ["FISUBR"], [OPT.FPUM32], IFlag.MODRM_REQUIRED)
Set("da //06", ["FIDIV"], [OPT.FPUM32], IFlag.MODRM_REQUIRED)
Set("da //07", ["FIDIVR"], [OPT.FPUM32], IFlag.MODRM_REQUIRED)
Set("da //e9", ["FUCOMPP"], [], IFlag.INST_FLAGS_NONE)
Set("db //00", ["FILD"], [OPT.FPUM32], IFlag.MODRM_REQUIRED)
Set("db //02", ["FIST"], [OPT.FPUM32], IFlag.MODRM_REQUIRED)
Set("db //03", ["FISTP"], [OPT.FPUM32], IFlag.MODRM_REQUIRED)
Set("db //05", ["FLD"], [OPT.FPUM80], IFlag.MODRM_REQUIRED)
Set("db //07", ["FSTP"], [OPT.FPUM80], IFlag.MODRM_REQUIRED)
# Obsolete.
Set("db //e0", ["FENI"], [], IFlag.INST_FLAGS_NONE)
# Obsolete.
Set("db //e1", ["FEDISI"], [], IFlag.INST_FLAGS_NONE)
Set("db //e2", ["FNCLEX"], [], IFlag.INST_FLAGS_NONE)
Set("db //e3", ["FNINIT"], [], IFlag.INST_FLAGS_NONE)
# Obsolete.
Set("db //e4", ["FSETPM"], [], IFlag.INST_FLAGS_NONE)
Set("db //e8", ["FUCOMI"], [OPT.FPU_SSI], IFlag._32BITS | IFlag.GEN_BLOCK)
Set("dc //00", ["FADD"], [OPT.FPUM64], IFlag.MODRM_REQUIRED)
Set("dc //01", ["FMUL"], [OPT.FPUM64], IFlag.MODRM_REQUIRED)
Set("dc //02", ["FCOM"], [OPT.FPUM64], IFlag.MODRM_REQUIRED)
Set("dc //03", ["FCOMP"], [OPT.FPUM64], IFlag.MODRM_REQUIRED)
Set("dc //04", ["FSUB"], [OPT.FPUM64], IFlag.MODRM_REQUIRED)
Set("dc //05", ["FSUBR"], [OPT.FPUM64], IFlag.MODRM_REQUIRED)
Set("dc //06", ["FDIV"], [OPT.FPUM64], IFlag.MODRM_REQUIRED)
Set("dc //07", ["FDIVR"], [OPT.FPUM64], IFlag.MODRM_REQUIRED)
Set("dc //c0", ["FADD"], [OPT.FPU_SIS], IFlag.GEN_BLOCK)
Set("dc //c8", ["FMUL"], [OPT.FPU_SIS], IFlag.GEN_BLOCK)
Set("dc //e0", ["FSUBR"], [OPT.FPU_SIS], IFlag.GEN_BLOCK)
Set("dc //e8", ["FSUB"], [OPT.FPU_SIS], IFlag.GEN_BLOCK)
Set("dc //f0", ["FDIVR"], [OPT.FPU_SIS], IFlag.GEN_BLOCK)
Set("dc //f8", ["FDIV"], [OPT.FPU_SIS], IFlag.GEN_BLOCK)
Set("dd //00", ["FLD"], [OPT.FPUM64], IFlag.MODRM_REQUIRED)
Set("dd //02", ["FST"], [OPT.FPUM64], IFlag.MODRM_REQUIRED)
Set("dd //03", ["FSTP"], [OPT.FPUM64], IFlag.MODRM_REQUIRED)
Set("dd //04", ["FRSTOR"], [OPT.MEM], IFlag.MODRM_REQUIRED)
Set("dd //06", ["FNSAVE"], [OPT.MEM], IFlag.MODRM_REQUIRED)
Set("dd //07", ["FNSTSW"], [OPT.FPUM16], IFlag.MODRM_REQUIRED)
Set("dd //c0", ["FFREE"], [OPT.FPU_SI], IFlag.GEN_BLOCK)
Set("dd //d0", ["FST"], [OPT.FPU_SI], IFlag.GEN_BLOCK)
Set("dd //d8", ["FSTP"], [OPT.FPU_SI], IFlag.GEN_BLOCK)
Set("dd //e0", ["FUCOM"], [OPT.FPU_SIS], IFlag.GEN_BLOCK)
Set("dd //e1", ["FUCOM"], [], IFlag.INST_FLAGS_NONE)
Set("dd //e8", ["FUCOMP"], [OPT.FPU_SI], IFlag.GEN_BLOCK)
Set("dd //e9", ["FUCOMP"], [], IFlag.INST_FLAGS_NONE)
Set("de //00", ["FIADD"], [OPT.FPUM16], IFlag.MODRM_REQUIRED)
Set("de //01", ["FIMUL"], [OPT.FPUM16], IFlag.MODRM_REQUIRED)
Set("de //02", ["FICOM"], [OPT.FPUM16], IFlag.MODRM_REQUIRED)
Set("de //03", ["FICOMP"], [OPT.FPUM16], IFlag.MODRM_REQUIRED)
Set("de //04", ["FISUB"], [OPT.FPUM16], IFlag.MODRM_REQUIRED)
Set("de //05", ["FISUBR"], [OPT.FPUM16], IFlag.MODRM_REQUIRED)
Set("de //06", ["FIDIV"], [OPT.FPUM16], IFlag.MODRM_REQUIRED)
Set("de //07", ["FIDIVR"], [OPT.FPUM16], IFlag.MODRM_REQUIRED)
Set("de //c0", ["FADDP"], [OPT.FPU_SIS], IFlag.GEN_BLOCK)
Set("de //c1", ["FADDP"], [], IFlag.INST_FLAGS_NONE)
Set("de //c8", ["FMULP"], [OPT.FPU_SIS], IFlag.GEN_BLOCK)
Set("de //c9", ["FMULP"], [], IFlag.INST_FLAGS_NONE)
Set("de //d9", ["FCOMPP"], [], IFlag.INST_FLAGS_NONE)
Set("de //e0", ["FSUBRP"], [OPT.FPU_SIS], IFlag.GEN_BLOCK)
Set("de //e1", ["FSUBRP"], [], IFlag.INST_FLAGS_NONE)
Set("de //e8", ["FSUBP"], [OPT.FPU_SIS], IFlag.GEN_BLOCK)
Set("de //e9", ["FSUBP"], [], IFlag.INST_FLAGS_NONE)
Set("de //f0", ["FDIVRP"], [OPT.FPU_SIS], IFlag.GEN_BLOCK)
Set("de //f1", ["FDIVRP"], [], IFlag.INST_FLAGS_NONE)
Set("de //f8", ["FDIVP"], [OPT.FPU_SIS], IFlag.GEN_BLOCK)
Set("de //f9", ["FDIVP"], [], IFlag.INST_FLAGS_NONE)
Set("df //00", ["FILD"], [OPT.FPUM16], IFlag.MODRM_REQUIRED)
Set("df //02", ["FIST"], [OPT.FPUM16], IFlag.MODRM_REQUIRED)
Set("df //03", ["FISTP"], [OPT.FPUM16], IFlag.MODRM_REQUIRED)
Set("df //04", ["FBLD"], [OPT.FPUM80], IFlag.MODRM_REQUIRED)
Set("df //05", ["FILD"], [OPT.FPUM64], IFlag.MODRM_REQUIRED)
Set("df //06", ["FBSTP"], [OPT.FPUM80], IFlag.MODRM_REQUIRED)
Set("df //07", ["FISTP"], [OPT.FPUM64], IFlag.MODRM_REQUIRED)
Set("df //e0", ["FNSTSW"], [OPT.ACC16], IFlag.INST_FLAGS_NONE)
Set("df //e8", ["FUCOMIP"], [OPT.FPU_SSI], IFlag._32BITS | IFlag.GEN_BLOCK)
Set("df //f0", ["FCOMIP"], [OPT.FPU_SSI], IFlag._32BITS | IFlag.GEN_BLOCK)
def init_P6(self):
Set = lambda *args: self.SetCallback(ISetClass.P6, *args)
Set("0f, 05", ["SYSCALL"], [], IFlag._32BITS)
Set("0f, 07", ["SYSRET"], [], IFlag._32BITS)
Set("0f, 34", ["SYSENTER"], [], IFlag._32BITS) # Only AMD states invalid in 64 bits.
Set("0f, 35", ["SYSEXIT"], [], IFlag._32BITS) # Only AMD states invalid in 64 bits.
Set("0f, 40", ["CMOVO"], [OPT.REG_FULL, OPT.RM_FULL], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("0f, 41", ["CMOVNO"], [OPT.REG_FULL, OPT.RM_FULL], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("0f, 42", ["CMOVB"], [OPT.REG_FULL, OPT.RM_FULL], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("0f, 43", ["CMOVAE"], [OPT.REG_FULL, OPT.RM_FULL], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("0f, 44", ["CMOVZ"], [OPT.REG_FULL, OPT.RM_FULL], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("0f, 45", ["CMOVNZ"], [OPT.REG_FULL, OPT.RM_FULL], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("0f, 46", ["CMOVBE"], [OPT.REG_FULL, OPT.RM_FULL], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("0f, 47", ["CMOVA"], [OPT.REG_FULL, OPT.RM_FULL], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("0f, 48", ["CMOVS"], [OPT.REG_FULL, OPT.RM_FULL], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("0f, 49", ["CMOVNS"], [OPT.REG_FULL, OPT.RM_FULL], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("0f, 4a", ["CMOVP"], [OPT.REG_FULL, OPT.RM_FULL], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("0f, 4b", ["CMOVNP"], [OPT.REG_FULL, OPT.RM_FULL], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("0f, 4c", ["CMOVL"], [OPT.REG_FULL, OPT.RM_FULL], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("0f, 4d", ["CMOVGE"], [OPT.REG_FULL, OPT.RM_FULL], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("0f, 4e", ["CMOVLE"], [OPT.REG_FULL, OPT.RM_FULL], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("0f, 4f", ["CMOVG"], [OPT.REG_FULL, OPT.RM_FULL], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("da //c0", ["FCMOVB"], [OPT.FPU_SSI], IFlag._32BITS | IFlag.GEN_BLOCK)
Set("da //c8", ["FCMOVE"], [OPT.FPU_SSI], IFlag._32BITS | IFlag.GEN_BLOCK)
Set("da //d0", ["FCMOVBE"], [OPT.FPU_SSI], IFlag._32BITS | IFlag.GEN_BLOCK)
Set("da //d8", ["FCMOVU"], [OPT.FPU_SSI], IFlag._32BITS | IFlag.GEN_BLOCK)
Set("db //c0", ["FCMOVNB"], [OPT.FPU_SSI], IFlag._32BITS | IFlag.GEN_BLOCK)
Set("db //c8", ["FCMOVNE"], [OPT.FPU_SSI], IFlag._32BITS | IFlag.GEN_BLOCK)
Set("db //d0", ["FCMOVNBE"], [OPT.FPU_SSI], IFlag._32BITS | IFlag.GEN_BLOCK)
Set("db //d8", ["FCMOVNU"], [OPT.FPU_SSI], IFlag._32BITS | IFlag.GEN_BLOCK)
Set("db //f0", ["FCOMI"], [OPT.FPU_SSI], IFlag._32BITS | IFlag.GEN_BLOCK)
def init_MMX(self):
Set = lambda *args: self.SetCallback(ISetClass.MMX, *args)
# Pseudo Opcodes, the second mnemonic is concatenated to the first mnemonic.
Set("0f, 60", ["PUNPCKLBW"], [OPT.MM, OPT.MM32], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("0f, 61", ["PUNPCKLWD"], [OPT.MM, OPT.MM32], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("0f, 62", ["PUNPCKLDQ"], [OPT.MM, OPT.MM32], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("0f, 63", ["PACKSSWB"], [OPT.MM, OPT.MM64], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("0f, 64", ["PCMPGTB"], [OPT.MM, OPT.MM64], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("0f, 65", ["PCMPGTW"], [OPT.MM, OPT.MM64], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("0f, 66", ["PCMPGTD"], [OPT.MM, OPT.MM64], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("0f, 67", ["PACKUSWB"], [OPT.MM, OPT.MM64], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("0f, 68", ["PUNPCKHBW"], [OPT.MM, OPT.MM64], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("0f, 69", ["PUNPCKHWD"], [OPT.MM, OPT.MM64], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("0f, 6a", ["PUNPCKHDQ"], [OPT.MM, OPT.MM64], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("0f, 6b", ["PACKSSDW"], [OPT.MM, OPT.MM64], IFlag.MODRM_REQUIRED | IFlag._32BITS)
# V 1.6.22 MOVD/MOVQ are used for 32bits or 64bits correspondignly.
Set("0f, 6e", ["MOVD", "", "MOVQ"], [OPT.MM, OPT.RM32_64], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag._64BITS | IFlag.PRE_REX | IFlag.USE_EXMNEMONIC2)
Set("0f, 6f", ["MOVQ"], [OPT.MM, OPT.MM64], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("0f, 71 /02", ["PSRLW"], [OPT.MM_RM, OPT.IMM8], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.MODRR_REQUIRED)
Set("0f, 71 /04", ["PSRAW"], [OPT.MM_RM, OPT.IMM8], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.MODRR_REQUIRED)
Set("0f, 71 /06", ["PSLLW"], [OPT.MM_RM, OPT.IMM8], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.MODRR_REQUIRED)
Set("0f, 72 /02", ["PSRLD"], [OPT.MM_RM, OPT.IMM8], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.MODRR_REQUIRED)
Set("0f, 72 /04", ["PSRAD"], [OPT.MM_RM, OPT.IMM8], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.MODRR_REQUIRED)
Set("0f, 72 /06", ["PSLLD"], [OPT.MM_RM, OPT.IMM8], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.MODRR_REQUIRED)
Set("0f, 73 /02", ["PSRLQ"], [OPT.MM_RM, OPT.IMM8], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.MODRR_REQUIRED)
Set("0f, 73 /06", ["PSLLQ"], [OPT.MM_RM, OPT.IMM8], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.MODRR_REQUIRED)
Set("0f, 74", ["PCMPEQB"], [OPT.MM, OPT.MM64], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("0f, 75", ["PCMPEQW"], [OPT.MM, OPT.MM64], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("0f, 76", ["PCMPEQD"], [OPT.MM, OPT.MM64], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("0f, 77", ["EMMS"], [], IFlag._32BITS)
# V 1.6.22 MOVD/MOVQ are used for 32bits or 64bits correspondignly.
Set("0f, 7e", ["MOVD", "", "MOVQ"], [OPT.RM32_64, OPT.MM], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag._64BITS | IFlag.PRE_REX | IFlag.USE_EXMNEMONIC2)
Set("0f, 7f", ["MOVQ"], [OPT.MM64, OPT.MM], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("0f, d1", ["PSRLW"], [OPT.MM, OPT.MM64], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("0f, d2", ["PSRLD"], [OPT.MM, OPT.MM64], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("0f, d3", ["PSRLQ"], [OPT.MM, OPT.MM64], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("0f, d5", ["PMULLW"], [OPT.MM, OPT.MM64], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("0f, d8", ["PSUBUSB"], [OPT.MM, OPT.MM64], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("0f, d9", ["PSUBUSW"], [OPT.MM, OPT.MM64], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("0f, db", ["PAND"], [OPT.MM, OPT.MM64], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("0f, dc", ["PADDUSB"], [OPT.MM, OPT.MM64], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("0f, dd", ["PADDUSW"], [OPT.MM, OPT.MM64], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("0f, df", ["PANDN"], [OPT.MM, OPT.MM64], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("0f, e1", ["PSRAW"], [OPT.MM, OPT.MM64], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("0f, e2", ["PSRAD"], [OPT.MM, OPT.MM64], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("0f, e5", ["PMULHW"], [OPT.MM, OPT.MM64], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("0f, e8", ["PSUBSB"], [OPT.MM, OPT.MM64], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("0f, e9", ["PSUBSW"], [OPT.MM, OPT.MM64], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("0f, eb", ["POR"], [OPT.MM, OPT.MM64], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("0f, ec", ["PADDSB"], [OPT.MM, OPT.MM64], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("0f, ed", ["PADDSW"], [OPT.MM, OPT.MM64], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("0f, ef", ["PXOR"], [OPT.MM, OPT.MM64], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("0f, f1", ["PSLLW"], [OPT.MM, OPT.MM64], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("0f, f2", ["PSLLD"], [OPT.MM, OPT.MM64], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("0f, f3", ["PSLLQ"], [OPT.MM, OPT.MM64], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("0f, f5", ["PMADDWD"], [OPT.MM, OPT.MM64], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("0f, f8", ["PSUBB"], [OPT.MM, OPT.MM64], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("0f, f9", ["PSUBW"], [OPT.MM, OPT.MM64], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("0f, fa", ["PSUBD"], [OPT.MM, OPT.MM64], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("0f, fc", ["PADDB"], [OPT.MM, OPT.MM64], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("0f, fd", ["PADDW"], [OPT.MM, OPT.MM64], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("0f, fe", ["PADDD"], [OPT.MM, OPT.MM64], IFlag.MODRM_REQUIRED | IFlag._32BITS)
def init_SSE(self):
Set = lambda *args: self.SetCallback(ISetClass.SSE, *args)
Set("0f, 10", ["MOVUPS"], [OPT.XMM, OPT.XMM128], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("0f, 11", ["MOVUPS"], [OPT.XMM128, OPT.XMM], IFlag.MODRM_REQUIRED | IFlag._32BITS)
# The problem with these instructions (MOVHLPS/MOVLHPS) is that both kinds need partialy the ModR/M byte.
# mod=11 for first mnemonic.
Set("0f, 12", ["MOVHLPS", "MOVLPS"], [OPT.XMM, OPT.XMM64], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.USE_EXMNEMONIC | IFlag.MNEMONIC_MODRM_BASED)
Set("0f, 13", ["MOVLPS"], [OPT.MEM64, OPT.XMM], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("0f, 14", ["UNPCKLPS"], [OPT.XMM, OPT.XMM128], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("0f, 15", ["UNPCKHPS"], [OPT.XMM, OPT.XMM128], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("0f, 16", ["MOVLHPS", "MOVHPS"], [OPT.XMM, OPT.XMM64], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.USE_EXMNEMONIC | IFlag.MNEMONIC_MODRM_BASED)
Set("0f, 17", ["MOVHPS"], [OPT.MEM64, OPT.XMM], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("0f, 18 /00", ["PREFETCHNTA"], [OPT.MEM], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("0f, 18 /01", ["PREFETCHT0"], [OPT.MEM], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("0f, 18 /02", ["PREFETCHT1"], [OPT.MEM], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("0f, 18 /03", ["PREFETCHT2"], [OPT.MEM], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("0f, 28", ["MOVAPS"], [OPT.XMM, OPT.XMM128], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("0f, 29", ["MOVAPS"], [OPT.XMM128, OPT.XMM], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("0f, 2a", ["CVTPI2PS"], [OPT.XMM, OPT.MM64], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("0f, 2b", ["MOVNTPS"], [OPT.MEM128, OPT.XMM], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("0f, 2c", ["CVTTPS2PI"], [OPT.MM, OPT.XMM64], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("0f, 2d", ["CVTPS2PI"], [OPT.MM, OPT.XMM64], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("0f, 2e", ["UCOMISS"], [OPT.XMM, OPT.XMM32], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("0f, 2f", ["COMISS"], [OPT.XMM, OPT.XMM32], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("0f, 50", ["MOVMSKPS"], [OPT.REG32, OPT.XMM_RM], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.MODRR_REQUIRED)
Set("0f, 51", ["SQRTPS"], [OPT.XMM, OPT.XMM128], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("0f, 52", ["RSQRTPS"], [OPT.XMM, OPT.XMM128], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("0f, 53", ["RCPPS"], [OPT.XMM, OPT.XMM128], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("0f, 54", ["ANDPS"], [OPT.XMM, OPT.XMM128], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("0f, 55", ["ANDNPS"], [OPT.XMM, OPT.XMM128], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("0f, 56", ["ORPS"], [OPT.XMM, OPT.XMM128], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("0f, 57", ["XORPS"], [OPT.XMM, OPT.XMM128], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("0f, 58", ["ADDPS"], [OPT.XMM, OPT.XMM128], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("0f, 59", ["MULPS"], [OPT.XMM, OPT.XMM128], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("0f, 5c", ["SUBPS"], [OPT.XMM, OPT.XMM128], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("0f, 5d", ["MINPS"], [OPT.XMM, OPT.XMM128], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("0f, 5e", ["DIVPS"], [OPT.XMM, OPT.XMM128], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("0f, 5f", ["MAXPS"], [OPT.XMM, OPT.XMM128], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("0f, 70", ["PSHUFW"], [OPT.MM, OPT.MM64, OPT.IMM8], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("0f, c2", ["CMP", "PS"], [OPT.XMM, OPT.XMM128], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.USE_EXMNEMONIC | IFlag.PSEUDO_OPCODE)
Set("0f, c4", ["PINSRW"], [OPT.MM, OPT.R32_M16, OPT.IMM8], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("0f, c5", ["PEXTRW"], [OPT.REG32, OPT.MM_RM, OPT.IMM8], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.MODRR_REQUIRED)
Set("0f, c6", ["SHUFPS"], [OPT.XMM, OPT.XMM128, OPT.IMM8], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("0f, d7", ["PMOVMSKB"], [OPT.REG32, OPT.MM_RM], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.MODRR_REQUIRED)
Set("0f, da", ["PMINUB"], [OPT.MM, OPT.MM64], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("0f, de", ["PMAXUB"], [OPT.MM, OPT.MM64], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("0f, e0", ["PAVGB"], [OPT.MM, OPT.MM64], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("0f, e3", ["PAVGW"], [OPT.MM, OPT.MM64], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("0f, e4", ["PMULHUW"], [OPT.MM, OPT.MM64], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("0f, e7", ["MOVNTQ"], [OPT.MEM64, OPT.MM], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("0f, ea", ["PMINSW"], [OPT.MM, OPT.MM64], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("0f, ee", ["PMAXSW"], [OPT.MM, OPT.MM64], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("0f, f6", ["PSADBW"], [OPT.MM, OPT.MM64], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("0f, f7", ["MASKMOVQ"], [OPT.MM, OPT.MM_RM], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.MODRR_REQUIRED)
Set("f3, 0f, 10", ["MOVSS"], [OPT.XMM, OPT.XMM32], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("f3, 0f, 11", ["MOVSS"], [OPT.XMM32, OPT.XMM], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("f3, 0f, 2a", ["CVTSI2SS"], [OPT.XMM, OPT.RM32_64], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag._64BITS | IFlag.PRE_REX)
Set("f3, 0f, 2c", ["CVTTSS2SI"], [OPT.REG32_64, OPT.XMM32], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag._64BITS | IFlag.PRE_REX)
Set("f3, 0f, 2d", ["CVTSS2SI"], [OPT.REG32_64, OPT.XMM32], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag._64BITS | IFlag.PRE_REX)
Set("f3, 0f, 51", ["SQRTSS"], [OPT.XMM, OPT.XMM32], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("f3, 0f, 52", ["RSQRTSS"], [OPT.XMM, OPT.XMM32], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("f3, 0f, 53", ["RCPSS"], [OPT.XMM, OPT.XMM32], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("f3, 0f, 58", ["ADDSS"], [OPT.XMM, OPT.XMM32], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("f3, 0f, 59", ["MULSS"], [OPT.XMM, OPT.XMM32], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("f3, 0f, 5c", ["SUBSS"], [OPT.XMM, OPT.XMM32], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("f3, 0f, 5d", ["MINSS"], [OPT.XMM, OPT.XMM32], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("f3, 0f, 5e", ["DIVSS"], [OPT.XMM, OPT.XMM32], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("f3, 0f, 5f", ["MAXSS"], [OPT.XMM, OPT.XMM32], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("f3, 0f, c2", ["CMP", "SS"], [OPT.XMM, OPT.XMM32], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.USE_EXMNEMONIC | IFlag.PSEUDO_OPCODE)
def init_SSE2(self):
Set = lambda *args: self.SetCallback(ISetClass.SSE2, *args)
Set("0f, 5a", ["CVTPS2PD"], [OPT.XMM, OPT.XMM64], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("0f, 5b", ["CVTDQ2PS"], [OPT.XMM, OPT.XMM128], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("0f, c3", ["MOVNTI"], [OPT.MEM32_64, OPT.REG32_64], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag._64BITS | IFlag.PRE_REX)
Set("0f, d4", ["PADDQ"], [OPT.XMM, OPT.XMM64], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("0f, f4", ["PMULUDQ"], [OPT.MM, OPT.MM64], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("0f, fb", ["PSUBQ"], [OPT.MM, OPT.MM64], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("66, 0f, 10", ["MOVUPD"], [OPT.XMM, OPT.XMM128], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("66, 0f, 11", ["MOVUPD"], [OPT.XMM128, OPT.XMM], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("66, 0f, 12", ["MOVLPD"], [OPT.XMM, OPT.MEM64], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("66, 0f, 13", ["MOVLPD"], [OPT.MEM64, OPT.XMM], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("66, 0f, 14", ["UNPCKLPD"], [OPT.XMM, OPT.XMM128], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("66, 0f, 15", ["UNPCKHPD"], [OPT.XMM, OPT.XMM128], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("66, 0f, 16", ["MOVHPD"], [OPT.XMM, OPT.MEM64], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("66, 0f, 17", ["MOVHPD"], [OPT.MEM64, OPT.XMM], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("66, 0f, 28", ["MOVAPD"], [OPT.XMM, OPT.XMM128], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("66, 0f, 29", ["MOVAPD"], [OPT.XMM128, OPT.XMM], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("66, 0f, 2a", ["CVTPI2PD"], [OPT.XMM, OPT.MM64], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("66, 0f, 2b", ["MOVNTPD"], [OPT.MEM128, OPT.XMM], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("66, 0f, 2c", ["CVTTPD2PI"], [OPT.MM, OPT.XMM128], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("66, 0f, 2d", ["CVTPD2PI"], [OPT.XMM, OPT.XMM128], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("66, 0f, 2e", ["UCOMISD"], [OPT.XMM, OPT.XMM64], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("66, 0f, 2f", ["COMISD"], [OPT.XMM, OPT.XMM64], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("66, 0f, 50", ["MOVMSKPD"], [OPT.REG32, OPT.XMM_RM], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.MODRR_REQUIRED)
Set("66, 0f, 51", ["SQRTPD"], [OPT.XMM, OPT.XMM128], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("66, 0f, 54", ["ANDPD"], [OPT.XMM, OPT.XMM128], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("66, 0f, 55", ["ANDNPD"], [OPT.XMM, OPT.XMM128], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("66, 0f, 56", ["ORPD"], [OPT.XMM, OPT.XMM128], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("66, 0f, 57", ["XORPD"], [OPT.XMM, OPT.XMM128], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("66, 0f, 58", ["ADDPD"], [OPT.XMM, OPT.XMM128], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("66, 0f, 59", ["MULPD"], [OPT.XMM, OPT.XMM128], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("66, 0f, 5a", ["CVTPD2PS"], [OPT.XMM, OPT.XMM128], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("66, 0f, 5b", ["CVTPS2DQ"], [OPT.XMM, OPT.XMM128], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("66, 0f, 5c", ["SUBPD"], [OPT.XMM, OPT.XMM128], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("66, 0f, 5d", ["MINPD"], [OPT.XMM, OPT.XMM128], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("66, 0f, 5e", ["DIVPD"], [OPT.XMM, OPT.XMM128], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("66, 0f, 5f", ["MAXPD"], [OPT.XMM, OPT.XMM128], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("66, 0f, 60", ["PUNPCKLBW"], [OPT.XMM, OPT.XMM128], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("66, 0f, 61", ["PUNPCKLWD"], [OPT.XMM, OPT.XMM128], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("66, 0f, 62", ["PUNPCKLDQ"], [OPT.XMM, OPT.XMM128], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("66, 0f, 63", ["PACKSSWB"], [OPT.XMM, OPT.XMM128], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("66, 0f, 64", ["PCMPGTB"], [OPT.XMM, OPT.XMM128], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("66, 0f, 65", ["PCMPGTW"], [OPT.XMM, OPT.XMM128], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("66, 0f, 66", ["PCMPGTD"], [OPT.XMM, OPT.XMM128], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("66, 0f, 67", ["PACKUSWB"], [OPT.XMM, OPT.XMM128], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("66, 0f, 68", ["PUNPCKHBW"], [OPT.XMM, OPT.XMM128], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("66, 0f, 69", ["PUNPCKHWD"], [OPT.XMM, OPT.XMM128], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("66, 0f, 6a", ["PUNPCKHDQ"], [OPT.XMM, OPT.XMM128], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("66, 0f, 6b", ["PACKSSDW"], [OPT.XMM, OPT.XMM128], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("66, 0f, 6c", ["PUNPCKLQDQ"], [OPT.XMM, OPT.XMM128], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("66, 0f, 6d", ["PUNPCKHQDQ"], [OPT.XMM, OPT.XMM128], IFlag.MODRM_REQUIRED | IFlag._32BITS)
# V 1.6.22 MOVD/MOVQ are used for 32bits or 64bits correspondignly.
Set("66, 0f, 6e", ["MOVD", "", "MOVQ"], [OPT.XMM, OPT.RM32_64], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag._64BITS | IFlag.PRE_REX | IFlag.USE_EXMNEMONIC2)
Set("66, 0f, 6f", ["MOVDQA"], [OPT.XMM, OPT.XMM128], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("66, 0f, 70", ["PSHUFD"], [OPT.XMM, OPT.XMM128, OPT.IMM8], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("66, 0f, 71 /02", ["PSRLW"], [OPT.XMM_RM, OPT.IMM8], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("66, 0f, 71 /04", ["PSRAW"], [OPT.XMM_RM, OPT.IMM8], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("66, 0f, 71 /06", ["PSLLW"], [OPT.XMM_RM, OPT.IMM8], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("66, 0f, 72 /02", ["PSRLD"], [OPT.XMM_RM, OPT.IMM8], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("66, 0f, 72 /04", ["PSRAD"], [OPT.XMM_RM, OPT.IMM8], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("66, 0f, 72 /06", ["PSLLD"], [OPT.XMM_RM, OPT.IMM8], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("66, 0f, 73 /02", ["PSRLQ"], [OPT.XMM_RM, OPT.IMM8], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("66, 0f, 73 /03", ["PSRLDQ"], [OPT.XMM_RM, OPT.IMM8], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("66, 0f, 73 /06", ["PSLLQ"], [OPT.XMM_RM, OPT.IMM8], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("66, 0f, 73 /07", ["PSLLDQ"], [OPT.XMM_RM, OPT.IMM8], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("66, 0f, 74", ["PCMPEQB"], [OPT.XMM, OPT.XMM128], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("66, 0f, 75", ["PCMPEQW"], [OPT.XMM, OPT.XMM128], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("66, 0f, 76", ["PCMPEQD"], [OPT.XMM, OPT.XMM128], IFlag.MODRM_REQUIRED | IFlag._32BITS)
# V 1.6.22 MOVD/MOVQ are used for 32bits or 64bits correspondignly.
Set("66, 0f, 7e", ["MOVD", "", "MOVQ"], [OPT.RM32_64, OPT.XMM], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag._64BITS | IFlag.PRE_REX | IFlag.USE_EXMNEMONIC2)
Set("66, 0f, 7f", ["MOVDQA"], [OPT.XMM128, OPT.XMM], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("66, 0f, c2", ["CMP", "PD"], [OPT.XMM, OPT.XMM128], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.USE_EXMNEMONIC | IFlag.PSEUDO_OPCODE)
Set("66, 0f, c4", ["PINSRW"], [OPT.XMM, OPT.R32_M16, OPT.IMM8], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("66, 0f, c5", ["PEXTRW"], [OPT.REG32, OPT.XMM_RM, OPT.IMM8], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("66, 0f, c6", ["SHUFPD"], [OPT.XMM, OPT.XMM128, OPT.IMM8], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("66, 0f, d1", ["PSRLW"], [OPT.XMM, OPT.XMM128], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("66, 0f, d2", ["PSRLD"], [OPT.XMM, OPT.XMM128], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("66, 0f, d3", ["PSRLQ"], [OPT.XMM, OPT.XMM128], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("66, 0f, d4", ["PADDQ"], [OPT.XMM, OPT.XMM128], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("66, 0f, d5", ["PMULLW"], [OPT.XMM, OPT.XMM128], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("66, 0f, d6", ["MOVQ"], [OPT.XMM64, OPT.XMM], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("66, 0f, d7", ["PMOVMSKB"], [OPT.REG32, OPT.XMM_RM], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.MODRR_REQUIRED)
Set("66, 0f, d8", ["PSUBUSB"], [OPT.XMM, OPT.XMM128], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("66, 0f, d9", ["PSUBUSW"], [OPT.XMM, OPT.XMM128], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("66, 0f, da", ["PMINUB"], [OPT.XMM, OPT.XMM128], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("66, 0f, db", ["PAND"], [OPT.XMM, OPT.XMM128], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("66, 0f, dc", ["PADDUSB"], [OPT.XMM, OPT.XMM128], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("66, 0f, dd", ["PADDUSW"], [OPT.XMM, OPT.XMM128], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("66, 0f, de", ["PMAXUB"], [OPT.XMM, OPT.XMM128], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("66, 0f, df", ["PANDN"], [OPT.XMM, OPT.XMM128], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("66, 0f, e0", ["PAVGB"], [OPT.XMM, OPT.XMM128], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("66, 0f, e1", ["PSRAW"], [OPT.XMM, OPT.XMM128], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("66, 0f, e2", ["PSRAD"], [OPT.XMM, OPT.XMM128], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("66, 0f, e3", ["PAVGW"], [OPT.XMM, OPT.XMM128], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("66, 0f, e4", ["PMULHUW"], [OPT.XMM, OPT.XMM128], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("66, 0f, e5", ["PMULHW"], [OPT.XMM, OPT.XMM128], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("66, 0f, e6", ["CVTTPD2DQ"], [OPT.XMM, OPT.XMM128], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("66, 0f, e7", ["MOVNTDQ"], [OPT.MEM128, OPT.XMM], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("66, 0f, e8", ["PSUBSB"], [OPT.XMM, OPT.XMM128], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("66, 0f, e9", ["PSUBSW"], [OPT.XMM, OPT.XMM128], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("66, 0f, ea", ["PMINSW"], [OPT.XMM, OPT.XMM128], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("66, 0f, eb", ["POR"], [OPT.XMM, OPT.XMM128], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("66, 0f, ec", ["PADDSB"], [OPT.XMM, OPT.XMM128], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("66, 0f, ed", ["PADDSW"], [OPT.XMM, OPT.XMM128], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("66, 0f, ee", ["PMAXSW"], [OPT.XMM, OPT.XMM128], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("66, 0f, ef", ["PXOR"], [OPT.XMM, OPT.XMM128], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("66, 0f, f1", ["PSLLW"], [OPT.XMM, OPT.XMM128], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("66, 0f, f2", ["PSLLD"], [OPT.XMM, OPT.XMM128], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("66, 0f, f3", ["PSLLQ"], [OPT.XMM, OPT.XMM128], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("66, 0f, f4", ["PMULUDQ"], [OPT.XMM, OPT.XMM128], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("66, 0f, f5", ["PMADDWD"], [OPT.XMM, OPT.XMM128], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("66, 0f, f6", ["PSADBW"], [OPT.XMM, OPT.XMM128], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("66, 0f, f7", ["MASKMOVDQU"], [OPT.XMM, OPT.XMM_RM], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.MODRR_REQUIRED)
Set("66, 0f, f8", ["PSUBB"], [OPT.XMM, OPT.XMM128], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("66, 0f, f9", ["PSUBW"], [OPT.XMM, OPT.XMM128], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("66, 0f, fa", ["PSUBD"], [OPT.XMM, OPT.XMM128], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("66, 0f, fb", ["PSUBQ"], [OPT.XMM, OPT.XMM128], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("66, 0f, fc", ["PADDB"], [OPT.XMM, OPT.XMM128], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("66, 0f, fd", ["PADDW"], [OPT.XMM, OPT.XMM128], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("66, 0f, fe", ["PADDD"], [OPT.XMM, OPT.XMM128], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("f2, 0f, 10", ["MOVSD"], [OPT.XMM, OPT.XMM64], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("f2, 0f, 11", ["MOVSD"], [OPT.XMM64, OPT.XMM], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("f2, 0f, 2a", ["CVTSI2SD"], [OPT.XMM, OPT.RM32_64], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag._64BITS | IFlag.PRE_REX)
Set("f2, 0f, 2c", ["CVTTSD2SI"], [OPT.REG32_64, OPT.XMM64], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag._64BITS | IFlag.PRE_REX)
Set("f2, 0f, 2d", ["CVTSD2SI"], [OPT.REG32_64, OPT.XMM64], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag._64BITS | IFlag.PRE_REX)
Set("f2, 0f, 51", ["SQRTSD"], [OPT.XMM, OPT.XMM64], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("f2, 0f, 58", ["ADDSD"], [OPT.XMM, OPT.XMM64], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("f2, 0f, 59", ["MULSD"], [OPT.XMM, OPT.XMM64], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("f2, 0f, 5a", ["CVTSD2SS"], [OPT.XMM, OPT.XMM64], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("f2, 0f, 5c", ["SUBSD"], [OPT.XMM, OPT.XMM64], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("f2, 0f, 5d", ["MINSD"], [OPT.XMM, OPT.XMM64], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("f2, 0f, 5e", ["DIVSD"], [OPT.XMM, OPT.XMM64], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("f2, 0f, 5f", ["MAXSD"], [OPT.XMM, OPT.XMM64], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("f2, 0f, 70", ["PSHUFLW"], [OPT.XMM, OPT.XMM128, OPT.IMM8], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("f2, 0f, c2", ["CMP", "SD"], [OPT.XMM, OPT.XMM64], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.USE_EXMNEMONIC | IFlag.PSEUDO_OPCODE)
Set("f2, 0f, d6", ["MOVDQ2Q"], [OPT.MM, OPT.XMM_RM], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.MODRR_REQUIRED)
Set("f2, 0f, e6", ["CVTPD2DQ"], [OPT.XMM, OPT.XMM128], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("f3, 0f, 5a", ["CVTSS2SD"], [OPT.XMM, OPT.XMM32], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("f3, 0f, 5b", ["CVTTPS2DQ"], [OPT.XMM, OPT.XMM128], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("f3, 0f, 6f", ["MOVDQU"], [OPT.XMM, OPT.XMM128], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("f3, 0f, 70", ["PSHUFHW"], [OPT.XMM, OPT.XMM128, OPT.IMM8], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("f3, 0f, 7e", ["MOVQ"], [OPT.XMM, OPT.XMM64], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("f3, 0f, 7f", ["MOVDQU"], [OPT.XMM128, OPT.XMM], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("f3, 0f, d6", ["MOVQ2DQ"], [OPT.XMM, OPT.MM_RM], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.MODRR_REQUIRED)
Set("f3, 0f, e6", ["CVTDQ2PD"], [OPT.XMM, OPT.XMM64], IFlag.MODRM_REQUIRED | IFlag._32BITS)
def init_SSE3(self):
Set = lambda *args: self.SetCallback(ISetClass.SSE3, *args)
Set("66, 0f, 7c", ["HADDPD"], [OPT.XMM, OPT.XMM128], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("66, 0f, 7d", ["HSUBPD"], [OPT.XMM, OPT.XMM128], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("66, 0f, d0", ["ADDSUBPD"], [OPT.XMM, OPT.XMM128], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("db //01", ["FISTTP"], [OPT.FPUM32], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("dd //01", ["FISTTP"], [OPT.FPUM64], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("df //01", ["FISTTP"], [OPT.FPUM16], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("f2, 0f, 12", ["MOVDDUP"], [OPT.XMM, OPT.XMM64], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("f3, 0f, 12", ["MOVSLDUP"], [OPT.XMM, OPT.XMM64], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("f2, 0f, 7c", ["HADDPS"], [OPT.XMM, OPT.XMM128], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("f2, 0f, 7d", ["HSUBPS"], [OPT.XMM, OPT.XMM128], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("f2, 0f, d0", ["ADDSUBPS"], [OPT.XMM, OPT.XMM128], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("f2, 0f, f0", ["LDDQU"], [OPT.XMM, OPT.MEM], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("f3, 0f, 16", ["MOVSHDUP"], [OPT.XMM, OPT.XMM128], IFlag.MODRM_REQUIRED | IFlag._32BITS)
def init_SSSE3(self):
Set = lambda *args: self.SetCallback(ISetClass.SSSE3, *args)
Set("0f, 38, 00", ["PSHUFB"], [OPT.MM, OPT.MM64], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("0f, 38, 01", ["PHADDW"], [OPT.MM, OPT.MM64], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("0f, 38, 02", ["PHADDD"], [OPT.MM, OPT.MM64], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("0f, 38, 03", ["PHADDSW"], [OPT.MM, OPT.MM64], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("0f, 38, 04", ["PMADDUBSW"], [OPT.MM, OPT.MM64], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("0f, 38, 05", ["PHSUBW"], [OPT.MM, OPT.MM64], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("0f, 38, 06", ["PHSUBD"], [OPT.MM, OPT.MM64], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("0f, 38, 07", ["PHSUBSW"], [OPT.MM, OPT.MM64], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("0f, 38, 08", ["PSIGNB"], [OPT.MM, OPT.MM64], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("0f, 38, 09", ["PSIGNW"], [OPT.MM, OPT.MM64], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("0f, 38, 0a", ["PSIGND"], [OPT.MM, OPT.MM64], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("0f, 38, 0b", ["PMULHRSW"], [OPT.MM, OPT.MM64], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("0f, 38, 1c", ["PABSB"], [OPT.MM, OPT.MM64], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("0f, 38, 1d", ["PABSW"], [OPT.MM, OPT.MM64], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("0f, 38, 1e", ["PABSD"], [OPT.MM, OPT.MM64], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("0f, 3a, 0f", ["PALIGNR"], [OPT.MM, OPT.MM64, OPT.IMM8], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("66, 0f, 38, 00", ["PSHUFB"], [OPT.XMM, OPT.XMM128], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("66, 0f, 38, 01", ["PHADDW"], [OPT.XMM, OPT.XMM128], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("66, 0f, 38, 02", ["PHADDD"], [OPT.XMM, OPT.XMM128], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("66, 0f, 38, 03", ["PHADDSW"], [OPT.XMM, OPT.XMM128], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("66, 0f, 38, 04", ["PMADDUBSW"], [OPT.XMM, OPT.XMM128], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("66, 0f, 38, 05", ["PHSUBW"], [OPT.XMM, OPT.XMM128], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("66, 0f, 38, 06", ["PHSUBD"], [OPT.XMM, OPT.XMM128], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("66, 0f, 38, 07", ["PHSUBSW"], [OPT.XMM, OPT.XMM128], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("66, 0f, 38, 08", ["PSIGNB"], [OPT.XMM, OPT.XMM128], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("66, 0f, 38, 09", ["PSIGNW"], [OPT.XMM, OPT.XMM128], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("66, 0f, 38, 0a", ["PSIGND"], [OPT.XMM, OPT.XMM128], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("66, 0f, 38, 0b", ["PMULHRSW"], [OPT.XMM, OPT.XMM128], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("66, 0f, 38, 1c", ["PABSB"], [OPT.XMM, OPT.XMM128], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("66, 0f, 38, 1d", ["PABSW"], [OPT.XMM, OPT.XMM128], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("66, 0f, 38, 1e", ["PABSD"], [OPT.XMM, OPT.XMM128], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("66, 0f, 3a, 0f", ["PALIGNR"], [OPT.XMM, OPT.XMM128, OPT.IMM8], IFlag.MODRM_REQUIRED | IFlag._32BITS)
def init_SSE4_1(self):
Set = lambda *args: self.SetCallback(ISetClass.SSE4_1, *args)
Set("66, 0f, 3a, 0d", ["BLENDPD"], [OPT.XMM, OPT.XMM128, OPT.IMM8], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("66, 0f, 3a, 0c", ["BLENDPS"], [OPT.XMM, OPT.XMM128, OPT.IMM8], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("66, 0f, 38, 15", ["BLENDVPD"], [OPT.XMM, OPT.XMM128, OPT.REGXMM0], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("66, 0f, 38, 14", ["BLENDVPS"], [OPT.XMM, OPT.XMM128, OPT.REGXMM0], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("66, 0f, 3a, 41", ["DPPD"], [OPT.XMM, OPT.XMM128, OPT.IMM8], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("66, 0f, 3a, 40", ["DPPS"], [OPT.XMM, OPT.XMM128, OPT.IMM8], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("66, 0f, 3a, 17", ["EXTRACTPS"], [OPT.RM32_64, OPT.XMM, OPT.IMM8], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("66, 0f, 3a, 21", ["INSERTPS"], [OPT.XMM, OPT.XMM32, OPT.IMM8], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("66, 0f, 38, 2a", ["MOVNTDQA"], [OPT.XMM, OPT.MEM128], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("66, 0f, 3a, 42", ["MPSADBW"], [OPT.XMM, OPT.XMM128, OPT.IMM8], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("66, 0f, 38, 2b", ["PACKUSDW"], [OPT.XMM, OPT.XMM128], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("66, 0f, 38, 10", ["PBLENDVB"], [OPT.XMM, OPT.XMM128, OPT.REGXMM0], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("66, 0f, 3a, 0e", ["PBLENDW"], [OPT.XMM, OPT.XMM128, OPT.IMM8], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("66, 0f, 38, 29", ["PCMPEQQ"], [OPT.XMM, OPT.XMM128], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("66, 0f, 3a, 14", ["PEXTRB"], [OPT.R32_64_M8, OPT.XMM, OPT.IMM8], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("66, 0f, 3a, 16", ["PEXTRD", "", "PEXTRQ"], [OPT.RM32_64, OPT.XMM, OPT.IMM8], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.USE_EXMNEMONIC2)
Set("66, 0f, 3a, 15", ["PEXTRW"], [OPT.R32_64_M16, OPT.XMM, OPT.IMM8], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("66, 0f, 38, 41", ["PHMINPOSUW"], [OPT.XMM, OPT.XMM128], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("66, 0f, 3a, 20", ["PINSRB"], [OPT.XMM, OPT.R32_M8, OPT.IMM8], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("66, 0f, 3a, 22", ["PINSRD", "", "PINSRQ"], [OPT.XMM, OPT.RM32_64, OPT.IMM8], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.USE_EXMNEMONIC2)
Set("66, 0f, 38, 3c", ["PMAXSB"], [OPT.XMM, OPT.XMM128], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("66, 0f, 38, 3d", ["PMAXSD"], [OPT.XMM, OPT.XMM128], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("66, 0f, 38, 3f", ["PMAXUD"], [OPT.XMM, OPT.XMM128], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("66, 0f, 38, 3e", ["PMAXUW"], [OPT.XMM, OPT.XMM128], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("66, 0f, 38, 38", ["PMINSB"], [OPT.XMM, OPT.XMM128], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("66, 0f, 38, 39", ["PMINSD"], [OPT.XMM, OPT.XMM128], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("66, 0f, 38, 3b", ["PMINUD"], [OPT.XMM, OPT.XMM128], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("66, 0f, 38, 3a", ["PMINUW"], [OPT.XMM, OPT.XMM128], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("66, 0f, 38, 20", ["PMOVSXBW"], [OPT.XMM, OPT.XMM64], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("66, 0f, 38, 21", ["PMOVSXBD"], [OPT.XMM, OPT.XMM32], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("66, 0f, 38, 22", ["PMOVSXBQ"], [OPT.XMM, OPT.XMM16], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("66, 0f, 38, 23", ["PMOVSXWD"], [OPT.XMM, OPT.XMM64], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("66, 0f, 38, 24", ["PMOVSXWQ"], [OPT.XMM, OPT.XMM32], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("66, 0f, 38, 25", ["PMOVSXDQ"], [OPT.XMM, OPT.XMM64], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("66, 0f, 38, 30", ["PMOVZXBW"], [OPT.XMM, OPT.XMM64], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("66, 0f, 38, 31", ["PMOVZXBD"], [OPT.XMM, OPT.XMM32], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("66, 0f, 38, 32", ["PMOVZXBQ"], [OPT.XMM, OPT.XMM16], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("66, 0f, 38, 33", ["PMOVZXWD"], [OPT.XMM, OPT.XMM64], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("66, 0f, 38, 34", ["PMOVZXWQ"], [OPT.XMM, OPT.XMM32], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("66, 0f, 38, 35", ["PMOVZXDQ"], [OPT.XMM, OPT.XMM64], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("66, 0f, 38, 28", ["PMULDQ"], [OPT.XMM, OPT.XMM128], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("66, 0f, 38, 40", ["PMULLD"], [OPT.XMM, OPT.XMM128], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("66, 0f, 3a, 09", ["ROUNDPD"], [OPT.XMM, OPT.XMM128, OPT.IMM8], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("66, 0f, 3a, 08", ["ROUNDPS"], [OPT.XMM, OPT.XMM128, OPT.IMM8], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("66, 0f, 3a, 0b", ["ROUNDSD"], [OPT.XMM, OPT.XMM64, OPT.IMM8], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("66, 0f, 3a, 0a", ["ROUNDSS"], [OPT.XMM, OPT.XMM32, OPT.IMM8], IFlag.MODRM_REQUIRED | IFlag._32BITS)
def init_SSE4_2(self):
Set = lambda *args: self.SetCallback(ISetClass.SSE4_2, *args)
Set("f2, 0f, 38, f0", ["CRC32"], [OPT.REG32_64, OPT.RM8], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("f2, 0f, 38, f1", ["CRC32"], [OPT.REG32_64, OPT.RM_FULL], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("66, 0f, 38, 17", ["PTEST"], [OPT.XMM, OPT.XMM128], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("66, 0f, 3a, 61", ["PCMPESTRI"], [OPT.XMM, OPT.XMM128, OPT.IMM8], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("66, 0f, 3a, 60", ["PCMPESTRM"], [OPT.XMM, OPT.XMM128, OPT.IMM8], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("66, 0f, 3a, 63", ["PCMPISTRI"], [OPT.XMM, OPT.XMM128, OPT.IMM8], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("66, 0f, 3a, 62", ["PCMPISTRM"], [OPT.XMM, OPT.XMM128, OPT.IMM8], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("66, 0f, 38, 37", ["PCMPGTQ"], [OPT.XMM, OPT.XMM128], IFlag.MODRM_REQUIRED | IFlag._32BITS)
# Note POPCNT can be prefixed by 0x66 although it has also a mandatory prefix!
Set("f3, 0f, b8", ["POPCNT"], [OPT.REG_FULL, OPT.RM_FULL], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_OP_SIZE)
def init_SSE4_A(self):
# New instructions from AMD July 2007
Set = lambda *args: self.SetCallback(ISetClass.SSE4_A, *args)
# DB can't support a table after Prefixed table (it will really complicate everything and doesn't worth it),
# therefore we will have to force a REG of 0 in the flags! Beats me. :(
#Set("66, 0f, 78 /00", ["EXTRQ"], [OPT.XMM_RM, OPT.IMM8_1, OPT.IMM8_2], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("66, 0f, 78", ["EXTRQ"], [OPT.XMM_RM, OPT.IMM8_1, OPT.IMM8_2], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.FORCE_REG0)
Set("66, 0f, 79", ["EXTRQ"], [OPT.XMM, OPT.XMM_RM], IFlag.MODRM_REQUIRED | IFlag._32BITS)
# Four operands(!) I want m'mommy
Set("f2, 0f, 78", ["INSERTQ"], [OPT.XMM, OPT.XMM_RM, OPT.IMM8_1, OPT.IMM8_2], IFlag.MODRM_REQUIRED | IFlag.MODRR_REQUIRED | IFlag._32BITS)
Set("f2, 0f, 79", ["INSERTQ"], [OPT.XMM, OPT.XMM_RM], IFlag.MODRM_REQUIRED | IFlag.MODRR_REQUIRED | IFlag._32BITS)
Set("f2, 0f, 2b", ["MOVNTSD"], [OPT.MEM64, OPT.XMM], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("f3, 0f, 2b", ["MOVNTSS"], [OPT.MEM32, OPT.XMM], IFlag.MODRM_REQUIRED | IFlag._32BITS)
def init_3DNOW(self):
Set = lambda *args: self.SetCallback(ISetClass._3DNOW, *args)
Set("0f, 0d /00", ["PREFETCH"], [OPT.MEM], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("0f, 0d /01", ["PREFETCHW"], [OPT.MEM], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("0f, 0e", ["FEMMS"], [], IFlag.INST_FLAGS_NONE)
Set("0f, 0f, 0d", ["PI2FD"], [OPT.MM, OPT.MM64], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag._3DNOW_FETCH)
Set("0f, 0f, 1d", ["PF2ID"], [OPT.MM, OPT.MM64], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag._3DNOW_FETCH)
Set("0f, 0f, 90", ["PFCMPGE"], [OPT.MM, OPT.MM64], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag._3DNOW_FETCH)
Set("0f, 0f, 94", ["PFMIN"], [OPT.MM, OPT.MM64], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag._3DNOW_FETCH)
Set("0f, 0f, 96", ["PFRCP"], [OPT.MM, OPT.MM64], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag._3DNOW_FETCH)
Set("0f, 0f, 97", ["PFRSQRT"], [OPT.MM, OPT.MM64], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag._3DNOW_FETCH)
Set("0f, 0f, 9a", ["PFSUB"], [OPT.MM, OPT.MM64], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag._3DNOW_FETCH)
Set("0f, 0f, 9e", ["PFADD"], [OPT.MM, OPT.MM64], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag._3DNOW_FETCH)
Set("0f, 0f, a0", ["PFCMPGT"], [OPT.MM, OPT.MM64], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag._3DNOW_FETCH)
Set("0f, 0f, a4", ["PFMAX"], [OPT.MM, OPT.MM64], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag._3DNOW_FETCH)
Set("0f, 0f, a6", ["PFRCPIT1"], [OPT.MM, OPT.MM64], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag._3DNOW_FETCH)
Set("0f, 0f, a7", ["PFRSQIT1"], [OPT.MM, OPT.MM64], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag._3DNOW_FETCH)
Set("0f, 0f, aa", ["PFSUBR"], [OPT.MM, OPT.MM64], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag._3DNOW_FETCH)
Set("0f, 0f, ae", ["PFACC"], [OPT.MM, OPT.MM64], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag._3DNOW_FETCH)
Set("0f, 0f, b0", ["PFCMPEQ"], [OPT.MM, OPT.MM64], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag._3DNOW_FETCH)
Set("0f, 0f, b4", ["PFMUL"], [OPT.MM, OPT.MM64], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag._3DNOW_FETCH)
Set("0f, 0f, b6", ["PFRCPIT2"], [OPT.MM, OPT.MM64], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag._3DNOW_FETCH)
Set("0f, 0f, b7", ["PMULHRW"], [OPT.MM, OPT.MM64], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag._3DNOW_FETCH)
Set("0f, 0f, bf", ["PAVGUSB"], [OPT.MM, OPT.MM64], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag._3DNOW_FETCH)
def init_3DNOWEXT(self):
Set = lambda *args: self.SetCallback(ISetClass._3DNOWEXT, *args)
Set("0f, 0f, 0c", ["PI2FW"], [OPT.MM, OPT.MM64], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag._3DNOW_FETCH)
Set("0f, 0f, 1c", ["PF2IW"], [OPT.MM, OPT.MM64], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag._3DNOW_FETCH)
Set("0f, 0f, 8a", ["PFNACC"], [OPT.MM, OPT.MM64], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag._3DNOW_FETCH)
Set("0f, 0f, 8e", ["PFPNACC"], [OPT.MM, OPT.MM64], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag._3DNOW_FETCH)
Set("0f, 0f, bb", ["PSWAPD"], [OPT.MM, OPT.MM64], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag._3DNOW_FETCH)
def init_VMX(self):
Set = lambda *args: self.SetCallback(ISetClass.VMX, *args)
Set("0f, 01 //c1", ["VMCALL"], [], IFlag._32BITS)
Set("0f, 01 //c2", ["VMLAUNCH"], [], IFlag._32BITS)
Set("0f, 01 //c3", ["VMRESUME"], [], IFlag._32BITS)
Set("0f, 01 //c4", ["VMXOFF"], [], IFlag._32BITS)
# In 64bits the operands are promoted to 64bits automatically.
Set("0f, 78", ["VMREAD"], [OPT.RM32_64, OPT.REG32_64], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag._64BITS)
Set("0f, 79", ["VMWRITE"], [OPT.REG32_64, OPT.RM32_64], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag._64BITS)
# VMPTRLD collides with RDRAND (see exported instructions).
Set("0f, c7 /06", ["VMPTRLD"], [OPT.MEM], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("0f, c7 /07", ["VMPTRST"], [OPT.MEM64], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("66, 0f, c7 /06", ["VMCLEAR"], [OPT.MEM64], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("f3, 0f, c7 /06", ["VMXON"], [OPT.MEM64], IFlag.MODRM_REQUIRED | IFlag._32BITS)
# New VMX instructions from Intel September 2009:
Set("66, 0f, 38, 80", ["INVEPT"], [OPT.REG32_64, OPT.MEM128], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag._64BITS)
Set("66, 0f, 38, 81", ["INVVPID"], [OPT.REG32_64, OPT.MEM128], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag._64BITS)
# New instructions from Intel December 2011.
Set("0f, 01 //d4", ["VMFUNC"], [], IFlag._32BITS)
Set("66, 0f, 38, 82", ["INVPCID"], [OPT.REG32_64, OPT.MEM128], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag._64BITS)
# Can be prefixed with 0x66, see LZCNT.
Set("f3, 0f, bc", ["TZCNT"], [OPT.REG_FULL, OPT.RM_FULL], IFlag.MODRM_REQUIRED | IFlag.PRE_OP_SIZE)
Set("f3, 0f, ae /0", ["RDFSBASE"], [OPT.REG32_64], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag._64BITS)
Set("f3, 0f, ae /1", ["RDGSBASE"], [OPT.REG32_64], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag._64BITS)
Set("f3, 0f, ae /2", ["WRFSBASE"], [OPT.REG32_64], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag._64BITS)
Set("f3, 0f, ae /3", ["WRGSBASE"], [OPT.REG32_64], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag._64BITS)
# SSE5.
Set("0f, 7a, 31", ["CVTPS2PH"], [OPT.XMM64, OPT.XMM], IFlag._32BITS, IFlag._64BITS)
Set("0f, 7a, 30", ["CVTPH2PS"], [OPT.XMM, OPT.XMM64], IFlag._32BITS, IFlag._64BITS)
def init_SVM(self):
Set = lambda *args: self.SetCallback(ISetClass.SVM, *args)
Set("0f, 01 //d8", ["VMRUN"], [OPT.REGI_EAX], IFlag.MODRM_REQUIRED | IFlag._32BITS)
Set("0f, 01 //d9", ["VMMCALL"], [], IFlag._32BITS)
Set("0f, 01 //da", ["VMLOAD"], [OPT.REGI_EAX], IFlag._32BITS)
Set("0f, 01 //db", ["VMSAVE"], [OPT.REGI_EAX], IFlag._32BITS)
Set("0f, 01 //dc", ["STGI"], [], IFlag._32BITS)
Set("0f, 01 //dd", ["CLGI"], [], IFlag._32BITS)
Set("0f, 01 //de", ["SKINIT"], [OPT.REGI_EAX], IFlag._32BITS)
Set("0f, 01 //df", ["INVLPGA"], [OPT.REGI_EAX, OPT.REGECX], IFlag._32BITS)
def init_AVX(self):
# Most SSE/SSE2/SSE3/SSSE3/SSE4 instructions have been promoted, and they are all part of the AVX category.
# It's easier to keep them separated, also because some of the promoted instructions have different number of operands, etc.
Set = lambda *args: self.SetCallback(ISetClass.AVX, *args)
SetAes = lambda *args: self.SetCallback(ISetClass.AES, *args)
SetClmul = lambda *args: self.SetCallback(ISetClass.CLMUL, *args)
Set("66, 0f, 58", ["VADDPD"], [OPT.YXMM, OPT.VYXMM, OPT.YXMM128_256], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX | IFlag.VEX_L)
Set("0f, 58", ["VADDPS"], [OPT.YXMM, OPT.VYXMM, OPT.YXMM128_256], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX | IFlag.VEX_L)
Set("f2, 0f, 58", ["VADDSD"], [OPT.XMM, OPT.VXMM, OPT.XMM64], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX)
Set("f3, 0f, 58", ["VADDSS"], [OPT.XMM, OPT.VXMM, OPT.XMM32], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX)
Set("66, 0f, d0", ["VADDSUBPD"], [OPT.YXMM, OPT.VYXMM, OPT.YXMM128_256], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX | IFlag.VEX_L)
Set("f2, 0f, d0", ["VADDSUBPS"], [OPT.YXMM, OPT.VYXMM, OPT.YXMM128_256], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX | IFlag.VEX_L)
SetAes("66, 0f, 38, dc", ["AESENC"], [OPT.XMM, OPT.XMM128], IFlag.MODRM_REQUIRED | IFlag._32BITS)
SetAes("66, 0f, 38, dd", ["AESENCLAST"], [OPT.XMM, OPT.XMM128], IFlag.MODRM_REQUIRED | IFlag._32BITS)
SetAes("66, 0f, 38, dc", ["VAESENC"], [OPT.XMM, OPT.VXMM, OPT.XMM128], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX)
SetAes("66, 0f, 38, dd", ["VAESENCLAST"], [OPT.XMM, OPT.VXMM, OPT.XMM128], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX)
SetAes("66, 0f, 38, de", ["AESDEC"], [OPT.XMM, OPT.XMM128], IFlag.MODRM_REQUIRED | IFlag._32BITS)
SetAes("66, 0f, 38, df", ["AESDECLAST"], [OPT.XMM, OPT.XMM128], IFlag.MODRM_REQUIRED | IFlag._32BITS)
SetAes("66, 0f, 38, de", ["VAESDEC"], [OPT.XMM, OPT.VXMM, OPT.XMM128], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX)
SetAes("66, 0f, 38, df", ["VAESDECLAST"], [OPT.XMM, OPT.VXMM, OPT.XMM128], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX)
SetAes("66, 0f, 38, db", ["AESIMC"], [OPT.XMM, OPT.XMM128], IFlag.MODRM_REQUIRED | IFlag._32BITS)
SetAes("66, 0f, 38, db", ["VAESIMC"], [OPT.XMM, OPT.XMM128], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX)
SetAes("66, 0f, 3a, df", ["AESKEYGENASSIST"], [OPT.XMM, OPT.XMM128, OPT.IMM8], IFlag.MODRM_REQUIRED | IFlag._32BITS)
SetAes("66, 0f, 3a, df", ["VAESKEYGENASSIST"], [OPT.XMM, OPT.XMM128, OPT.IMM8], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX)
Set("66, 0f, 54", ["VANDPD"], [OPT.YXMM, OPT.VYXMM, OPT.YXMM128_256], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX | IFlag.VEX_L)
Set("0f, 54", ["VANDPS"], [OPT.YXMM, OPT.VYXMM, OPT.YXMM128_256], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX | IFlag.VEX_L)
Set("66, 0f, 55", ["VANDNPD"], [OPT.YXMM, OPT.VYXMM, OPT.YXMM128_256], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX | IFlag.VEX_L)
Set("0f, 55", ["VANDNPS"], [OPT.YXMM, OPT.VYXMM, OPT.YXMM128_256], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX | IFlag.VEX_L)
Set("66, 0f, 3a, 0d", ["VBLENDPD"], [OPT.YXMM, OPT.VYXMM, OPT.YXMM128_256, OPT.IMM8], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX | IFlag.VEX_L)
Set("66, 0f, 3a, 0c", ["VBLENDPS"], [OPT.YXMM, OPT.VYXMM, OPT.YXMM128_256, OPT.IMM8], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX | IFlag.VEX_L)
Set("66, 0f, 3a, 4b", ["VBLENDVPD"], [OPT.YXMM, OPT.VYXMM, OPT.YXMM128_256, OPT.YXMM_IMM], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX | IFlag.VEX_L)
Set("66, 0f, 3a, 4a", ["VBLENDVPS"], [OPT.YXMM, OPT.VYXMM, OPT.YXMM128_256, OPT.YXMM_IMM], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX | IFlag.VEX_L)
Set("66, 0f, 38, 18", ["VBROADCASTSS"], [OPT.YXMM, OPT.MEM32], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX | IFlag.VEX_L)
Set("66, 0f, 38, 19", ["VBROADCASTSD"], [OPT.YMM, OPT.MEM64], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX | IFlag.FORCE_VEXL)
Set("66, 0f, 38, 1a", ["VBROADCASTF128"], [OPT.YMM, OPT.MEM128], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX | IFlag.FORCE_VEXL)
Set("66, 0f, c2", ["VCMP", "PD"], [OPT.YXMM, OPT.VYXMM, OPT.YXMM128_256], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX | IFlag.VEX_L | IFlag.PSEUDO_OPCODE)
Set("0f, c2", ["VCMP", "PS"], [OPT.YXMM, OPT.VYXMM, OPT.YXMM128_256], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX | IFlag.VEX_L | IFlag.PSEUDO_OPCODE)
Set("f2, 0f, c2", ["VCMP", "SD"], [OPT.XMM, OPT.VXMM, OPT.XMM64], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX | IFlag.PSEUDO_OPCODE)
Set("f3, 0f, c2", ["VCMP", "SS"], [OPT.XMM, OPT.VXMM, OPT.XMM32], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX | IFlag.PSEUDO_OPCODE)
Set("66, 0f, 2f", ["VCOMISD"], [OPT.XMM, OPT.XMM64], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX)
Set("0f, 2f", ["VCOMISS"], [OPT.XMM, OPT.XMM32], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX)
Set("f3, 0f, e6", ["VCVTDQ2PD"], [OPT.YXMM, OPT.LXMM64_128], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX | IFlag.VEX_L)
Set("0f, 5b", ["VCVTDQ2PS"], [OPT.YXMM, OPT.YXMM128_256], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX | IFlag.VEX_L)
Set("f2, 0f, e6", ["VCVTPD2DQ"], [OPT.XMM, OPT.YXMM128_256], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX | IFlag.VEX_L)
Set("66, 0f, 5a", ["VCVTPD2PS"], [OPT.XMM, OPT.YXMM128_256], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX | IFlag.VEX_L)
Set("66, 0f, 5b", ["VCVTPS2DQ"], [OPT.YXMM, OPT.YXMM128_256], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX | IFlag.VEX_L)
Set("0f, 5a", ["VCVTPS2PD"], [OPT.YXMM, OPT.LXMM64_128], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX | IFlag.VEX_L)
Set("f2, 0f, 2d", ["VCVTSD2SI"], [OPT.WREG32_64, OPT.WXMM32_64], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX | IFlag.VEX_W)
Set("f2, 0f, 5a", ["VCVTSD2SS"], [OPT.XMM, OPT.VXMM, OPT.XMM64], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX)
Set("f2, 0f, 2a", ["VCVTSI2SD"], [OPT.XMM, OPT.VXMM, OPT.WRM32_64], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX | IFlag.VEX_W)
Set("f3, 0f, 2a", ["VCVTSI2SS"], [OPT.XMM, OPT.VXMM, OPT.WRM32_64], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX | IFlag.VEX_W)
Set("f3, 0f, 5a", ["VCVTSS2SD"], [OPT.XMM, OPT.VXMM, OPT.XMM32], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX)
Set("f3, 0f, 2d", ["VCVTSS2SI"], [OPT.WREG32_64, OPT.WXMM32_64], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX | IFlag.VEX_W)
Set("66, 0f, e6", ["VCVTTPD2DQ"], [OPT.XMM, OPT.YXMM128_256], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX | IFlag.VEX_L)
Set("f3, 0f, 5b", ["VCVTTPS2DQ"], [OPT.YXMM, OPT.YXMM128_256], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX | IFlag.VEX_L)
Set("f2, 0f, 2c", ["VCVTTSD2SI"], [OPT.WREG32_64, OPT.WXMM32_64], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX | IFlag.VEX_W)
Set("f3, 0f, 2c", ["VCVTTSS2SI"], [OPT.WREG32_64, OPT.WXMM32_64], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX | IFlag.VEX_W)
Set("66, 0f, 5e", ["VDIVPD"], [OPT.YXMM, OPT.VYXMM, OPT.YXMM128_256], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX | IFlag.VEX_L)
Set("0f, 5e", ["VDIVPS"], [OPT.YXMM, OPT.VYXMM, OPT.YXMM128_256], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX | IFlag.VEX_L)
Set("f2, 0f, 5e", ["VDIVSD"], [OPT.XMM, OPT.VXMM, OPT.XMM64], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX)
Set("f3, 0f, 5e", ["VDIVSS"], [OPT.XMM, OPT.VXMM, OPT.XMM32], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX)
Set("66, 0f, 3a, 41", ["VDPPD"], [OPT.XMM, OPT.VXMM, OPT.XMM128, OPT.IMM8], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX)
Set("66, 0f, 3a, 40", ["VDPPS"], [OPT.YXMM, OPT.VYXMM, OPT.YXMM128_256, OPT.IMM8], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX | IFlag.VEX_L)
Set("66, 0f, 3a, 19", ["VEXTRACTF128"], [OPT.XMM128, OPT.YMM, OPT.IMM8], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX | IFlag.FORCE_VEXL)
Set("66, 0f, 3a, 17", ["VEXTRACTPS"], [OPT.RM32, OPT.XMM, OPT.IMM8], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX)
Set("66, 0f, 7c", ["VHADDPD"], [OPT.YXMM, OPT.VYXMM, OPT.YXMM128_256], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX | IFlag.VEX_L)
Set("f2, 0f, 7c", ["VHADDPS"], [OPT.YXMM, OPT.VYXMM, OPT.YXMM128_256], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX | IFlag.VEX_L)
Set("66, 0f, 7d", ["VHSUBPD"], [OPT.YXMM, OPT.VYXMM, OPT.YXMM128_256], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX | IFlag.VEX_L)
Set("f2, 0f, 7d", ["VHSUBPS"], [OPT.YXMM, OPT.VYXMM, OPT.YXMM128_256], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX | IFlag.VEX_L)
Set("66, 0f, 3a, 18", ["VINSERTF128"], [OPT.YMM, OPT.VYMM, OPT.XMM128, OPT.IMM8], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX | IFlag.FORCE_VEXL)
Set("66, 0f, 3a, 21", ["VINSERTPS"], [OPT.XMM, OPT.VXMM, OPT.XMM32, OPT.IMM8], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX)
Set("f2, 0f, f0", ["VLDDQU"], [OPT.YXMM, OPT.LMEM128_256], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX | IFlag.VEX_L)
Set("0f, ae /02", ["VLDMXCSR"], [OPT.MEM32], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX)
Set("66, 0f, f7", ["VMASKMOVDQU"], [OPT.XMM, OPT.XMM_RM], IFlag.MODRM_REQUIRED | IFlag.MODRR_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX)
Set("66, 0f, 38, 2c", ["VMASKMOVPS"], [OPT.YXMM, OPT.VYXMM, OPT.LMEM128_256], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX | IFlag.VEX_L)
Set("66, 0f, 38, 2d", ["VMASKMOVPD"], [OPT.YXMM, OPT.VYXMM, OPT.LMEM128_256], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX | IFlag.VEX_L)
Set("66, 0f, 38, 2e", ["VMASKMOVPS"], [OPT.LMEM128_256, OPT.VYXMM, OPT.YXMM], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX | IFlag.VEX_L)
Set("66, 0f, 38, 2f", ["VMASKMOVPD"], [OPT.LMEM128_256, OPT.VYXMM, OPT.YXMM], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX | IFlag.VEX_L)
Set("66, 0f, 5f", ["VMAXPD"], [OPT.YXMM, OPT.VYXMM, OPT.YXMM128_256], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX | IFlag.VEX_L)
Set("0f, 5f", ["VMAXPS"], [OPT.YXMM, OPT.VYXMM, OPT.YXMM128_256], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX | IFlag.VEX_L)
Set("f2, 0f, 5f", ["VMAXSD"], [OPT.XMM, OPT.VXMM, OPT.XMM64], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX)
Set("f3, 0f, 5f", ["VMAXSS"], [OPT.XMM, OPT.VXMM, OPT.XMM32], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX)
Set("66, 0f, 5d", ["VMINPD"], [OPT.YXMM, OPT.VYXMM, OPT.YXMM128_256], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX | IFlag.VEX_L)
Set("0f, 5d", ["VMINPS"], [OPT.YXMM, OPT.VYXMM, OPT.YXMM128_256], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX | IFlag.VEX_L)
Set("f2, 0f, 5d", ["VMINSD"], [OPT.XMM, OPT.VXMM, OPT.XMM64], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX)
Set("f3, 0f, 5d", ["VMINSS"], [OPT.XMM, OPT.VXMM, OPT.XMM32], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX)
Set("66, 0f, 28", ["VMOVAPD"], [OPT.YXMM, OPT.YXMM128_256], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX | IFlag.VEX_L)
Set("66, 0f, 29", ["VMOVAPD"], [OPT.YXMM128_256, OPT.YXMM], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX | IFlag.VEX_L)
Set("0f, 28", ["VMOVAPS"], [OPT.YXMM, OPT.YXMM128_256], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX | IFlag.VEX_L)
Set("0f, 29", ["VMOVAPS"], [OPT.YXMM128_256, OPT.YXMM], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX | IFlag.VEX_L)
Set("66, 0f, 6e", ["VMOVD", "VMOVQ"], [OPT.XMM, OPT.WRM32_64], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX | IFlag.VEX_W | IFlag.MNEMONIC_VEXW_BASED)
Set("66, 0f, 7e", ["VMOVD", "VMOVQ"], [OPT.WRM32_64, OPT.XMM], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX | IFlag.VEX_W | IFlag.MNEMONIC_VEXW_BASED)
Set("f3, 0f, 7e", ["VMOVQ"], [OPT.XMM, OPT.XMM64], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX)
Set("66, 0f, d6", ["VMOVQ"], [OPT.XMM64, OPT.XMM], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX)
Set("f2, 0f, 12", ["VMOVDDUP"], [OPT.YXMM, OPT.YXMM64_256], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX | IFlag.VEX_L)
Set("66, 0f, 6f", ["VMOVDQA"], [OPT.YXMM, OPT.YXMM128_256], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX | IFlag.VEX_L)
Set("66, 0f, 7f", ["VMOVDQA"], [OPT.YXMM128_256, OPT.YXMM], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX | IFlag.VEX_L)
Set("f3, 0f, 6f", ["VMOVDQU"], [OPT.YXMM, OPT.YXMM128_256], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX | IFlag.VEX_L)
Set("f3, 0f, 7f", ["VMOVDQU"], [OPT.YXMM128_256, OPT.YXMM], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX | IFlag.VEX_L)
Set("0f, 12", ["VMOVHLPS", "VMOVLPS"], [OPT.XMM, OPT.VXMM, OPT.XMM64], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX | IFlag.MNEMONIC_MODRM_BASED | IFlag.USE_EXMNEMONIC)
Set("66, 0f, 12", ["VMOVLPD"], [OPT.XMM, OPT.VXMM, OPT.MEM64], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX)
Set("0f, 13", ["VMOVLPS"], [OPT.MEM64, OPT.XMM], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX)
Set("66, 0f, 13", ["VMOVLPD"], [OPT.MEM64, OPT.XMM], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX)
Set("0f, 16", ["VMOVLHPS", "VMOVHPS"], [OPT.XMM, OPT.VXMM, OPT.XMM64], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX | IFlag.MNEMONIC_MODRM_BASED | IFlag.USE_EXMNEMONIC)
Set("66, 0f, 16", ["VMOVHPD"], [OPT.XMM, OPT.VXMM, OPT.MEM64], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX)
Set("0f, 17", ["VMOVHPS"], [OPT.MEM64, OPT.XMM], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX)
Set("66, 0f, 17", ["VMOVHPD"], [OPT.MEM64, OPT.XMM], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX)
Set("66, 0f, 50", ["VMOVMSKPD"], [OPT.REG32_64, OPT.YXMM], IFlag.MODRM_REQUIRED | IFlag.MODRR_REQUIRED | IFlag._32BITS | IFlag._64BITS | IFlag.PRE_VEX | IFlag.VEX_L)
Set("0f, 50", ["VMOVMSKPS"], [OPT.REG32_64, OPT.YXMM], IFlag.MODRM_REQUIRED | IFlag.MODRR_REQUIRED | IFlag._32BITS | IFlag._64BITS | IFlag.PRE_VEX | IFlag.VEX_L)
Set("66, 0f, e7", ["VMOVNTDQ"], [OPT.LMEM128_256, OPT.YXMM], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX | IFlag.VEX_L)
Set("66, 0f, 38, 2a", ["VMOVNTDQA"], [OPT.XMM, OPT.MEM128], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX)
Set("66, 0f, 2b", ["VMOVNTPD"], [OPT.LMEM128_256, OPT.YXMM], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX | IFlag.VEX_L)
Set("0f, 2b", ["VMOVNTPS"], [OPT.LMEM128_256, OPT.YXMM], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX | IFlag.VEX_L)
# Next two instructions are based on vvvv field.
Set("f2, 0f, 10", ["VMOVSD"], [OPT.XMM, OPT.VXMM, OPT.XMM_RM], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX | IFlag.MODRM_INCLUDED | IFlag.MODRR_BASED)
Set("f2, 0f, 10", ["VMOVSD"], [OPT.XMM, OPT.MEM64], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX | IFlag.MODRM_INCLUDED | IFlag.MODRR_BASED)
# Next two instructions are based on vvvv field.
Set("f2, 0f, 11", ["VMOVSD"], [OPT.XMM, OPT.VXMM, OPT.XMM_RM], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX | IFlag.MODRM_INCLUDED | IFlag.MODRR_BASED)
Set("f2, 0f, 11", ["VMOVSD"], [OPT.MEM64, OPT.XMM], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX | IFlag.MODRM_INCLUDED | IFlag.MODRR_BASED)
Set("f3, 0f, 16", ["VMOVSHDUP"], [OPT.YXMM, OPT.YXMM128_256], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX | IFlag.VEX_L)
Set("f3, 0f, 12", ["VMOVSLDUP"], [OPT.YXMM, OPT.YXMM128_256], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX | IFlag.VEX_L)
# Next two instructions are based on vvvv field.
Set("f3, 0f, 10", ["VMOVSS"], [OPT.XMM, OPT.VXMM, OPT.XMM_RM], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX | IFlag.MODRM_INCLUDED | IFlag.MODRR_BASED)
Set("f3, 0f, 10", ["VMOVSS"], [OPT.XMM, OPT.MEM32], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX | IFlag.MODRM_INCLUDED | IFlag.MODRR_BASED)
# Next two instructions are based on vvvv field.
Set("f3, 0f, 11", ["VMOVSS"], [OPT.XMM, OPT.VXMM, OPT.XMM_RM], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX | IFlag.MODRM_INCLUDED | IFlag.MODRR_BASED)
Set("f3, 0f, 11", ["VMOVSS"], [OPT.MEM32, OPT.XMM], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX | IFlag.MODRM_INCLUDED | IFlag.MODRR_BASED)
Set("66, 0f, 10", ["VMOVUPD"], [OPT.YXMM, OPT.YXMM128_256], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX | IFlag.VEX_L)
Set("66, 0f, 11", ["VMOVUPD"], [OPT.YXMM128_256, OPT.YXMM], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX | IFlag.VEX_L)
Set("0f, 10", ["VMOVUPS"], [OPT.YXMM, OPT.YXMM128_256], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX | IFlag.VEX_L)
Set("0f, 11", ["VMOVUPS"], [OPT.YXMM128_256, OPT.YXMM], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX | IFlag.VEX_L)
Set("66, 0f, 3a, 42", ["VMPSADBW"], [OPT.XMM, OPT.VXMM, OPT.XMM128, OPT.IMM8], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX)
Set("66, 0f, 59", ["VMULPD"], [OPT.YXMM, OPT.VYXMM, OPT.YXMM128_256], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX | IFlag.VEX_L)
Set("0f, 59", ["VMULPS"], [OPT.YXMM, OPT.VYXMM, OPT.YXMM128_256], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX | IFlag.VEX_L)
Set("f2, 0f, 59", ["VMULSD"], [OPT.XMM, OPT.VXMM, OPT.XMM64], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX)
Set("f3, 0f, 59", ["VMULSS"], [OPT.XMM, OPT.VXMM, OPT.XMM32], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX)
Set("66, 0f, 56", ["VORPD"], [OPT.YXMM, OPT.VYXMM, OPT.YXMM128_256], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX | IFlag.VEX_L)
Set("0f, 56", ["VORPS"], [OPT.YXMM, OPT.VYXMM, OPT.YXMM128_256], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX | IFlag.VEX_L)
Set("66, 0f, 38, 1c", ["VPABSB"], [OPT.XMM, OPT.XMM128], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX)
Set("66, 0f, 38, 1d", ["VPABSW"], [OPT.XMM, OPT.XMM128], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX)
Set("66, 0f, 38, 1e", ["VPABSD"], [OPT.XMM, OPT.XMM128], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX)
Set("66, 0f, 63", ["VPACKSSWB"], [OPT.XMM, OPT.VXMM, OPT.XMM128], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX)
Set("66, 0f, 6b", ["VPACKSSDW"], [OPT.XMM, OPT.VXMM, OPT.XMM128], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX)
Set("66, 0f, 67", ["VPACKUSWB"], [OPT.XMM, OPT.VXMM, OPT.XMM128], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX)
Set("66, 0f, 38, 2b", ["VPACKUSDW"], [OPT.XMM, OPT.VXMM, OPT.XMM128], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX)
Set("66, 0f, fc", ["VPADDB"], [OPT.XMM, OPT.VXMM, OPT.XMM128], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX)
Set("66, 0f, fd", ["VPADDW"], [OPT.XMM, OPT.VXMM, OPT.XMM128], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX)
Set("66, 0f, fe", ["VPADDD"], [OPT.XMM, OPT.VXMM, OPT.XMM128], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX)
Set("66, 0f, d4", ["VPADDQ"], [OPT.XMM, OPT.VXMM, OPT.XMM128], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX)
Set("66, 0f, ec", ["VPADDSB"], [OPT.XMM, OPT.VXMM, OPT.XMM128], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX)
Set("66, 0f, ed", ["VPADDSW"], [OPT.XMM, OPT.VXMM, OPT.XMM128], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX)
Set("66, 0f, dc", ["VPADDUSW"], [OPT.XMM, OPT.VXMM, OPT.XMM128], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX)
Set("66, 0f, dd", ["VPADDUSW"], [OPT.XMM, OPT.VXMM, OPT.XMM128], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX)
Set("66, 0f, 3a, 0f", ["VPALIGNR"], [OPT.XMM, OPT.VXMM, OPT.XMM128, OPT.IMM8], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX)
Set("66, 0f, db", ["VPAND"], [OPT.XMM, OPT.VXMM, OPT.XMM128], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX)
Set("66, 0f, df", ["VPANDN"], [OPT.XMM, OPT.VXMM, OPT.XMM128], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX)
Set("66, 0f, e0", ["VPAVGB"], [OPT.XMM, OPT.VXMM, OPT.XMM128], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX)
Set("66, 0f, e3", ["VPAVGW"], [OPT.XMM, OPT.VXMM, OPT.XMM128], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX)
Set("66, 0f, 3a, 4c", ["VPBLENDVB"], [OPT.XMM, OPT.VXMM, OPT.XMM128, OPT.XMM_IMM], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX)
Set("66, 0f, 3a, 0e", ["VPBLENDW"], [OPT.XMM, OPT.VXMM, OPT.XMM128, OPT.IMM8], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX)
# This instruction is not prefixed with VEX.
SetClmul("66, 0f, 3a, 44", ["PCLMULQDQ"], [OPT.XMM, OPT.XMM128, OPT.IMM8], IFlag.MODRM_REQUIRED | IFlag._32BITS)
# Next instruction is prefixed with VEX.
SetClmul("66, 0f, 3a, 44", ["VPCLMULQDQ"], [OPT.XMM, OPT.VXMM, OPT.XMM128, OPT.IMM8], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX)
Set("66, 0f, 3a, 61", ["VPCMPESTRI"], [OPT.XMM, OPT.XMM128, OPT.IMM8], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX)
Set("66, 0f, 3a, 60", ["VPCMPESTRM"], [OPT.XMM, OPT.XMM128, OPT.IMM8], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX)
Set("66, 0f, 3a, 63", ["VPCMPISTRI"], [OPT.XMM, OPT.XMM128, OPT.IMM8], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX)
Set("66, 0f, 3a, 62", ["VPCMPISTRM"], [OPT.XMM, OPT.XMM128, OPT.IMM8], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX)
Set("66, 0f, 74", ["VPCMPEQB"], [OPT.XMM, OPT.VXMM, OPT.XMM128], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX)
Set("66, 0f, 75", ["VPCMPEQW"], [OPT.XMM, OPT.VXMM, OPT.XMM128], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX)
Set("66, 0f, 76", ["VPCMPEQD"], [OPT.XMM, OPT.VXMM, OPT.XMM128], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX)
Set("66, 0f, 38, 29", ["VPCMPEQQ"], [OPT.XMM, OPT.VXMM, OPT.XMM128], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX)
Set("66, 0f, 64", ["VPCMPGTB"], [OPT.XMM, OPT.VXMM, OPT.XMM128], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX)
Set("66, 0f, 65", ["VPCMPGTW"], [OPT.XMM, OPT.VXMM, OPT.XMM128], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX)
Set("66, 0f, 66", ["VPCMPGTD"], [OPT.XMM, OPT.VXMM, OPT.XMM128], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX)
Set("66, 0f, 38, 37", ["VPCMPGTQ"], [OPT.XMM, OPT.VXMM, OPT.XMM128], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX)
Set("66, 0f, 38, 0d", ["VPERMILPD"], [OPT.YXMM, OPT.VYXMM, OPT.YXMM128_256], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX | IFlag.VEX_L)
Set("66, 0f, 3a, 05", ["VPERMILPD"], [OPT.YXMM, OPT.YXMM128_256, OPT.IMM8], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX | IFlag.VEX_L)
Set("66, 0f, 3a, 04", ["VPERMILPS"], [OPT.YXMM, OPT.YXMM128_256, OPT.IMM8], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX | IFlag.VEX_L)
Set("66, 0f, 38, 0c", ["VPERMILPS"], [OPT.YXMM, OPT.VYXMM, OPT.YXMM128_256], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX | IFlag.VEX_L)
Set("66, 0f, 3a, 06", ["VPERM2F128"], [OPT.YMM, OPT.VYMM, OPT.YMM256, OPT.IMM8], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX | IFlag.FORCE_VEXL)
Set("66, 0f, 3a, 14", ["VPEXTRB"], [OPT.REG32_64_M8, OPT.XMM, OPT.IMM8], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag._64BITS | IFlag.PRE_VEX)
Set("66, 0f, c5", ["VPEXTRW"], [OPT.REG32_64, OPT.XMM, OPT.IMM8], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag._64BITS | IFlag.PRE_VEX)
Set("66, 0f, 3a, 15", ["VPEXTRW"], [OPT.REG32_64_M16, OPT.XMM, OPT.IMM8], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag._64BITS | IFlag.PRE_VEX)
Set("66, 0f, 3a, 16", ["VPEXTRD", "VPEXTRQ"], [OPT.WRM32_64, OPT.XMM, OPT.IMM8], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX | IFlag.VEX_W | IFlag.MNEMONIC_VEXW_BASED)
Set("66, 0f, 38, 01", ["VPHADDW"], [OPT.XMM, OPT.VXMM, OPT.XMM128], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX)
Set("66, 0f, 38, 02", ["VPHADDD"], [OPT.XMM, OPT.VXMM, OPT.XMM128], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX)
Set("66, 0f, 38, 03", ["VPHADDSW"], [OPT.XMM, OPT.VXMM, OPT.XMM128], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX)
Set("66, 0f, 38, 41", ["VPHMINPOSUW"], [OPT.XMM, OPT.XMM128], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX)
Set("66, 0f, 38, 05", ["VPHSUBW"], [OPT.XMM, OPT.VXMM, OPT.XMM128], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX)
Set("66, 0f, 38, 06", ["VPHSUBD"], [OPT.XMM, OPT.VXMM, OPT.XMM128], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX)
Set("66, 0f, 38, 07", ["VPHSUBSW"], [OPT.XMM, OPT.VXMM, OPT.XMM128], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX)
Set("66, 0f, 3a, 20", ["VPINSRB"], [OPT.XMM, OPT.VXMM, OPT.REG32_64_M8, OPT.IMM8], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX)
Set("66, 0f, c4", ["VPINSRW"], [OPT.XMM, OPT.VXMM, OPT.R32_M16, OPT.IMM8], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX)
Set("66, 0f, 3a, 22", ["VPINSRD", "VPINSRQ"], [OPT.XMM, OPT.VXMM, OPT.WRM32_64, OPT.IMM8], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX | IFlag.VEX_W | IFlag.MNEMONIC_VEXW_BASED)
Set("66, 0f, f5", ["VPMADDWD"], [OPT.XMM, OPT.VXMM, OPT.XMM128], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX)
Set("66, 0f, 38, 04", ["VPMADDUBSW"], [OPT.XMM, OPT.VXMM, OPT.XMM128], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX)
Set("66, 0f, 38, 3c", ["VPMAXSB"], [OPT.XMM, OPT.VXMM, OPT.XMM128], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX)
Set("66, 0f, ee", ["VPMAXSW"], [OPT.XMM, OPT.VXMM, OPT.XMM128], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX)
Set("66, 0f, 38, 3d", ["VPMAXSD"], [OPT.XMM, OPT.VXMM, OPT.XMM128], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX)
Set("66, 0f, de", ["VPMAXUB"], [OPT.XMM, OPT.VXMM, OPT.XMM128], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX)
Set("66, 0f, 38, 3e", ["VPMAXUW"], [OPT.XMM, OPT.VXMM, OPT.XMM128], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX)
Set("66, 0f, 38, 3f", ["VPMAXUD"], [OPT.XMM, OPT.VXMM, OPT.XMM128], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX)
Set("66, 0f, 38, 38", ["VPMINSB"], [OPT.XMM, OPT.VXMM, OPT.XMM128], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX)
Set("66, 0f, ea", ["VPMINSW"], [OPT.XMM, OPT.VXMM, OPT.XMM128], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX)
Set("66, 0f, 38, 39", ["VPMINSD"], [OPT.XMM, OPT.VXMM, OPT.XMM128], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX)
Set("66, 0f, da", ["VPMINUB"], [OPT.XMM, OPT.VXMM, OPT.XMM128], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX)
Set("66, 0f, 38, 3a", ["VPMINUW"], [OPT.XMM, OPT.VXMM, OPT.XMM128], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX)
Set("66, 0f, 38, 3b", ["VPMINUD"], [OPT.XMM, OPT.VXMM, OPT.XMM128], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX)
Set("66, 0f, d7", ["VPMOVMSKB"], [OPT.REG32_64, OPT.XMM], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag._64BITS | IFlag.PRE_VEX | IFlag.MODRR_REQUIRED)
Set("66, 0f, 38, 20", ["VPMOVSXBW"], [OPT.XMM, OPT.XMM64], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX)
Set("66, 0f, 38, 21", ["VPMOVSXBD"], [OPT.XMM, OPT.XMM32], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX)
Set("66, 0f, 38, 22", ["VPMOVSXBQ"], [OPT.XMM, OPT.XMM16], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX)
Set("66, 0f, 38, 23", ["VPMOVSXWD"], [OPT.XMM, OPT.XMM64], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX)
Set("66, 0f, 38, 24", ["VPMOVSXWQ"], [OPT.XMM, OPT.XMM32], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX)
Set("66, 0f, 38, 25", ["VPMOVSXDQ"], [OPT.XMM, OPT.XMM64], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX)
Set("66, 0f, 38, 30", ["VPMOVZXBW"], [OPT.XMM, OPT.XMM64], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX)
Set("66, 0f, 38, 31", ["VPMOVZXBD"], [OPT.XMM, OPT.XMM32], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX)
Set("66, 0f, 38, 32", ["VPMOVZXBQ"], [OPT.XMM, OPT.XMM16], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX)
Set("66, 0f, 38, 33", ["VPMOVZXWD"], [OPT.XMM, OPT.XMM64], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX)
Set("66, 0f, 38, 34", ["VPMOVZXWQ"], [OPT.XMM, OPT.XMM32], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX)
Set("66, 0f, 38, 35", ["VPMOVZXDQ"], [OPT.XMM, OPT.XMM64], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX)
Set("66, 0f, e4", ["VPMULHUW"], [OPT.XMM, OPT.VXMM, OPT.XMM128], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX)
Set("66, 0f, 38, 0b", ["VPMULHRSW"], [OPT.XMM, OPT.VXMM, OPT.XMM128], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX)
Set("66, 0f, e5", ["VPMULHW"], [OPT.XMM, OPT.VXMM, OPT.XMM128], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX)
Set("66, 0f, d5", ["VPMULLW"], [OPT.XMM, OPT.VXMM, OPT.XMM128], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX)
Set("66, 0f, 38, 40", ["VPMULLD"], [OPT.XMM, OPT.VXMM, OPT.XMM128], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX)
Set("66, 0f, f4", ["VPMULUDQ"], [OPT.XMM, OPT.VXMM, OPT.XMM128], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX)
Set("66, 0f, 38, 28", ["VPMULDQ"], [OPT.XMM, OPT.VXMM, OPT.XMM128], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX)
Set("66, 0f, eb", ["VPOR"], [OPT.XMM, OPT.VXMM, OPT.XMM128], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX)
Set("66, 0f, f6", ["VPSADBW"], [OPT.XMM, OPT.VXMM, OPT.XMM128], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX)
Set("66, 0f, 38, 00", ["VPSHUFB"], [OPT.XMM, OPT.VXMM, OPT.XMM128], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX)
Set("66, 0f, 70", ["VPSHUFD"], [OPT.XMM, OPT.XMM128, OPT.IMM8], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX)
Set("f3, 0f, 70", ["VPSHUFHW"], [OPT.XMM, OPT.XMM128, OPT.IMM8], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX)
Set("f2, 0f, 70", ["VPSHUFLW"], [OPT.XMM, OPT.XMM128, OPT.IMM8], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX)
Set("66, 0f, 38, 08", ["VPSIGNB"], [OPT.XMM, OPT.VXMM, OPT.XMM128], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX)
Set("66, 0f, 38, 09", ["VPSIGNW"], [OPT.XMM, OPT.VXMM, OPT.XMM128], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX)
Set("66, 0f, 38, 0a", ["VPSIGND"], [OPT.XMM, OPT.VXMM, OPT.XMM128], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX)
Set("66, 0f, 73 /07", ["VPSLLDQ"], [OPT.VXMM, OPT.XMM_RM, OPT.IMM8], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX | IFlag.MODRR_REQUIRED)
Set("66, 0f, 73 /03", ["VPSRLDQ"], [OPT.VXMM, OPT.XMM_RM, OPT.IMM8], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX | IFlag.MODRR_REQUIRED)
Set("66, 0f, f1", ["VPSLLW"], [OPT.XMM, OPT.VXMM, OPT.XMM128], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX)
Set("66, 0f, 71 /06", ["VPSLLW"], [OPT.VXMM, OPT.XMM_RM, OPT.IMM8], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX | IFlag.MODRR_REQUIRED)
Set("66, 0f, f2", ["VPSLLD"], [OPT.XMM, OPT.VXMM, OPT.XMM128], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX)
Set("66, 0f, 72 /06", ["VPSLLD"], [OPT.VXMM, OPT.XMM_RM, OPT.IMM8], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX | IFlag.MODRR_REQUIRED)
Set("66, 0f, f3", ["VPSLLQ"], [OPT.XMM, OPT.VXMM, OPT.XMM128], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX)
Set("66, 0f, 73 /06", ["VPSLLQ"], [OPT.VXMM, OPT.XMM_RM, OPT.IMM8], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX | IFlag.MODRR_REQUIRED)
Set("66, 0f, e1", ["VPSRAW"], [OPT.XMM, OPT.VXMM, OPT.XMM128], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX)
Set("66, 0f, 71 /04", ["VPSRAW"], [OPT.VXMM, OPT.XMM_RM, OPT.IMM8], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX | IFlag.MODRR_REQUIRED)
Set("66, 0f, e2", ["VPSRAD"], [OPT.XMM, OPT.VXMM, OPT.XMM128], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX)
Set("66, 0f, 72 /04", ["VPSRAD"], [OPT.VXMM, OPT.XMM_RM, OPT.IMM8], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX | IFlag.MODRR_REQUIRED)
Set("66, 0f, d1", ["VPSRLW"], [OPT.XMM, OPT.VXMM, OPT.XMM128], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX)
Set("66, 0f, 71 /02", ["VPSRLW"], [OPT.VXMM, OPT.XMM_RM, OPT.IMM8], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX | IFlag.MODRR_REQUIRED)
Set("66, 0f, d2", ["VPSRLD"], [OPT.XMM, OPT.VXMM, OPT.XMM128], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX)
Set("66, 0f, 72 /02", ["VPSRLD"], [OPT.VXMM, OPT.XMM_RM, OPT.IMM8], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX | IFlag.MODRR_REQUIRED)
Set("66, 0f, d3", ["VPSRLQ"], [OPT.XMM, OPT.VXMM, OPT.XMM128], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX)
Set("66, 0f, 73 /02", ["VPSRLQ"], [OPT.VXMM, OPT.XMM_RM, OPT.IMM8], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX | IFlag.MODRR_REQUIRED)
Set("66, 0f, 38, 17", ["VPTEST"], [OPT.YXMM, OPT.YXMM128_256], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX | IFlag.VEX_L)
Set("66, 0f, 38, 0e", ["VTESTPS"], [OPT.YXMM, OPT.YXMM128_256], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX | IFlag.VEX_L)
Set("66, 0f, 38, 0f", ["VTESTPD"], [OPT.YXMM, OPT.YXMM128_256], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX | IFlag.VEX_L)
Set("66, 0f, f8", ["VPSUBB"], [OPT.XMM, OPT.VXMM, OPT.XMM128], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX)
Set("66, 0f, f9", ["VPSUBW"], [OPT.XMM, OPT.VXMM, OPT.XMM128], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX)
Set("66, 0f, fa", ["VPSUBD"], [OPT.XMM, OPT.VXMM, OPT.XMM128], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX)
Set("66, 0f, fb", ["VPSUBQ"], [OPT.XMM, OPT.VXMM, OPT.XMM128], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX)
Set("66, 0f, e8", ["VPSUBSB"], [OPT.XMM, OPT.VXMM, OPT.XMM128], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX)
Set("66, 0f, e9", ["VPSUBSW"], [OPT.XMM, OPT.VXMM, OPT.XMM128], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX)
Set("66, 0f, d8", ["VPSUBUSB"], [OPT.XMM, OPT.VXMM, OPT.XMM128], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX)
Set("66, 0f, d9", ["VPSUBUSW"], [OPT.XMM, OPT.VXMM, OPT.XMM128], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX)
Set("66, 0f, 68", ["VPUNPCKHBW"], [OPT.XMM, OPT.VXMM, OPT.XMM128], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX)
Set("66, 0f, 69", ["VPUNPCKHWD"], [OPT.XMM, OPT.VXMM, OPT.XMM128], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX)
Set("66, 0f, 6a", ["VPUNPCKHDQ"], [OPT.XMM, OPT.VXMM, OPT.XMM128], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX)
Set("66, 0f, 6d", ["VPUNPCKHQDQ"], [OPT.XMM, OPT.VXMM, OPT.XMM128], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX)
Set("66, 0f, 60", ["VPUNPCKLBW"], [OPT.XMM, OPT.VXMM, OPT.XMM128], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX)
Set("66, 0f, 61", ["VPUNPCKLWD"], [OPT.XMM, OPT.VXMM, OPT.XMM128], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX)
Set("66, 0f, 62", ["VPUNPCKLDQ"], [OPT.XMM, OPT.VXMM, OPT.XMM128], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX)
Set("66, 0f, 6c", ["VPUNPCKLQDQ"], [OPT.XMM, OPT.VXMM, OPT.XMM128], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX)
Set("66, 0f, ef", ["VPXOR"], [OPT.XMM, OPT.VXMM, OPT.XMM128], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX)
Set("0f, 53", ["VRCPPS"], [OPT.YXMM, OPT.YXMM128_256], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX | IFlag.VEX_L)
Set("f3, 0f, 53", ["VRCPSS"], [OPT.XMM, OPT.VXMM, OPT.XMM32], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX)
Set("0f, 52", ["VRSQRTPS"], [OPT.YXMM, OPT.YXMM128_256], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX | IFlag.VEX_L)
Set("f3, 0f, 52", ["VRSQRTSS"], [OPT.XMM, OPT.VXMM, OPT.XMM32], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX)
Set("66, 0f, 3a, 09", ["VROUNDPD"], [OPT.YXMM, OPT.YXMM128_256, OPT.IMM8], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX | IFlag.VEX_L)
Set("66, 0f, 3a, 08", ["VROUNDPS"], [OPT.YXMM, OPT.YXMM128_256, OPT.IMM8], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX | IFlag.VEX_L)
Set("66, 0f, 3a, 0b", ["VROUNDSD"], [OPT.XMM, OPT.VXMM, OPT.XMM64, OPT.IMM8], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX)
Set("66, 0f, 3a, 0a", ["VROUNDSS"], [OPT.XMM, OPT.VXMM, OPT.XMM32, OPT.IMM8], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX)
Set("66, 0f, c6", ["VSHUFPD"], [OPT.YXMM, OPT.VYXMM, OPT.YXMM128_256, OPT.IMM8], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX | IFlag.VEX_L)
Set("0f, c6", ["VSHUFPS"], [OPT.YXMM, OPT.VYXMM, OPT.YXMM128_256, OPT.IMM8], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX | IFlag.VEX_L)
Set("66, 0f, 51", ["VSQRTPD"], [OPT.YXMM, OPT.YXMM128_256], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX | IFlag.VEX_L)
Set("0f, 51", ["VSQRTPS"], [OPT.YXMM, OPT.YXMM128_256], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX | IFlag.VEX_L)
Set("f2, 0f, 51", ["VSQRTSD"], [OPT.XMM, OPT.VXMM, OPT.XMM64], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX)
Set("f3, 0f, 51", ["VSQRTSS"], [OPT.XMM, OPT.VXMM, OPT.XMM32], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX)
Set("0f, ae /03", ["VSTMXCSR"], [OPT.MEM32], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX)
Set("66, 0f, 5c", ["VSUBPD"], [OPT.YXMM, OPT.VYXMM, OPT.YXMM128_256], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX | IFlag.VEX_L)
Set("0f, 5c", ["VSUBPS"], [OPT.YXMM, OPT.VYXMM, OPT.YXMM128_256], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX | IFlag.VEX_L)
Set("f2, 0f, 5c", ["VSUBSD"], [OPT.XMM, OPT.VXMM, OPT.XMM64], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX)
Set("f3, 0f, 5c", ["VSUBSS"], [OPT.XMM, OPT.VXMM, OPT.XMM32], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX)
Set("66, 0f, 2e", ["VUCOMISD"], [OPT.XMM, OPT.XMM64], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX)
Set("0f, 2e", ["VUCOMISS"], [OPT.XMM, OPT.XMM32], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX)
Set("66, 0f, 15", ["VUNPCKHPD"], [OPT.YXMM, OPT.VYXMM, OPT.YXMM128_256], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX | IFlag.VEX_L)
Set("0f, 15", ["VUNPCKHPS"], [OPT.YXMM, OPT.VYXMM, OPT.YXMM128_256], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX | IFlag.VEX_L)
Set("66, 0f, 14", ["VUNPCKLPD"], [OPT.YXMM, OPT.VYXMM, OPT.YXMM128_256], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX | IFlag.VEX_L)
Set("0f, 14", ["VUNPCKLPS"], [OPT.YXMM, OPT.VYXMM, OPT.YXMM128_256], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX | IFlag.VEX_L)
Set("66, 0f, 57", ["VXORPD"], [OPT.YXMM, OPT.VYXMM, OPT.YXMM128_256], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX | IFlag.VEX_L)
Set("0f, 57", ["VXORPS"], [OPT.YXMM, OPT.VYXMM, OPT.YXMM128_256], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX | IFlag.VEX_L)
Set("0f, 77", ["VZEROUPPER", "VZEROALL"], [], IFlag._32BITS | IFlag.PRE_VEX | IFlag.VEX_L | IFlag.MNEMONIC_VEXL_BASED)
def init_FMA(self):
Set = lambda *args: self.SetCallback(ISetClass.FMA, *args)
Set("66, 0f, 38, 98", ["VFMADD132PS", "VFMADD132PD"], [OPT.YXMM, OPT.VYXMM, OPT.YXMM128_256], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX | IFlag.VEX_W | IFlag.VEX_L | IFlag.MNEMONIC_VEXW_BASED)
Set("66, 0f, 38, a8", ["VFMADD213PS", "VFMADD213PD"], [OPT.YXMM, OPT.VYXMM, OPT.YXMM128_256], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX | IFlag.VEX_W | IFlag.VEX_L | IFlag.MNEMONIC_VEXW_BASED)
Set("66, 0f, 38, b8", ["VFMADD231PS", "VFMADD231PD"], [OPT.YXMM, OPT.VYXMM, OPT.YXMM128_256], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX | IFlag.VEX_W | IFlag.VEX_L | IFlag.MNEMONIC_VEXW_BASED)
Set("66, 0f, 38, 99", ["VFMADD132SS", "VFMADD132SD"], [OPT.XMM, OPT.VXMM, OPT.WXMM32_64], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX | IFlag.VEX_W | IFlag.MNEMONIC_VEXW_BASED)
Set("66, 0f, 38, a9", ["VFMADD213SS", "VFMADD213SD"], [OPT.XMM, OPT.VXMM, OPT.WXMM32_64], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX | IFlag.VEX_W | IFlag.MNEMONIC_VEXW_BASED)
Set("66, 0f, 38, b9", ["VFMADD231SS", "VFMADD231SD"], [OPT.XMM, OPT.VXMM, OPT.WXMM32_64], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX | IFlag.VEX_W | IFlag.MNEMONIC_VEXW_BASED)
Set("66, 0f, 38, 96", ["VFMADDSUB132PS", "VFMADDSUB132PD"], [OPT.YXMM, OPT.VYXMM, OPT.YXMM128_256], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX | IFlag.VEX_W | IFlag.VEX_L | IFlag.MNEMONIC_VEXW_BASED)
Set("66, 0f, 38, a6", ["VFMADDSUB213PS", "VFMADDSUB213PD"], [OPT.YXMM, OPT.VYXMM, OPT.YXMM128_256], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX | IFlag.VEX_W | IFlag.VEX_L | IFlag.MNEMONIC_VEXW_BASED)
Set("66, 0f, 38, b6", ["VFMADDSUB231PS", "VFMADDSUB231PD"], [OPT.YXMM, OPT.VYXMM, OPT.YXMM128_256], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX | IFlag.VEX_W | IFlag.VEX_L | IFlag.MNEMONIC_VEXW_BASED)
Set("66, 0f, 38, 97", ["VFMSUBADD132PS", "VFMSUBADD132PD"], [OPT.YXMM, OPT.VYXMM, OPT.YXMM128_256], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX | IFlag.VEX_W | IFlag.VEX_L | IFlag.MNEMONIC_VEXW_BASED)
Set("66, 0f, 38, a7", ["VFMSUBADD213PS", "VFMSUBADD213PD"], [OPT.YXMM, OPT.VYXMM, OPT.YXMM128_256], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX | IFlag.VEX_W | IFlag.VEX_L | IFlag.MNEMONIC_VEXW_BASED)
Set("66, 0f, 38, b7", ["VFMSUBADD231PS", "VFMSUBADD231PD"], [OPT.YXMM, OPT.VYXMM, OPT.YXMM128_256], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX | IFlag.VEX_W | IFlag.VEX_L | IFlag.MNEMONIC_VEXW_BASED)
Set("66, 0f, 38, 9a", ["VFMSUB132PS", "VFMSUB132PD"], [OPT.YXMM, OPT.VYXMM, OPT.YXMM128_256], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX | IFlag.VEX_W | IFlag.VEX_L | IFlag.MNEMONIC_VEXW_BASED)
Set("66, 0f, 38, aa", ["VFMSUB213PS", "VFMSUB213PD"], [OPT.YXMM, OPT.VYXMM, OPT.YXMM128_256], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX | IFlag.VEX_W | IFlag.VEX_L | IFlag.MNEMONIC_VEXW_BASED)
Set("66, 0f, 38, ba", ["VFMSUB231PS", "VFMSUB231PD"], [OPT.YXMM, OPT.VYXMM, OPT.YXMM128_256], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX | IFlag.VEX_W | IFlag.VEX_L | IFlag.MNEMONIC_VEXW_BASED)
Set("66, 0f, 38, 9b", ["VFMSUB132SS", "VFMSUB132SD"], [OPT.XMM, OPT.VXMM, OPT.WXMM32_64], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX | IFlag.VEX_W | IFlag.MNEMONIC_VEXW_BASED)
Set("66, 0f, 38, ab", ["VFMSUB213SS", "VFMSUB213SD"], [OPT.XMM, OPT.VXMM, OPT.WXMM32_64], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX | IFlag.VEX_W | IFlag.MNEMONIC_VEXW_BASED)
Set("66, 0f, 38, bb", ["VFMSUB231SS", "VFMSUB231SD"], [OPT.XMM, OPT.VXMM, OPT.WXMM32_64], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX | IFlag.VEX_W | IFlag.MNEMONIC_VEXW_BASED)
Set("66, 0f, 38, 9c", ["VFNMADD132PS", "VFNMADD132PD"], [OPT.YXMM, OPT.VYXMM, OPT.YXMM128_256], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX | IFlag.VEX_W | IFlag.VEX_L | IFlag.MNEMONIC_VEXW_BASED)
Set("66, 0f, 38, ac", ["VFNMADD213PS", "VFNMADD213PD"], [OPT.YXMM, OPT.VYXMM, OPT.YXMM128_256], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX | IFlag.VEX_W | IFlag.VEX_L | IFlag.MNEMONIC_VEXW_BASED)
Set("66, 0f, 38, bc", ["VFNMADD231PS", "VFNMADD231PD"], [OPT.YXMM, OPT.VYXMM, OPT.YXMM128_256], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX | IFlag.VEX_W | IFlag.VEX_L | IFlag.MNEMONIC_VEXW_BASED)
Set("66, 0f, 38, 9d", ["VFNMADD132SS", "VFNMADD132SD"], [OPT.XMM, OPT.VXMM, OPT.WXMM32_64], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX | IFlag.VEX_W | IFlag.MNEMONIC_VEXW_BASED)
Set("66, 0f, 38, ad", ["VFNMADD213SS", "VFNMADD213SD"], [OPT.XMM, OPT.VXMM, OPT.WXMM32_64], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX | IFlag.VEX_W | IFlag.MNEMONIC_VEXW_BASED)
Set("66, 0f, 38, bd", ["VFNMADD231SS", "VFNMADD231SD"], [OPT.XMM, OPT.VXMM, OPT.WXMM32_64], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX | IFlag.VEX_W | IFlag.MNEMONIC_VEXW_BASED)
Set("66, 0f, 38, 9e", ["VFNMSUB132PS", "VFNMSUB132PD"], [OPT.YXMM, OPT.VYXMM, OPT.YXMM128_256], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX | IFlag.VEX_W | IFlag.VEX_L | IFlag.MNEMONIC_VEXW_BASED)
Set("66, 0f, 38, ae", ["VFNMSUB213PS", "VFNMSUB213PD"], [OPT.YXMM, OPT.VYXMM, OPT.YXMM128_256], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX | IFlag.VEX_W | IFlag.VEX_L | IFlag.MNEMONIC_VEXW_BASED)
Set("66, 0f, 38, be", ["VFNMSUB231PS", "VFNMSUB231PD"], [OPT.YXMM, OPT.VYXMM, OPT.YXMM128_256], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX | IFlag.VEX_W | IFlag.VEX_L | IFlag.MNEMONIC_VEXW_BASED)
Set("66, 0f, 38, 9f", ["VFNMSUB132SS", "VFNMSUB132SD"], [OPT.XMM, OPT.VXMM, OPT.WXMM32_64], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX | IFlag.VEX_W | IFlag.MNEMONIC_VEXW_BASED)
Set("66, 0f, 38, af", ["VFNMSUB213SS", "VFNMSUB213SD"], [OPT.XMM, OPT.VXMM, OPT.WXMM32_64], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX | IFlag.VEX_W | IFlag.MNEMONIC_VEXW_BASED)
Set("66, 0f, 38, bf", ["VFNMSUB231SS", "VFNMSUB231SD"], [OPT.XMM, OPT.VXMM, OPT.WXMM32_64], IFlag.MODRM_REQUIRED | IFlag._32BITS | IFlag.PRE_VEX | IFlag.VEX_W | IFlag.MNEMONIC_VEXW_BASED)
def __init__(self, SetCallback):
""" Initializes all instructions-sets using the given callback.
The arguments of the callback are as follows:
(iset-class, opcode-length, list of bytes of opcode, list of string of mnemonics, list of operands, flags) """
self.SetCallback = SetCallback
Set = lambda *args: self.SetCallback(ISetClass.INTEGER, *args)
self.init_INTEGER()
self.init_Exported()
self.init_FPU()
self.init_P6()
self.init_MMX()
self.init_SSE()
self.init_SSE2()
self.init_SSE3()
self.init_SSSE3()
self.init_SSE4_1()
self.init_SSE4_2()
self.init_SSE4_A()
self.init_3DNOW()
self.init_3DNOWEXT()
self.init_VMX()
self.init_SVM()
self.init_AVX()
self.init_FMA()
|
elloray/thrift | refs/heads/master | test/py/TestEof.py | 99 | #!/usr/bin/env python
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
import sys, glob
from optparse import OptionParser
parser = OptionParser()
parser.add_option('--genpydir', type='string', dest='genpydir', default='gen-py')
options, args = parser.parse_args()
del sys.argv[1:] # clean up hack so unittest doesn't complain
sys.path.insert(0, options.genpydir)
sys.path.insert(0, glob.glob('../../lib/py/build/lib.*')[0])
from ThriftTest import ThriftTest
from ThriftTest.ttypes import *
from thrift.transport import TTransport
from thrift.transport import TSocket
from thrift.protocol import TBinaryProtocol
from thrift.protocol import TCompactProtocol
import unittest
import time
class TestEof(unittest.TestCase):
def make_data(self, pfactory=None):
trans = TTransport.TMemoryBuffer()
if pfactory:
prot = pfactory.getProtocol(trans)
else:
prot = TBinaryProtocol.TBinaryProtocol(trans)
x = Xtruct()
x.string_thing = "Zero"
x.byte_thing = 0
x.write(prot)
x = Xtruct()
x.string_thing = "One"
x.byte_thing = 1
x.write(prot)
return trans.getvalue()
def testTransportReadAll(self):
"""Test that readAll on any type of transport throws an EOFError"""
trans = TTransport.TMemoryBuffer(self.make_data())
trans.readAll(1)
try:
trans.readAll(10000)
except EOFError:
return
self.fail("Should have gotten EOFError")
def eofTestHelper(self, pfactory):
trans = TTransport.TMemoryBuffer(self.make_data(pfactory))
prot = pfactory.getProtocol(trans)
x = Xtruct()
x.read(prot)
self.assertEqual(x.string_thing, "Zero")
self.assertEqual(x.byte_thing, 0)
x = Xtruct()
x.read(prot)
self.assertEqual(x.string_thing, "One")
self.assertEqual(x.byte_thing, 1)
try:
x = Xtruct()
x.read(prot)
except EOFError:
return
self.fail("Should have gotten EOFError")
def eofTestHelperStress(self, pfactory):
"""Teest the ability of TBinaryProtocol to deal with the removal of every byte in the file"""
# TODO: we should make sure this covers more of the code paths
data = self.make_data(pfactory)
for i in xrange(0, len(data) + 1):
trans = TTransport.TMemoryBuffer(data[0:i])
prot = pfactory.getProtocol(trans)
try:
x = Xtruct()
x.read(prot)
x.read(prot)
x.read(prot)
except EOFError:
continue
self.fail("Should have gotten an EOFError")
def testBinaryProtocolEof(self):
"""Test that TBinaryProtocol throws an EOFError when it reaches the end of the stream"""
self.eofTestHelper(TBinaryProtocol.TBinaryProtocolFactory())
self.eofTestHelperStress(TBinaryProtocol.TBinaryProtocolFactory())
def testBinaryProtocolAcceleratedEof(self):
"""Test that TBinaryProtocolAccelerated throws an EOFError when it reaches the end of the stream"""
self.eofTestHelper(TBinaryProtocol.TBinaryProtocolAcceleratedFactory())
self.eofTestHelperStress(TBinaryProtocol.TBinaryProtocolAcceleratedFactory())
def testCompactProtocolEof(self):
"""Test that TCompactProtocol throws an EOFError when it reaches the end of the stream"""
self.eofTestHelper(TCompactProtocol.TCompactProtocolFactory())
self.eofTestHelperStress(TCompactProtocol.TCompactProtocolFactory())
def suite():
suite = unittest.TestSuite()
loader = unittest.TestLoader()
suite.addTest(loader.loadTestsFromTestCase(TestEof))
return suite
if __name__ == "__main__":
unittest.main(defaultTest="suite", testRunner=unittest.TextTestRunner(verbosity=2))
|
ephes/scikit-learn | refs/heads/master | examples/classification/plot_classifier_comparison.py | 181 | #!/usr/bin/python
# -*- coding: utf-8 -*-
"""
=====================
Classifier comparison
=====================
A comparison of a several classifiers in scikit-learn on synthetic datasets.
The point of this example is to illustrate the nature of decision boundaries
of different classifiers.
This should be taken with a grain of salt, as the intuition conveyed by
these examples does not necessarily carry over to real datasets.
Particularly in high-dimensional spaces, data can more easily be separated
linearly and the simplicity of classifiers such as naive Bayes and linear SVMs
might lead to better generalization than is achieved by other classifiers.
The plots show training points in solid colors and testing points
semi-transparent. The lower right shows the classification accuracy on the test
set.
"""
print(__doc__)
# Code source: Gaël Varoquaux
# Andreas Müller
# Modified for documentation by Jaques Grobler
# License: BSD 3 clause
import numpy as np
import matplotlib.pyplot as plt
from matplotlib.colors import ListedColormap
from sklearn.cross_validation import train_test_split
from sklearn.preprocessing import StandardScaler
from sklearn.datasets import make_moons, make_circles, make_classification
from sklearn.neighbors import KNeighborsClassifier
from sklearn.svm import SVC
from sklearn.tree import DecisionTreeClassifier
from sklearn.ensemble import RandomForestClassifier, AdaBoostClassifier
from sklearn.naive_bayes import GaussianNB
from sklearn.lda import LDA
from sklearn.qda import QDA
h = .02 # step size in the mesh
names = ["Nearest Neighbors", "Linear SVM", "RBF SVM", "Decision Tree",
"Random Forest", "AdaBoost", "Naive Bayes", "LDA", "QDA"]
classifiers = [
KNeighborsClassifier(3),
SVC(kernel="linear", C=0.025),
SVC(gamma=2, C=1),
DecisionTreeClassifier(max_depth=5),
RandomForestClassifier(max_depth=5, n_estimators=10, max_features=1),
AdaBoostClassifier(),
GaussianNB(),
LDA(),
QDA()]
X, y = make_classification(n_features=2, n_redundant=0, n_informative=2,
random_state=1, n_clusters_per_class=1)
rng = np.random.RandomState(2)
X += 2 * rng.uniform(size=X.shape)
linearly_separable = (X, y)
datasets = [make_moons(noise=0.3, random_state=0),
make_circles(noise=0.2, factor=0.5, random_state=1),
linearly_separable
]
figure = plt.figure(figsize=(27, 9))
i = 1
# iterate over datasets
for ds in datasets:
# preprocess dataset, split into training and test part
X, y = ds
X = StandardScaler().fit_transform(X)
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=.4)
x_min, x_max = X[:, 0].min() - .5, X[:, 0].max() + .5
y_min, y_max = X[:, 1].min() - .5, X[:, 1].max() + .5
xx, yy = np.meshgrid(np.arange(x_min, x_max, h),
np.arange(y_min, y_max, h))
# just plot the dataset first
cm = plt.cm.RdBu
cm_bright = ListedColormap(['#FF0000', '#0000FF'])
ax = plt.subplot(len(datasets), len(classifiers) + 1, i)
# Plot the training points
ax.scatter(X_train[:, 0], X_train[:, 1], c=y_train, cmap=cm_bright)
# and testing points
ax.scatter(X_test[:, 0], X_test[:, 1], c=y_test, cmap=cm_bright, alpha=0.6)
ax.set_xlim(xx.min(), xx.max())
ax.set_ylim(yy.min(), yy.max())
ax.set_xticks(())
ax.set_yticks(())
i += 1
# iterate over classifiers
for name, clf in zip(names, classifiers):
ax = plt.subplot(len(datasets), len(classifiers) + 1, i)
clf.fit(X_train, y_train)
score = clf.score(X_test, y_test)
# Plot the decision boundary. For that, we will assign a color to each
# point in the mesh [x_min, m_max]x[y_min, y_max].
if hasattr(clf, "decision_function"):
Z = clf.decision_function(np.c_[xx.ravel(), yy.ravel()])
else:
Z = clf.predict_proba(np.c_[xx.ravel(), yy.ravel()])[:, 1]
# Put the result into a color plot
Z = Z.reshape(xx.shape)
ax.contourf(xx, yy, Z, cmap=cm, alpha=.8)
# Plot also the training points
ax.scatter(X_train[:, 0], X_train[:, 1], c=y_train, cmap=cm_bright)
# and testing points
ax.scatter(X_test[:, 0], X_test[:, 1], c=y_test, cmap=cm_bright,
alpha=0.6)
ax.set_xlim(xx.min(), xx.max())
ax.set_ylim(yy.min(), yy.max())
ax.set_xticks(())
ax.set_yticks(())
ax.set_title(name)
ax.text(xx.max() - .3, yy.min() + .3, ('%.2f' % score).lstrip('0'),
size=15, horizontalalignment='right')
i += 1
figure.subplots_adjust(left=.02, right=.98)
plt.show()
|
bastianh/swampdragon | refs/heads/master | tests/test_permissions.py | 13 | from swampdragon.testing.dragon_testcase import DragonTestCase
from swampdragon.route_handler import BaseRouter, LOGIN_REQUIRED, SUCCESS, ERROR
from swampdragon.permissions import login_required, LoginRequired, RoutePermission
class TestRouterDecorated(BaseRouter):
"""
A router with a function decorated
"""
valid_verbs = ['do_something']
@login_required
def do_something(self, **kwargs):
self.send('all good')
class TestRouter(BaseRouter):
"""
A router with all functions requiring login
"""
valid_verbs = ['do_something']
permission_classes = [LoginRequired()]
def do_something(self, **kwargs):
self.send('all good')
class TestRouterSpecific(BaseRouter):
"""
A router where only one verb requires a signed in user
"""
valid_verbs = ['free_for_all', 'need_login']
permission_classes = [LoginRequired(verbs=['need_login'])]
def free_for_all(self, **kwargs):
self.send('all good')
def need_login(self, **kwargs):
self.send('all good')
class CustomPermission(RoutePermission):
"""
A custom permission requiring 'ok' to be sent to the handler
"""
def test_permission(self, handler, verb, **kwargs):
if 'ok' not in kwargs:
return self.permission_failed(handler)
return True
def permission_failed(self, handler):
handler.send_error({'ok': ['ok missing']})
return False
class CustomPermissionRouter(BaseRouter):
"""
A router with a custom permission
"""
valid_verbs = ['permission_required']
permission_classes = [CustomPermission()]
def permission_required(self, **kwargs):
self.send('everything is okay')
class BrokenPermission(RoutePermission):
"""
This permission is missing both required functions of a permission
* test_permission
* permission_failed
"""
pass
class BrokenPermissionRouter(BaseRouter):
valid_verbs = ['do_something']
permission_classes = [BrokenPermission()]
def do_something(self, **kwargs):
pass
class HalfAPermission(RoutePermission):
def test_permission(self, handler, verb, **kwargs):
return self.permission_failed(handler)
class HalfAPermissionRouter(BaseRouter):
valid_verbs = ['do_something']
permission_classes = [HalfAPermission()]
def do_something(self, **kwargs):
pass
class TestPermissions(DragonTestCase):
def test_login_required_decorator(self):
router = TestRouterDecorated(self.connection)
router.handle({'verb': 'do_something'})
self.assertEqual(self.connection.last_message['context']['state'], LOGIN_REQUIRED)
def test_login_required_decorator_with_user(self):
self.connection.user = {'user': 'test user'}
router = TestRouterDecorated(self.connection)
router.handle({'verb': 'do_something'})
self.assertEqual(self.connection.last_message['context']['state'], SUCCESS)
def test_login_required(self):
router = TestRouter(self.connection)
router.handle({'verb': 'do_something'})
self.assertEqual(self.connection.last_message['context']['state'], LOGIN_REQUIRED)
def test_login_required_on_one_verb(self):
router = TestRouterSpecific(self.connection)
router.handle({'verb': 'free_for_all'})
self.assertEqual(self.connection.last_message['context']['state'], SUCCESS)
router.handle({'verb': 'need_login'})
self.assertEqual(self.connection.last_message['context']['state'], LOGIN_REQUIRED)
def test_fail_custom_permission(self):
router = CustomPermissionRouter(self.connection)
router.handle({'verb': 'permission_required'})
self.assertEqual(self.connection.last_message['context']['state'], ERROR)
def test_pass_custom_permission(self):
router = CustomPermissionRouter(self.connection)
router.handle({'verb': 'permission_required', 'args': {'ok': True}})
self.assertEqual(self.connection.last_message['context']['state'], SUCCESS)
def test_broken_permission(self):
router = BrokenPermissionRouter(self.connection)
with self.assertRaises(NotImplementedError):
router.handle({'verb': 'do_something'})
def test_permission_missing_permission_failed(self):
router = HalfAPermissionRouter(self.connection)
with self.assertRaises(NotImplementedError):
router.handle({'verb': 'do_something'})
|
toshywoshy/ansible | refs/heads/devel | lib/ansible/modules/cloud/azure/azure_rm_devtestlabvirtualnetwork_info.py | 20 | #!/usr/bin/python
#
# Copyright (c) 2019 Zim Kalinowski, (@zikalino)
#
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: azure_rm_devtestlabvirtualnetwork_info
version_added: "2.9"
short_description: Get Azure DevTest Lab Virtual Network facts
description:
- Get facts of Azure DevTest Lab Virtual Network.
options:
resource_group:
description:
- The name of the resource group.
required: True
type: str
lab_name:
description:
- The name of DevTest Lab.
required: True
type: str
name:
description:
- The name of DevTest Lab Virtual Network.
type: str
extends_documentation_fragment:
- azure
author:
- Zim Kalinowski (@zikalino)
'''
EXAMPLES = '''
- name: Get instance of DevTest Lab Virtual Network
azure_rm_devtestlabvirtualnetwork_info:
resource_group: myResourceGroup
lab_name: myLab
name: myVirtualNetwork
- name: List all Virtual Networks in DevTest Lab
azure_rm_devtestlabvirtualnetwork_info:
resource_group: myResourceGroup
lab_name: myLab
name: myVirtualNetwork
'''
RETURN = '''
virtualnetworks:
description:
- A list of dictionaries containing facts for DevTest Lab Virtual Network.
returned: always
type: complex
contains:
id:
description:
- The identifier of the virtual network.
returned: always
type: str
sample: "/subscriptions/xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx/resourcegroups/myResourceGroup/providers/microsoft.devtestlab/labs/myLab/virt
ualnetworks/myVirtualNetwork"
resource_group:
description:
- Name of the resource group.
returned: always
type: str
sample: myResourceGroup
lab_name:
description:
- Name of the lab.
returned: always
type: str
sample: myLab
name:
description:
- Name of the virtual network.
returned: always
type: str
sample: myVirtualNetwork
description:
description:
- Description of the virtual network.
returned: always
type: str
sample: My Virtual Network
external_provider_resource_id:
description:
- Resource id of an external virtual network.
returned: always
type: str
sample: "/subscriptions/xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx/resourceGroups/myResourceGroup/providers/Microsoft.Network/virtualNetworks/my
VirtualNetwork"
provisioning_state:
description:
- Provisioning state of the virtual network.
returned: always
type: str
sample: Succeeded
'''
from ansible.module_utils.azure_rm_common import AzureRMModuleBase
try:
from msrestazure.azure_exceptions import CloudError
from azure.mgmt.devtestlabs import DevTestLabsClient
from msrest.serialization import Model
except ImportError:
# This is handled in azure_rm_common
pass
class AzureRMDevTestLabVirtualNetworkInfo(AzureRMModuleBase):
def __init__(self):
# define user inputs into argument
self.module_arg_spec = dict(
resource_group=dict(
type='str',
required=True
),
lab_name=dict(
type='str',
required=True
),
name=dict(
type='str'
)
)
# store the results of the module operation
self.results = dict(
changed=False
)
self.mgmt_client = None
self.resource_group = None
self.lab_name = None
self.name = None
super(AzureRMDevTestLabVirtualNetworkInfo, self).__init__(self.module_arg_spec, supports_tags=False)
def exec_module(self, **kwargs):
is_old_facts = self.module._name == 'azure_rm_devtestlabvirtualnetwork_facts'
if is_old_facts:
self.module.deprecate("The 'azure_rm_devtestlabvirtualnetwork_facts' module has been renamed to 'azure_rm_devtestlabvirtualnetwork_info'",
version='2.13')
for key in self.module_arg_spec:
setattr(self, key, kwargs[key])
self.mgmt_client = self.get_mgmt_svc_client(DevTestLabsClient,
base_url=self._cloud_environment.endpoints.resource_manager)
if self.name:
self.results['virtualnetworks'] = self.get()
else:
self.results['virtualnetworks'] = self.list()
return self.results
def list(self):
response = None
results = []
try:
response = self.mgmt_client.virtual_networks.list(resource_group_name=self.resource_group,
lab_name=self.lab_name)
self.log("Response : {0}".format(response))
except CloudError as e:
self.fail('Could not list Virtual Networks for DevTest Lab.')
if response is not None:
for item in response:
results.append(self.format_response(item))
return results
def get(self):
response = None
results = []
try:
response = self.mgmt_client.virtual_networks.get(resource_group_name=self.resource_group,
lab_name=self.lab_name,
name=self.name)
self.log("Response : {0}".format(response))
except CloudError as e:
self.fail('Could not get facts for Virtual Network.')
if response:
results.append(self.format_response(response))
return results
def format_response(self, item):
d = item.as_dict()
d = {
'resource_group': self.resource_group,
'lab_name': self.lab_name,
'name': d.get('name', None),
'id': d.get('id', None),
'external_provider_resource_id': d.get('external_provider_resource_id', None),
'provisioning_state': d.get('provisioning_state', None),
'description': d.get('description', None)
}
return d
def main():
AzureRMDevTestLabVirtualNetworkInfo()
if __name__ == '__main__':
main()
|
BosnaZmaj/PeachIt | refs/heads/master | kb/apps.py | 3 | from django.apps import AppConfig
class KbConfig(AppConfig):
name = 'kb'
|
Nowheresly/odoo | refs/heads/8.0 | openerp/addons/base/ir/ir_rule.py | 24 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2009 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import time
from openerp import SUPERUSER_ID
from openerp import tools
from openerp.osv import fields, osv, expression
from openerp.tools.safe_eval import safe_eval as eval
from openerp.tools.misc import unquote as unquote
class ir_rule(osv.osv):
_name = 'ir.rule'
_order = 'name'
_MODES = ['read', 'write', 'create', 'unlink']
def _eval_context_for_combinations(self):
"""Returns a dictionary to use as evaluation context for
ir.rule domains, when the goal is to obtain python lists
that are easier to parse and combine, but not to
actually execute them."""
return {'user': unquote('user'),
'time': unquote('time')}
def _eval_context(self, cr, uid):
"""Returns a dictionary to use as evaluation context for
ir.rule domains."""
return {'user': self.pool.get('res.users').browse(cr, SUPERUSER_ID, uid),
'time':time}
def _domain_force_get(self, cr, uid, ids, field_name, arg, context=None):
res = {}
eval_context = self._eval_context(cr, uid)
for rule in self.browse(cr, uid, ids, context):
if rule.domain_force:
res[rule.id] = expression.normalize_domain(eval(rule.domain_force, eval_context))
else:
res[rule.id] = []
return res
def _get_value(self, cr, uid, ids, field_name, arg, context=None):
res = {}
for rule in self.browse(cr, uid, ids, context):
if not rule.groups:
res[rule.id] = True
else:
res[rule.id] = False
return res
def _check_model_obj(self, cr, uid, ids, context=None):
return not any(self.pool[rule.model_id.model].is_transient() for rule in self.browse(cr, uid, ids, context))
def _check_model_name(self, cr, uid, ids, context=None):
# Don't allow rules on rules records (this model).
return not any(rule.model_id.model == self._name for rule in self.browse(cr, uid, ids, context))
_columns = {
'name': fields.char('Name', select=1),
'active': fields.boolean('Active', help="If you uncheck the active field, it will disable the record rule without deleting it (if you delete a native record rule, it may be re-created when you reload the module."),
'model_id': fields.many2one('ir.model', 'Object',select=1, required=True, ondelete="cascade"),
'global': fields.function(_get_value, string='Global', type='boolean', store=True, help="If no group is specified the rule is global and applied to everyone"),
'groups': fields.many2many('res.groups', 'rule_group_rel', 'rule_group_id', 'group_id', 'Groups'),
'domain_force': fields.text('Domain'),
'domain': fields.function(_domain_force_get, string='Domain', type='binary'),
'perm_read': fields.boolean('Apply for Read'),
'perm_write': fields.boolean('Apply for Write'),
'perm_create': fields.boolean('Apply for Create'),
'perm_unlink': fields.boolean('Apply for Delete')
}
_order = 'model_id DESC'
_defaults = {
'active': True,
'perm_read': True,
'perm_write': True,
'perm_create': True,
'perm_unlink': True,
'global': True,
}
_sql_constraints = [
('no_access_rights', 'CHECK (perm_read!=False or perm_write!=False or perm_create!=False or perm_unlink!=False)', 'Rule must have at least one checked access right !'),
]
_constraints = [
(_check_model_obj, 'Rules can not be applied on Transient models.', ['model_id']),
(_check_model_name, 'Rules can not be applied on the Record Rules model.', ['model_id']),
]
@tools.ormcache()
def _compute_domain(self, cr, uid, model_name, mode="read"):
if mode not in self._MODES:
raise ValueError('Invalid mode: %r' % (mode,))
if uid == SUPERUSER_ID:
return None
cr.execute("""SELECT r.id
FROM ir_rule r
JOIN ir_model m ON (r.model_id = m.id)
WHERE m.model = %s
AND r.active is True
AND r.perm_""" + mode + """
AND (r.id IN (SELECT rule_group_id FROM rule_group_rel g_rel
JOIN res_groups_users_rel u_rel ON (g_rel.group_id = u_rel.gid)
WHERE u_rel.uid = %s) OR r.global)""", (model_name, uid))
rule_ids = [x[0] for x in cr.fetchall()]
if rule_ids:
# browse user as super-admin root to avoid access errors!
user = self.pool.get('res.users').browse(cr, SUPERUSER_ID, uid)
global_domains = [] # list of domains
group_domains = {} # map: group -> list of domains
for rule in self.browse(cr, SUPERUSER_ID, rule_ids):
# read 'domain' as UID to have the correct eval context for the rule.
rule_domain = self.read(cr, uid, [rule.id], ['domain'])[0]['domain']
dom = expression.normalize_domain(rule_domain)
if rule.groups & user.groups_id:
group_domains.setdefault(rule.groups[0], []).append(dom)
if not rule.groups:
global_domains.append(dom)
# combine global domains and group domains
if group_domains:
group_domain = expression.OR(map(expression.OR, group_domains.values()))
else:
group_domain = []
domain = expression.AND(global_domains + [group_domain])
return domain
return []
def clear_cache(self, cr, uid):
self._compute_domain.clear_cache(self)
def domain_get(self, cr, uid, model_name, mode='read', context=None):
dom = self._compute_domain(cr, uid, model_name, mode)
if dom:
# _where_calc is called as superuser. This means that rules can
# involve objects on which the real uid has no acces rights.
# This means also there is no implicit restriction (e.g. an object
# references another object the user can't see).
query = self.pool[model_name]._where_calc(cr, SUPERUSER_ID, dom, active_test=False)
return query.where_clause, query.where_clause_params, query.tables
return [], [], ['"' + self.pool[model_name]._table + '"']
def unlink(self, cr, uid, ids, context=None):
res = super(ir_rule, self).unlink(cr, uid, ids, context=context)
self.clear_cache(cr, uid)
return res
def create(self, cr, uid, vals, context=None):
res = super(ir_rule, self).create(cr, uid, vals, context=context)
self.clear_cache(cr, uid)
return res
def write(self, cr, uid, ids, vals, context=None):
res = super(ir_rule, self).write(cr, uid, ids, vals, context=context)
self.clear_cache(cr,uid)
return res
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
Insolita/creepy | refs/heads/master | creepy/models/PluginConfigurationListModel.py | 5 | #!/usr/bin/python
# -*- coding: utf-8 -*-
from PyQt4.QtCore import QVariant, QAbstractListModel, Qt
from PyQt4.Qt import QPixmap, QIcon
import os
class PluginConfigurationListModel(QAbstractListModel):
def __init__(self, plugins, parent=None):
super(PluginConfigurationListModel, self).__init__(parent)
self.plugins = []
self.pluginList = plugins
def checkPluginConfiguration(self):
for plugin in self.pluginList:
self.plugins.append((plugin,True))
'''
if plugin.plugin_object.isConfigured()[0]:
self.plugins.append((plugin,True))
else:
self.plugins.append((plugin,False))
'''
def rowCount(self,index):
return len(self.plugins)
def data(self,index,role):
pluginListItem= self.plugins[index.row()]
if index.isValid():
if role == Qt.DisplayRole:
return QVariant(pluginListItem[0].name)
if role == Qt.DecorationRole:
picturePath = os.path.join(os.getcwdu(), 'plugins', pluginListItem[0].plugin_object.name, 'logo.png')
if picturePath and os.path.exists(picturePath):
pixmap = QPixmap(picturePath)
return QIcon(pixmap)
else:
pixmap = QPixmap(':/creepy/folder')
return QIcon(pixmap)
else:
return QVariant() |
wd5/jangr | refs/heads/master | archive/migrations/0003_auto__del_field_artist_city.py | 1 | # encoding: utf-8
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Deleting field 'Artist.city'
db.delete_column('archive_artist', 'city')
def backwards(self, orm):
# Adding field 'Artist.city'
db.add_column('archive_artist', 'city', self.gf('django.db.models.fields.CharField')(default='\xd0\xa1\xd0\xbe\xd1\x84\xd0\xb8\xd1\x8f', max_length=20), keep_default=False)
models = {
'archive.album': {
'Meta': {'object_name': 'Album'},
'artists': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['archive.Artist']", 'symmetrical': 'False'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'publisher': ('django.db.models.fields.CharField', [], {'max_length': '20', 'blank': 'True'}),
'released': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'slug': ('autoslug.fields.AutoSlugField', [], {'unique_with': '()', 'max_length': '50', 'populate_from': 'None', 'db_index': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'tracks': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['archive.Song']", 'symmetrical': 'False', 'through': "orm['archive.AlbumTrack']", 'blank': 'True'})
},
'archive.albumtrack': {
'Meta': {'object_name': 'AlbumTrack'},
'album': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['archive.Album']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'number': ('django.db.models.fields.PositiveSmallIntegerField', [], {'null': 'True', 'blank': 'True'}),
'side': ('django.db.models.fields.CharField', [], {'max_length': '1', 'blank': 'True'}),
'song': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['archive.Song']"})
},
'archive.artist': {
'Meta': {'object_name': 'Artist'},
'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'members': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['archive.Person']", 'null': 'True', 'through': "orm['archive.Membership']", 'symmetrical': 'False'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'picture': ('django.db.models.fields.files.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'slug': ('autoslug.fields.AutoSlugField', [], {'unique_with': '()', 'max_length': '50', 'populate_from': 'None', 'db_index': 'True'}),
'years_active': ('django.db.models.fields.CharField', [], {'max_length': '48', 'blank': 'True'})
},
'archive.membership': {
'Meta': {'object_name': 'Membership'},
'group': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['archive.Artist']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'instrument': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'}),
'now': ('django.db.models.fields.NullBooleanField', [], {'null': 'True', 'blank': 'True'}),
'person': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['archive.Person']"}),
'years': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'})
},
'archive.person': {
'Meta': {'object_name': 'Person'},
'alive': ('django.db.models.fields.NullBooleanField', [], {'null': 'True', 'blank': 'True'}),
'born': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'died': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'info': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'picture': ('django.db.models.fields.files.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'slug': ('autoslug.fields.AutoSlugField', [], {'unique_with': '()', 'max_length': '50', 'populate_from': 'None', 'db_index': 'True'})
},
'archive.song': {
'Meta': {'object_name': 'Song'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'original_artists': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['archive.Artist']", 'symmetrical': 'False'}),
'slug': ('autoslug.fields.AutoSlugField', [], {'unique_with': '()', 'max_length': '50', 'populate_from': 'None', 'db_index': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '64'})
}
}
complete_apps = ['archive']
|
rhyolight/nupic | refs/heads/master | src/nupic/algorithms/monitor_mixin/trace.py | 50 | # ----------------------------------------------------------------------
# Numenta Platform for Intelligent Computing (NuPIC)
# Copyright (C) 2014, Numenta, Inc. Unless you have an agreement
# with Numenta, Inc., for a separate license for this software code, the
# following terms and conditions apply:
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero Public License version 3 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# See the GNU Affero Public License for more details.
#
# You should have received a copy of the GNU Affero Public License
# along with this program. If not, see http://www.gnu.org/licenses.
#
# http://numenta.org/licenses/
# ----------------------------------------------------------------------
"""
Trace classes used in monitor mixin framework.
"""
import abc
import numpy
class Trace(object):
"""
A record of the past data the algorithm has seen, with an entry for each
iteration.
"""
__metaclass__ = abc.ABCMeta
def __init__(self, monitor, title):
"""
@param monitor (MonitorMixinBase) Monitor Mixin instance that generated
this trace
@param title (string) Title
"""
self.monitor = monitor
self.title = title
self.data = []
def prettyPrintTitle(self):
return ("[{0}] {1}".format(self.monitor.mmName, self.title)
if self.monitor.mmName is not None else self.title)
@staticmethod
def prettyPrintDatum(datum):
"""
@param datum (object) Datum from `self.data` to pretty-print
@return (string) Pretty-printed datum
"""
return str(datum) if datum is not None else ""
class IndicesTrace(Trace):
"""
Each entry contains indices (for example of predicted => active cells).
"""
def makeCountsTrace(self):
"""
@return (CountsTrace) A new Trace made up of counts of this trace's indices.
"""
trace = CountsTrace(self.monitor, "# {0}".format(self.title))
trace.data = [len(indices) for indices in self.data]
return trace
def makeCumCountsTrace(self):
"""
@return (CountsTrace) A new Trace made up of cumulative counts of this
trace's indices.
"""
trace = CountsTrace(self.monitor, "# (cumulative) {0}".format(self.title))
countsTrace = self.makeCountsTrace()
def accumulate(iterator):
total = 0
for item in iterator:
total += item
yield total
trace.data = list(accumulate(countsTrace.data))
return trace
@staticmethod
def prettyPrintDatum(datum):
return str(sorted(list(datum)))
class BoolsTrace(Trace):
"""
Each entry contains bools (for example resets).
"""
pass
class CountsTrace(Trace):
"""
Each entry contains counts (for example # of predicted => active cells).
"""
pass
class StringsTrace(Trace):
"""
Each entry contains strings (for example sequence labels).
"""
pass
class MetricsTrace(Trace):
"""
Each entry contains Metrics (for example metric for # of predicted => active
cells).
"""
@staticmethod
def prettyPrintDatum(datum):
return ("min: {0:.2f}, max: {1:.2f}, sum: {2:.2f}, "
"mean: {3:.2f}, std dev: {4:.2f}").format(
datum.min, datum.max, datum.sum, datum.mean, datum.standardDeviation)
|
ashaarunkumar/spark-tk | refs/heads/master | regression-tests/sparktkregtests/testcases/frames/column_method_add_test.py | 3 | # vim: set encoding=utf-8
# Copyright (c) 2016 Intel Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Tests methods that access or alter columns"""
import unittest
from sparktkregtests.lib import sparktk_test
udf_int_val = -77 # placeholder data value for added column
udf_col_count = 1000 # length of list for column add
def global_udf(row):
"""This method is to test different sources of functions with udf"""
return [udf_int_val for _ in range(0, udf_col_count)]
class ColumnMethodTest(sparktk_test.SparkTKTestCase):
# Test class bound methods
@staticmethod
def static_udf(row):
"""This method is to test different sources of functions with udf"""
return [udf_int_val for _ in range(0, udf_col_count)]
def setUp(self):
"""Build test_frame"""
super(ColumnMethodTest, self).setUp()
dataset = self.get_file("int_str_float.csv")
schema = [("int", int), ("str", str), ("float", float)]
self.frame = self.context.frame.import_csv(dataset, schema=schema)
old_header = self.frame.column_names
self.new_col_schema = [("col_" + str(n), int)
for n in range(0, udf_col_count)]
self.expected_header = old_header + [col_schema[0]
for col_schema in
self.new_col_schema]
def test_static_add_col_names(self):
"""Tests adding a column name with a static method"""
self.frame.add_columns(
ColumnMethodTest.static_udf, self.new_col_schema)
self.assertEqual(self.frame.column_names, self.expected_header)
self.assertEqual(
len(self.new_col_schema)+3, len((self.frame.take(1))[0]))
columns = self.frame.take(self.frame.count())
for i in columns:
self.assertEqual(i[-1], udf_int_val)
@unittest.skip("DPNG-11909")
def test_add_col_names(self):
"""Tests adding a column name with a global method"""
self.frame.add_columns(global_udf, self.new_col_schema)
self.assertEqual(self.frame.column_names, self.expected_header)
self.assertEqual(
len(self.new_col_schema)+3, len((self.frame.take(1))[0]))
self.frame.inspect()
columns = self.frame.take(self.frame.count())
for i in columns:
self.assertEqual(i[-1], udf_int_val)
def test_add_columns_lambda_single(self):
"""Test adding individual columns from a lambda"""
col_count = len((self.frame.take(1))[0])
self.frame.add_columns(
lambda row: row.int*row.float, ('a_times_b', int))
self.assertIn('a_times_b', self.frame.column_names)
self.assertEqual(col_count+1, len((self.frame.take(1))[0]))
def test_add_columns_lambda_multiple(self):
"""Test adding multiple columns from a lambda"""
col_count = len((self.frame.take(1))[0])
self.frame.add_columns(
lambda row: [row.int * row.float, row.int + row.float],
[("a_times_b", float), ("a_plus_b", float)])
self.assertIn('a_times_b', self.frame.column_names)
self.assertIn('a_plus_b', self.frame.column_names)
self.assertEqual(col_count+2, len((self.frame.take(1))[0]))
def test_add_columns_abort(self):
"""Test divide by zero errors"""
# Divide by 0 exception will abort column add;
# Schema should be unchanged.
schema_before = self.frame.schema
def bad_divide(row):
return float(row.float) / 0
with self.assertRaisesRegexp(ValueError, "schema expected to contain tuples, encountered type <type 'str'>"):
self.frame.add_columns(
bad_divide, schema=["result", float])
self.assertEqual(schema_before, self.frame.schema)
self.frame.inspect()
self.assertEqual(schema_before, self.frame.schema)
def test_add_columns_add_existing_name(self):
"""Test adding columns with existing names errors"""
with self.assertRaisesRegexp(
Exception, "requirement failed: Schemas have conflicting column names. Please rename before merging. Left Schema: int, str, float Right Schema: str"):
self.frame.add_columns(lambda row: udf_int_val, ('str', int))
self.frame.inspect()
def test_add_column_with_empty_name(self):
"""Test adding a column with an empty name errors"""
with self.assertRaisesRegexp(Exception, "requirement failed: column name can't be empty"):
self.frame.add_columns(lambda row: udf_int_val, ('', int))
self.frame.inspect()
def test_add_column_null_schema_no_force(self):
"""Test adding a column with a null schema errors, don't force eval"""
with self.assertRaisesRegexp(ValueError, "schema expected to contain tuples, encountered type <type 'NoneType'>"):
self.frame.add_columns(lambda row: udf_int_val, None)
def test_add_column_empty_schema_no_force(self):
"""Test adding a column with empty schema errors, don't force eval"""
with self.assertRaisesRegexp(IndexError, "tuple index out of range"):
self.frame.add_columns(lambda row: udf_int_val, ())
def test_add_column_null_schema(self):
"""Test adding a column with a null schema errors"""
with self.assertRaisesRegexp(
ValueError, "schema expected to contain tuples, encountered type <type 'NoneType'>"):
self.frame.add_columns(lambda row: udf_int_val, None)
self.frame.inspect()
def test_add_column_empty_schema(self):
"""Test adding a column with an empty schema errors"""
with self.assertRaisesRegexp(IndexError, "tuple index out of range"):
self.frame.add_columns(lambda row: udf_int_val, ())
self.frame.inspect()
def test_add_column_schema_list(self):
"""Test adding a column with a schema containing a list"""
with self.assertRaisesRegexp(Exception, 'concatenate list'):
self.frame.add_columns(
lambda row: udf_int_val, schema=[('new_col', int)])
self.frame.inspect()
def test_unicode_conversion(self):
"""Test renaming with unicode names"""
self.frame.add_columns(
lambda row: udf_int_val, ('product', int))
col_count = len(self.frame.take(1)[0])
self.frame.rename_columns({'product': u'unicode'})
self.assertEqual(col_count, len(self.frame.take(1)[0]))
self.assertNotIn('product', self.frame.column_names)
self.assertIn(u'unicode', self.frame.column_names)
if __name__ == "__main__":
unittest.main()
|
AubrCool/rt-thread | refs/heads/master | bsp/gd32303e-eval/rtconfig.py | 12 | import os
# toolchains options
ARCH='arm'
CPU='cortex-m4'
CROSS_TOOL='keil'
if os.getenv('RTT_CC'):
CROSS_TOOL = os.getenv('RTT_CC')
# cross_tool provides the cross compiler
# EXEC_PATH is the compiler execute path, for example, CodeSourcery, Keil MDK, IAR
if CROSS_TOOL == 'gcc':
PLATFORM = 'gcc'
EXEC_PATH = r'D:/toolchain/gnu_tools_arm_embedded/5.4_2016q3/bin'
elif CROSS_TOOL == 'keil':
PLATFORM = 'armcc'
EXEC_PATH = r'C:/Keil_v5'
elif CROSS_TOOL == 'iar':
PLATFORM = 'iar'
EXEC_PATH = r'C:/Program Files (x86)/IAR Systems/Embedded Workbench 8.0'
if os.getenv('RTT_EXEC_PATH'):
EXEC_PATH = os.getenv('RTT_EXEC_PATH')
BUILD = 'debug'
if PLATFORM == 'gcc':
# tool-chains
PREFIX = 'arm-none-eabi-'
CC = PREFIX + 'gcc'
AS = PREFIX + 'gcc'
AR = PREFIX + 'ar'
LINK = PREFIX + 'gcc'
TARGET_EXT = 'elf'
SIZE = PREFIX + 'size'
OBJDUMP = PREFIX + 'objdump'
OBJCPY = PREFIX + 'objcopy'
DEVICE = ' -mcpu=cortex-m4 -mthumb -ffunction-sections -fdata-sections'
CFLAGS = DEVICE + ' -Dgcc' # -D' + PART_TYPE
AFLAGS = ' -c' + DEVICE + ' -x assembler-with-cpp -Wa,-mimplicit-it=thumb '
LFLAGS = DEVICE + ' -Wl,--gc-sections,-Map=rtthread-gd32.map,-cref,-u,Reset_Handler -T gd32_rom.ld'
CPATH = ''
LPATH = ''
if BUILD == 'debug':
CFLAGS += ' -O0 -gdwarf-2 -g'
AFLAGS += ' -gdwarf-2'
else:
CFLAGS += ' -O2'
POST_ACTION = OBJCPY + ' -O binary $TARGET rtthread.bin\n' + SIZE + ' $TARGET \n'
elif PLATFORM == 'armcc':
# toolchains
CC = 'armcc'
AS = 'armasm'
AR = 'armar'
LINK = 'armlink'
TARGET_EXT = 'axf'
DEVICE = ' --cpu Cortex-M4'
CFLAGS = DEVICE + ' --apcs=interwork'
AFLAGS = DEVICE
LFLAGS = DEVICE + ' --info sizes --info totals --info unused --info veneers --list rtthread-gd32.map --scatter gd32_rom.sct'
LFLAGS += ' --keep *.o(.rti_fn.*) --keep *.o(FSymTab) --keep *.o(VSymTab)'
EXEC_PATH += '/ARM/ARMCC/bin'
print(EXEC_PATH)
CFLAGS += ' --c99'
if BUILD == 'debug':
CFLAGS += ' -g -O0'
AFLAGS += ' -g'
else:
CFLAGS += ' -O2'
POST_ACTION = 'fromelf --bin $TARGET --output rtthread.bin \nfromelf -z $TARGET'
elif PLATFORM == 'iar':
# toolchains
CC = 'iccarm'
AS = 'iasmarm'
AR = 'iarchive'
LINK = 'ilinkarm'
TARGET_EXT = 'out'
DEVICE = ' -D USE_STDPERIPH_DRIVER' + ' -D GD32F30X_HD'
CFLAGS = DEVICE
CFLAGS += ' --diag_suppress Pa050'
CFLAGS += ' --no_cse'
CFLAGS += ' --no_unroll'
CFLAGS += ' --no_inline'
CFLAGS += ' --no_code_motion'
CFLAGS += ' --no_tbaa'
CFLAGS += ' --no_clustering'
CFLAGS += ' --no_scheduling'
CFLAGS += ' --debug'
CFLAGS += ' --endian=little'
CFLAGS += ' --cpu=Cortex-M4'
CFLAGS += ' -e'
CFLAGS += ' --fpu=None'
CFLAGS += ' --dlib_config "' + EXEC_PATH + '/arm/INC/c/DLib_Config_Normal.h"'
CFLAGS += ' -Ol'
CFLAGS += ' --use_c++_inline'
AFLAGS = ''
AFLAGS += ' -s+'
AFLAGS += ' -w+'
AFLAGS += ' -r'
AFLAGS += ' --cpu Cortex-M4'
AFLAGS += ' --fpu None'
LFLAGS = ' --config gd32_rom.icf'
LFLAGS += ' --redirect _Printf=_PrintfTiny'
LFLAGS += ' --redirect _Scanf=_ScanfSmall'
LFLAGS += ' --entry __iar_program_start'
EXEC_PATH += '/arm/bin/'
POST_ACTION = ''
|
rresol/coala | refs/heads/master | tests/results/HiddenResultTest.py | 28 | import unittest
from coalib.results.HiddenResult import HiddenResult
class HiddenResultTest(unittest.TestCase):
def test_hidden_result(self):
uut = HiddenResult("any", "anything")
self.assertEqual(uut.contents, "anything")
|
jesramirez/odoo | refs/heads/8.0 | addons/web_view_editor/__init__.py | 14224 | # -*- coding: utf-8 -*-
|
mlvdv/truffle | refs/heads/master | mx.truffle/mx_truffle.py | 2 | #
# commands.py - the GraalVM specific commands
#
# ----------------------------------------------------------------------------------------------------
#
# Copyright (c) 2007, 2016, Oracle and/or its affiliates. All rights reserved.
# DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
#
# This code is free software; you can redistribute it and/or modify it
# under the terms of the GNU General Public License version 2 only, as
# published by the Free Software Foundation.
#
# This code is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
# version 2 for more details (a copy is included in the LICENSE file that
# accompanied this code).
#
# You should have received a copy of the GNU General Public License version
# 2 along with this work; if not, write to the Free Software Foundation,
# Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
#
# Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
# or visit www.oracle.com if you need additional information or have any
# questions.
#
# ----------------------------------------------------------------------------------------------------
import os
import re
import subprocess
import mx
from mx_unittest import unittest
from mx_sigtest import sigtest
from mx_gate import Task
import mx_gate
_suite = mx.suite('truffle')
def javadoc(args, vm=None):
"""build the Javadoc for all API packages"""
mx.javadoc(['--unified'] + args)
def build(args, vm=None):
"""build the Java sources"""
opts2 = mx.build(['--source', '1.7'] + args)
assert len(opts2.remainder) == 0
def sl(args):
"""run an SL program"""
vmArgs, slArgs = mx.extract_VM_args(args)
mx.run_java(vmArgs + ['-cp', mx.classpath(["TRUFFLE_API", "com.oracle.truffle.sl"]), "com.oracle.truffle.sl.SLLanguage"] + slArgs)
def repl(args):
"""run a simple command line debugger for Truffle-implemented languages on the class path"""
vmArgs, slArgs = mx.extract_VM_args(args, useDoubleDash=True)
mx.run_java(vmArgs + ['-cp', mx.classpath(), "com.oracle.truffle.tools.debug.shell.client.SimpleREPLClient"] + slArgs)
def testdownstream(args):
"""test downstream users of the Truffle API"""
jruby_dir = 'jruby'
jruby_repo = 'https://github.com/jruby/jruby.git'
jruby_branch = 'truffle-head'
git = mx.GitConfig()
if os.path.exists('jruby'):
git.run(['git', 'reset', 'HEAD', '--hard'], nonZeroIsFatal=True, cwd=jruby_dir)
git.pull('jruby')
else:
git.clone(jruby_repo, jruby_dir)
git.run(['git', 'checkout', jruby_branch], nonZeroIsFatal=True, cwd=jruby_dir)
dev_version = _suite.release_version(snapshotSuffix='SNAPSHOT')
subprocess.check_call(['tool/truffle/set_truffle_version.sh', dev_version], cwd=jruby_dir)
mx.build([])
mx.maven_install([])
subprocess.check_call(['./mvnw', 'clean'], cwd=jruby_dir)
subprocess.check_call(['./mvnw'], cwd=jruby_dir)
subprocess.check_call(['bin/jruby', 'tool/jt.rb', 'test', 'fast'], cwd=jruby_dir)
def _truffle_gate_runner(args, tasks):
with Task('Truffle Javadoc', tasks) as t:
if t: mx.javadoc(['--unified'])
with Task('Truffle UnitTests', tasks) as t:
if t: unittest(['--suite', 'truffle', '--enable-timing', '--verbose', '--fail-fast'])
with Task('Truffle Signature Tests', tasks) as t:
if t: sigtest(['--check', 'binary'])
mx_gate.add_gate_runner(_suite, _truffle_gate_runner)
mx.update_commands(_suite, {
'javadoc' : [javadoc, '[SL args|@VM options]'],
'sl' : [sl, '[SL args|@VM options]'],
'repl' : [repl, '[REPL Debugger args|@VM options]'],
'testdownstream' : [testdownstream, ''],
})
"""
Merges META-INF/truffle/language and META-INF/truffle/instrument files.
This code is tightly coupled with the file format generated by
LanguageRegistrationProcessor and InstrumentRegistrationProcessor.
"""
class TruffleArchiveParticipant:
PROPERTY_RE = re.compile(r'(language\d+|instrument\d+)(\..+)')
def _truffle_metainf_file(self, arcname):
if arcname == 'META-INF/truffle/language':
return 'language'
if arcname == 'META-INF/truffle/instrument':
return 'instrument'
return None
def __opened__(self, arc, srcArc, services):
self.settings = {}
self.arc = arc
def __add__(self, arcname, contents):
metainfFile = self._truffle_metainf_file(arcname)
if metainfFile:
propertyRe = TruffleArchiveParticipant.PROPERTY_RE
properties = {}
for line in contents.strip().split('\n'):
if not line.startswith('#'):
m = propertyRe.match(line)
assert m, 'line in ' + arcname + ' does not match ' + propertyRe.pattern + ': ' + line
enum = m.group(1)
prop = m.group(2)
properties.setdefault(enum, []).append(prop)
self.settings.setdefault(metainfFile, []).append(properties)
return True
return False
def __addsrc__(self, arcname, contents):
return False
def __closing__(self):
for metainfFile, propertiesList in self.settings.iteritems():
arcname = 'META-INF/truffle/' + metainfFile
lines = []
counter = 1
for properties in propertiesList:
for enum in sorted(properties.viewkeys()):
assert enum.startswith(metainfFile)
newEnum = metainfFile + str(counter)
counter += 1
for prop in properties[enum]:
lines.append(newEnum + prop)
content = os.linesep.join(lines)
self.arc.zf.writestr(arcname, content + os.linesep)
def mx_post_parse_cmd_line(opts):
dist = mx.distribution('TRUFFLE_TEST')
dist.set_archiveparticipant(TruffleArchiveParticipant())
|
Jobava/zamboni | refs/heads/master | mkt/features/tests/test_serializers.py | 13 | import mkt.site.tests
from mkt.features.serializers import AppFeaturesSerializer
class TestAppFeaturesSerializer(mkt.site.tests.TestCase):
def setUp(self):
self.app = mkt.site.tests.app_factory()
self.serializer = AppFeaturesSerializer()
def _test_features(self, true_features):
features = self.app.current_version.features
data = self.serializer.to_native(features)
self.assertSetEqual(['has_' + i for i in data], true_features)
def test_all_false(self):
self._test_features([])
def test_one_true(self):
features = {'has_apps': True}
self.app.current_version.features.update(**features)
self._test_features(features.keys())
def test_several_true(self):
features = {'has_apps': True, 'has_video_webm': True}
self.app.current_version.features.update(**features)
self._test_features(features.keys())
|
kashif/chainer | refs/heads/master | examples/dcgan/train_dcgan.py | 4 | #!/usr/bin/env python
from __future__ import print_function
import argparse
import os
import chainer
from chainer import training
from chainer.training import extensions
from net import Discriminator
from net import Generator
from updater import DCGANUpdater
from visualize import out_generated_image
def main():
parser = argparse.ArgumentParser(description='Chainer example: DCGAN')
parser.add_argument('--batchsize', '-b', type=int, default=50,
help='Number of images in each mini-batch')
parser.add_argument('--epoch', '-e', type=int, default=1000,
help='Number of sweeps over the dataset to train')
parser.add_argument('--gpu', '-g', type=int, default=-1,
help='GPU ID (negative value indicates CPU)')
parser.add_argument('--dataset', '-i', default='',
help='Directory of image files. Default is cifar-10.')
parser.add_argument('--out', '-o', default='result',
help='Directory to output the result')
parser.add_argument('--resume', '-r', default='',
help='Resume the training from snapshot')
parser.add_argument('--n_hidden', '-n', type=int, default=100,
help='Number of hidden units (z)')
parser.add_argument('--seed', type=int, default=0,
help='Random seed of z at visualization stage')
parser.add_argument('--snapshot_interval', type=int, default=1000,
help='Interval of snapshot')
parser.add_argument('--display_interval', type=int, default=100,
help='Interval of displaying log to console')
args = parser.parse_args()
print('GPU: {}'.format(args.gpu))
print('# Minibatch-size: {}'.format(args.batchsize))
print('# n_hidden: {}'.format(args.n_hidden))
print('# epoch: {}'.format(args.epoch))
print('')
# Set up a neural network to train
gen = Generator(n_hidden=args.n_hidden)
dis = Discriminator()
if args.gpu >= 0:
# Make a specified GPU current
chainer.cuda.get_device_from_id(args.gpu).use()
gen.to_gpu() # Copy the model to the GPU
dis.to_gpu()
# Setup an optimizer
def make_optimizer(model, alpha=0.0002, beta1=0.5):
optimizer = chainer.optimizers.Adam(alpha=alpha, beta1=beta1)
optimizer.setup(model)
optimizer.add_hook(chainer.optimizer.WeightDecay(0.0001), 'hook_dec')
return optimizer
opt_gen = make_optimizer(gen)
opt_dis = make_optimizer(dis)
if args.dataset == '':
# Load the CIFAR10 dataset if args.dataset is not specified
train, _ = chainer.datasets.get_cifar10(withlabel=False, scale=255.)
else:
all_files = os.listdir(args.dataset)
image_files = [f for f in all_files if ('png' in f or 'jpg' in f)]
print('{} contains {} image files'
.format(args.dataset, len(image_files)))
train = chainer.datasets\
.ImageDataset(paths=image_files, root=args.dataset)
train_iter = chainer.iterators.SerialIterator(train, args.batchsize)
# Set up a trainer
updater = DCGANUpdater(
models=(gen, dis),
iterator=train_iter,
optimizer={
'gen': opt_gen, 'dis': opt_dis},
device=args.gpu)
trainer = training.Trainer(updater, (args.epoch, 'epoch'), out=args.out)
snapshot_interval = (args.snapshot_interval, 'iteration')
display_interval = (args.display_interval, 'iteration')
trainer.extend(
extensions.snapshot(filename='snapshot_iter_{.updater.iteration}.npz'),
trigger=snapshot_interval)
trainer.extend(extensions.snapshot_object(
gen, 'gen_iter_{.updater.iteration}.npz'), trigger=snapshot_interval)
trainer.extend(extensions.snapshot_object(
dis, 'dis_iter_{.updater.iteration}.npz'), trigger=snapshot_interval)
trainer.extend(extensions.LogReport(trigger=display_interval))
trainer.extend(extensions.PrintReport([
'epoch', 'iteration', 'gen/loss', 'dis/loss',
]), trigger=display_interval)
trainer.extend(extensions.ProgressBar(update_interval=10))
trainer.extend(
out_generated_image(
gen, dis,
10, 10, args.seed, args.out),
trigger=snapshot_interval)
if args.resume:
# Resume from a snapshot
chainer.serializers.load_npz(args.resume, trainer)
# Run the training
trainer.run()
if __name__ == '__main__':
main()
|
scop/bash-completion | refs/heads/master | test/t/unit/test_unit_quote.py | 2 | import pytest
from conftest import TestUnitBase, assert_bash_exec
@pytest.mark.bashcomp(cmd=None)
class TestUnitQuote(TestUnitBase):
def test_1(self, bash):
output = assert_bash_exec(
bash, 'quote "a b"', want_output=True, want_newline=False
)
assert output.strip() == "'a b'"
def test_2(self, bash):
output = assert_bash_exec(
bash, 'quote "a b"', want_output=True, want_newline=False
)
assert output.strip() == "'a b'"
def test_3(self, bash):
output = assert_bash_exec(
bash, 'quote " a "', want_output=True, want_newline=False
)
assert output.strip() == "' a '"
def test_4(self, bash):
output = assert_bash_exec(
bash, "quote \"a'b'c\"", want_output=True, want_newline=False
)
assert output.strip() == r"'a'\''b'\''c'"
def test_5(self, bash):
output = assert_bash_exec(
bash, 'quote "a\'"', want_output=True, want_newline=False
)
assert output.strip() == r"'a'\'''"
|
ivan-fedorov/intellij-community | refs/heads/master | python/testData/inspections/PyArgumentListInspection/nestedClass.py | 83 | class Py2460A:
class Py2460B:
def __init__(self):
pass
def __init__(self):
self.b = Py2460A.Py2460B()
|
santisiri/popego | refs/heads/master | envs/ALPHA-POPEGO/lib/python2.5/site-packages/twisted/web2/test/test_compat.py | 2 | from twisted.web2.test.test_server import BaseCase
import sys
try:
from twisted.web import resource
class OldWebResource(resource.Resource):
def __init__(self, message, *args, **kwargs):
self.message = message
resource.Resource.__init__(self, *args, **kwargs)
isLeaf = True
def render(self, req):
return self.message
except ImportError:
resource = None
class OldWebCompat(BaseCase):
try:
import twisted.web
except ImportError:
skip = "can't run w/o twisted.web"
def testOldWebResource(self):
ow = OldWebResource('I am an OldWebResource')
self.assertResponse((ow, "http://localhost/"),
(200, {}, 'I am an OldWebResource'))
def testOldWebResourceNotLeaf(self):
ow = OldWebResource('I am not a leaf')
ow.isLeaf = False
self.assertResponse((ow, "http://localhost/"),
(200, {}, 'I am not a leaf'))
def testOldWebResourceWithChildren(self):
ow = OldWebResource('I am an OldWebResource with a child')
ow.isLeaf = False
ow.putChild('child',
OldWebResource('I am a child of an OldWebResource'))
self.assertResponse((ow, "http://localhost/"),
(200, {},
'I am an OldWebResource with a child'))
self.assertResponse((ow, "http://localhost/child"),
(200, {},
'I am a child of an OldWebResource'))
if not resource:
OldWebCompat.skip = "can't run w/o twisted.web"
|
csuttles/utils | refs/heads/master | python/todo-api/flask/lib/python2.7/site-packages/flask/_compat.py | 783 | # -*- coding: utf-8 -*-
"""
flask._compat
~~~~~~~~~~~~~
Some py2/py3 compatibility support based on a stripped down
version of six so we don't have to depend on a specific version
of it.
:copyright: (c) 2013 by Armin Ronacher.
:license: BSD, see LICENSE for more details.
"""
import sys
PY2 = sys.version_info[0] == 2
_identity = lambda x: x
if not PY2:
text_type = str
string_types = (str,)
integer_types = (int, )
iterkeys = lambda d: iter(d.keys())
itervalues = lambda d: iter(d.values())
iteritems = lambda d: iter(d.items())
from io import StringIO
def reraise(tp, value, tb=None):
if value.__traceback__ is not tb:
raise value.with_traceback(tb)
raise value
implements_to_string = _identity
else:
text_type = unicode
string_types = (str, unicode)
integer_types = (int, long)
iterkeys = lambda d: d.iterkeys()
itervalues = lambda d: d.itervalues()
iteritems = lambda d: d.iteritems()
from cStringIO import StringIO
exec('def reraise(tp, value, tb=None):\n raise tp, value, tb')
def implements_to_string(cls):
cls.__unicode__ = cls.__str__
cls.__str__ = lambda x: x.__unicode__().encode('utf-8')
return cls
def with_metaclass(meta, *bases):
# This requires a bit of explanation: the basic idea is to make a
# dummy metaclass for one level of class instantiation that replaces
# itself with the actual metaclass. Because of internal type checks
# we also need to make sure that we downgrade the custom metaclass
# for one level to something closer to type (that's why __call__ and
# __init__ comes back from type etc.).
#
# This has the advantage over six.with_metaclass in that it does not
# introduce dummy classes into the final MRO.
class metaclass(meta):
__call__ = type.__call__
__init__ = type.__init__
def __new__(cls, name, this_bases, d):
if this_bases is None:
return type.__new__(cls, name, (), d)
return meta(name, bases, d)
return metaclass('temporary_class', None, {})
|
deaspo/micro_bank_system | refs/heads/FromMyPc | micro_bank_system/urls.py | 3 | """micro_bank_system URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.9/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.conf.urls import url, include
2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls'))
"""
from django.conf import settings
from django.conf.urls import url, include
from django.conf.urls.static import static
from django.contrib import admin
from bank import views as bView
from loan import views as lView
urlpatterns = [
url(r'^admin/', admin.site.urls),
url(r'^$', bView.selfCheck, name='check'),
url(r'^home/?$', bView.home, name='home'),
url(r'login/?$', bView.signin, name='login'),
url(r'logout/?$', bView.sign_out, name='logout'),
# Urlss for the loan app
url(r'loan/?$', lView.loan_home, name='loan'),
# Define account urls
url(r'^accounts/', include('registration.backends.simple.urls')),
]
# Adding the ability to save media files during uploads when we're still in debug mode. Once uploaded the below code won't be necessary as the server nginx and gunicorn will handle that.
if settings.DEBUG:
urlpatterns += static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
|
shapiromatron/amy | refs/heads/master | workshops/migrations/0002_auto_20150219_1305.py | 4 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('workshops', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='site',
name='notes',
field=models.TextField(blank=True, default=''),
preserve_default=True,
),
]
|
altsen/diandiyun-platform | refs/heads/master | common/djangoapps/external_auth/tests/test_helper.py | 63 | """
Tests for utility functions in external_auth module
"""
from django.test import TestCase
from external_auth.views import _safe_postlogin_redirect
class ExternalAuthHelperFnTest(TestCase):
"""
Unit tests for the external_auth.views helper function
"""
def test__safe_postlogin_redirect(self):
"""
Tests the _safe_postlogin_redirect function with different values of next
"""
HOST = 'testserver' # pylint: disable=C0103
ONSITE1 = '/dashboard' # pylint: disable=C0103
ONSITE2 = '/courses/org/num/name/courseware' # pylint: disable=C0103
ONSITE3 = 'http://{}/my/custom/url'.format(HOST) # pylint: disable=C0103
OFFSITE1 = 'http://www.attacker.com' # pylint: disable=C0103
for redirect_to in [ONSITE1, ONSITE2, ONSITE3]:
redir = _safe_postlogin_redirect(redirect_to, HOST)
self.assertEqual(redir.status_code, 302)
self.assertEqual(redir['location'], redirect_to)
redir2 = _safe_postlogin_redirect(OFFSITE1, HOST)
self.assertEqual(redir2.status_code, 302)
self.assertEqual("/", redir2['location'])
|
argonemyth/sentry | refs/heads/master | src/sentry/models/releasefile.py | 3 | """
sentry.models.releasefile
~~~~~~~~~~~~~~~~~~~~~~~~~
:copyright: (c) 2010-2015 by the Sentry Team, see AUTHORS for more details.
:license: BSD, see LICENSE for more details.
"""
from __future__ import absolute_import
from django.db import models
from hashlib import sha1
from sentry.db.models import FlexibleForeignKey, Model, sane_repr
class ReleaseFile(Model):
"""
A ReleaseFile is an association between a Release and a File.
The ident of the file should be sha1(name) and must be unique per release.
"""
project = FlexibleForeignKey('sentry.Project')
release = FlexibleForeignKey('sentry.Release')
file = FlexibleForeignKey('sentry.File')
ident = models.CharField(max_length=40)
name = models.TextField()
__repr__ = sane_repr('release', 'ident')
class Meta:
unique_together = (('release', 'ident'),)
app_label = 'sentry'
db_table = 'sentry_releasefile'
def save(self, *args, **kwargs):
if not self.ident and self.name:
self.ident = type(self).get_ident(self.name)
return super(ReleaseFile, self).save(*args, **kwargs)
@classmethod
def get_ident(cls, name):
return sha1(name.encode('utf-8')).hexdigest()
|
GageGaskins/osf.io | refs/heads/develop | website/files/models/figshare.py | 13 | from website.util.sanitize import escape_html
from website.files.models.base import File, Folder, FileNode, FileVersion
__all__ = ('FigshareFile', 'FigshareFolder', 'FigshareFileNode')
class FigshareFileNode(FileNode):
provider = 'figshare'
class FigshareFolder(FigshareFileNode, Folder):
pass
class FigshareFile(FigshareFileNode, File):
def touch(self, bearer, revision=None, **kwargs):
return super(FigshareFile, self).touch(bearer, revision=None, **kwargs)
def update(self, revision, data, user=None):
"""Figshare does not support versioning.
Always pass revision as None to avoid conflict.
"""
self.name = data['name']
self.materialized_path = data['materialized']
self.save()
version = FileVersion(identifier=None)
version.update_metadata(data, save=False)
# Draft files are not renderable
if data['extra']['status'] == 'drafts':
return (version, u'''
<style>
.file-download{{display: none;}}
.file-share{{display: none;}}
</style>
<div class="alert alert-info" role="alert">
The file "{name}" is still a draft on figshare. <br>
To view it on the OSF <a href="http://figshare.com/faqs">publish</a> it on figshare.
</div>
'''.format(name=escape_html(self.name)))
return version
|
anoushkaalavilli/empty-app | refs/heads/master | ggame/__init__.py | 227 | from ggame.ggame import *
|
DreadPirateRobert/stock_visualiser | refs/heads/master | stock_visualiser_virtualenv/lib/python3.5/site-packages/pip/_vendor/distlib/scripts.py | 333 | # -*- coding: utf-8 -*-
#
# Copyright (C) 2013-2015 Vinay Sajip.
# Licensed to the Python Software Foundation under a contributor agreement.
# See LICENSE.txt and CONTRIBUTORS.txt.
#
from io import BytesIO
import logging
import os
import re
import struct
import sys
from .compat import sysconfig, detect_encoding, ZipFile
from .resources import finder
from .util import (FileOperator, get_export_entry, convert_path,
get_executable, in_venv)
logger = logging.getLogger(__name__)
_DEFAULT_MANIFEST = '''
<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
<assembly xmlns="urn:schemas-microsoft-com:asm.v1" manifestVersion="1.0">
<assemblyIdentity version="1.0.0.0"
processorArchitecture="X86"
name="%s"
type="win32"/>
<!-- Identify the application security requirements. -->
<trustInfo xmlns="urn:schemas-microsoft-com:asm.v3">
<security>
<requestedPrivileges>
<requestedExecutionLevel level="asInvoker" uiAccess="false"/>
</requestedPrivileges>
</security>
</trustInfo>
</assembly>'''.strip()
# check if Python is called on the first line with this expression
FIRST_LINE_RE = re.compile(b'^#!.*pythonw?[0-9.]*([ \t].*)?$')
SCRIPT_TEMPLATE = '''# -*- coding: utf-8 -*-
if __name__ == '__main__':
import sys, re
def _resolve(module, func):
__import__(module)
mod = sys.modules[module]
parts = func.split('.')
result = getattr(mod, parts.pop(0))
for p in parts:
result = getattr(result, p)
return result
try:
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
func = _resolve('%(module)s', '%(func)s')
rc = func() # None interpreted as 0
except Exception as e: # only supporting Python >= 2.6
sys.stderr.write('%%s\\n' %% e)
rc = 1
sys.exit(rc)
'''
def _enquote_executable(executable):
if ' ' in executable:
# make sure we quote only the executable in case of env
# for example /usr/bin/env "/dir with spaces/bin/jython"
# instead of "/usr/bin/env /dir with spaces/bin/jython"
# otherwise whole
if executable.startswith('/usr/bin/env '):
env, _executable = executable.split(' ', 1)
if ' ' in _executable and not _executable.startswith('"'):
executable = '%s "%s"' % (env, _executable)
else:
if not executable.startswith('"'):
executable = '"%s"' % executable
return executable
class ScriptMaker(object):
"""
A class to copy or create scripts from source scripts or callable
specifications.
"""
script_template = SCRIPT_TEMPLATE
executable = None # for shebangs
def __init__(self, source_dir, target_dir, add_launchers=True,
dry_run=False, fileop=None):
self.source_dir = source_dir
self.target_dir = target_dir
self.add_launchers = add_launchers
self.force = False
self.clobber = False
# It only makes sense to set mode bits on POSIX.
self.set_mode = (os.name == 'posix') or (os.name == 'java' and
os._name == 'posix')
self.variants = set(('', 'X.Y'))
self._fileop = fileop or FileOperator(dry_run)
self._is_nt = os.name == 'nt' or (
os.name == 'java' and os._name == 'nt')
def _get_alternate_executable(self, executable, options):
if options.get('gui', False) and self._is_nt: # pragma: no cover
dn, fn = os.path.split(executable)
fn = fn.replace('python', 'pythonw')
executable = os.path.join(dn, fn)
return executable
if sys.platform.startswith('java'): # pragma: no cover
def _is_shell(self, executable):
"""
Determine if the specified executable is a script
(contains a #! line)
"""
try:
with open(executable) as fp:
return fp.read(2) == '#!'
except (OSError, IOError):
logger.warning('Failed to open %s', executable)
return False
def _fix_jython_executable(self, executable):
if self._is_shell(executable):
# Workaround for Jython is not needed on Linux systems.
import java
if java.lang.System.getProperty('os.name') == 'Linux':
return executable
elif executable.lower().endswith('jython.exe'):
# Use wrapper exe for Jython on Windows
return executable
return '/usr/bin/env %s' % executable
def _get_shebang(self, encoding, post_interp=b'', options=None):
enquote = True
if self.executable:
executable = self.executable
enquote = False # assume this will be taken care of
elif not sysconfig.is_python_build():
executable = get_executable()
elif in_venv(): # pragma: no cover
executable = os.path.join(sysconfig.get_path('scripts'),
'python%s' % sysconfig.get_config_var('EXE'))
else: # pragma: no cover
executable = os.path.join(
sysconfig.get_config_var('BINDIR'),
'python%s%s' % (sysconfig.get_config_var('VERSION'),
sysconfig.get_config_var('EXE')))
if options:
executable = self._get_alternate_executable(executable, options)
if sys.platform.startswith('java'): # pragma: no cover
executable = self._fix_jython_executable(executable)
# Normalise case for Windows
executable = os.path.normcase(executable)
# If the user didn't specify an executable, it may be necessary to
# cater for executable paths with spaces (not uncommon on Windows)
if enquote:
executable = _enquote_executable(executable)
# Issue #51: don't use fsencode, since we later try to
# check that the shebang is decodable using utf-8.
executable = executable.encode('utf-8')
# in case of IronPython, play safe and enable frames support
if (sys.platform == 'cli' and '-X:Frames' not in post_interp
and '-X:FullFrames' not in post_interp): # pragma: no cover
post_interp += b' -X:Frames'
shebang = b'#!' + executable + post_interp + b'\n'
# Python parser starts to read a script using UTF-8 until
# it gets a #coding:xxx cookie. The shebang has to be the
# first line of a file, the #coding:xxx cookie cannot be
# written before. So the shebang has to be decodable from
# UTF-8.
try:
shebang.decode('utf-8')
except UnicodeDecodeError: # pragma: no cover
raise ValueError(
'The shebang (%r) is not decodable from utf-8' % shebang)
# If the script is encoded to a custom encoding (use a
# #coding:xxx cookie), the shebang has to be decodable from
# the script encoding too.
if encoding != 'utf-8':
try:
shebang.decode(encoding)
except UnicodeDecodeError: # pragma: no cover
raise ValueError(
'The shebang (%r) is not decodable '
'from the script encoding (%r)' % (shebang, encoding))
return shebang
def _get_script_text(self, entry):
return self.script_template % dict(module=entry.prefix,
func=entry.suffix)
manifest = _DEFAULT_MANIFEST
def get_manifest(self, exename):
base = os.path.basename(exename)
return self.manifest % base
def _write_script(self, names, shebang, script_bytes, filenames, ext):
use_launcher = self.add_launchers and self._is_nt
linesep = os.linesep.encode('utf-8')
if not use_launcher:
script_bytes = shebang + linesep + script_bytes
else: # pragma: no cover
if ext == 'py':
launcher = self._get_launcher('t')
else:
launcher = self._get_launcher('w')
stream = BytesIO()
with ZipFile(stream, 'w') as zf:
zf.writestr('__main__.py', script_bytes)
zip_data = stream.getvalue()
script_bytes = launcher + shebang + linesep + zip_data
for name in names:
outname = os.path.join(self.target_dir, name)
if use_launcher: # pragma: no cover
n, e = os.path.splitext(outname)
if e.startswith('.py'):
outname = n
outname = '%s.exe' % outname
try:
self._fileop.write_binary_file(outname, script_bytes)
except Exception:
# Failed writing an executable - it might be in use.
logger.warning('Failed to write executable - trying to '
'use .deleteme logic')
dfname = '%s.deleteme' % outname
if os.path.exists(dfname):
os.remove(dfname) # Not allowed to fail here
os.rename(outname, dfname) # nor here
self._fileop.write_binary_file(outname, script_bytes)
logger.debug('Able to replace executable using '
'.deleteme logic')
try:
os.remove(dfname)
except Exception:
pass # still in use - ignore error
else:
if self._is_nt and not outname.endswith('.' + ext): # pragma: no cover
outname = '%s.%s' % (outname, ext)
if os.path.exists(outname) and not self.clobber:
logger.warning('Skipping existing file %s', outname)
continue
self._fileop.write_binary_file(outname, script_bytes)
if self.set_mode:
self._fileop.set_executable_mode([outname])
filenames.append(outname)
def _make_script(self, entry, filenames, options=None):
post_interp = b''
if options:
args = options.get('interpreter_args', [])
if args:
args = ' %s' % ' '.join(args)
post_interp = args.encode('utf-8')
shebang = self._get_shebang('utf-8', post_interp, options=options)
script = self._get_script_text(entry).encode('utf-8')
name = entry.name
scriptnames = set()
if '' in self.variants:
scriptnames.add(name)
if 'X' in self.variants:
scriptnames.add('%s%s' % (name, sys.version[0]))
if 'X.Y' in self.variants:
scriptnames.add('%s-%s' % (name, sys.version[:3]))
if options and options.get('gui', False):
ext = 'pyw'
else:
ext = 'py'
self._write_script(scriptnames, shebang, script, filenames, ext)
def _copy_script(self, script, filenames):
adjust = False
script = os.path.join(self.source_dir, convert_path(script))
outname = os.path.join(self.target_dir, os.path.basename(script))
if not self.force and not self._fileop.newer(script, outname):
logger.debug('not copying %s (up-to-date)', script)
return
# Always open the file, but ignore failures in dry-run mode --
# that way, we'll get accurate feedback if we can read the
# script.
try:
f = open(script, 'rb')
except IOError: # pragma: no cover
if not self.dry_run:
raise
f = None
else:
first_line = f.readline()
if not first_line: # pragma: no cover
logger.warning('%s: %s is an empty file (skipping)',
self.get_command_name(), script)
return
match = FIRST_LINE_RE.match(first_line.replace(b'\r\n', b'\n'))
if match:
adjust = True
post_interp = match.group(1) or b''
if not adjust:
if f:
f.close()
self._fileop.copy_file(script, outname)
if self.set_mode:
self._fileop.set_executable_mode([outname])
filenames.append(outname)
else:
logger.info('copying and adjusting %s -> %s', script,
self.target_dir)
if not self._fileop.dry_run:
encoding, lines = detect_encoding(f.readline)
f.seek(0)
shebang = self._get_shebang(encoding, post_interp)
if b'pythonw' in first_line: # pragma: no cover
ext = 'pyw'
else:
ext = 'py'
n = os.path.basename(outname)
self._write_script([n], shebang, f.read(), filenames, ext)
if f:
f.close()
@property
def dry_run(self):
return self._fileop.dry_run
@dry_run.setter
def dry_run(self, value):
self._fileop.dry_run = value
if os.name == 'nt' or (os.name == 'java' and os._name == 'nt'): # pragma: no cover
# Executable launcher support.
# Launchers are from https://bitbucket.org/vinay.sajip/simple_launcher/
def _get_launcher(self, kind):
if struct.calcsize('P') == 8: # 64-bit
bits = '64'
else:
bits = '32'
name = '%s%s.exe' % (kind, bits)
# Issue 31: don't hardcode an absolute package name, but
# determine it relative to the current package
distlib_package = __name__.rsplit('.', 1)[0]
result = finder(distlib_package).find(name).bytes
return result
# Public API follows
def make(self, specification, options=None):
"""
Make a script.
:param specification: The specification, which is either a valid export
entry specification (to make a script from a
callable) or a filename (to make a script by
copying from a source location).
:param options: A dictionary of options controlling script generation.
:return: A list of all absolute pathnames written to.
"""
filenames = []
entry = get_export_entry(specification)
if entry is None:
self._copy_script(specification, filenames)
else:
self._make_script(entry, filenames, options=options)
return filenames
def make_multiple(self, specifications, options=None):
"""
Take a list of specifications and make scripts from them,
:param specifications: A list of specifications.
:return: A list of all absolute pathnames written to,
"""
filenames = []
for specification in specifications:
filenames.extend(self.make(specification, options))
return filenames
|
ds-hwang/chromium-crosswalk | refs/heads/master | testing/scripts/sizes.py | 23 | #!/usr/bin/env python
# Copyright 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import json
import os
import sys
import common
def main_run(args):
with common.temporary_file() as tempfile_path:
rc = common.run_runtest(args, [
'--test-type', 'sizes',
'--run-python-script',
os.path.join(
common.SRC_DIR, 'infra', 'scripts', 'legacy', 'scripts', 'slave',
'chromium', 'sizes.py'),
'--json', tempfile_path])
with open(tempfile_path) as f:
results = json.load(f)
with open(os.path.join(common.SRC_DIR, 'tools', 'perf_expectations',
'perf_expectations.json')) as f:
perf_expectations = json.load(f)
prefix = args.args[0]
valid = (rc == 0)
failures = []
for name, result in results.iteritems():
fqtn = '%s/%s/%s' % (prefix, name, result['identifier'])
if fqtn not in perf_expectations:
continue
if perf_expectations[fqtn]['type'] != 'absolute':
print 'ERROR: perf expectation %r is not yet supported' % fqtn
valid = False
continue
actual = result['value']
expected = perf_expectations[fqtn]['regress']
better = perf_expectations[fqtn]['better']
check_result = ((actual <= expected) if better == 'lower'
else (actual >= expected))
if not check_result:
failures.append(fqtn)
print 'FAILED %s: actual %s, expected %s, better %s' % (
fqtn, actual, expected, better)
json.dump({
'valid': valid,
'failures': failures,
}, args.output)
# sizes.py itself doesn't fail on regressions.
if failures and rc == 0:
rc = 1
return rc
def main_compile_targets(args):
json.dump(['chrome'], args.output)
if __name__ == '__main__':
funcs = {
'run': main_run,
'compile_targets': main_compile_targets,
}
sys.exit(common.run_script(sys.argv[1:], funcs))
|
kawamon/hue | refs/heads/master | desktop/core/ext-py/Django-1.11.29/django/core/files/__init__.py | 839 | from django.core.files.base import File
__all__ = ['File']
|
AndroidOpenDevelopment/android_external_chromium_org | refs/heads/lp | tools/deep_memory_profiler/lib/policy.py | 46 | # Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import json
import logging
import os
import re
LOGGER = logging.getLogger('dmprof')
BASE_PATH = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
POLICIES_JSON_PATH = os.path.join(BASE_PATH, 'policies.json')
# Heap Profile Policy versions
# POLICY_DEEP_1 DOES NOT include allocation_type columns.
# mmap regions are distincted w/ mmap frames in the pattern column.
POLICY_DEEP_1 = 'POLICY_DEEP_1'
# POLICY_DEEP_2 DOES include allocation_type columns.
# mmap regions are distincted w/ the allocation_type column.
POLICY_DEEP_2 = 'POLICY_DEEP_2'
# POLICY_DEEP_3 is in JSON format.
POLICY_DEEP_3 = 'POLICY_DEEP_3'
# POLICY_DEEP_3 contains typeinfo.
POLICY_DEEP_4 = 'POLICY_DEEP_4'
class Rule(object):
"""Represents one matching rule in a policy file."""
def __init__(self,
name,
allocator_type,
stackfunction_pattern=None,
stacksourcefile_pattern=None,
typeinfo_pattern=None,
mappedpathname_pattern=None,
mappedpermission_pattern=None,
sharedwith=None):
self._name = name
self._allocator_type = allocator_type
self._stackfunction_pattern = None
if stackfunction_pattern:
self._stackfunction_pattern = re.compile(
stackfunction_pattern + r'\Z')
self._stacksourcefile_pattern = None
if stacksourcefile_pattern:
self._stacksourcefile_pattern = re.compile(
stacksourcefile_pattern + r'\Z')
self._typeinfo_pattern = None
if typeinfo_pattern:
self._typeinfo_pattern = re.compile(typeinfo_pattern + r'\Z')
self._mappedpathname_pattern = None
if mappedpathname_pattern:
self._mappedpathname_pattern = re.compile(mappedpathname_pattern + r'\Z')
self._mappedpermission_pattern = None
if mappedpermission_pattern:
self._mappedpermission_pattern = re.compile(
mappedpermission_pattern + r'\Z')
self._sharedwith = []
if sharedwith:
self._sharedwith = sharedwith
@property
def name(self):
return self._name
@property
def allocator_type(self):
return self._allocator_type
@property
def stackfunction_pattern(self):
return self._stackfunction_pattern
@property
def stacksourcefile_pattern(self):
return self._stacksourcefile_pattern
@property
def typeinfo_pattern(self):
return self._typeinfo_pattern
@property
def mappedpathname_pattern(self):
return self._mappedpathname_pattern
@property
def mappedpermission_pattern(self):
return self._mappedpermission_pattern
@property
def sharedwith(self):
return self._sharedwith
class Policy(object):
"""Represents a policy, a content of a policy file."""
def __init__(self, rules, version, components):
self._rules = rules
self._version = version
self._components = components
@property
def rules(self):
return self._rules
@property
def version(self):
return self._version
@property
def components(self):
return self._components
def find_rule(self, component_name):
"""Finds a rule whose name is |component_name|. """
for rule in self._rules:
if rule.name == component_name:
return rule
return None
def find_malloc(self, bucket):
"""Finds a matching component name which a given |bucket| belongs to.
Args:
bucket: A Bucket object to be searched for.
Returns:
A string representing a component name.
"""
assert not bucket or bucket.allocator_type == 'malloc'
if not bucket:
return 'no-bucket'
if bucket.component_cache:
return bucket.component_cache
stackfunction = bucket.symbolized_joined_stackfunction
stacksourcefile = bucket.symbolized_joined_stacksourcefile
typeinfo = bucket.symbolized_typeinfo
if typeinfo.startswith('0x'):
typeinfo = bucket.typeinfo_name
for rule in self._rules:
if (rule.allocator_type == 'malloc' and
(not rule.stackfunction_pattern or
rule.stackfunction_pattern.match(stackfunction)) and
(not rule.stacksourcefile_pattern or
rule.stacksourcefile_pattern.match(stacksourcefile)) and
(not rule.typeinfo_pattern or rule.typeinfo_pattern.match(typeinfo))):
bucket.component_cache = rule.name
return rule.name
assert False
def find_mmap(self, region, bucket_set,
pageframe=None, group_pfn_counts=None):
"""Finds a matching component which a given mmap |region| belongs to.
It uses |bucket_set| to match with backtraces. If |pageframe| is given,
it considers memory sharing among processes.
NOTE: Don't use Bucket's |component_cache| for mmap regions because they're
classified not only with bucket information (mappedpathname for example).
Args:
region: A tuple representing a memory region.
bucket_set: A BucketSet object to look up backtraces.
pageframe: A PageFrame object representing a pageframe maybe including
a pagecount.
group_pfn_counts: A dict mapping a PFN to the number of times the
the pageframe is mapped by the known "group (Chrome)" processes.
Returns:
A string representing a component name.
"""
assert region[0] == 'hooked'
bucket = bucket_set.get(region[1]['bucket_id'])
assert not bucket or bucket.allocator_type == 'mmap'
if not bucket:
return 'no-bucket', None
stackfunction = bucket.symbolized_joined_stackfunction
stacksourcefile = bucket.symbolized_joined_stacksourcefile
sharedwith = self._categorize_pageframe(pageframe, group_pfn_counts)
for rule in self._rules:
if (rule.allocator_type == 'mmap' and
(not rule.stackfunction_pattern or
rule.stackfunction_pattern.match(stackfunction)) and
(not rule.stacksourcefile_pattern or
rule.stacksourcefile_pattern.match(stacksourcefile)) and
(not rule.mappedpathname_pattern or
rule.mappedpathname_pattern.match(region[1]['vma']['name'])) and
(not rule.mappedpermission_pattern or
rule.mappedpermission_pattern.match(
region[1]['vma']['readable'] +
region[1]['vma']['writable'] +
region[1]['vma']['executable'] +
region[1]['vma']['private'])) and
(not rule.sharedwith or
not pageframe or sharedwith in rule.sharedwith)):
return rule.name, bucket
assert False
def find_unhooked(self, region, pageframe=None, group_pfn_counts=None):
"""Finds a matching component which a given unhooked |region| belongs to.
If |pageframe| is given, it considers memory sharing among processes.
Args:
region: A tuple representing a memory region.
pageframe: A PageFrame object representing a pageframe maybe including
a pagecount.
group_pfn_counts: A dict mapping a PFN to the number of times the
the pageframe is mapped by the known "group (Chrome)" processes.
Returns:
A string representing a component name.
"""
assert region[0] == 'unhooked'
sharedwith = self._categorize_pageframe(pageframe, group_pfn_counts)
for rule in self._rules:
if (rule.allocator_type == 'unhooked' and
(not rule.mappedpathname_pattern or
rule.mappedpathname_pattern.match(region[1]['vma']['name'])) and
(not rule.mappedpermission_pattern or
rule.mappedpermission_pattern.match(
region[1]['vma']['readable'] +
region[1]['vma']['writable'] +
region[1]['vma']['executable'] +
region[1]['vma']['private'])) and
(not rule.sharedwith or
not pageframe or sharedwith in rule.sharedwith)):
return rule.name
assert False
@staticmethod
def load(filename, filetype):
"""Loads a policy file of |filename| in a |format|.
Args:
filename: A filename to be loaded.
filetype: A string to specify a type of the file. Only 'json' is
supported for now.
Returns:
A loaded Policy object.
"""
with open(os.path.join(BASE_PATH, filename)) as policy_f:
return Policy.parse(policy_f, filetype)
@staticmethod
def parse(policy_f, filetype):
"""Parses a policy file content in a |format|.
Args:
policy_f: An IO object to be loaded.
filetype: A string to specify a type of the file. Only 'json' is
supported for now.
Returns:
A loaded Policy object.
"""
if filetype == 'json':
return Policy._parse_json(policy_f)
else:
return None
JSON_COMMENT_REGEX = re.compile(r'//.*')
@staticmethod
def _parse_json(policy_f):
"""Parses policy file in json format.
A policy file contains component's names and their stacktrace pattern
written in regular expression. Those patterns are matched against each
symbols of each stacktraces in the order written in the policy file
Args:
policy_f: A File/IO object to read.
Returns:
A loaded policy object.
"""
policy_json = policy_f.read()
policy_json = re.sub(Policy.JSON_COMMENT_REGEX, '', policy_json)
policy = json.loads(policy_json)
rules = []
for rule in policy['rules']:
stackfunction = rule.get('stackfunction') or rule.get('stacktrace')
stacksourcefile = rule.get('stacksourcefile')
rules.append(Rule(
rule['name'],
rule['allocator'], # allocator_type
stackfunction,
stacksourcefile,
rule['typeinfo'] if 'typeinfo' in rule else None,
rule.get('mappedpathname'),
rule.get('mappedpermission'),
rule.get('sharedwith')))
return Policy(rules, policy['version'], policy['components'])
@staticmethod
def _categorize_pageframe(pageframe, group_pfn_counts):
"""Categorizes a pageframe based on its sharing status.
Returns:
'private' if |pageframe| is not shared with other processes. 'group'
if |pageframe| is shared only with group (Chrome-related) processes.
'others' if |pageframe| is shared with non-group processes.
"""
if not pageframe:
return 'private'
if pageframe.pagecount:
if pageframe.pagecount == 1:
return 'private'
elif pageframe.pagecount <= group_pfn_counts.get(pageframe.pfn, 0) + 1:
return 'group'
else:
return 'others'
else:
if pageframe.pfn in group_pfn_counts:
return 'group'
else:
return 'private'
class PolicySet(object):
"""Represents a set of policies."""
def __init__(self, policy_directory):
self._policy_directory = policy_directory
@staticmethod
def load(labels=None):
"""Loads a set of policies via the "default policy directory".
The "default policy directory" contains pairs of policies and their labels.
For example, a policy "policy.l0.json" is labeled "l0" in the default
policy directory "policies.json".
All policies in the directory are loaded by default. Policies can be
limited by |labels|.
Args:
labels: An array that contains policy labels to be loaded.
Returns:
A PolicySet object.
"""
default_policy_directory = PolicySet._load_default_policy_directory()
if labels:
specified_policy_directory = {}
for label in labels:
if label in default_policy_directory:
specified_policy_directory[label] = default_policy_directory[label]
# TODO(dmikurube): Load an un-labeled policy file.
return PolicySet._load_policies(specified_policy_directory)
else:
return PolicySet._load_policies(default_policy_directory)
def __len__(self):
return len(self._policy_directory)
def __iter__(self):
for label in self._policy_directory:
yield label
def __getitem__(self, label):
return self._policy_directory[label]
@staticmethod
def _load_default_policy_directory():
with open(POLICIES_JSON_PATH, mode='r') as policies_f:
default_policy_directory = json.load(policies_f)
return default_policy_directory
@staticmethod
def _load_policies(directory):
LOGGER.info('Loading policy files.')
policies = {}
for label in directory:
LOGGER.info(' %s: %s' % (label, directory[label]['file']))
loaded = Policy.load(directory[label]['file'], directory[label]['format'])
if loaded:
policies[label] = loaded
return PolicySet(policies)
|
arnif/CouchPotatoServer | refs/heads/master | libs/sqlalchemy/connectors/zxJDBC.py | 17 | # connectors/zxJDBC.py
# Copyright (C) 2005-2012 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
import sys
from sqlalchemy.connectors import Connector
class ZxJDBCConnector(Connector):
driver = 'zxjdbc'
supports_sane_rowcount = False
supports_sane_multi_rowcount = False
supports_unicode_binds = True
supports_unicode_statements = sys.version > '2.5.0+'
description_encoding = None
default_paramstyle = 'qmark'
jdbc_db_name = None
jdbc_driver_name = None
@classmethod
def dbapi(cls):
from com.ziclix.python.sql import zxJDBC
return zxJDBC
def _driver_kwargs(self):
"""Return kw arg dict to be sent to connect()."""
return {}
def _create_jdbc_url(self, url):
"""Create a JDBC url from a :class:`~sqlalchemy.engine.url.URL`"""
return 'jdbc:%s://%s%s/%s' % (self.jdbc_db_name, url.host,
url.port is not None
and ':%s' % url.port or '',
url.database)
def create_connect_args(self, url):
opts = self._driver_kwargs()
opts.update(url.query)
return [
[self._create_jdbc_url(url),
url.username, url.password,
self.jdbc_driver_name],
opts]
def is_disconnect(self, e, connection, cursor):
if not isinstance(e, self.dbapi.ProgrammingError):
return False
e = str(e)
return 'connection is closed' in e or 'cursor is closed' in e
def _get_server_version_info(self, connection):
# use connection.connection.dbversion, and parse appropriately
# to get a tuple
raise NotImplementedError()
|
xiandiancloud/edxplaltfom-xusong | refs/heads/master | common/djangoapps/student/migrations/0034_auto__add_courseaccessrole.py | 36 | # -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'CourseAccessRole'
db.create_table('student_courseaccessrole', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('user', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['auth.User'])),
('org', self.gf('django.db.models.fields.CharField')(db_index=True, max_length=64, blank=True)),
('course_id', self.gf('xmodule_django.models.CourseKeyField')(db_index=True, max_length=255, blank=True)),
('role', self.gf('django.db.models.fields.CharField')(max_length=64, db_index=True)),
))
db.send_create_signal('student', ['CourseAccessRole'])
# Adding unique constraint on 'CourseAccessRole', fields ['user', 'org', 'course_id', 'role']
db.create_unique('student_courseaccessrole', ['user_id', 'org', 'course_id', 'role'])
# Changing field 'AnonymousUserId.course_id'
db.alter_column('student_anonymoususerid', 'course_id', self.gf('xmodule_django.models.CourseKeyField')(max_length=255))
# Changing field 'CourseEnrollment.course_id'
db.alter_column('student_courseenrollment', 'course_id', self.gf('xmodule_django.models.CourseKeyField')(max_length=255))
# Changing field 'CourseEnrollmentAllowed.course_id'
db.alter_column('student_courseenrollmentallowed', 'course_id', self.gf('xmodule_django.models.CourseKeyField')(max_length=255))
def backwards(self, orm):
# Removing unique constraint on 'CourseAccessRole', fields ['user', 'org', 'course_id', 'role']
db.delete_unique('student_courseaccessrole', ['user_id', 'org', 'course_id', 'role'])
# Deleting model 'CourseAccessRole'
db.delete_table('student_courseaccessrole')
# Changing field 'AnonymousUserId.course_id'
db.alter_column('student_anonymoususerid', 'course_id', self.gf('django.db.models.fields.CharField')(max_length=255))
# Changing field 'CourseEnrollment.course_id'
db.alter_column('student_courseenrollment', 'course_id', self.gf('django.db.models.fields.CharField')(max_length=255))
# Changing field 'CourseEnrollmentAllowed.course_id'
db.alter_column('student_courseenrollmentallowed', 'course_id', self.gf('django.db.models.fields.CharField')(max_length=255))
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'student.anonymoususerid': {
'Meta': {'object_name': 'AnonymousUserId'},
'anonymous_user_id': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '32'}),
'course_id': ('xmodule_django.models.CourseKeyField', [], {'db_index': 'True', 'max_length': '255', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"})
},
'student.courseaccessrole': {
'Meta': {'unique_together': "(('user', 'org', 'course_id', 'role'),)", 'object_name': 'CourseAccessRole'},
'course_id': ('xmodule_django.models.CourseKeyField', [], {'db_index': 'True', 'max_length': '255', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'org': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '64', 'blank': 'True'}),
'role': ('django.db.models.fields.CharField', [], {'max_length': '64', 'db_index': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"})
},
'student.courseenrollment': {
'Meta': {'ordering': "('user', 'course_id')", 'unique_together': "(('user', 'course_id'),)", 'object_name': 'CourseEnrollment'},
'course_id': ('xmodule_django.models.CourseKeyField', [], {'max_length': '255', 'db_index': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'null': 'True', 'db_index': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'mode': ('django.db.models.fields.CharField', [], {'default': "'honor'", 'max_length': '100'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"})
},
'student.courseenrollmentallowed': {
'Meta': {'unique_together': "(('email', 'course_id'),)", 'object_name': 'CourseEnrollmentAllowed'},
'auto_enroll': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'course_id': ('xmodule_django.models.CourseKeyField', [], {'max_length': '255', 'db_index': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'null': 'True', 'db_index': 'True', 'blank': 'True'}),
'email': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'})
},
'student.loginfailures': {
'Meta': {'object_name': 'LoginFailures'},
'failure_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'lockout_until': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"})
},
'student.passwordhistory': {
'Meta': {'object_name': 'PasswordHistory'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'time_set': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"})
},
'student.pendingemailchange': {
'Meta': {'object_name': 'PendingEmailChange'},
'activation_key': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '32', 'db_index': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'new_email': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '255', 'blank': 'True'}),
'user': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['auth.User']", 'unique': 'True'})
},
'student.pendingnamechange': {
'Meta': {'object_name': 'PendingNameChange'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'new_name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'rationale': ('django.db.models.fields.CharField', [], {'max_length': '1024', 'blank': 'True'}),
'user': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['auth.User']", 'unique': 'True'})
},
'student.registration': {
'Meta': {'object_name': 'Registration', 'db_table': "'auth_registration'"},
'activation_key': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '32', 'db_index': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'unique': 'True'})
},
'student.userprofile': {
'Meta': {'object_name': 'UserProfile', 'db_table': "'auth_userprofile'"},
'allow_certificate': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'city': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'country': ('django_countries.fields.CountryField', [], {'max_length': '2', 'null': 'True', 'blank': 'True'}),
'courseware': ('django.db.models.fields.CharField', [], {'default': "'course.xml'", 'max_length': '255', 'blank': 'True'}),
'gender': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '6', 'null': 'True', 'blank': 'True'}),
'goals': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'language': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '255', 'blank': 'True'}),
'level_of_education': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '6', 'null': 'True', 'blank': 'True'}),
'location': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '255', 'blank': 'True'}),
'mailing_address': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'meta': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '255', 'blank': 'True'}),
'user': ('django.db.models.fields.related.OneToOneField', [], {'related_name': "'profile'", 'unique': 'True', 'to': "orm['auth.User']"}),
'year_of_birth': ('django.db.models.fields.IntegerField', [], {'db_index': 'True', 'null': 'True', 'blank': 'True'})
},
'student.userstanding': {
'Meta': {'object_name': 'UserStanding'},
'account_status': ('django.db.models.fields.CharField', [], {'max_length': '31', 'blank': 'True'}),
'changed_by': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'standing_last_changed_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'standing'", 'unique': 'True', 'to': "orm['auth.User']"})
},
'student.usertestgroup': {
'Meta': {'object_name': 'UserTestGroup'},
'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '32', 'db_index': 'True'}),
'users': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.User']", 'db_index': 'True', 'symmetrical': 'False'})
}
}
complete_apps = ['student'] |
tmcone/ycmd | refs/heads/master | cpp/ycm/tests/gmock/gtest/test/gtest_xml_output_unittest.py | 1815 | #!/usr/bin/env python
#
# Copyright 2006, Google Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Unit test for the gtest_xml_output module"""
__author__ = 'eefacm@gmail.com (Sean Mcafee)'
import datetime
import errno
import os
import re
import sys
from xml.dom import minidom, Node
import gtest_test_utils
import gtest_xml_test_utils
GTEST_FILTER_FLAG = '--gtest_filter'
GTEST_LIST_TESTS_FLAG = '--gtest_list_tests'
GTEST_OUTPUT_FLAG = "--gtest_output"
GTEST_DEFAULT_OUTPUT_FILE = "test_detail.xml"
GTEST_PROGRAM_NAME = "gtest_xml_output_unittest_"
SUPPORTS_STACK_TRACES = False
if SUPPORTS_STACK_TRACES:
STACK_TRACE_TEMPLATE = '\nStack trace:\n*'
else:
STACK_TRACE_TEMPLATE = ''
EXPECTED_NON_EMPTY_XML = """<?xml version="1.0" encoding="UTF-8"?>
<testsuites tests="23" failures="4" disabled="2" errors="0" time="*" timestamp="*" name="AllTests" ad_hoc_property="42">
<testsuite name="SuccessfulTest" tests="1" failures="0" disabled="0" errors="0" time="*">
<testcase name="Succeeds" status="run" time="*" classname="SuccessfulTest"/>
</testsuite>
<testsuite name="FailedTest" tests="1" failures="1" disabled="0" errors="0" time="*">
<testcase name="Fails" status="run" time="*" classname="FailedTest">
<failure message="gtest_xml_output_unittest_.cc:*
Value of: 2
Expected: 1" type=""><![CDATA[gtest_xml_output_unittest_.cc:*
Value of: 2
Expected: 1%(stack)s]]></failure>
</testcase>
</testsuite>
<testsuite name="MixedResultTest" tests="3" failures="1" disabled="1" errors="0" time="*">
<testcase name="Succeeds" status="run" time="*" classname="MixedResultTest"/>
<testcase name="Fails" status="run" time="*" classname="MixedResultTest">
<failure message="gtest_xml_output_unittest_.cc:*
Value of: 2
Expected: 1" type=""><![CDATA[gtest_xml_output_unittest_.cc:*
Value of: 2
Expected: 1%(stack)s]]></failure>
<failure message="gtest_xml_output_unittest_.cc:*
Value of: 3
Expected: 2" type=""><![CDATA[gtest_xml_output_unittest_.cc:*
Value of: 3
Expected: 2%(stack)s]]></failure>
</testcase>
<testcase name="DISABLED_test" status="notrun" time="*" classname="MixedResultTest"/>
</testsuite>
<testsuite name="XmlQuotingTest" tests="1" failures="1" disabled="0" errors="0" time="*">
<testcase name="OutputsCData" status="run" time="*" classname="XmlQuotingTest">
<failure message="gtest_xml_output_unittest_.cc:*
Failed
XML output: <?xml encoding="utf-8"><top><![CDATA[cdata text]]></top>" type=""><![CDATA[gtest_xml_output_unittest_.cc:*
Failed
XML output: <?xml encoding="utf-8"><top><![CDATA[cdata text]]>]]><![CDATA[</top>%(stack)s]]></failure>
</testcase>
</testsuite>
<testsuite name="InvalidCharactersTest" tests="1" failures="1" disabled="0" errors="0" time="*">
<testcase name="InvalidCharactersInMessage" status="run" time="*" classname="InvalidCharactersTest">
<failure message="gtest_xml_output_unittest_.cc:*
Failed
Invalid characters in brackets []" type=""><![CDATA[gtest_xml_output_unittest_.cc:*
Failed
Invalid characters in brackets []%(stack)s]]></failure>
</testcase>
</testsuite>
<testsuite name="DisabledTest" tests="1" failures="0" disabled="1" errors="0" time="*">
<testcase name="DISABLED_test_not_run" status="notrun" time="*" classname="DisabledTest"/>
</testsuite>
<testsuite name="PropertyRecordingTest" tests="4" failures="0" disabled="0" errors="0" time="*" SetUpTestCase="yes" TearDownTestCase="aye">
<testcase name="OneProperty" status="run" time="*" classname="PropertyRecordingTest" key_1="1"/>
<testcase name="IntValuedProperty" status="run" time="*" classname="PropertyRecordingTest" key_int="1"/>
<testcase name="ThreeProperties" status="run" time="*" classname="PropertyRecordingTest" key_1="1" key_2="2" key_3="3"/>
<testcase name="TwoValuesForOneKeyUsesLastValue" status="run" time="*" classname="PropertyRecordingTest" key_1="2"/>
</testsuite>
<testsuite name="NoFixtureTest" tests="3" failures="0" disabled="0" errors="0" time="*">
<testcase name="RecordProperty" status="run" time="*" classname="NoFixtureTest" key="1"/>
<testcase name="ExternalUtilityThatCallsRecordIntValuedProperty" status="run" time="*" classname="NoFixtureTest" key_for_utility_int="1"/>
<testcase name="ExternalUtilityThatCallsRecordStringValuedProperty" status="run" time="*" classname="NoFixtureTest" key_for_utility_string="1"/>
</testsuite>
<testsuite name="Single/ValueParamTest" tests="4" failures="0" disabled="0" errors="0" time="*">
<testcase name="HasValueParamAttribute/0" value_param="33" status="run" time="*" classname="Single/ValueParamTest" />
<testcase name="HasValueParamAttribute/1" value_param="42" status="run" time="*" classname="Single/ValueParamTest" />
<testcase name="AnotherTestThatHasValueParamAttribute/0" value_param="33" status="run" time="*" classname="Single/ValueParamTest" />
<testcase name="AnotherTestThatHasValueParamAttribute/1" value_param="42" status="run" time="*" classname="Single/ValueParamTest" />
</testsuite>
<testsuite name="TypedTest/0" tests="1" failures="0" disabled="0" errors="0" time="*">
<testcase name="HasTypeParamAttribute" type_param="*" status="run" time="*" classname="TypedTest/0" />
</testsuite>
<testsuite name="TypedTest/1" tests="1" failures="0" disabled="0" errors="0" time="*">
<testcase name="HasTypeParamAttribute" type_param="*" status="run" time="*" classname="TypedTest/1" />
</testsuite>
<testsuite name="Single/TypeParameterizedTestCase/0" tests="1" failures="0" disabled="0" errors="0" time="*">
<testcase name="HasTypeParamAttribute" type_param="*" status="run" time="*" classname="Single/TypeParameterizedTestCase/0" />
</testsuite>
<testsuite name="Single/TypeParameterizedTestCase/1" tests="1" failures="0" disabled="0" errors="0" time="*">
<testcase name="HasTypeParamAttribute" type_param="*" status="run" time="*" classname="Single/TypeParameterizedTestCase/1" />
</testsuite>
</testsuites>""" % {'stack': STACK_TRACE_TEMPLATE}
EXPECTED_FILTERED_TEST_XML = """<?xml version="1.0" encoding="UTF-8"?>
<testsuites tests="1" failures="0" disabled="0" errors="0" time="*"
timestamp="*" name="AllTests" ad_hoc_property="42">
<testsuite name="SuccessfulTest" tests="1" failures="0" disabled="0"
errors="0" time="*">
<testcase name="Succeeds" status="run" time="*" classname="SuccessfulTest"/>
</testsuite>
</testsuites>"""
EXPECTED_EMPTY_XML = """<?xml version="1.0" encoding="UTF-8"?>
<testsuites tests="0" failures="0" disabled="0" errors="0" time="*"
timestamp="*" name="AllTests">
</testsuites>"""
GTEST_PROGRAM_PATH = gtest_test_utils.GetTestExecutablePath(GTEST_PROGRAM_NAME)
SUPPORTS_TYPED_TESTS = 'TypedTest' in gtest_test_utils.Subprocess(
[GTEST_PROGRAM_PATH, GTEST_LIST_TESTS_FLAG], capture_stderr=False).output
class GTestXMLOutputUnitTest(gtest_xml_test_utils.GTestXMLTestCase):
"""
Unit test for Google Test's XML output functionality.
"""
# This test currently breaks on platforms that do not support typed and
# type-parameterized tests, so we don't run it under them.
if SUPPORTS_TYPED_TESTS:
def testNonEmptyXmlOutput(self):
"""
Runs a test program that generates a non-empty XML output, and
tests that the XML output is expected.
"""
self._TestXmlOutput(GTEST_PROGRAM_NAME, EXPECTED_NON_EMPTY_XML, 1)
def testEmptyXmlOutput(self):
"""Verifies XML output for a Google Test binary without actual tests.
Runs a test program that generates an empty XML output, and
tests that the XML output is expected.
"""
self._TestXmlOutput('gtest_no_test_unittest', EXPECTED_EMPTY_XML, 0)
def testTimestampValue(self):
"""Checks whether the timestamp attribute in the XML output is valid.
Runs a test program that generates an empty XML output, and checks if
the timestamp attribute in the testsuites tag is valid.
"""
actual = self._GetXmlOutput('gtest_no_test_unittest', [], 0)
date_time_str = actual.documentElement.getAttributeNode('timestamp').value
# datetime.strptime() is only available in Python 2.5+ so we have to
# parse the expected datetime manually.
match = re.match(r'(\d+)-(\d\d)-(\d\d)T(\d\d):(\d\d):(\d\d)', date_time_str)
self.assertTrue(
re.match,
'XML datettime string %s has incorrect format' % date_time_str)
date_time_from_xml = datetime.datetime(
year=int(match.group(1)), month=int(match.group(2)),
day=int(match.group(3)), hour=int(match.group(4)),
minute=int(match.group(5)), second=int(match.group(6)))
time_delta = abs(datetime.datetime.now() - date_time_from_xml)
# timestamp value should be near the current local time
self.assertTrue(time_delta < datetime.timedelta(seconds=600),
'time_delta is %s' % time_delta)
actual.unlink()
def testDefaultOutputFile(self):
"""
Confirms that Google Test produces an XML output file with the expected
default name if no name is explicitly specified.
"""
output_file = os.path.join(gtest_test_utils.GetTempDir(),
GTEST_DEFAULT_OUTPUT_FILE)
gtest_prog_path = gtest_test_utils.GetTestExecutablePath(
'gtest_no_test_unittest')
try:
os.remove(output_file)
except OSError, e:
if e.errno != errno.ENOENT:
raise
p = gtest_test_utils.Subprocess(
[gtest_prog_path, '%s=xml' % GTEST_OUTPUT_FLAG],
working_dir=gtest_test_utils.GetTempDir())
self.assert_(p.exited)
self.assertEquals(0, p.exit_code)
self.assert_(os.path.isfile(output_file))
def testSuppressedXmlOutput(self):
"""
Tests that no XML file is generated if the default XML listener is
shut down before RUN_ALL_TESTS is invoked.
"""
xml_path = os.path.join(gtest_test_utils.GetTempDir(),
GTEST_PROGRAM_NAME + 'out.xml')
if os.path.isfile(xml_path):
os.remove(xml_path)
command = [GTEST_PROGRAM_PATH,
'%s=xml:%s' % (GTEST_OUTPUT_FLAG, xml_path),
'--shut_down_xml']
p = gtest_test_utils.Subprocess(command)
if p.terminated_by_signal:
# p.signal is avalable only if p.terminated_by_signal is True.
self.assertFalse(
p.terminated_by_signal,
'%s was killed by signal %d' % (GTEST_PROGRAM_NAME, p.signal))
else:
self.assert_(p.exited)
self.assertEquals(1, p.exit_code,
"'%s' exited with code %s, which doesn't match "
'the expected exit code %s.'
% (command, p.exit_code, 1))
self.assert_(not os.path.isfile(xml_path))
def testFilteredTestXmlOutput(self):
"""Verifies XML output when a filter is applied.
Runs a test program that executes only some tests and verifies that
non-selected tests do not show up in the XML output.
"""
self._TestXmlOutput(GTEST_PROGRAM_NAME, EXPECTED_FILTERED_TEST_XML, 0,
extra_args=['%s=SuccessfulTest.*' % GTEST_FILTER_FLAG])
def _GetXmlOutput(self, gtest_prog_name, extra_args, expected_exit_code):
"""
Returns the xml output generated by running the program gtest_prog_name.
Furthermore, the program's exit code must be expected_exit_code.
"""
xml_path = os.path.join(gtest_test_utils.GetTempDir(),
gtest_prog_name + 'out.xml')
gtest_prog_path = gtest_test_utils.GetTestExecutablePath(gtest_prog_name)
command = ([gtest_prog_path, '%s=xml:%s' % (GTEST_OUTPUT_FLAG, xml_path)] +
extra_args)
p = gtest_test_utils.Subprocess(command)
if p.terminated_by_signal:
self.assert_(False,
'%s was killed by signal %d' % (gtest_prog_name, p.signal))
else:
self.assert_(p.exited)
self.assertEquals(expected_exit_code, p.exit_code,
"'%s' exited with code %s, which doesn't match "
'the expected exit code %s.'
% (command, p.exit_code, expected_exit_code))
actual = minidom.parse(xml_path)
return actual
def _TestXmlOutput(self, gtest_prog_name, expected_xml,
expected_exit_code, extra_args=None):
"""
Asserts that the XML document generated by running the program
gtest_prog_name matches expected_xml, a string containing another
XML document. Furthermore, the program's exit code must be
expected_exit_code.
"""
actual = self._GetXmlOutput(gtest_prog_name, extra_args or [],
expected_exit_code)
expected = minidom.parseString(expected_xml)
self.NormalizeXml(actual.documentElement)
self.AssertEquivalentNodes(expected.documentElement,
actual.documentElement)
expected.unlink()
actual.unlink()
if __name__ == '__main__':
os.environ['GTEST_STACK_TRACE_DEPTH'] = '1'
gtest_test_utils.Main()
|
agaffney/ansible | refs/heads/devel | test/lib/ansible_test/_internal/ci/shippable.py | 25 | """Support code for working with Shippable."""
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import os
import re
import time
from .. import types as t
from ..config import (
CommonConfig,
TestConfig,
)
from ..git import (
Git,
)
from ..http import (
HttpClient,
urlencode,
)
from ..util import (
ApplicationError,
display,
MissingEnvironmentVariable,
SubprocessError,
)
from . import (
AuthContext,
ChangeDetectionNotSupported,
CIProvider,
OpenSSLAuthHelper,
)
CODE = 'shippable'
class Shippable(CIProvider):
"""CI provider implementation for Shippable."""
def __init__(self):
self.auth = ShippableAuthHelper()
@staticmethod
def is_supported(): # type: () -> bool
"""Return True if this provider is supported in the current running environment."""
return os.environ.get('SHIPPABLE') == 'true'
@property
def code(self): # type: () -> str
"""Return a unique code representing this provider."""
return CODE
@property
def name(self): # type: () -> str
"""Return descriptive name for this provider."""
return 'Shippable'
def generate_resource_prefix(self): # type: () -> str
"""Return a resource prefix specific to this CI provider."""
try:
prefix = 'shippable-%s-%s' % (
os.environ['SHIPPABLE_BUILD_NUMBER'],
os.environ['SHIPPABLE_JOB_NUMBER'],
)
except KeyError as ex:
raise MissingEnvironmentVariable(name=ex.args[0])
return prefix
def get_base_branch(self): # type: () -> str
"""Return the base branch or an empty string."""
base_branch = os.environ.get('BASE_BRANCH')
if base_branch:
base_branch = 'origin/%s' % base_branch
return base_branch or ''
def detect_changes(self, args): # type: (TestConfig) -> t.Optional[t.List[str]]
"""Initialize change detection."""
result = ShippableChanges(args)
if result.is_pr:
job_type = 'pull request'
elif result.is_tag:
job_type = 'tag'
else:
job_type = 'merge commit'
display.info('Processing %s for branch %s commit %s' % (job_type, result.branch, result.commit))
if not args.metadata.changes:
args.metadata.populate_changes(result.diff)
if result.paths is None:
# There are several likely causes of this:
# - First run on a new branch.
# - Too many pull requests passed since the last merge run passed.
display.warning('No successful commit found. All tests will be executed.')
return result.paths
def supports_core_ci_auth(self, context): # type: (AuthContext) -> bool
"""Return True if Ansible Core CI is supported."""
return True
def prepare_core_ci_auth(self, context): # type: (AuthContext) -> t.Dict[str, t.Any]
"""Return authentication details for Ansible Core CI."""
try:
request = dict(
run_id=os.environ['SHIPPABLE_BUILD_ID'],
job_number=int(os.environ['SHIPPABLE_JOB_NUMBER']),
)
except KeyError as ex:
raise MissingEnvironmentVariable(name=ex.args[0])
self.auth.sign_request(request)
auth = dict(
shippable=request,
)
return auth
def get_git_details(self, args): # type: (CommonConfig) -> t.Optional[t.Dict[str, t.Any]]
"""Return details about git in the current environment."""
commit = os.environ.get('COMMIT')
base_commit = os.environ.get('BASE_COMMIT')
details = dict(
base_commit=base_commit,
commit=commit,
merged_commit=self._get_merged_commit(args, commit),
)
return details
# noinspection PyUnusedLocal
def _get_merged_commit(self, args, commit): # type: (CommonConfig, str) -> t.Optional[str] # pylint: disable=unused-argument
"""Find the merged commit that should be present."""
if not commit:
return None
git = Git()
try:
show_commit = git.run_git(['show', '--no-patch', '--no-abbrev', commit])
except SubprocessError as ex:
# This should only fail for pull requests where the commit does not exist.
# Merge runs would fail much earlier when attempting to checkout the commit.
raise ApplicationError('Commit %s was not found:\n\n%s\n\n'
'GitHub may not have fully replicated the commit across their infrastructure.\n'
'It is also possible the commit was removed by a force push between job creation and execution.\n'
'Find the latest run for the pull request and restart failed jobs as needed.'
% (commit, ex.stderr.strip()))
head_commit = git.run_git(['show', '--no-patch', '--no-abbrev', 'HEAD'])
if show_commit == head_commit:
# Commit is HEAD, so this is not a pull request or the base branch for the pull request is up-to-date.
return None
match_merge = re.search(r'^Merge: (?P<parents>[0-9a-f]{40} [0-9a-f]{40})$', head_commit, flags=re.MULTILINE)
if not match_merge:
# The most likely scenarios resulting in a failure here are:
# A new run should or does supersede this job, but it wasn't cancelled in time.
# A job was superseded and then later restarted.
raise ApplicationError('HEAD is not commit %s or a merge commit:\n\n%s\n\n'
'This job has likely been superseded by another run due to additional commits being pushed.\n'
'Find the latest run for the pull request and restart failed jobs as needed.'
% (commit, head_commit.strip()))
parents = set(match_merge.group('parents').split(' '))
if len(parents) != 2:
raise ApplicationError('HEAD is a %d-way octopus merge.' % len(parents))
if commit not in parents:
raise ApplicationError('Commit %s is not a parent of HEAD.' % commit)
parents.remove(commit)
last_commit = parents.pop()
return last_commit
class ShippableAuthHelper(OpenSSLAuthHelper):
"""
Authentication helper for Shippable.
Based on OpenSSL since cryptography is not provided by the default Shippable environment.
"""
def publish_public_key(self, public_key_pem): # type: (str) -> None
"""Publish the given public key."""
# display the public key as a single line to avoid mangling such as when prefixing each line with a timestamp
display.info(public_key_pem.replace('\n', ' '))
# allow time for logs to become available to reduce repeated API calls
time.sleep(3)
class ShippableChanges:
"""Change information for Shippable build."""
def __init__(self, args): # type: (TestConfig) -> None
self.args = args
self.git = Git()
try:
self.branch = os.environ['BRANCH']
self.is_pr = os.environ['IS_PULL_REQUEST'] == 'true'
self.is_tag = os.environ['IS_GIT_TAG'] == 'true'
self.commit = os.environ['COMMIT']
self.project_id = os.environ['PROJECT_ID']
self.commit_range = os.environ['SHIPPABLE_COMMIT_RANGE']
except KeyError as ex:
raise MissingEnvironmentVariable(name=ex.args[0])
if self.is_tag:
raise ChangeDetectionNotSupported('Change detection is not supported for tags.')
if self.is_pr:
self.paths = sorted(self.git.get_diff_names([self.commit_range]))
self.diff = self.git.get_diff([self.commit_range])
else:
commits = self.get_successful_merge_run_commits(self.project_id, self.branch)
last_successful_commit = self.get_last_successful_commit(commits)
if last_successful_commit:
self.paths = sorted(self.git.get_diff_names([last_successful_commit, self.commit]))
self.diff = self.git.get_diff([last_successful_commit, self.commit])
else:
# first run for branch
self.paths = None # act as though change detection not enabled, do not filter targets
self.diff = []
def get_successful_merge_run_commits(self, project_id, branch): # type: (str, str) -> t.Set[str]
"""Return a set of recent successsful merge commits from Shippable for the given project and branch."""
parameters = dict(
isPullRequest='false',
projectIds=project_id,
branch=branch,
)
url = 'https://api.shippable.com/runs?%s' % urlencode(parameters)
http = HttpClient(self.args, always=True)
response = http.get(url)
result = response.json()
if 'id' in result and result['id'] == 4004:
# most likely due to a private project, which returns an HTTP 200 response with JSON
display.warning('Unable to find project. Cannot determine changes. All tests will be executed.')
return set()
commits = set(run['commitSha'] for run in result if run['statusCode'] == 30)
return commits
def get_last_successful_commit(self, successful_commits): # type: (t.Set[str]) -> t.Optional[str]
"""Return the last successful commit from git history that is found in the given commit list, or None."""
commit_history = self.git.get_rev_list(max_count=100)
ordered_successful_commits = [commit for commit in commit_history if commit in successful_commits]
last_successful_commit = ordered_successful_commits[0] if ordered_successful_commits else None
return last_successful_commit
|
jiachenning/odoo | refs/heads/8.0 | addons/sale_service/models/__init__.py | 354 | import sale_service |
sld/computer_vision_workshop | refs/heads/master | Seminars/Fourier/headers.py | 1 | import matplotlib.pyplot as plt
import cv2
import numpy as np
from fourier_transform import FourierTransform
import sys
sys.path.insert(0, '../support')
from gray_img_processing_viewer import GrayImgProcessingViewer
|
cloudcache/namebench | refs/heads/master | libnamebench/nameserver_list.py | 173 | # Copyright 2009 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Classes to work with bunches of nameservers."""
__author__ = 'tstromberg@google.com (Thomas Stromberg)'
import datetime
import operator
import Queue
import random
import sys
import threading
import time
# 3rd party libraries
import dns.resolver
import conn_quality
import addr_util
import nameserver
import util
NS_CACHE_SLACK = 2
CACHE_VER = 4
PREFERRED_HEALTH_TIMEOUT_MULTIPLIER = 1.5
SYSTEM_HEALTH_TIMEOUT_MULTIPLIER = 2
TOO_DISTANT_MULTIPLIER = 4.75
DEFAULT_MAX_SERVERS_TO_CHECK = 350
# If we can't ping more than this, go into slowmode.
MIN_PINGABLE_PERCENT = 5
MIN_HEALTHY_PERCENT = 10
SLOW_MODE_THREAD_COUNT = 6
# Windows behaves in unfortunate ways if too many threads are specified
DEFAULT_THREAD_COUNT = 35
MAX_INITIAL_HEALTH_THREAD_COUNT = 35
class OutgoingUdpInterception(Exception):
def __init__(self, value):
self.value = value
def __str__(self):
return repr(self.value)
class TooFewNameservers(Exception):
def __init__(self, value):
self.value = value
def __str__(self):
return repr(self.value)
class ThreadFailure(Exception):
def __init__(self):
pass
class QueryThreads(threading.Thread):
"""Quickly see which nameservers are awake."""
def __init__(self, input_queue, results_queue, action_type, checks=None):
threading.Thread.__init__(self)
self.input = input_queue
self.action_type = action_type
self.results = results_queue
self.checks = checks
self.halt = False
def stop(self):
self.halt = True
def run(self):
"""Iterate over the queue, processing each item."""
while not self.halt and not self.input.empty():
# check_wildcards is special: it has a tuple of two nameservers
if self.action_type == 'wildcard_check':
try:
(ns, other_ns) = self.input.get_nowait()
except Queue.Empty:
return
if ns.is_disabled or other_ns.is_disabled:
self.results.put(None)
continue
else:
self.results.put((ns, ns.TestSharedCache(other_ns)))
# everything else only has a single nameserver.
else:
try:
ns = self.input.get_nowait()
except Queue.Empty:
return
if ns.is_disabled:
self.results.put(None)
continue
if self.action_type == 'ping':
self.results.put(ns.CheckHealth(fast_check=True))
elif self.action_type == 'health':
self.results.put(ns.CheckHealth(sanity_checks=self.checks))
elif self.action_type == 'final':
self.results.put(ns.CheckHealth(sanity_checks=self.checks, final_check=True))
elif self.action_type == 'port_behavior':
self.results.put(ns.CheckHealth(sanity_checks=self.checks, port_check=True))
elif self.action_type == 'censorship':
self.results.put(ns.CheckCensorship(self.checks))
elif self.action_type == 'store_wildcards':
self.results.put(ns.StoreWildcardCache())
elif self.action_type == 'node_id':
self.results.put(ns.UpdateNodeIds())
elif self.action_type == 'update_hostname':
self.results.put(ns.UpdateHostname())
else:
raise ValueError('Invalid action type: %s' % self.action_type)
class NameServers(list):
def __init__(self, thread_count=DEFAULT_THREAD_COUNT, max_servers_to_check=DEFAULT_MAX_SERVERS_TO_CHECK):
self._ips = set()
self.thread_count = thread_count
super(NameServers, self).__init__()
self.client_latitude = None
self.client_longitude = None
self.client_country = None
self.client_domain = None
self.client_asn = None
self.max_servers_to_check = max_servers_to_check
@property
def visible_servers(self):
return [x for x in self if not x.is_hidden]
@property
def enabled_servers(self):
return [x for x in self.visible_servers if not x.is_disabled]
@property
def disabled_servers(self):
return [x for x in self.visible_servers if x.is_disabled]
@property
def enabled_keepers(self):
return [x for x in self.enabled_servers if x.is_keeper]
@property
def enabled_supplemental(self):
return [x for x in self.enabled_servers if not x.is_keeper]
@property
def supplemental_servers(self):
return [x for x in self if not x.is_keeper]
@property
def country_servers(self):
return [x for x in self if x.country_code == self.client_country]
# Return a list of servers that match a particular tag
def HasTag(self, tag):
return [x for x in self if x.HasTag(tag)]
# Return a list of servers that match a particular tag
def HasVisibleTag(self, tag):
return [x for x in self.visible_servers if x.HasTag(tag)]
def SortEnabledByFastest(self):
"""Return a list of healthy servers in fastest-first order."""
return sorted(self.enabled_servers, key=operator.attrgetter('check_average'))
def SortEnabledByNearest(self):
"""Return a list of healthy servers in fastest-first order."""
return sorted(self.enabled_servers, key=operator.attrgetter('fastest_check_duration'))
def msg(self, msg, count=None, total=None, **kwargs):
if self.status_callback:
self.status_callback(msg, count=count, total=total, **kwargs)
else:
print '%s [%s/%s]' % (msg, count, total)
def _GetObjectForIP(self, ip):
return [x for x in self if x.ip == ip][0]
def _MergeNameServerData(self, ns):
existing = self._GetObjectForIP(ns.ip)
existing.tags.update(ns.tags)
if ns.system_position is not None:
existing.system_position = ns.system_position
elif ns.dhcp_position is not None:
existing.dhcp_position = ns.dhcp_position
def append(self, ns):
"""Add a nameserver to the list, guaranteeing uniqueness."""
if ns.ip in self._ips:
self._MergeNameServerData(ns)
else:
super(NameServers, self).append(ns)
self._ips.add(ns.ip)
def SetTimeouts(self, timeout, ping_timeout, health_timeout):
if len(self.enabled_servers) > 1:
cq = conn_quality.ConnectionQuality(status_callback=self.status_callback)
(intercepted, avg_latency, max_latency) = cq.CheckConnectionQuality()[0:3]
if intercepted:
raise OutgoingUdpInterception(
'Your router or Internet Service Provider appears to be intercepting '
'and redirecting all outgoing DNS requests. This means you cannot '
'benchmark or utilize alternate DNS servers. Please adjust your '
'router configuration or file a support request with your ISP.'
)
if (max_latency * 2) > health_timeout:
health_timeout = max_latency * 2
self.msg('Set health timeout to %.2fs' % health_timeout)
if (max_latency * 1.1) > ping_timeout:
ping_timeout = avg_latency * 1.4
self.msg('Set ping timeout to %.2fs' % ping_timeout)
for ns in self:
ns.timeout = timeout
ns.ping_timeout = ping_timeout
ns.health_timeout = health_timeout
def SetClientLocation(self, latitude, longitude, client_country):
self.client_latitude = latitude
self.client_longitude = longitude
self.client_country = client_country
def SetNetworkLocation(self, domain, asn):
self.client_domain = domain
self.client_asn = asn
def FilterByTag(self, include_tags=None, require_tags=None):
for ns in self:
if include_tags:
if not ns.MatchesTags(include_tags):
ns.tags.add('hidden')
if require_tags:
for tag in require_tags:
if not ns.HasTag(tag):
ns.tags.add('hidden')
if not self.enabled_servers:
raise TooFewNameservers('No nameservers specified matched tags %s %s' % (include_tags, require_tags))
if require_tags:
self.msg("%s of %s nameservers have tags: %s (%s required)" %
(len(self.visible_servers), len(self), ', '.join(include_tags),
', '.join(require_tags)))
else:
self.msg("%s of %s nameservers have tags: %s" %
(len(self.visible_servers), len(self), ', '.join(include_tags)))
def HasEnoughInCountryServers():
return len(self.country_servers) > self.max_servers_to_check
def NearbyServers(self, max_distance):
srv_by_dist = sorted([(x.DistanceFromCoordinates(self.client_latitude, self.client_longitude), x)
for x in self.HasVisibleTag('regional')], key=operator.itemgetter(0))
for distance, ns in srv_by_dist:
if distance < float(max_distance):
yield ns
def AddNetworkTags(self):
"""Add network tags for each nameserver."""
if self.client_domain:
provider = self.client_domain.split('.')[0]
else:
provider = None
for ns in self:
ns.AddNetworkTags(self.client_domain, provider, self.client_asn, self.client_country)
def AddLocalityTags(self, max_distance):
if self.client_latitude:
count = 0
for ns in self.NearbyServers(max_distance):
count += 1
if count > self.max_servers_to_check:
break
ns.tags.add('nearby')
def DisableSlowestSupplementalServers(self, multiplier=TOO_DISTANT_MULTIPLIER, max_servers=None,
prefer_asn=None):
"""Disable servers who's fastest duration is multiplier * average of best 10 servers."""
if not max_servers:
max_servers = self.max_servers_to_check
supplemental_servers = self.enabled_supplemental
fastest = [x for x in self.SortEnabledByFastest()][:10]
best_10 = util.CalculateListAverage([x.fastest_check_duration for x in fastest])
cutoff = best_10 * multiplier
self.msg("Removing secondary nameservers slower than %0.2fms (max=%s)" % (cutoff, max_servers))
for (idx, ns) in enumerate(self.SortEnabledByFastest()):
hide = False
if ns not in supplemental_servers:
continue
if ns.fastest_check_duration > cutoff:
hide = True
if idx > max_servers:
hide = True
if hide:
matches = ns.MatchesTags(nameserver.PROVIDER_TAGS)
if matches:
self.msg("%s seems slow, but has tag: %s" % (ns, matches))
else:
ns.tags.add('hidden')
def _FastestByLocalProvider(self):
"""Find the fastest DNS server by the client provider."""
fastest = self.SortEnabledByFastest()
# Give preference in tag order
for tag in nameserver.PROVIDER_TAGS:
for ns in fastest:
if ns.HasTag(tag):
return ns
def HideBrokenIPV6Servers(self):
"""Most people don't care about these."""
for ns in self.disabled_servers:
if ns.HasTag('ipv6') and not ns.is_hidden:
ns.tags.add('hidden')
def HideSlowSupplementalServers(self, target_count):
"""Given a target count, delete nameservers that we do not plan to test."""
# Magic secondary mixing algorithm:
# - Half of them should be the "nearest" nameservers
# - Half of them should be the "fastest average" nameservers
self.msg("Hiding all but %s servers" % target_count)
keepers = self.enabled_keepers
isp_keeper = self._FastestByLocalProvider()
if isp_keeper:
self.msg("%s is the fastest DNS server provided by your ISP." % isp_keeper)
keepers.append(isp_keeper)
supplemental_servers_needed = target_count - len(keepers)
if supplemental_servers_needed < 1 or not self.enabled_supplemental:
return
nearest_needed = int(supplemental_servers_needed / 2.0)
if supplemental_servers_needed < 50:
self.msg("Picking %s secondary servers to use (%s nearest, %s fastest)" %
(supplemental_servers_needed, nearest_needed, supplemental_servers_needed - nearest_needed))
# Phase two is picking the nearest secondary server
supplemental_servers_to_keep = []
for ns in self.SortEnabledByNearest():
if ns not in keepers:
if not supplemental_servers_to_keep and supplemental_servers_needed < 15:
self.msg('%s appears to be the nearest regional (%0.2fms)' % (ns, ns.fastest_check_duration))
supplemental_servers_to_keep.append(ns)
if len(supplemental_servers_to_keep) >= nearest_needed:
break
# Phase three is hiding the slower secondary servers
for ns in self.SortEnabledByFastest():
if ns not in keepers and ns not in supplemental_servers_to_keep:
supplemental_servers_to_keep.append(ns)
if len(supplemental_servers_to_keep) >= supplemental_servers_needed:
break
for ns in self.supplemental_servers:
if ns not in supplemental_servers_to_keep and ns not in keepers:
ns.tags.add('hidden')
def CheckHealth(self, sanity_checks=None, max_servers=11, prefer_asn=None):
"""Filter out unhealthy or slow replica servers."""
self.PingNameServers()
if len(self.enabled_servers) > max_servers:
self.DisableSlowestSupplementalServers(prefer_asn=prefer_asn)
self.RunHealthCheckThreads(sanity_checks['primary'])
if len(self.enabled_servers) > max_servers:
self._DemoteSecondaryGlobalNameServers()
self.HideSlowSupplementalServers(int(max_servers * NS_CACHE_SLACK))
if len(self.enabled_servers) > 1:
self.RunNodeIdThreads()
self.CheckCacheCollusion()
self.RunNodeIdThreads()
self.HideSlowSupplementalServers(max_servers)
self.RunFinalHealthCheckThreads(sanity_checks['secondary'])
self.RunNodeIdThreads()
self.HideBrokenIPV6Servers()
# One more time!
if len(self.enabled_servers) > 1:
self.RunNodeIdThreads()
self.CheckCacheCollusion()
self.RunHostnameThreads()
if not self.enabled_servers:
raise TooFewNameservers('None of the nameservers tested are healthy')
def CheckCensorship(self, sanity_checks):
pass
def _RemoveGlobalWarnings(self):
"""If all nameservers have the same warning, remove it. It's likely false."""
ns_count = len(self.enabled_servers)
seen_counts = {}
# No sense in checking for duplicate warnings if we only have one server.
if len(self.enabled_servers) == 1:
return
for ns in self.enabled_servers:
for warning in ns.warnings:
seen_counts[warning] = seen_counts.get(warning, 0) + 1
for warning in seen_counts:
if seen_counts[warning] == ns_count:
self.msg('All nameservers have warning: %s (likely a false positive)' % warning)
for ns in self.enabled_servers:
ns.warnings.remove(warning)
def _DemoteSecondaryGlobalNameServers(self):
"""For global nameservers, demote the slower IP to secondary status."""
seen = {}
for ns in self.SortEnabledByFastest():
if ns.MatchesTags(['preferred', 'global']):
if ns.provider in seen and not ns.MatchesTags(['system', 'specified']):
faster_ns = seen[ns.provider]
if ns.HasTag('preferred'):
self.msg('Making %s the primary anycast - faster than %s by %2.2fms' %
(faster_ns.name_and_node, ns.name_and_node, ns.check_average - faster_ns.check_average))
ns.tags.add('hidden')
else:
seen[ns.provider] = ns
def ResetTestResults(self):
"""Reset the testng status of all disabled hosts."""
return [ns.ResetTestStatus() for ns in self]
def CheckCacheCollusion(self):
"""Mark if any nameservers share cache, especially if they are slower."""
self.RunWildcardStoreThreads()
sleepy_time = 4
self.msg("Waiting %ss for TTL's to decrement." % sleepy_time)
time.sleep(sleepy_time)
test_combos = []
good_nameservers = [x for x in self.SortEnabledByFastest()]
for ns in good_nameservers:
for compare_ns in good_nameservers:
if ns != compare_ns:
test_combos.append((compare_ns, ns))
results = self.RunCacheCollusionThreads(test_combos)
while not results.empty():
(ns, shared_ns) = results.get()
if shared_ns:
ns.shared_with.add(shared_ns)
shared_ns.shared_with.add(ns)
if ns.is_disabled or shared_ns.is_disabled:
continue
if ns.check_average > shared_ns.check_average:
slower = ns
faster = shared_ns
else:
slower = shared_ns
faster = ns
if slower.system_position == 0:
faster.DisableWithMessage('Shares-cache with current primary DNS server')
slower.warnings.add('Replica of %s' % faster.ip)
elif slower.is_keeper and not faster.is_keeper:
faster.DisableWithMessage('Replica of %s [%s]' % (slower.name, slower.ip))
slower.warnings.add('Replica of %s [%s]' % (faster.name, faster.ip))
else:
diff = slower.check_average - faster.check_average
self.msg("Disabling %s - slower replica of %s by %0.1fms." % (slower.name_and_node, faster.name_and_node, diff))
slower.DisableWithMessage('Slower replica of %s [%s]' % (faster.name, faster.ip))
faster.warnings.add('Replica of %s [%s]' % (slower.name, slower.ip))
def _LaunchQueryThreads(self, action_type, status_message, items,
thread_count=None, **kwargs):
"""Launch query threads for a given action type.
Args:
action_type: a string describing an action type to pass
status_message: Status to show during updates.
items: A list of items to pass to the queue
thread_count: How many threads to use (int)
kwargs: Arguments to pass to QueryThreads()
Returns:
results_queue: Results from the query tests.
Raises:
TooFewNameservers: If no tested nameservers are healthy.
"""
threads = []
input_queue = Queue.Queue()
results_queue = Queue.Queue()
# items are usually nameservers
random.shuffle(items)
for item in items:
input_queue.put(item)
if not thread_count:
thread_count = self.thread_count
if thread_count > len(items):
thread_count = len(items)
status_message += ' (%s threads)' % thread_count
self.msg(status_message, count=0, total=len(items))
for _ in range(0, thread_count):
thread = QueryThreads(input_queue, results_queue, action_type, **kwargs)
try:
thread.start()
except:
self.msg("ThreadingError with %s threads: waiting for completion before retrying." % thread_count)
for thread in threads:
thread.stop()
thread.join()
raise ThreadFailure()
threads.append(thread)
while results_queue.qsize() != len(items):
self.msg(status_message, count=results_queue.qsize(), total=len(items))
time.sleep(0.5)
self.msg(status_message, count=results_queue.qsize(), total=len(items))
for thread in threads:
thread.join()
if not self.enabled_servers:
raise TooFewNameservers('None of the %s nameservers tested are healthy' % len(self.visible_servers))
return results_queue
def RunCacheCollusionThreads(self, test_combos):
"""Schedule and manage threading for cache collusion checks."""
return self._LaunchQueryThreads('wildcard_check', 'Running cache-sharing checks on %s servers' % len(self.enabled_servers), test_combos)
def PingNameServers(self):
"""Quickly ping nameservers to see which are available."""
start = datetime.datetime.now()
test_servers = list(self.enabled_servers)
try:
results = self._LaunchQueryThreads('ping', 'Checking nameserver availability', test_servers)
except ThreadFailure:
self.msg("It looks like you couldn't handle %s threads, trying again with %s (slow)" % (self.thread_count, SLOW_MODE_THREAD_COUNT))
self.thread_count = SLOW_MODE_THREAD_COUNT
self.ResetTestResults()
results = self._LaunchQueryThreads('ping', 'Checking nameserver availability', test_servers)
success_rate = self.GetHealthyPercentage(compare_to=test_servers)
if success_rate < MIN_PINGABLE_PERCENT:
self.msg('How odd! Only %0.1f percent of name servers were pingable. Trying again with %s threads (slow)'
% (success_rate, SLOW_MODE_THREAD_COUNT))
self.ResetTestResults()
self.thread_count = SLOW_MODE_THREAD_COUNT
results = self._LaunchQueryThreads('ping', 'Checking nameserver availability', test_servers)
if self.enabled_servers:
self.msg('%s of %s servers are available (duration: %s)' %
(len(self.enabled_servers), len(test_servers), datetime.datetime.now() - start))
return results
def GetHealthyPercentage(self, compare_to=None):
if not compare_to:
compare_to = self.visible_servers
return (float(len(self.enabled_servers)) / float(len(compare_to))) * 100
def RunHealthCheckThreads(self, checks, min_healthy_percent=MIN_HEALTHY_PERCENT):
"""Quickly ping nameservers to see which are healthy."""
test_servers = self.enabled_servers
status_msg = 'Running initial health checks on %s servers' % len(test_servers)
if self.thread_count > MAX_INITIAL_HEALTH_THREAD_COUNT:
thread_count = MAX_INITIAL_HEALTH_THREAD_COUNT
else:
thread_count = self.thread_count
try:
results = self._LaunchQueryThreads('health', status_msg, test_servers,
checks=checks, thread_count=thread_count)
except ThreadFailure:
self.msg("It looks like you couldn't handle %s threads, trying again with %s (slow)" % (thread_count, SLOW_MODE_THREAD_COUNT))
self.thread_count = SLOW_MODE_THREAD_COUNT
self.ResetTestResults()
results = self._LaunchQueryThreads('ping', 'Checking nameserver availability', list(self.visible_servers))
success_rate = self.GetHealthyPercentage(compare_to=test_servers)
if success_rate < min_healthy_percent:
self.msg('How odd! Only %0.1f percent of name servers are healthy. Trying again with %s threads (slow)'
% (success_rate, SLOW_MODE_THREAD_COUNT))
self.ResetTestResults()
self.thread_count = SLOW_MODE_THREAD_COUNT
time.sleep(5)
results = self._LaunchQueryThreads('health', status_msg, test_servers,
checks=checks, thread_count=thread_count)
self.msg('%s of %s tested name servers are healthy' %
(len(self.enabled_servers), len(test_servers)))
return results
def RunNodeIdThreads(self):
"""Update node id status on all servers."""
status_msg = 'Checking node ids on %s servers' % len(self.enabled_servers)
return self._LaunchQueryThreads('node_id', status_msg, list(self.enabled_servers))
def RunHostnameThreads(self):
"""Update node id status on all servers."""
status_msg = 'Updating hostnames on %s servers' % len(self.enabled_servers)
return self._LaunchQueryThreads('update_hostname', status_msg, list(self.enabled_servers))
def RunFinalHealthCheckThreads(self, checks):
"""Quickly ping nameservers to see which are healthy."""
status_msg = 'Running final health checks on %s servers' % len(self.enabled_servers)
return self._LaunchQueryThreads('final', status_msg, list(self.enabled_servers), checks=checks)
def RunCensorshipCheckThreads(self, checks):
"""Quickly ping nameservers to see which are healthy."""
status_msg = 'Running censorship checks on %s servers' % len(self.enabled_servers)
return self._LaunchQueryThreads('censorship', status_msg, list(self.enabled_servers), checks=checks)
def RunPortBehaviorThreads(self):
"""Get port behavior data."""
status_msg = 'Running port behavior checks on %s servers' % len(self.enabled_servers)
return self._LaunchQueryThreads('port_behavior', status_msg, list(self.enabled_servers))
def RunWildcardStoreThreads(self):
"""Store a wildcard cache value for all nameservers (using threads)."""
status_msg = 'Waiting for wildcard cache queries from %s servers' % len(self.enabled_servers)
return self._LaunchQueryThreads('store_wildcards', status_msg, list(self.enabled_servers))
|
pbatzing/AliPhysics | refs/heads/master | PWGJE/EMCALJetTasks/Tracks/analysis/base/__init__.py | 369 | #**************************************************************************
#* Copyright(c) 1998-2014, ALICE Experiment at CERN, All rights reserved. *
#* *
#* Author: The ALICE Off-line Project. *
#* Contributors are mentioned in the code where appropriate. *
#* *
#* Permission to use, copy, modify and distribute this software and its *
#* documentation strictly for non-commercial purposes is hereby granted *
#* without fee, provided that the above copyright notice appears in all *
#* copies and that both the copyright notice and this permission notice *
#* appear in the supporting documentation. The authors make no claims *
#* about the suitability of this software for any purpose. It is *
#* provided "as is" without express or implied warranty. *
#************************************************************************** |
supertanglang/shadowsocks | refs/heads/master | setup.py | 47 | import codecs
from setuptools import setup
with codecs.open('README.rst', encoding='utf-8') as f:
long_description = f.read()
setup(
name="shadowsocks",
version="2.6.12",
license='http://www.apache.org/licenses/LICENSE-2.0',
description="A fast tunnel proxy that help you get through firewalls",
author='clowwindy',
author_email='clowwindy42@gmail.com',
url='https://github.com/shadowsocks/shadowsocks',
packages=['shadowsocks', 'shadowsocks.crypto'],
package_data={
'shadowsocks': ['README.rst', 'LICENSE']
},
install_requires=[],
entry_points="""
[console_scripts]
sslocal = shadowsocks.local:main
ssserver = shadowsocks.server:main
""",
classifiers=[
'License :: OSI Approved :: Apache Software License',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: Implementation :: CPython',
'Programming Language :: Python :: Implementation :: PyPy',
'Topic :: Internet :: Proxy Servers',
],
long_description=long_description,
)
|
toconn/Python3-Core | refs/heads/master | Source/ua/core/utils/sysutils.py | 1 | import os
import sys
PYTHON_PATH_ENVIRONMENT_VARIABLE = "PYTHONPATH"
def python_paths():
"""Returns the environment variable PYTHONPATH as a list
"""
python_path = os.environ[PYTHON_PATH_ENVIRONMENT_VARIABLE]
if python_path:
python_paths = python_path.split(os.path.pathsep)
else:
python_paths = []
return python_paths
def python_version_string():
return str(sys.version_info.major) + "." + \
str(sys.version_info.minor) + "." + \
str(sys.version_info.micro)
def python_version_full_strings():
return sys.version.split("\n")
|
CyBHFal/plugin.video.freplay | refs/heads/cyb2 | resources/lib/channels/rtbf.py | 2 | # -*- coding: utf-8 -*-
from resources.lib import utils
import re
title = ['RTBF Auvio']
img = ['rtbf']
readyForUse = True
url_root = 'http://www.rtbf.be/auvio'
categories = {
'/categorie/series?id=35': 'Séries',
'/categorie/sport?id=9': 'Sport',
'/categorie/divertissement?id=29': 'Divertissement',
'/categorie/culture?id=18': 'Culture',
'/categorie/films?id=36': 'Films',
'/categorie/sport/football?id=11': 'Football',
'/categorie/vie-quotidienne?id=44': 'Vie quotidienne',
'/categorie/musique?id=23': 'Musique',
'/categorie/info?id=1': 'Info',
'/categorie/humour?id=40': 'Humour',
'/categorie/documentaires?id=31': 'Documentaires',
'/categorie/enfants?id=32': 'Enfants'
}
def list_shows(channel, param):
shows = []
if param == 'none':
for url, title in categories.iteritems():
shows.append([channel,url,title,'','shows'])
return shows
def list_videos(channel, cat_url):
videos = []
cat=cat_url[2:]
filePath=utils.downloadCatalog(url_root + cat_url ,'rtbf' + cat + '.html',False,{})
html=open(filePath).read().replace('\xe9', 'e').replace('\xe0', 'a').replace('\n', ' ').replace('\r', '')
match = re.compile(r'<h3 class="rtbf-media-item__title "><a href="(.*?)" title="(.*?)">',re.DOTALL).findall(html)
for url,title in match:
title=utils.formatName(title)
infoLabels={ "Title": title}
videos.append( [channel, url , title , '',infoLabels,'play'] )
return videos
def getVideoURL(channel, url_video):
html = utils.get_webcontent(url_video).replace('\xe9', 'e').replace('\xe0', 'a').replace('\n', ' ').replace('\r', '')
url=re.findall(r'<meta property="og:video" content="(.*?).mp4"', html)[0]
return url+'.mp4'
|
kennethd/moto | refs/heads/master | tests/helpers.py | 21 | from __future__ import unicode_literals
import boto
from nose.plugins.skip import SkipTest
import six
def version_tuple(v):
return tuple(map(int, (v.split("."))))
# Note: See https://github.com/spulec/moto/issues/201 for why this is a separate method.
def skip_test():
raise SkipTest
class requires_boto_gte(object):
"""Decorator for requiring boto version greater than or equal to 'version'"""
def __init__(self, version):
self.version = version
def __call__(self, test):
boto_version = version_tuple(boto.__version__)
required = version_tuple(self.version)
if boto_version >= required:
return test
return skip_test
class disable_on_py3(object):
def __call__(self, test):
if not six.PY3:
return test
return skip_test
|
Communities-Communications/cc-odoo | refs/heads/master | addons/website_sale_delivery/__openerp__.py | 321 | {
'name': 'eCommerce Delivery',
'category': 'Website',
'summary': 'Add Delivery Costs to Online Sales',
'website': 'https://www.odoo.com/page/e-commerce',
'version': '1.0',
'description': """
Delivery Costs
==============
""",
'author': 'OpenERP SA',
'depends': ['website_sale', 'delivery'],
'data': [
'views/website_sale_delivery.xml',
'views/website_sale_delivery_view.xml',
'security/ir.model.access.csv',
],
'demo': [],
'qweb': [],
'installable': True,
}
|
AndroidOpenDevelopment/android_external_chromium_org | refs/heads/lp | tools/site_compare/scrapers/firefox/__init__.py | 179 | #!/usr/bin/env python
# Copyright (c) 2011 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Selects the appropriate scraper for Firefox."""
def GetScraper(version):
"""Returns the scraper module for the given version.
Args:
version: version string of IE, or None for most recent
Returns:
scrape module for given version
"""
# Pychecker will warn that the parameter is unused; we only
# support one version of Firefox at this time
# We only have one version of the Firefox scraper for now
return __import__("firefox2", globals(), locals(), [''])
# if invoked rather than imported, test
if __name__ == "__main__":
print GetScraper("2.0.0.6").version
|
songgc/tensormate | refs/heads/master | tensormate/graph/__init__.py | 1 | from tensormate.graph.base import *
from tensormate.graph.data_pipeline import *
from tensormate.graph.image_graph import *
from tensormate.graph.decorators import *
from tensormate.graph.op_wrappers import *
|
ocadotechnology/boto | refs/heads/develop | boto/kms/__init__.py | 113 | # Copyright (c) 2014 Amazon.com, Inc. or its affiliates.
# All Rights Reserved
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish, dis-
# tribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the fol-
# lowing conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
#
from boto.regioninfo import RegionInfo, get_regions
def regions():
"""
Get all available regions for the AWS Key Management Service.
:rtype: list
:return: A list of :class:`boto.regioninfo.RegionInfo`
"""
from boto.kms.layer1 import KMSConnection
return get_regions('kms', connection_cls=KMSConnection)
def connect_to_region(region_name, **kw_params):
for region in regions():
if region.name == region_name:
return region.connect(**kw_params)
return None
|
jaharkes/home-assistant | refs/heads/dev | homeassistant/components/device_tracker/locative.py | 3 | """
Support for the Locative platform.
For more details about this platform, please refer to the documentation at
https://home-assistant.io/components/device_tracker.locative/
"""
import asyncio
from functools import partial
import logging
from homeassistant.const import (ATTR_LATITUDE, ATTR_LONGITUDE,
STATE_NOT_HOME,
HTTP_UNPROCESSABLE_ENTITY)
from homeassistant.components.http import HomeAssistantView
# pylint: disable=unused-import
from homeassistant.components.device_tracker import ( # NOQA
DOMAIN, PLATFORM_SCHEMA)
_LOGGER = logging.getLogger(__name__)
DEPENDENCIES = ['http']
def setup_scanner(hass, config, see):
"""Setup an endpoint for the Locative application."""
hass.http.register_view(LocativeView(hass, see))
return True
class LocativeView(HomeAssistantView):
"""View to handle locative requests."""
url = '/api/locative'
name = 'api:locative'
def __init__(self, hass, see):
"""Initialize Locative url endpoints."""
super().__init__(hass)
self.see = see
@asyncio.coroutine
def get(self, request):
"""Locative message received as GET."""
res = yield from self._handle(request.GET)
return res
@asyncio.coroutine
def post(self, request):
"""Locative message received."""
data = yield from request.post()
res = yield from self._handle(data)
return res
@asyncio.coroutine
# pylint: disable=too-many-return-statements
def _handle(self, data):
"""Handle locative request."""
if 'latitude' not in data or 'longitude' not in data:
return ('Latitude and longitude not specified.',
HTTP_UNPROCESSABLE_ENTITY)
if 'device' not in data:
_LOGGER.error('Device id not specified.')
return ('Device id not specified.',
HTTP_UNPROCESSABLE_ENTITY)
if 'id' not in data:
_LOGGER.error('Location id not specified.')
return ('Location id not specified.',
HTTP_UNPROCESSABLE_ENTITY)
if 'trigger' not in data:
_LOGGER.error('Trigger is not specified.')
return ('Trigger is not specified.',
HTTP_UNPROCESSABLE_ENTITY)
device = data['device'].replace('-', '')
location_name = data['id'].lower()
direction = data['trigger']
gps_location = (data[ATTR_LATITUDE], data[ATTR_LONGITUDE])
if direction == 'enter':
yield from self.hass.loop.run_in_executor(
None, partial(self.see, dev_id=device,
location_name=location_name,
gps=gps_location))
return 'Setting location to {}'.format(location_name)
elif direction == 'exit':
current_state = self.hass.states.get(
'{}.{}'.format(DOMAIN, device))
if current_state is None or current_state.state == location_name:
location_name = STATE_NOT_HOME
yield from self.hass.loop.run_in_executor(
None, partial(self.see, dev_id=device,
location_name=location_name,
gps=gps_location))
return 'Setting location to not home'
else:
# Ignore the message if it is telling us to exit a zone that we
# aren't currently in. This occurs when a zone is entered
# before the previous zone was exited. The enter message will
# be sent first, then the exit message will be sent second.
return 'Ignoring exit from {} (already in {})'.format(
location_name, current_state)
elif direction == 'test':
# In the app, a test message can be sent. Just return something to
# the user to let them know that it works.
return 'Received test message.'
else:
_LOGGER.error('Received unidentified message from Locative: %s',
direction)
return ('Received unidentified message: {}'.format(direction),
HTTP_UNPROCESSABLE_ENTITY)
|
litchfield/django | refs/heads/master | tests/model_meta/models.py | 192 | from django.contrib.contenttypes.fields import (
GenericForeignKey, GenericRelation,
)
from django.contrib.contenttypes.models import ContentType
from django.db import models
class Relation(models.Model):
pass
class AbstractPerson(models.Model):
# DATA fields
data_abstract = models.CharField(max_length=10)
fk_abstract = models.ForeignKey(Relation, models.CASCADE, related_name='fk_abstract_rel')
# M2M fields
m2m_abstract = models.ManyToManyField(Relation, related_name='m2m_abstract_rel')
friends_abstract = models.ManyToManyField('self', related_name='friends_abstract', symmetrical=True)
following_abstract = models.ManyToManyField('self', related_name='followers_abstract', symmetrical=False)
# VIRTUAL fields
data_not_concrete_abstract = models.ForeignObject(
Relation,
on_delete=models.CASCADE,
from_fields=['abstract_non_concrete_id'],
to_fields=['id'],
related_name='fo_abstract_rel',
)
# GFK fields
content_type_abstract = models.ForeignKey(ContentType, models.CASCADE, related_name='+')
object_id_abstract = models.PositiveIntegerField()
content_object_abstract = GenericForeignKey('content_type_abstract', 'object_id_abstract')
# GR fields
generic_relation_abstract = GenericRelation(Relation)
class Meta:
abstract = True
class BasePerson(AbstractPerson):
# DATA fields
data_base = models.CharField(max_length=10)
fk_base = models.ForeignKey(Relation, models.CASCADE, related_name='fk_base_rel')
# M2M fields
m2m_base = models.ManyToManyField(Relation, related_name='m2m_base_rel')
friends_base = models.ManyToManyField('self', related_name='friends_base', symmetrical=True)
following_base = models.ManyToManyField('self', related_name='followers_base', symmetrical=False)
# VIRTUAL fields
data_not_concrete_base = models.ForeignObject(
Relation,
on_delete=models.CASCADE,
from_fields=['base_non_concrete_id'],
to_fields=['id'],
related_name='fo_base_rel',
)
# GFK fields
content_type_base = models.ForeignKey(ContentType, models.CASCADE, related_name='+')
object_id_base = models.PositiveIntegerField()
content_object_base = GenericForeignKey('content_type_base', 'object_id_base')
# GR fields
generic_relation_base = GenericRelation(Relation)
class Person(BasePerson):
# DATA fields
data_inherited = models.CharField(max_length=10)
fk_inherited = models.ForeignKey(Relation, models.CASCADE, related_name='fk_concrete_rel')
# M2M Fields
m2m_inherited = models.ManyToManyField(Relation, related_name='m2m_concrete_rel')
friends_inherited = models.ManyToManyField('self', related_name='friends_concrete', symmetrical=True)
following_inherited = models.ManyToManyField('self', related_name='followers_concrete', symmetrical=False)
# VIRTUAL fields
data_not_concrete_inherited = models.ForeignObject(
Relation,
on_delete=models.CASCADE,
from_fields=['model_non_concrete_id'],
to_fields=['id'],
related_name='fo_concrete_rel',
)
# GFK fields
content_type_concrete = models.ForeignKey(ContentType, models.CASCADE, related_name='+')
object_id_concrete = models.PositiveIntegerField()
content_object_concrete = GenericForeignKey('content_type_concrete', 'object_id_concrete')
# GR fields
generic_relation_concrete = GenericRelation(Relation)
class ProxyPerson(Person):
class Meta:
proxy = True
class Relating(models.Model):
# ForeignKey to BasePerson
baseperson = models.ForeignKey(BasePerson, models.CASCADE, related_name='relating_baseperson')
baseperson_hidden = models.ForeignKey(BasePerson, models.CASCADE, related_name='+')
# ForeignKey to Person
person = models.ForeignKey(Person, models.CASCADE, related_name='relating_person')
person_hidden = models.ForeignKey(Person, models.CASCADE, related_name='+')
# ForeignKey to ProxyPerson
proxyperson = models.ForeignKey(ProxyPerson, models.CASCADE, related_name='relating_proxyperson')
proxyperson_hidden = models.ForeignKey(ProxyPerson, models.CASCADE, related_name='+')
# ManyToManyField to BasePerson
basepeople = models.ManyToManyField(BasePerson, related_name='relating_basepeople')
basepeople_hidden = models.ManyToManyField(BasePerson, related_name='+')
# ManyToManyField to Person
people = models.ManyToManyField(Person, related_name='relating_people')
people_hidden = models.ManyToManyField(Person, related_name='+')
# ParentListTests models
class CommonAncestor(models.Model):
pass
class FirstParent(CommonAncestor):
first_ancestor = models.OneToOneField(CommonAncestor, models.SET_NULL, primary_key=True, parent_link=True)
class SecondParent(CommonAncestor):
second_ancestor = models.OneToOneField(CommonAncestor, models.SET_NULL, primary_key=True, parent_link=True)
class Child(FirstParent, SecondParent):
pass
|
joakim-hove/django | refs/heads/master | tests/view_tests/tests/test_specials.py | 330 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.test import SimpleTestCase, override_settings
@override_settings(ROOT_URLCONF='view_tests.generic_urls')
class URLHandling(SimpleTestCase):
"""
Tests for URL handling in views and responses.
"""
redirect_target = "/%E4%B8%AD%E6%96%87/target/"
def test_nonascii_redirect(self):
"""
Tests that a non-ASCII argument to HttpRedirect is handled properly.
"""
response = self.client.get('/nonascii_redirect/')
self.assertRedirects(response, self.redirect_target)
def test_permanent_nonascii_redirect(self):
"""
Tests that a non-ASCII argument to HttpPermanentRedirect is handled
properly.
"""
response = self.client.get('/permanent_nonascii_redirect/')
self.assertRedirects(response, self.redirect_target, status_code=301)
|
loco-odoo/localizacion_co | refs/heads/master | openerp/addons/mrp_byproduct/mrp_byproduct.py | 108 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.osv import fields
from openerp.osv import osv
import openerp.addons.decimal_precision as dp
from openerp.tools.translate import _
class mrp_subproduct(osv.osv):
_name = 'mrp.subproduct'
_description = 'Byproduct'
_columns={
'product_id': fields.many2one('product.product', 'Product', required=True),
'product_qty': fields.float('Product Qty', digits_compute=dp.get_precision('Product Unit of Measure'), required=True),
'product_uom': fields.many2one('product.uom', 'Product Unit of Measure', required=True),
'subproduct_type': fields.selection([('fixed','Fixed'),('variable','Variable')], 'Quantity Type', required=True, help="Define how the quantity of byproducts will be set on the production orders using this BoM.\
'Fixed' depicts a situation where the quantity of created byproduct is always equal to the quantity set on the BoM, regardless of how many are created in the production order.\
By opposition, 'Variable' means that the quantity will be computed as\
'(quantity of byproduct set on the BoM / quantity of manufactured product set on the BoM * quantity of manufactured product in the production order.)'"),
'bom_id': fields.many2one('mrp.bom', 'BoM', ondelete='cascade'),
}
_defaults={
'subproduct_type': 'variable',
'product_qty': lambda *a: 1.0,
}
def onchange_product_id(self, cr, uid, ids, product_id, context=None):
""" Changes UoM if product_id changes.
@param product_id: Changed product_id
@return: Dictionary of changed values
"""
if product_id:
prod = self.pool.get('product.product').browse(cr, uid, product_id, context=context)
v = {'product_uom': prod.uom_id.id}
return {'value': v}
return {}
def onchange_uom(self, cr, uid, ids, product_id, product_uom, context=None):
res = {'value':{}}
if not product_uom or not product_id:
return res
product = self.pool.get('product.product').browse(cr, uid, product_id, context=context)
uom = self.pool.get('product.uom').browse(cr, uid, product_uom, context=context)
if uom.category_id.id != product.uom_id.category_id.id:
res['warning'] = {'title': _('Warning'), 'message': _('The Product Unit of Measure you chose has a different category than in the product form.')}
res['value'].update({'product_uom': product.uom_id.id})
return res
class mrp_bom(osv.osv):
_name = 'mrp.bom'
_description = 'Bill of Material'
_inherit='mrp.bom'
_columns={
'sub_products':fields.one2many('mrp.subproduct', 'bom_id', 'Byproducts', copy=True),
}
class mrp_production(osv.osv):
_description = 'Production'
_inherit= 'mrp.production'
def action_confirm(self, cr, uid, ids, context=None):
""" Confirms production order and calculates quantity based on subproduct_type.
@return: Newly generated picking Id.
"""
move_obj = self.pool.get('stock.move')
picking_id = super(mrp_production,self).action_confirm(cr, uid, ids, context=context)
product_uom_obj = self.pool.get('product.uom')
for production in self.browse(cr, uid, ids):
source = production.product_id.property_stock_production.id
if not production.bom_id:
continue
for sub_product in production.bom_id.sub_products:
product_uom_factor = product_uom_obj._compute_qty(cr, uid, production.product_uom.id, production.product_qty, production.bom_id.product_uom.id)
qty1 = sub_product.product_qty
qty2 = production.product_uos and production.product_uos_qty or False
product_uos_factor = 0.0
if qty2 and production.bom_id.product_uos.id:
product_uos_factor = product_uom_obj._compute_qty(cr, uid, production.product_uos.id, production.product_uos_qty, production.bom_id.product_uos.id)
if sub_product.subproduct_type == 'variable':
if production.product_qty:
qty1 *= product_uom_factor / (production.bom_id.product_qty or 1.0)
if production.product_uos_qty:
qty2 *= product_uos_factor / (production.bom_id.product_uos_qty or 1.0)
data = {
'name': 'PROD:'+production.name,
'date': production.date_planned,
'product_id': sub_product.product_id.id,
'product_uom_qty': qty1,
'product_uom': sub_product.product_uom.id,
'product_uos_qty': qty2,
'product_uos': production.product_uos and production.product_uos.id or False,
'location_id': source,
'location_dest_id': production.location_dest_id.id,
'move_dest_id': production.move_prod_id.id,
'production_id': production.id
}
move_id = move_obj.create(cr, uid, data, context=context)
move_obj.action_confirm(cr, uid, [move_id], context=context)
return picking_id
def _get_subproduct_factor(self, cr, uid, production_id, move_id=None, context=None):
"""Compute the factor to compute the qty of procucts to produce for the given production_id. By default,
it's always equal to the quantity encoded in the production order or the production wizard, but with
the module mrp_byproduct installed it can differ for byproducts having type 'variable'.
:param production_id: ID of the mrp.order
:param move_id: ID of the stock move that needs to be produced. Identify the product to produce.
:return: The factor to apply to the quantity that we should produce for the given production order and stock move.
"""
sub_obj = self.pool.get('mrp.subproduct')
move_obj = self.pool.get('stock.move')
production_obj = self.pool.get('mrp.production')
production_browse = production_obj.browse(cr, uid, production_id, context=context)
move_browse = move_obj.browse(cr, uid, move_id, context=context)
subproduct_factor = 1
sub_id = sub_obj.search(cr, uid,[('product_id', '=', move_browse.product_id.id),('bom_id', '=', production_browse.bom_id.id), ('subproduct_type', '=', 'variable')], context=context)
if sub_id:
subproduct_record = sub_obj.browse(cr ,uid, sub_id[0], context=context)
if subproduct_record.bom_id.product_qty:
subproduct_factor = subproduct_record.product_qty / subproduct_record.bom_id.product_qty
return subproduct_factor
return super(mrp_production, self)._get_subproduct_factor(cr, uid, production_id, move_id, context=context)
class change_production_qty(osv.osv_memory):
_inherit = 'change.production.qty'
def _update_product_to_produce(self, cr, uid, prod, qty, context=None):
bom_obj = self.pool.get('mrp.bom')
move_lines_obj = self.pool.get('stock.move')
prod_obj = self.pool.get('mrp.production')
for m in prod.move_created_ids:
if m.product_id.id == prod.product_id.id:
move_lines_obj.write(cr, uid, [m.id], {'product_uom_qty': qty})
else:
for sub_product_line in prod.bom_id.sub_products:
if sub_product_line.product_id.id == m.product_id.id:
factor = prod_obj._get_subproduct_factor(cr, uid, prod.id, m.id, context=context)
subproduct_qty = sub_product_line.subproduct_type == 'variable' and qty * factor or sub_product_line.product_qty
move_lines_obj.write(cr, uid, [m.id], {'product_uom_qty': subproduct_qty})
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
albertgafic/l10n-spain | refs/heads/8.0 | __unported__/l10n_es_payment_order/wizard/csb_32.py | 6 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (c) 2006 ACYSOS S.L. (http://acysos.com) All Rights Reserved.
# Pedro Tarrafeta <pedro@acysos.com>
# Copyright (c) 2008 Pablo Rocandio. All Rights Reserved.
# Copyright (c) 2009 Zikzakmedia S.L. (http://zikzakmedia.com) All Rights Reserved.
# Jordi Esteve <jesteve@zikzakmedia.com>
# $Id$
#
# Corregido para instalación TinyERP estándar 4.2.0: Zikzakmedia S.L. 2008
# Jordi Esteve <jesteve@zikzakmedia.com>
#
# Añadidas cuentas de remesas y tipos de pago. 2008
# Pablo Rocandio <salbet@gmail.com>
#
# Rehecho de nuevo para instalación OpenERP 5.0.0 sobre account_payment_extension: Zikzakmedia S.L. 2009
# Jordi Esteve <jesteve@zikzakmedia.com>
#
# Refactorización. Acysos S.L. (http://www.acysos.com) 2012
# Ignacio Ibeas <ignacio@acysos.com>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.osv import orm, fields
from datetime import datetime
from openerp.tools.translate import _
from log import *
class csb_32(orm.Model):
_name = 'csb.32'
_auto = False
def _cabecera_fichero_32(self, cr, uid):
converter = self.pool.get('payment.converter.spain')
texto = '0265'
texto += ' '
texto += datetime.today().strftime('%d%m%y')
texto += converter.digits_only(cr, uid, self.order.reference )[-4:]
texto += ' '*35
texto += converter.digits_only(cr, uid, self.order.mode.bank_id.acc_number )[:8]
texto += ' '*6
texto += ' '*61
texto += ' '*24
texto += '\r\n'
if len(texto) != 152:
raise Log(_('Configuration error:\n\nThe line "%s" is not 150 characters long:\n%s') % ('Cabecera fichero 32', texto), True)
return texto
def _cabecera_remesa_32(self, cr, uid, context):
converter = self.pool.get('payment.converter.spain')
# A:
texto = '1165'
texto += ' '
# B
texto += datetime.today().strftime('%d%m%y')
texto += '0001'
texto += ' '*12
# C
texto += converter.convert(cr, uid, self.order.mode.cedente, 15, context) # TODO: Identificador del cedente. Qué es?
texto += '1' # Identificativo de efectos truncados
texto += ' '*21
# D
texto += converter.digits_only(cr, uid, self.order.mode.bank_id.acc_number )
texto += converter.digits_only(cr, uid, self.order.mode.bank_id.acc_number )
texto += converter.digits_only(cr, uid, self.order.mode.bank_id.acc_number )
texto += ' ' + ' '*24
texto += '\r\n'
if len(texto) != 152:
raise Log(_('Configuration error:\n\nThe line "%s" is not 150 characters long:\n%s') % ('Cabecera remesa 32', texto), True)
return texto
def _registro_individual_i_32(self, cr, uid, recibo, context):
converter = self.pool.get('payment.converter.spain')
# A
texto = '2565'
texto += ' '
# B
texto += converter.convert(cr, uid, self.num_recibos+1, 15, context)
texto += datetime.today().strftime('%d%m%y')
texto += '0001'
# C
state = self.order.mode.bank_id.state_id and self.order.mode.bank_id.state_id.code or False
texto += converter.convert(cr, uid, state, 2, context)
texto += ' '*7
texto += ' '
# D
texto += converter.convert(cr, uid, self.order.mode.bank_id.city, 20, context)
texto += ' '
# E
texto += ' '*24
texto += converter.convert(cr, uid, abs(recibo['amount']), 9, context)
texto += ' '*15
texto += datetime.strptime( recibo['ml_maturity_date'], '%Y-%m-%d').strftime('%d%m%y')
texto += ' '*(6+6+1+4+16)
texto += '\r\n'
if len(texto) != 152:
raise Log(_('Configuration error:\n\nThe line "%s" is not 150 characters long:\n%s') % ('Registro individual I 32', texto), True)
return texto
def _registro_individual_ii_32(self, cr, uid, recibo, context):
converter = self.pool.get('payment.converter.spain')
# A: Identificacion de la operacion
texto = '2665'
texto += ' '
# B: Datos del efecto
texto += converter.convert(cr, uid, self.num_recibos+1, 15, context)
texto += ' '
texto += '2' # Recibo
texto += '000000'
texto += '1'
# 0= Sin gastos, 1=Con gastos, 9=Orden expresa de protesto notarial
texto += '0'
# C: Datos del efecto
ccc = recibo['bank_id'] and recibo['bank_id'].acc_number or ''
if ccc:
texto += ccc[:20].zfill(20)
else:
texto += ' '*20
# D: Datos del efecto
texto += converter.convert(cr, uid, self.order.mode.partner_id.name, 34, context)
texto += converter.convert(cr, uid, recibo['partner_id'].name, 34, context)
texto += ' '*30
texto += '\r\n'
if len(texto) != 152:
raise Log(_('Configuration error:\n\nThe line "%s" is not 150 characters long:\n%s') % ('Registro individual II 32', texto), True)
return texto
def _registro_individual_iii_32(self, cr, uid, recibo, context):
converter = self.pool.get('payment.converter.spain')
# A: Identificacion de la operacion
texto = '2765'
texto += ' '
# B: Datos del efecto
texto += converter.convert(cr, uid, self.num_recibos+1, 15, context)
texto += ' '
addresses = self.pool.get('res.partner').address_get(cr, uid, [recibo['partner_id'].id] )
#if not addresses:
# print "NO ADDRESSES"
address = self.pool.get('res.partner').browse(cr, uid, addresses['default'], context)
texto += converter.convert( cr, uid, address.street, 34, context )
texto += converter.convert( cr, uid, address.zip, 5, context )
texto += converter.convert( cr, uid, address.city, 20, context )
texto += converter.convert( cr, uid, address.state_id and address.state_id.code or False, 2, context )
texto += '0'*7
# C: Datos del efecto
vat = recibo['partner_id'].vat and recibo['partner_id'].vat[2:] or False
texto += converter.convert(cr, uid, vat, 9, context)
texto += ' '*50
texto += '\r\n'
if len(texto) != 152:
raise Log(_('Configuration error:\n\nThe line "%s" is not 150 characters long:\n%s') % ('Registro individual III 32', texto), True)
return texto
def _registro_fin_remesa_32(self, cr, uid, context):
converter = self.pool.get('payment.converter.spain')
# A: Identificación de la operación
texto = '7165'
texto += ' '
# B: Control de duplicidades
texto += datetime.today().strftime('%d%m%y')
texto += '0001'
texto += ' '*(6+6)
# C: Libre
texto += ' '*37
# D: Acumuladores de importe
texto += ' '*10
texto += converter.convert( cr, uid, abs(self.order.total), 10, context )
texto += ' '*(10+6+7+6+6+6)
# E: Controles de lectura de fichero
texto += ' '*5
texto += converter.convert(cr, uid, (self.num_recibos*3) + 2, 7, context)
texto += converter.convert(cr, uid, self.num_recibos, 6, context)
texto += ' '*6
texto += '\r\n'
if len(texto) != 152:
raise Log(_('Configuration error:\n\nThe line "%s" is not 150 characters long:\n%s') % ('Fin remesa 32', texto), True)
return texto
def _registro_fin_fichero_32(self, cr, uid, context):
converter = self.pool.get('payment.converter.spain')
# A: Identificación de la operación
texto = '9865'
texto += ' '
# B: Libre
texto += ' '*22
# C: Libre
texto += ' '*37
# D: Acumuladores de importes
texto += ' '*10
texto += converter.convert( cr, uid, abs(self.order.total), 10, context )
texto += ' '*(10+6+7+6+6+6)
# E: Controles de lectura del fichero
texto += '00001'
texto += converter.convert(cr, uid, (self.num_recibos*3) + 3, 7, context)
texto += converter.convert(cr, uid, self.num_recibos, 6, context)
texto += ' '*6
texto += '\r\n'
if len(texto) != 152:
raise Log(_('Configuration error:\n\nThe line "%s" is not 150 characters long:\n%s') % ('Fin fichero 32', texto), True)
return texto
def create_file(self, cr, uid, order, lines, context):
self.order = order
txt_remesa = ''
self.num_recibos = 0
self.num_lineas_opc = 0
txt_remesa += self._cabecera_fichero_32(cr,uid)
txt_remesa += self._cabecera_remesa_32(cr, uid, context)
for recibo in lines:
txt_remesa += self._registro_individual_i_32(cr, uid, recibo, context)
txt_remesa += self._registro_individual_ii_32(cr, uid, recibo, context)
txt_remesa += self._registro_individual_iii_32(cr, uid, recibo, context)
self.num_recibos = self.num_recibos + 1
txt_remesa += self._registro_fin_remesa_32(cr, uid, context)
txt_remesa += self._registro_fin_fichero_32(cr, uid, context)
return txt_remesa
|
edx/edx-oauth2-provider | refs/heads/master | edx_oauth2_provider/tests/base.py | 1 | # pylint: disable=missing-docstring
from __future__ import absolute_import, division, print_function, unicode_literals
import json
import uuid
import jwt
import provider.scope
from django.core.urlresolvers import reverse
from django.http import QueryDict
from django.test import TestCase
from six.moves.urllib.parse import urlparse # pylint: disable=import-error, wrong-import-order
from ..constants import AUTHORIZED_CLIENTS_SESSION_KEY
from ..models import TrustedClient
from .factories import AccessTokenFactory, ClientFactory, TrustedClientFactory, UserFactory
from .util import normpath
class BaseTestCase(TestCase):
def setUp(self):
super(BaseTestCase, self).setUp()
self.client_secret = 'some_secret'
self.auth_client = ClientFactory(client_secret=self.client_secret)
self.password = 'some_password'
self.user_factory = UserFactory
self.user = None
self.access_token = None
self.set_user(self.make_user())
def make_user(self):
return self.user_factory(password=self.password)
def set_user(self, user):
self.user = user
def set_trusted(self, client, trusted=True):
if trusted:
TrustedClientFactory.create(client=client)
else:
TrustedClient.objects.filter(client=client).delete()
class OAuth2TestCase(BaseTestCase):
def setUp(self):
super(OAuth2TestCase, self).setUp()
self.nonce = str(uuid.uuid4())
def login_and_authorize(self, scope=None, claims=None, trusted=False, validate_session=True):
""" Login into client using OAuth2 authorization flow. """
self.set_trusted(self.auth_client, trusted)
self.client.login(username=self.user.username, password=self.password)
client_id = self.auth_client.client_id
payload = {
'client_id': client_id,
'redirect_uri': self.auth_client.redirect_uri,
'response_type': 'code',
'state': 'some_state',
'nonce': self.nonce,
}
_add_values(payload, 'id_token', scope, claims)
response = self.client.get(reverse('oauth2:capture'), payload)
self.assertEqual(302, response.status_code)
response = self.client.get(reverse('oauth2:authorize'), payload)
if validate_session:
self.assertListEqual(self.client.session[AUTHORIZED_CLIENTS_SESSION_KEY], [client_id])
return response
def get_access_token_response(self, scope=None, claims=None):
""" Get a new access token using the OAuth2 authorization flow. """
response = self.login_and_authorize(scope, claims, trusted=True)
self.assertEqual(302, response.status_code)
self.assertEqual(reverse('oauth2:redirect'), normpath(response['Location']))
response = self.client.get(reverse('oauth2:redirect'))
self.assertEqual(302, response.status_code)
query = QueryDict(urlparse(response['Location']).query)
payload = {
'grant_type': 'authorization_code',
'client_id': self.auth_client.client_id,
'client_secret': self.client_secret,
'code': query['code'],
}
_add_values(payload, 'id_token', scope, claims)
response = self.client.post(reverse('oauth2:access_token'), payload)
return response
class IDTokenTestCase(OAuth2TestCase):
def get_id_token_values(self, scope=None, claims=None):
""" Get a new id_token using the OIDC authorization flow. """
self.assertIn('openid', scope.split())
response = self.get_access_token_response(scope, claims)
self.assertEqual(response.status_code, 200)
values = json.loads(response.content.decode('utf-8'))
self.assertIn('access_token', values)
id_token = values['id_token']
secret = self.auth_client.client_secret
audience = self.auth_client.client_id
self.assertValidIDToken(id_token, secret, audience)
scopes = values['scope'].split()
claims = self.parse_id_token(id_token)
# Should always be included
self.assertIn('iss', claims)
self.assertIn('sub', claims)
return scopes, claims
def parse_id_token(self, id_token):
claims = jwt.decode(id_token, verify=False)
return claims
def assertValidIDToken(self, id_token, secret, audience):
try:
jwt.decode(id_token, secret, audience=audience)
except jwt.DecodeError:
assert False
class UserInfoTestCase(BaseTestCase):
def setUp(self):
super(UserInfoTestCase, self).setUp()
self.path = reverse('oauth2:user_info')
self.set_user(self.user)
def set_user(self, user):
super(UserInfoTestCase, self).set_user(user)
self.access_token = AccessTokenFactory(user=self.user, client=self.auth_client)
def set_access_token_scope(self, scope):
self.access_token.scope = provider.scope.to_int(*scope.split())
self.access_token.save() # pylint: disable=no-member
def get_with_authorization(self, path, access_token=None, payload=None):
kwargs = {}
if access_token:
kwargs['HTTP_AUTHORIZATION'] = 'Bearer %s' % access_token
return self.client.get(path, payload, **kwargs)
def get_userinfo(self, token=None, scope=None, claims=None):
payload = _add_values({}, 'userinfo', scope, claims)
response = self.get_with_authorization(self.path, token, payload)
values = json.loads(response.content.decode('utf-8'))
return response, values
def _add_values(data, endpoint, scope=None, claims=None):
if scope:
data['scope'] = scope
if claims:
data['claims'] = json.dumps({endpoint: claims})
return data
|
Kelym/adversarial-reinforcement-learning | refs/heads/master | Environment.py | 1 | from __future__ import print_function
import numpy as np
import random
'''
The Environment Class shall all respond to two calls
- act
Do an action at the current state
Move to the next state
Return the reward
- observe
Return a single number representing the current state
The following is a naive environment that allows
1) reading a rectangle shaped maze from file, where 'X' is block, 'O' is the reward place, (optionally) 'S' specifies a start point.
2) randomly generate a maze with given shape and number of traps.
It will attempt to re-place the agent and (optionally) refresh the maze when the agent reaches the goal, so the agent can play with the environment.
'''
class Toy():
def __init__(self):
self.actions = 4
self.movement = [[0,1],[1,0],[0,-1],[-1,0]]
self.actions_name = ['Right','Down','Left','Up']
self.debug = 0
def read_maze(self,fname):
content = open(fname).read().splitlines()
self.maze = np.array([[1 if x=='O' else -1 if x=='X' else 0
for x in row] for row in content ])
self.maze_id = Toy.generate_maze_id(self.maze)
self.maze_pretty = content
self.xbound = len(self.maze)
self.ybound = len(self.maze[0])
self.states = (2**(self.xbound * self.ybound) *
(self.xbound * self.ybound)**2)
# Search for a start symbol
start = [x for x in content if 'S' in x]
if(len(start)>0):
start = start[0]
self.set_position(self.maze.index(start), start.index('S'))
else:
if not self.random_start():
print("The given maze does not have a valid start point")
def generate_random_maze(self,x,y,trap):
self.xbound = x
self.ybound = y
self.states = 2**(self.xbound * self.ybound) * self.xbound * self.ybound
while True:
self.maze = np.zeros((i,j))
candidates = np.random.choice(range(0, x * y), trap+1, replace=False)
np.put(self.maze, candidates[0:-1], -1)
self.maze[candidates[-1]] = 1
if self.random_start():
self.maze_id = Toy.generate_maze_id(self.maze)
self.maze_pretty = [''.join(['X' if x==-1 else '.' if x==0 else 1
for x in row]) for row in self.maze]
break
@staticmethod
def generate_maze_id(maze):
(sx,sy) = maze.shape
blocks = sum(1<<i for i, b in enumerate(maze.flatten()) if b == -1)
(x,y) = np.where(maze == 1)
target_pos = x[0]*sy+y[0]
return ((target_pos * (1<<(sx*sy)) + blocks) *
(sx*sy))
def random_start(self):
"""Given a maze with target and obstacles, put the agent to a random starting point. Return true when succeeds and false if such a point
cannot be found.
"""
(x,y) = np.where(self.maze == 1)
init = (x[0],y[0])
spfa_pending = [init]
cur = 0
visited = np.zeros((self.xbound, self.ybound))
while cur < len(spfa_pending):
x,y = spfa_pending[cur]
cur += 1
for move in self.movement:
i = x+move[0]
j = y+move[1]
if (not self.out_of_maze(i,j) and
self.maze[i][j] == 0 and visited[i][j]==0):
visited[i][j]=1
spfa_pending.append((i,j))
if cur == 1:
return False
sel = np.random.randint(1, len(spfa_pending))
self.set_position(spfa_pending[sel][0], spfa_pending[sel][1])
return True
def act(self, action):
x = self.cur[0]
y = self.cur[1]
new_x = x + self.movement[action][0]
new_y = y + self.movement[action][1]
if(self.debug > 3):
print(x,' ',y,' + ', self.actions_name[action], ' -> ',new_x, ' ', new_y)
if self.out_of_maze(new_x, new_y) or self.is_trap(new_x, new_y):
return -100
self.set_position(new_x, new_y)
if self.reach_target(new_x, new_y):
# Target
self.random_start() # Same maze, multiple shots
return 100
# Punish for time passed
return -0.5
def observe(self):
# The method will produce a single number representing distinct states
pos = self.cur[0] * self.ybound + self.cur[1]
return pos + self.maze_id
def print_position(self, state):
cur = state % (self.xbound * self.ybound)
print (cur // self.ybound, cur % self.ybound)
def print_maze(self):
print('\n'.join(self.maze_pretty))
def print_state(self, state):
self.print_maze()
self.print_position(state)
def print_action(self, action):
print (self.actions_name[action], end=' ')
def set_position(self,x,y):
self.cur = (x,y)
def out_of_maze(self,x,y):
return (x < 0 or x >= self.xbound or y < 0 or y >= self.ybound)
def is_trap(self,x,y):
return self.maze[x][y] < 0
def reach_target(self,x,y):
return self.maze[x][y] > 0
|
Flowerowl/Crab | refs/heads/master | scikits/crab/metrics/base.py | 11 | #-*- coding:utf-8 -*-
"""Utilities to evaluate the predictive performance of the recommenders
"""
# Authors: Marcel Caraciolo <marcel@muricoca.com>
# License: BSD Style.
class RecommenderEvaluator(object):
"""
Basic Interface which is responsible to evaluate the quality of Recommender
recommendations. The range of values that may be returned depends on the
implementation. but lower values must mean better recommendations, with 0
being the lowest / best possible evaluation, meaning a perfect match.
"""
def evaluate(self, recommender, metrics=None, **kwargs):
"""
Evaluates the predictor
Parameters
----------
recommender: The BaseRecommender instance
The recommender instance to be evaluated.
metrics: [None|'rmse'|'f1score'|'precision'|'recall'|'nmae'|'mae']
If metrics is None, all metrics available will be evaluated.
Otherwise it will return the specified metric evaluated.
Returns
-------
Returns scores representing how well the recommender estimated the
preferences match real values.
"""
raise NotImplementedError("cannot instantiate Abstract Base Class")
def evaluate_online(self, metrics=None, **kwargs):
"""
Online evaluation for recommendation prediction
Parameters
----------
metrics: [None|'rmse'|'f1score'|'precision'|'recall'|'nmae'|'mae']
If metrics is None, all metrics available will be evaluated.
Otherwise it will return the specified metric evaluated.
Returns
-------
Returns scores representing how well the recommender estimated the
preferences match real values.
"""
raise NotImplementedError("cannot instantiate Abstract Base Class")
def evaluate_on_split(self, metrics=None, **kwargs):
"""
Evaluate on the folds of a dataset split
Parameters
----------
metrics: [None|'rmse'|'f1score'|'precision'|'recall'|'nmae'|'mae']
If metrics is None, all metrics available will be evaluated.
Otherwise it will return the specified metric evaluated.
Returns
-------
Returns scores representing how well the recommender estimated the
preferences match real values.
"""
raise NotImplementedError("cannot instantiate Abstract Base Class")
|
zachriggle/idapython | refs/heads/master | pywraps/deploy_all.py | 16 | # Please use the same tag for the same .i file
# That means if many insertions are going to happen in one
# given .i file then don't use more than code marking tag
print "\n-------- DEPLOY started --------------------------------------------------\n"
deploys = {
"idaapi (common functions, notifywhen)" : {
"tag" : "py_idaapi",
"src" : ["py_cvt.hpp", "py_idaapi.hpp", "py_idaapi.py", "py_notifywhen.hpp", "py_notifywhen.py"],
"tgt" : "../swig/idaapi.i"
},
"View (common)" : {
"tag" : "py_view_base",
"src" : ["py_view_base.hpp", "py_view_base.py"],
"tgt" : "../swig/view.i"
},
"IDAView" : {
"tag" : "py_idaview",
"src" : ["py_idaview.hpp", "py_idaview.py"],
"tgt" : "../swig/view.i"
},
"Graph" : {
"tag" : "py_graph",
"src" : ["py_graph.hpp", "py_graph.py"],
"tgt" : "../swig/graph.i"
},
"custview" : {
"tag" : "py_custviewer",
"src" : ["py_custview.py","py_custview.hpp"],
"tgt" : "../swig/kernwin.i"
},
"plgform" : {
"tag" : "py_plgform",
"src" : ["py_plgform.hpp","py_plgform.py"],
"tgt" : "../swig/kernwin.i"
},
"expr" : {
"tag" : "py_expr",
"src" : ["py_expr.hpp","py_expr.py"],
"tgt" : "../swig/expr.i"
},
"cli" : {
"tag" : "py_cli",
"src" : ["py_cli.py","py_cli.hpp"],
"tgt" : "../swig/kernwin.i"
},
"Loader" : {
"tag" : "py_loader",
"src" : ["py_loader.hpp"],
"tgt" : "../swig/loader.i"
},
"kernwin, choose2, askusingform" : {
"tag" : "py_kernwin",
"src" : ["py_kernwin.hpp","py_kernwin.py","py_choose.hpp","py_choose2.hpp","py_choose2.py","py_askusingform.hpp","py_askusingform.py"],
"tgt" : "../swig/kernwin.i"
},
"idd" : {
"tag" : "py_idd",
"src" : ["py_dbg.hpp","py_appcall.py"],
"tgt" : "../swig/idd.i"
},
"idd (python)" : {
"tag" : "py_idd_2",
"src" : ["py_dbg.py"],
"tgt" : "../swig/idd.i"
},
"nalt" : {
"tag" : "py_nalt",
"src" : ["py_nalt.hpp","py_nalt.py"],
"tgt" : "../swig/nalt.i"
},
"dbg" : {
"tag" : "py_dbg",
"src" : ["py_dbg.hpp"],
"tgt" : "../swig/dbg.i"
},
"linput/diskio" : {
"tag" : "py_diskio",
"src" : ["py_linput.hpp","py_diskio.hpp","py_diskio.py"],
"tgt" : "../swig/diskio.i"
},
"name" : {
"tag" : "py_name",
"src" : ["py_name.hpp","py_name.py"],
"tgt" : "../swig/name.i"
},
"qfile" : {
"tag" : "py_qfile",
"src" : ["py_qfile.hpp"],
"tgt" : "../swig/fpro.i"
},
"bytes" : {
"tag" : "py_bytes",
"src" : ["py_bytes.hpp","py_custdata.py","py_custdata.hpp"],
"tgt" : "../swig/bytes.i"
},
"typeinf" : {
"tag" : "py_typeinf",
"src" : ["py_typeinf.hpp","py_typeinf.py"],
"tgt" : "../swig/typeinf.i"
},
"gdl" : {
"tag" : "py_gdl",
"src" : ["py_gdl.py"],
"tgt" : "../swig/gdl.i"
},
"ua" : {
"tag" : "py_ua",
"src" : ["py_ua.hpp","py_ua.py"],
"tgt" : "../swig/ua.i"
},
"idp" : {
"tag" : "py_idp",
"src" : ["py_idp.hpp"],
"tgt" : "../swig/idp.i"
},
"lines" : {
"tag" : "py_lines",
"src" : ["py_lines.hpp","py_lines.py"],
"tgt" : "../swig/lines.i"
},
"registry" : {
"tag" : "py_registry",
"src" : ["py_registry.hpp"],
"tgt" : "../swig/registry.i"
},
"pc_win32_appcall" : {
"tag" : "appcalltest",
"src" : ["py_appcall.py"],
"tgt" : "../../../tests/input/pc_win32_appcall.pe.hints"
},
"ex_custdata example" : {
"tag" : "ex_custdata",
"src" : ["../examples/ex_custdata.py"],
"tgt" : "../../../tests/input/pc_win32_custdata1.pe.hints"
},
"ex_choose2" : {
"tag" : "py_choose2ex1",
"src" : ["py_choose2.py"],
"tgt" : "../examples/ex_choose2.py"
},
"ex_formchooser" : {
"tag" : "ex_formchooser",
"src" : ["py_askusingform.py"],
"tgt" : "../../formchooser/formchooser.py"
},
"ex_askusingform" : {
"tag" : "ex_askusingform",
"src" : ["py_askusingform.py"],
"tgt" : "../examples/ex_askusingform.py"
},
"ex_cli example" : {
"tag" : "ex_cli_ex1",
"src" : ["py_cli.py"],
"tgt" : "../examples/ex_cli.py"
},
"ex_expr example" : {
"tag" : "ex_expr",
"src" : ["py_expr.py"],
"tgt" : "../examples/ex_expr.py"
},
"ex_custview.py example" : {
"tag" : "py_custviewerex1",
"src" : ["py_custview.py"],
"tgt" : "../examples/ex_custview.py"
}
}
import deploy
for name in deploys:
data = deploys[name]
print "Deploying %s" % name
deploy.deploy(data["tag"], data["src"], data["tgt"])
|
Venturi/oldcms | refs/heads/master | env/lib/python2.7/site-packages/djangocms_blog/migrations/0004_auto_20150108_1435.py | 14 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
import filer.fields.image
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('djangocms_blog', '0003_auto_20141201_2252'),
]
operations = [
migrations.AlterField(
model_name='post',
name='main_image',
field=filer.fields.image.FilerImageField(related_name='djangocms_blog_post_image', on_delete=django.db.models.deletion.SET_NULL, verbose_name='Main image', blank=True, to='filer.Image', null=True),
preserve_default=True,
),
migrations.AlterField(
model_name='post',
name='main_image_full',
field=models.ForeignKey(related_name='djangocms_blog_post_full', on_delete=django.db.models.deletion.SET_NULL, verbose_name='Main image full', blank=True, to='cmsplugin_filer_image.ThumbnailOption', null=True),
preserve_default=True,
),
migrations.AlterField(
model_name='post',
name='main_image_thumbnail',
field=models.ForeignKey(related_name='djangocms_blog_post_thumbnail', on_delete=django.db.models.deletion.SET_NULL, verbose_name='Main image thumbnail', blank=True, to='cmsplugin_filer_image.ThumbnailOption', null=True),
preserve_default=True,
),
]
|
ted-gould/nova | refs/heads/master | nova/tests/unit/scheduler/filters/test_exact_ram_filter.py | 41 | # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from nova.scheduler.filters import exact_ram_filter
from nova import test
from nova.tests.unit.scheduler import fakes
class TestRamFilter(test.NoDBTestCase):
def setUp(self):
super(TestRamFilter, self).setUp()
self.filt_cls = exact_ram_filter.ExactRamFilter()
def test_exact_ram_filter_passes(self):
filter_properties = {'instance_type': {'memory_mb': 1024}}
host = self._get_host({'free_ram_mb': 1024})
self.assertTrue(self.filt_cls.host_passes(host, filter_properties))
def test_exact_ram_filter_fails(self):
filter_properties = {'instance_type': {'memory_mb': 512}}
host = self._get_host({'free_ram_mb': 1024})
self.assertFalse(self.filt_cls.host_passes(host, filter_properties))
def _get_host(self, host_attributes):
return fakes.FakeHostState('host1', 'node1', host_attributes)
|
markkerzner/nn_kove | refs/heads/master | hadoop/src/docs/relnotes.py | 36 | #!/usr/bin/python
# Run this command as:
#
# jira.sh -s https://issues.apache.org/jira -u $user -p $pw \
# -a getIssueList --search \
# "project in (HADOOP,HDFS,MAPREDUCE) and fixVersion = '$vers' and resolution = Fixed" \
# | ./relnotes.py > $vers.html
import csv
import re
import subprocess
import sys
namePattern = re.compile(r' \([0-9]+\)')
htmlSpecialPattern = re.compile(r'[&<>\'"\n]')
quotes = {'<' : '<', '>': '>', '"': '"', "'": ''',
'&': '&', '\n': '<br>'}
def clean(str):
return re.sub(namePattern, "", str)
def formatComponents(str):
str = re.sub(namePattern, '', str).replace("'", "")
if str != "":
return "(" + str + ")"
else:
return ""
def quoteHtmlChar(m):
return quotes[m.group(0)]
def quoteHtml(str):
return re.sub(htmlSpecialPattern, quoteHtmlChar, str)
def readReleaseNote(id, default):
cmd = ['jira.sh', '-s', 'https://issues.apache.org/jira', '-u', user,
'-p', password, '-a', 'getFieldValue', '--issue', id, '--field',
'Release Note']
proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=sys.stderr)
lines = proc.stdout.readlines()
# throw away first line
if len(lines) < 2 or len(lines[1]) < 2:
return default
else:
return "\n".join(lines[1:])[1:-2]
user = sys.argv[1]
password = sys.argv[2]
vers = sys.argv[3]
cmd = ['jira.sh', '-s', 'https://issues.apache.org/jira', '-u', user, '-p',
password, '-a', 'getIssueList', '--search',
"project in (HADOOP,HDFS,MAPREDUCE) and fixVersion = '" + vers +
"' and resolution = Fixed"]
proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=sys.stderr)
reader = csv.reader(proc.stdout, skipinitialspace=True)
# throw away number of issues
reader.next()
# read the columns
columns = reader.next()
key = columns.index('Key')
type = columns.index('Type')
priority = columns.index('Priority')
assignee = columns.index('Assignee')
reporter = columns.index('Reporter')
summary = columns.index('Summary')
description = columns.index('Description')
components = columns.index('Components')
print "<html><body><ul>"
for row in reader:
row_descr = readReleaseNote(row[key], row[description])
print \
'<li> <a href="https://issues.apache.org/jira/browse/%s">%s</a>.\n' \
' %s %s reported by %s and fixed by %s %s<br>\n' \
' <b>%s</b><br>\n' \
' <blockquote>%s</blockquote></li>\n' \
% (row[key], row[key], clean(row[priority]), clean(row[type]).lower(),
row[reporter], row[assignee], formatComponents(row[components]),
quoteHtml(row[summary]), quoteHtml(row_descr))
print "</ul>\n</body></html>"
|
musicrighter/CIS422-P2 | refs/heads/master | env/lib/python3.4/site-packages/werkzeug/serving.py | 116 | # -*- coding: utf-8 -*-
"""
werkzeug.serving
~~~~~~~~~~~~~~~~
There are many ways to serve a WSGI application. While you're developing
it you usually don't want a full blown webserver like Apache but a simple
standalone one. From Python 2.5 onwards there is the `wsgiref`_ server in
the standard library. If you're using older versions of Python you can
download the package from the cheeseshop.
However there are some caveats. Sourcecode won't reload itself when
changed and each time you kill the server using ``^C`` you get an
`KeyboardInterrupt` error. While the latter is easy to solve the first
one can be a pain in the ass in some situations.
The easiest way is creating a small ``start-myproject.py`` that runs the
application::
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from myproject import make_app
from werkzeug.serving import run_simple
app = make_app(...)
run_simple('localhost', 8080, app, use_reloader=True)
You can also pass it a `extra_files` keyword argument with a list of
additional files (like configuration files) you want to observe.
For bigger applications you should consider using `werkzeug.script`
instead of a simple start file.
:copyright: (c) 2014 by the Werkzeug Team, see AUTHORS for more details.
:license: BSD, see LICENSE for more details.
"""
from __future__ import with_statement
import os
import socket
import sys
import ssl
import signal
def _get_openssl_crypto_module():
try:
from OpenSSL import crypto
except ImportError:
raise TypeError('Using ad-hoc certificates requires the pyOpenSSL '
'library.')
else:
return crypto
try:
from SocketServer import ThreadingMixIn, ForkingMixIn
from BaseHTTPServer import HTTPServer, BaseHTTPRequestHandler
except ImportError:
from socketserver import ThreadingMixIn, ForkingMixIn
from http.server import HTTPServer, BaseHTTPRequestHandler
import werkzeug
from werkzeug._internal import _log
from werkzeug._compat import reraise, wsgi_encoding_dance
from werkzeug.urls import url_parse, url_unquote
from werkzeug.exceptions import InternalServerError
class WSGIRequestHandler(BaseHTTPRequestHandler, object):
"""A request handler that implements WSGI dispatching."""
@property
def server_version(self):
return 'Werkzeug/' + werkzeug.__version__
def make_environ(self):
request_url = url_parse(self.path)
def shutdown_server():
self.server.shutdown_signal = True
url_scheme = self.server.ssl_context is None and 'http' or 'https'
path_info = url_unquote(request_url.path)
environ = {
'wsgi.version': (1, 0),
'wsgi.url_scheme': url_scheme,
'wsgi.input': self.rfile,
'wsgi.errors': sys.stderr,
'wsgi.multithread': self.server.multithread,
'wsgi.multiprocess': self.server.multiprocess,
'wsgi.run_once': False,
'werkzeug.server.shutdown': shutdown_server,
'SERVER_SOFTWARE': self.server_version,
'REQUEST_METHOD': self.command,
'SCRIPT_NAME': '',
'PATH_INFO': wsgi_encoding_dance(path_info),
'QUERY_STRING': wsgi_encoding_dance(request_url.query),
'CONTENT_TYPE': self.headers.get('Content-Type', ''),
'CONTENT_LENGTH': self.headers.get('Content-Length', ''),
'REMOTE_ADDR': self.client_address[0],
'REMOTE_PORT': self.client_address[1],
'SERVER_NAME': self.server.server_address[0],
'SERVER_PORT': str(self.server.server_address[1]),
'SERVER_PROTOCOL': self.request_version
}
for key, value in self.headers.items():
key = 'HTTP_' + key.upper().replace('-', '_')
if key not in ('HTTP_CONTENT_TYPE', 'HTTP_CONTENT_LENGTH'):
environ[key] = value
if request_url.netloc:
environ['HTTP_HOST'] = request_url.netloc
return environ
def run_wsgi(self):
if self.headers.get('Expect', '').lower().strip() == '100-continue':
self.wfile.write(b'HTTP/1.1 100 Continue\r\n\r\n')
environ = self.make_environ()
headers_set = []
headers_sent = []
def write(data):
assert headers_set, 'write() before start_response'
if not headers_sent:
status, response_headers = headers_sent[:] = headers_set
try:
code, msg = status.split(None, 1)
except ValueError:
code, msg = status, ""
self.send_response(int(code), msg)
header_keys = set()
for key, value in response_headers:
self.send_header(key, value)
key = key.lower()
header_keys.add(key)
if 'content-length' not in header_keys:
self.close_connection = True
self.send_header('Connection', 'close')
if 'server' not in header_keys:
self.send_header('Server', self.version_string())
if 'date' not in header_keys:
self.send_header('Date', self.date_time_string())
self.end_headers()
assert type(data) is bytes, 'applications must write bytes'
self.wfile.write(data)
self.wfile.flush()
def start_response(status, response_headers, exc_info=None):
if exc_info:
try:
if headers_sent:
reraise(*exc_info)
finally:
exc_info = None
elif headers_set:
raise AssertionError('Headers already set')
headers_set[:] = [status, response_headers]
return write
def execute(app):
application_iter = app(environ, start_response)
try:
for data in application_iter:
write(data)
if not headers_sent:
write(b'')
finally:
if hasattr(application_iter, 'close'):
application_iter.close()
application_iter = None
try:
execute(self.server.app)
except (socket.error, socket.timeout) as e:
self.connection_dropped(e, environ)
except Exception:
if self.server.passthrough_errors:
raise
from werkzeug.debug.tbtools import get_current_traceback
traceback = get_current_traceback(ignore_system_exceptions=True)
try:
# if we haven't yet sent the headers but they are set
# we roll back to be able to set them again.
if not headers_sent:
del headers_set[:]
execute(InternalServerError())
except Exception:
pass
self.server.log('error', 'Error on request:\n%s',
traceback.plaintext)
def handle(self):
"""Handles a request ignoring dropped connections."""
rv = None
try:
rv = BaseHTTPRequestHandler.handle(self)
except (socket.error, socket.timeout) as e:
self.connection_dropped(e)
except Exception:
if self.server.ssl_context is None or not is_ssl_error():
raise
if self.server.shutdown_signal:
self.initiate_shutdown()
return rv
def initiate_shutdown(self):
"""A horrible, horrible way to kill the server for Python 2.6 and
later. It's the best we can do.
"""
# Windows does not provide SIGKILL, go with SIGTERM then.
sig = getattr(signal, 'SIGKILL', signal.SIGTERM)
# reloader active
if os.environ.get('WERKZEUG_RUN_MAIN') == 'true':
os.kill(os.getpid(), sig)
# python 2.7
self.server._BaseServer__shutdown_request = True
# python 2.6
self.server._BaseServer__serving = False
def connection_dropped(self, error, environ=None):
"""Called if the connection was closed by the client. By default
nothing happens.
"""
def handle_one_request(self):
"""Handle a single HTTP request."""
self.raw_requestline = self.rfile.readline()
if not self.raw_requestline:
self.close_connection = 1
elif self.parse_request():
return self.run_wsgi()
def send_response(self, code, message=None):
"""Send the response header and log the response code."""
self.log_request(code)
if message is None:
message = code in self.responses and self.responses[code][0] or ''
if self.request_version != 'HTTP/0.9':
hdr = "%s %d %s\r\n" % (self.protocol_version, code, message)
self.wfile.write(hdr.encode('ascii'))
def version_string(self):
return BaseHTTPRequestHandler.version_string(self).strip()
def address_string(self):
return self.client_address[0]
def log_request(self, code='-', size='-'):
self.log('info', '"%s" %s %s', self.requestline, code, size)
def log_error(self, *args):
self.log('error', *args)
def log_message(self, format, *args):
self.log('info', format, *args)
def log(self, type, message, *args):
_log(type, '%s - - [%s] %s\n' % (self.address_string(),
self.log_date_time_string(),
message % args))
#: backwards compatible name if someone is subclassing it
BaseRequestHandler = WSGIRequestHandler
def generate_adhoc_ssl_pair(cn=None):
from random import random
crypto = _get_openssl_crypto_module()
# pretty damn sure that this is not actually accepted by anyone
if cn is None:
cn = '*'
cert = crypto.X509()
cert.set_serial_number(int(random() * sys.maxsize))
cert.gmtime_adj_notBefore(0)
cert.gmtime_adj_notAfter(60 * 60 * 24 * 365)
subject = cert.get_subject()
subject.CN = cn
subject.O = 'Dummy Certificate'
issuer = cert.get_issuer()
issuer.CN = 'Untrusted Authority'
issuer.O = 'Self-Signed'
pkey = crypto.PKey()
pkey.generate_key(crypto.TYPE_RSA, 1024)
cert.set_pubkey(pkey)
cert.sign(pkey, 'md5')
return cert, pkey
def make_ssl_devcert(base_path, host=None, cn=None):
"""Creates an SSL key for development. This should be used instead of
the ``'adhoc'`` key which generates a new cert on each server start.
It accepts a path for where it should store the key and cert and
either a host or CN. If a host is given it will use the CN
``*.host/CN=host``.
For more information see :func:`run_simple`.
.. versionadded:: 0.9
:param base_path: the path to the certificate and key. The extension
``.crt`` is added for the certificate, ``.key`` is
added for the key.
:param host: the name of the host. This can be used as an alternative
for the `cn`.
:param cn: the `CN` to use.
"""
from OpenSSL import crypto
if host is not None:
cn = '*.%s/CN=%s' % (host, host)
cert, pkey = generate_adhoc_ssl_pair(cn=cn)
cert_file = base_path + '.crt'
pkey_file = base_path + '.key'
with open(cert_file, 'wb') as f:
f.write(crypto.dump_certificate(crypto.FILETYPE_PEM, cert))
with open(pkey_file, 'wb') as f:
f.write(crypto.dump_privatekey(crypto.FILETYPE_PEM, pkey))
return cert_file, pkey_file
def generate_adhoc_ssl_context():
"""Generates an adhoc SSL context for the development server."""
crypto = _get_openssl_crypto_module()
import tempfile
import atexit
cert, pkey = generate_adhoc_ssl_pair()
cert_handle, cert_file = tempfile.mkstemp()
pkey_handle, pkey_file = tempfile.mkstemp()
atexit.register(os.remove, pkey_file)
atexit.register(os.remove, cert_file)
os.write(cert_handle, crypto.dump_certificate(crypto.FILETYPE_PEM, cert))
os.write(pkey_handle, crypto.dump_privatekey(crypto.FILETYPE_PEM, pkey))
os.close(cert_handle)
os.close(pkey_handle)
ctx = load_ssl_context(cert_file, pkey_file)
return ctx
def load_ssl_context(cert_file, pkey_file=None, protocol=None):
"""Loads SSL context from cert/private key files and optional protocol.
Many parameters are directly taken from the API of
:py:class:`ssl.SSLContext`.
:param cert_file: Path of the certificate to use.
:param pkey_file: Path of the private key to use. If not given, the key
will be obtained from the certificate file.
:param protocol: One of the ``PROTOCOL_*`` constants in the stdlib ``ssl``
module. Defaults to ``PROTOCOL_SSLv23``.
"""
if protocol is None:
protocol = ssl.PROTOCOL_SSLv23
ctx = _SSLContext(protocol)
ctx.load_cert_chain(cert_file, pkey_file)
return ctx
class _SSLContext(object):
'''A dummy class with a small subset of Python3's ``ssl.SSLContext``, only
intended to be used with and by Werkzeug.'''
def __init__(self, protocol):
self._protocol = protocol
self._certfile = None
self._keyfile = None
self._password = None
def load_cert_chain(self, certfile, keyfile=None, password=None):
self._certfile = certfile
self._keyfile = keyfile or certfile
self._password = password
def wrap_socket(self, sock, **kwargs):
return ssl.wrap_socket(sock, keyfile=self._keyfile,
certfile=self._certfile,
ssl_version=self._protocol, **kwargs)
def is_ssl_error(error=None):
"""Checks if the given error (or the current one) is an SSL error."""
exc_types = (ssl.SSLError,)
try:
from OpenSSL.SSL import Error
exc_types += (Error,)
except ImportError:
pass
if error is None:
error = sys.exc_info()[1]
return isinstance(error, exc_types)
def select_ip_version(host, port):
"""Returns AF_INET4 or AF_INET6 depending on where to connect to."""
# disabled due to problems with current ipv6 implementations
# and various operating systems. Probably this code also is
# not supposed to work, but I can't come up with any other
# ways to implement this.
# try:
# info = socket.getaddrinfo(host, port, socket.AF_UNSPEC,
# socket.SOCK_STREAM, 0,
# socket.AI_PASSIVE)
# if info:
# return info[0][0]
# except socket.gaierror:
# pass
if ':' in host and hasattr(socket, 'AF_INET6'):
return socket.AF_INET6
return socket.AF_INET
class BaseWSGIServer(HTTPServer, object):
"""Simple single-threaded, single-process WSGI server."""
multithread = False
multiprocess = False
request_queue_size = 128
def __init__(self, host, port, app, handler=None,
passthrough_errors=False, ssl_context=None):
if handler is None:
handler = WSGIRequestHandler
self.address_family = select_ip_version(host, port)
HTTPServer.__init__(self, (host, int(port)), handler)
self.app = app
self.passthrough_errors = passthrough_errors
self.shutdown_signal = False
if ssl_context is not None:
if isinstance(ssl_context, tuple):
ssl_context = load_ssl_context(*ssl_context)
if ssl_context == 'adhoc':
ssl_context = generate_adhoc_ssl_context()
self.socket = ssl_context.wrap_socket(self.socket,
server_side=True)
self.ssl_context = ssl_context
else:
self.ssl_context = None
def log(self, type, message, *args):
_log(type, message, *args)
def serve_forever(self):
self.shutdown_signal = False
try:
HTTPServer.serve_forever(self)
except KeyboardInterrupt:
pass
finally:
self.server_close()
def handle_error(self, request, client_address):
if self.passthrough_errors:
raise
else:
return HTTPServer.handle_error(self, request, client_address)
def get_request(self):
con, info = self.socket.accept()
return con, info
class ThreadedWSGIServer(ThreadingMixIn, BaseWSGIServer):
"""A WSGI server that does threading."""
multithread = True
class ForkingWSGIServer(ForkingMixIn, BaseWSGIServer):
"""A WSGI server that does forking."""
multiprocess = True
def __init__(self, host, port, app, processes=40, handler=None,
passthrough_errors=False, ssl_context=None):
BaseWSGIServer.__init__(self, host, port, app, handler,
passthrough_errors, ssl_context)
self.max_children = processes
def make_server(host, port, app=None, threaded=False, processes=1,
request_handler=None, passthrough_errors=False,
ssl_context=None):
"""Create a new server instance that is either threaded, or forks
or just processes one request after another.
"""
if threaded and processes > 1:
raise ValueError("cannot have a multithreaded and "
"multi process server.")
elif threaded:
return ThreadedWSGIServer(host, port, app, request_handler,
passthrough_errors, ssl_context)
elif processes > 1:
return ForkingWSGIServer(host, port, app, processes, request_handler,
passthrough_errors, ssl_context)
else:
return BaseWSGIServer(host, port, app, request_handler,
passthrough_errors, ssl_context)
def is_running_from_reloader():
"""Checks if the application is running from within the Werkzeug
reloader subprocess.
.. versionadded:: 0.10
"""
return os.environ.get('WERKZEUG_RUN_MAIN') == 'true'
def run_simple(hostname, port, application, use_reloader=False,
use_debugger=False, use_evalex=True,
extra_files=None, reloader_interval=1,
reloader_type='auto', threaded=False, processes=1,
request_handler=None, static_files=None,
passthrough_errors=False, ssl_context=None):
"""Start a WSGI application. Optional features include a reloader,
multithreading and fork support.
This function has a command-line interface too::
python -m werkzeug.serving --help
.. versionadded:: 0.5
`static_files` was added to simplify serving of static files as well
as `passthrough_errors`.
.. versionadded:: 0.6
support for SSL was added.
.. versionadded:: 0.8
Added support for automatically loading a SSL context from certificate
file and private key.
.. versionadded:: 0.9
Added command-line interface.
.. versionadded:: 0.10
Improved the reloader and added support for changing the backend
through the `reloader_type` parameter. See :ref:`reloader`
for more information.
:param hostname: The host for the application. eg: ``'localhost'``
:param port: The port for the server. eg: ``8080``
:param application: the WSGI application to execute
:param use_reloader: should the server automatically restart the python
process if modules were changed?
:param use_debugger: should the werkzeug debugging system be used?
:param use_evalex: should the exception evaluation feature be enabled?
:param extra_files: a list of files the reloader should watch
additionally to the modules. For example configuration
files.
:param reloader_interval: the interval for the reloader in seconds.
:param reloader_type: the type of reloader to use. The default is
auto detection. Valid values are ``'stat'`` and
``'watchdog'``. See :ref:`reloader` for more
information.
:param threaded: should the process handle each request in a separate
thread?
:param processes: if greater than 1 then handle each request in a new process
up to this maximum number of concurrent processes.
:param request_handler: optional parameter that can be used to replace
the default one. You can use this to replace it
with a different
:class:`~BaseHTTPServer.BaseHTTPRequestHandler`
subclass.
:param static_files: a dict of paths for static files. This works exactly
like :class:`SharedDataMiddleware`, it's actually
just wrapping the application in that middleware before
serving.
:param passthrough_errors: set this to `True` to disable the error catching.
This means that the server will die on errors but
it can be useful to hook debuggers in (pdb etc.)
:param ssl_context: an SSL context for the connection. Either an
:class:`ssl.SSLContext`, a tuple in the form
``(cert_file, pkey_file)``, the string ``'adhoc'`` if
the server should automatically create one, or ``None``
to disable SSL (which is the default).
"""
if use_debugger:
from werkzeug.debug import DebuggedApplication
application = DebuggedApplication(application, use_evalex)
if static_files:
from werkzeug.wsgi import SharedDataMiddleware
application = SharedDataMiddleware(application, static_files)
def inner():
make_server(hostname, port, application, threaded,
processes, request_handler,
passthrough_errors, ssl_context).serve_forever()
if os.environ.get('WERKZEUG_RUN_MAIN') != 'true':
display_hostname = hostname != '*' and hostname or 'localhost'
if ':' in display_hostname:
display_hostname = '[%s]' % display_hostname
quit_msg = '(Press CTRL+C to quit)'
_log('info', ' * Running on %s://%s:%d/ %s', ssl_context is None
and 'http' or 'https', display_hostname, port, quit_msg)
if use_reloader:
# Create and destroy a socket so that any exceptions are raised before
# we spawn a separate Python interpreter and lose this ability.
address_family = select_ip_version(hostname, port)
test_socket = socket.socket(address_family, socket.SOCK_STREAM)
test_socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
test_socket.bind((hostname, port))
test_socket.close()
from ._reloader import run_with_reloader
run_with_reloader(inner, extra_files, reloader_interval,
reloader_type)
else:
inner()
def run_with_reloader(*args, **kwargs):
# People keep using undocumented APIs. Do not use this function
# please, we do not guarantee that it continues working.
from ._reloader import run_with_reloader
return run_with_reloader(*args, **kwargs)
def main():
'''A simple command-line interface for :py:func:`run_simple`.'''
# in contrast to argparse, this works at least under Python < 2.7
import optparse
from werkzeug.utils import import_string
parser = optparse.OptionParser(
usage='Usage: %prog [options] app_module:app_object')
parser.add_option('-b', '--bind', dest='address',
help='The hostname:port the app should listen on.')
parser.add_option('-d', '--debug', dest='use_debugger',
action='store_true', default=False,
help='Use Werkzeug\'s debugger.')
parser.add_option('-r', '--reload', dest='use_reloader',
action='store_true', default=False,
help='Reload Python process if modules change.')
options, args = parser.parse_args()
hostname, port = None, None
if options.address:
address = options.address.split(':')
hostname = address[0]
if len(address) > 1:
port = address[1]
if len(args) != 1:
sys.stdout.write('No application supplied, or too much. See --help\n')
sys.exit(1)
app = import_string(args[0])
run_simple(
hostname=(hostname or '127.0.0.1'), port=int(port or 5000),
application=app, use_reloader=options.use_reloader,
use_debugger=options.use_debugger
)
if __name__ == '__main__':
main()
|
drexly/openhgsenti | refs/heads/master | lib/django/db/migrations/optimizer.py | 339 | from __future__ import unicode_literals
from django.db.migrations import (
AddField, AlterField, AlterIndexTogether, AlterModelTable,
AlterOrderWithRespectTo, AlterUniqueTogether, CreateModel, DeleteModel,
RemoveField, RenameField, RenameModel,
)
from django.utils import six
class MigrationOptimizer(object):
"""
Powers the optimization process, where you provide a list of Operations
and you are returned a list of equal or shorter length - operations
are merged into one if possible.
For example, a CreateModel and an AddField can be optimized into a
new CreateModel, and CreateModel and DeleteModel can be optimized into
nothing.
"""
def __init__(self):
self.model_level_operations = (
CreateModel,
AlterModelTable,
AlterUniqueTogether,
AlterIndexTogether,
AlterOrderWithRespectTo,
)
self.field_level_operations = (
AddField,
AlterField,
)
self.reduce_methods = {
# (model operation, model operation)
(CreateModel, DeleteModel): self.reduce_create_model_delete_model,
(CreateModel, RenameModel): self.reduce_create_model_rename_model,
(RenameModel, RenameModel): self.reduce_rename_model_rename_model,
(AlterIndexTogether, AlterIndexTogether): self.reduce_alter_model_alter_model,
(AlterModelTable, AlterModelTable): self.reduce_alter_model_alter_model,
(AlterOrderWithRespectTo, AlterOrderWithRespectTo): self.reduce_alter_model_alter_model,
(AlterUniqueTogether, AlterUniqueTogether): self.reduce_alter_model_alter_model,
(AlterIndexTogether, DeleteModel): self.reduce_alter_model_delete_model,
(AlterModelTable, DeleteModel): self.reduce_alter_model_delete_model,
(AlterOrderWithRespectTo, DeleteModel): self.reduce_alter_model_delete_model,
(AlterUniqueTogether, DeleteModel): self.reduce_alter_model_delete_model,
# (model operation, field operation)
(CreateModel, AddField): self.reduce_create_model_add_field,
(CreateModel, AlterField): self.reduce_create_model_alter_field,
(CreateModel, RemoveField): self.reduce_create_model_remove_field,
(CreateModel, RenameField): self.reduce_create_model_rename_field,
(AlterIndexTogether, AddField): self.reduce_alter_model_addalterremove_field,
(AlterIndexTogether, AlterField): self.reduce_alter_model_addalterremove_field,
(AlterIndexTogether, RemoveField): self.reduce_alter_model_addalterremove_field,
(AlterOrderWithRespectTo, AddField): self.reduce_alter_model_addalterremove_field,
(AlterOrderWithRespectTo, AlterField): self.reduce_alter_model_addalterremove_field,
(AlterOrderWithRespectTo, RemoveField): self.reduce_alter_model_addalterremove_field,
(AlterUniqueTogether, AddField): self.reduce_alter_model_addalterremove_field,
(AlterUniqueTogether, AlterField): self.reduce_alter_model_addalterremove_field,
(AlterUniqueTogether, RemoveField): self.reduce_alter_model_addalterremove_field,
(AlterIndexTogether, RenameField): self.reduce_alter_model_rename_field,
(AlterOrderWithRespectTo, RenameField): self.reduce_alter_model_rename_field,
(AlterUniqueTogether, RenameField): self.reduce_alter_model_rename_field,
# (field operation, field operation)
(AddField, AlterField): self.reduce_add_field_alter_field,
(AddField, RemoveField): self.reduce_add_field_remove_field,
(AddField, RenameField): self.reduce_add_field_rename_field,
(AlterField, RemoveField): self.reduce_alter_field_remove_field,
(AlterField, RenameField): self.reduce_alter_field_rename_field,
(RenameField, RenameField): self.reduce_rename_field_rename_field,
}
def optimize(self, operations, app_label=None):
"""
Main optimization entry point. Pass in a list of Operation instances,
get out a new list of Operation instances.
Unfortunately, due to the scope of the optimization (two combinable
operations might be separated by several hundred others), this can't be
done as a peephole optimization with checks/output implemented on
the Operations themselves; instead, the optimizer looks at each
individual operation and scans forwards in the list to see if there
are any matches, stopping at boundaries - operations which can't
be optimized over (RunSQL, operations on the same field/model, etc.)
The inner loop is run until the starting list is the same as the result
list, and then the result is returned. This means that operation
optimization must be stable and always return an equal or shorter list.
The app_label argument is optional, but if you pass it you'll get more
efficient optimization.
"""
# Internal tracking variable for test assertions about # of loops
self._iterations = 0
while True:
result = self.optimize_inner(operations, app_label)
self._iterations += 1
if result == operations:
return result
operations = result
def optimize_inner(self, operations, app_label=None):
"""
Inner optimization loop.
"""
new_operations = []
for i, operation in enumerate(operations):
# Compare it to each operation after it
for j, other in enumerate(operations[i + 1:]):
result = self.reduce(operation, other, operations[i + 1:i + j + 1])
if result is not None:
# Optimize! Add result, then remaining others, then return
new_operations.extend(result)
new_operations.extend(operations[i + 1:i + 1 + j])
new_operations.extend(operations[i + j + 2:])
return new_operations
if not self.can_optimize_through(operation, other, app_label):
new_operations.append(operation)
break
else:
new_operations.append(operation)
return new_operations
# REDUCTION
def reduce(self, operation, other, in_between=None):
"""
Either returns a list of zero, one or two operations,
or None, meaning this pair cannot be optimized.
"""
method = self.reduce_methods.get((type(operation), type(other)))
if method:
return method(operation, other, in_between or [])
return None
def model_to_key(self, model):
"""
Takes either a model class or a "appname.ModelName" string
and returns (appname, modelname)
"""
if isinstance(model, six.string_types):
return model.split(".", 1)
else:
return (
model._meta.app_label,
model._meta.object_name,
)
# REDUCE METHODS: (MODEL OPERATION, MODEL OPERATION)
def reduce_create_model_delete_model(self, operation, other, in_between):
"""
Folds a CreateModel and a DeleteModel into nothing.
"""
if (operation.name_lower == other.name_lower and
not operation.options.get("proxy", False)):
return []
def reduce_create_model_rename_model(self, operation, other, in_between):
"""
Folds a model rename into its create
"""
if operation.name_lower == other.old_name_lower:
return [
CreateModel(
other.new_name,
fields=operation.fields,
options=operation.options,
bases=operation.bases,
managers=operation.managers,
)
]
def reduce_rename_model_rename_model(self, operation, other, in_between):
"""
Folds a model rename into another one
"""
if operation.new_name_lower == other.old_name_lower:
return [
RenameModel(
operation.old_name,
other.new_name,
)
]
def reduce_alter_model_alter_model(self, operation, other, in_between):
"""
Folds two AlterModelTable, AlterFooTogether, or AlterOrderWithRespectTo
operations into the latter.
"""
if operation.name_lower == other.name_lower:
return [other]
def reduce_alter_model_delete_model(self, operation, other, in_between):
"""
Folds an AlterModelSomething and a DeleteModel into just delete.
"""
if operation.name_lower == other.name_lower:
return [other]
# REDUCE METHODS: (MODEL OPERATION, FIELD OPERATION)
def reduce_create_model_add_field(self, operation, other, in_between):
if operation.name_lower == other.model_name_lower:
# Don't allow optimizations of FKs through models they reference
if hasattr(other.field, "remote_field") and other.field.remote_field:
for between in in_between:
# Check that it doesn't point to the model
app_label, object_name = self.model_to_key(other.field.remote_field.model)
if between.references_model(object_name, app_label):
return None
# Check that it's not through the model
if getattr(other.field.remote_field, "through", None):
app_label, object_name = self.model_to_key(other.field.remote_field.through)
if between.references_model(object_name, app_label):
return None
# OK, that's fine
return [
CreateModel(
operation.name,
fields=operation.fields + [(other.name, other.field)],
options=operation.options,
bases=operation.bases,
managers=operation.managers,
)
]
def reduce_create_model_alter_field(self, operation, other, in_between):
if operation.name_lower == other.model_name_lower:
return [
CreateModel(
operation.name,
fields=[
(n, other.field if n == other.name else v)
for n, v in operation.fields
],
options=operation.options,
bases=operation.bases,
managers=operation.managers,
)
]
def reduce_create_model_remove_field(self, operation, other, in_between):
if operation.name_lower == other.model_name_lower:
return [
CreateModel(
operation.name,
fields=[
(n, v)
for n, v in operation.fields
if n.lower() != other.name_lower
],
options=operation.options,
bases=operation.bases,
managers=operation.managers,
)
]
def reduce_create_model_rename_field(self, operation, other, in_between):
if operation.name_lower == other.model_name_lower:
return [
CreateModel(
operation.name,
fields=[
(other.new_name if n == other.old_name else n, v)
for n, v in operation.fields
],
options=operation.options,
bases=operation.bases,
managers=operation.managers,
)
]
def reduce_alter_model_addalterremove_field(self, operation, other, in_between):
if (operation.name_lower == other.model_name_lower and
not operation.references_field(other.model_name, other.name)):
return [other, operation]
def reduce_alter_model_rename_field(self, operation, other, in_between):
if (operation.name_lower == other.model_name_lower and
not operation.references_field(other.model_name, other.old_name)):
return [other, operation]
# REDUCE METHODS: (FIELD OPERATION, FIELD OPERATION)
def reduce_add_field_alter_field(self, operation, other, in_between):
if (operation.model_name_lower == other.model_name_lower and
operation.name_lower == other.name_lower):
return [
AddField(
model_name=operation.model_name,
name=operation.name,
field=other.field,
)
]
def reduce_add_field_remove_field(self, operation, other, in_between):
if (operation.model_name_lower == other.model_name_lower and
operation.name_lower == other.name_lower):
return []
def reduce_add_field_rename_field(self, operation, other, in_between):
if (operation.model_name_lower == other.model_name_lower and
operation.name_lower == other.old_name_lower):
return [
AddField(
model_name=operation.model_name,
name=other.new_name,
field=operation.field,
)
]
def reduce_alter_field_remove_field(self, operation, other, in_between):
if (operation.model_name_lower == other.model_name_lower and
operation.name_lower == other.name_lower):
return [other]
def reduce_alter_field_rename_field(self, operation, other, in_between):
if (operation.model_name_lower == other.model_name_lower and
operation.name_lower == other.old_name_lower):
return [
other,
AlterField(
model_name=operation.model_name,
name=other.new_name,
field=operation.field,
),
]
def reduce_rename_field_rename_field(self, operation, other, in_between):
if (operation.model_name_lower == other.model_name_lower and
operation.new_name_lower == other.old_name_lower):
return [
RenameField(
operation.model_name,
operation.old_name,
other.new_name,
),
]
# THROUGH CHECKS
def can_optimize_through(self, operation, other, app_label=None):
"""
Returns True if it's possible to optimize 'operation' with something
the other side of 'other'. This is possible if, for example, they
affect different models.
"""
# If it's a model level operation, let it through if there's
# nothing that looks like a reference to us in 'other'.
if isinstance(operation, self.model_level_operations):
if not other.references_model(operation.name, app_label):
return True
# If it's field level, only let it through things that don't reference
# the field (which includes not referencing the model)
if isinstance(operation, self.field_level_operations):
if not other.references_field(operation.model_name, operation.name, app_label):
return True
return False
|
abo-abo/edx-platform | refs/heads/master | cms/djangoapps/contentstore/features/video-editor.py | 6 | # disable missing docstring
# pylint: disable=C0111
from lettuce import world, step
from terrain.steps import reload_the_page
@step('I have set "show transcript" to (.*)$')
def set_show_captions(step, setting):
# Prevent cookies from overriding course settings
world.browser.cookies.delete('hide_captions')
world.css_click('a.edit-button')
world.wait_for(lambda _driver: world.css_visible('a.save-button'))
world.click_link_by_text('Advanced')
world.browser.select('Show Transcript', setting)
world.css_click('a.save-button')
@step('when I view the video it (.*) show the captions$')
def shows_captions(_step, show_captions):
world.wait_for_js_variable_truthy("Video")
world.wait(0.5)
if show_captions == 'does not':
assert world.is_css_present('div.video.closed')
else:
assert world.is_css_not_present('div.video.closed')
# Prevent cookies from overriding course settings
world.browser.cookies.delete('hide_captions')
world.browser.cookies.delete('current_player_mode')
@step('I see the correct video settings and default values$')
def correct_video_settings(_step):
expected_entries = [
# basic
['Display Name', 'Video', False],
['Video URL', 'http://youtu.be/OEoXaMPEzfM, , ', False],
# advanced
['Display Name', 'Video', False],
['Download Transcript', '', False],
['Download Video', '', False],
['End Time', '00:00:00', False],
['HTML5 Transcript', '', False],
['Show Transcript', 'True', False],
['Start Time', '00:00:00', False],
['Video Sources', '', False],
['Youtube ID', 'OEoXaMPEzfM', False],
['Youtube ID for .75x speed', '', False],
['Youtube ID for 1.25x speed', '', False],
['Youtube ID for 1.5x speed', '', False]
]
world.verify_all_setting_entries(expected_entries)
@step('my video display name change is persisted on save$')
def video_name_persisted(step):
world.css_click('a.save-button')
reload_the_page(step)
world.wait_for_xmodule()
world.edit_component()
world.verify_setting_entry(
world.get_setting_entry('Display Name'),
'Display Name', '3.4', True
)
|
masom/Puck | refs/heads/master | server/tests/test_models/test_virtual_machines.py | 1 | import unittest
from collections import OrderedDict
from models.virtual_machines import VirtualMachine, VirtualMachines
from libs.model import ModelCollection, Model
class VirtualMachineTest(unittest.TestCase):
def testInit(self):
e = VirtualMachine(name="test", ip="asdf", status='derp', config='lol')
for a in ['name', 'ip','status','config']:
self.assertTrue(hasattr(e, a))
self.assertEqual('test', e.name)
self.assertEqual('asdf', e.ip)
self.assertEqual('derp', e.status)
self.assertEqual('lol', e.config)
self.assertFalse(hasattr(e, 'derp'))
self.assertIsInstance(e, Model)
class VirtualMachinesTest(unittest.TestCase):
def testInit(self):
vms = VirtualMachines()
self.assertIsInstance(vms, ModelCollection)
self.assertGreater(vms._items, 0)
self.assertIsInstance(vms.all(), list)
for i in vms.all():
self.assertIsInstance(i, VirtualMachine)
def testFirst(self):
vms = VirtualMachines()
self.assertEqual(vms.first(), None)
entity = vms.new()
vms.add(entity, persist=False)
self.assertEqual(vms.first(), entity)
def testNew(self):
vms = VirtualMachines()
self.assertIsInstance(vms.new(), VirtualMachine)
e = vms.new(name="lol")
self.assertEqual(e.name, 'lol')
self.assertEqual(e.ip, None)
e = vms.new()
self.assertIsNotNone(e.name)
self.assertNotEqual(vms.new().name, e.name)
def testAdd(self):
vms = VirtualMachines()
before_count = len(vms.all())
self.assertTrue(vms.add(vms.new(), persist=False))
after_count = len(vms.all())
self.assertGreater(after_count, before_count)
self.assertEqual(before_count + 1, after_count)
def testDelete(self):
pass
def test_GenerateSelectQuery(self):
vms = VirtualMachines()
expected = 'SELECT * FROM virtual_machines'
self.assertEqual(vms._generate_select_query(), expected)
def test_InsertQuery(self):
vms = VirtualMachines()
entity = vms.new(name=None)
expected = OrderedDict([
('id', None), ('name', None), ('ip', None), ('status', None),
('image_id', None), ('image_id', None), ('instance_type_id', None),
('instance_id', None),('user', None), ('config', None)
])
data = vms._generate_query_data(entity)
self.assertEqual(expected, data)
expected = 'INSERT INTO virtual_machines(id,name,ip,status,image_id,instance_type_id,instance_id,user,config) VALUES (?,?,?,?,?,?,?,?,?)'
self.assertEqual(vms._generate_insert_query(data), expected)
def testTableDefinition(self):
vms = VirtualMachines()
expected = 'CREATE TABLE virtual_machines (id TEXT PRIMARY KEY,name TEXT,ip TEXT,status TEXT,image_id TEXT,instance_type_id TEXT,instance_id TEXT,user TEXT,config TEXT)'
self.assertEqual(str(vms.table_definition()), expected)
def testDelete(self):
vms = VirtualMachines()
entity = vms.new()
expected = 'DELETE FROM virtual_machines WHERE id = ?'
self.assertEqual(vms._generate_delete_query(entity.name), expected)
|
funkring/fdoo | refs/heads/8.0-fdoo | addons-funkring/report_aeroo_sample/report/lorem.py | 8 | # -*- coding: utf8 -*-
shortname = "lorem"
program = "Lorem Ipsum Generator (%s)" % shortname
version = "0.6"
copyright = "Copyright (C) 2007 Per Erik Strandberg"
license = """This is free software, and you are welcome to redistribute it
under the GNU General Public License <http://www.gnu.org/licenses/gpl.html>
%s comes with NO WARRANTY, to the extent permitted by law.""" % shortname
# Copyright 2007 Per Erik Strandberg: per at pererikstrandberg dot se
#
# This program is free software you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
# MA 02110-1301, USA.
GPLv3 = """GNU GENERAL PUBLIC LICENSE
Version 3, 29 June 2007
Copyright (C) 2007 Free Software Foundation, Inc. <http://fsf.org/>
Everyone is permitted to copy and distribute verbatim copies of this license
document, but changing it is not allowed.
Preamble
The GNU General Public License is a free, copyleft license for software and
other kinds of works.
The licenses for most software and other practical works are designed to take
away your freedom to share and change the works. By contrast, the GNU General
Public License is intended to guarantee your freedom to share and change all
versions of a program--to make sure it remains free software for all its users.
We, the Free Software Foundation, use the GNU General Public License for most
of our software it applies also to any other work released this way by its
authors. You can apply it to your programs, too.
When we speak of free software, we are referring to freedom, not price. Our
General Public Licenses are designed to make sure that you have the freedom to
distribute copies of free software (and charge for them if you wish), that you
receive source code or can get it if you want it, that you can change the
software or use pieces of it in new free programs, and that you know you can
do these things.
To protect your rights, we need to prevent others from denying you these
rights or asking you to surrender the rights. Therefore, you have certain
responsibilities if you distribute copies of the software, or if you modify
it: responsibilities to respect the freedom of others.
For example, if you distribute copies of such a program, whether gratis or for
a fee, you must pass on to the recipients the same freedoms that you received.
You must make sure that they, too, receive or can get the source code. And you
must show them these terms so they know their rights.
Developers that use the GNU GPL protect your rights with two steps: (1) assert
copyright on the software, and (2) offer you this License giving you legal
permission to copy, distribute and/or modify it.
For the developers' and authors' protection, the GPL clearly explains that
there is no warranty for this free software. For both users' and authors'
sake, the GPL requires that modified versions be marked as changed, so that
their problems will not be attributed erroneously to authors of previous
versions.
Some devices are designed to deny users access to install or run modified
versions of the software inside them, although the manufacturer can do so.
This is fundamentally incompatible with the aim of protecting users' freedom
to change the software. The systematic pattern of such abuse occurs in the
area of products for individuals to use, which is precisely where it is most
unacceptable. Therefore, we have designed this version of the GPL to prohibit
the practice for those products. If such problems arise substantially in other
domains, we stand ready to extend this provision to those domains in future
versions of the GPL, as needed to protect the freedom of users.
Finally, every program is threatened constantly by software patents. States
should not allow patents to restrict development and use of software on
general-purpose computers, but in those that do, we wish to avoid the special
danger that patents applied to a free program could make it effectively
proprietary. To prevent this, the GPL assures that patents cannot be used to
render the program non-free.
The precise terms and conditions for copying, distribution and modification
follow.
TERMS AND CONDITIONS
0. Definitions.
"This License" refers to version 3 of the GNU General Public License.
"Copyright" also means copyright-like laws that apply to other kinds of works,
such as semiconductor masks.
"The Program" refers to any copyrightable work licensed under this License.
Each licensee is addressed as "you". "Licensees" and "recipients" may be
individuals or organizations.
To "modify" a work means to copy from or adapt all or part of the work in a
fashion requiring copyright permission, other than the making of an exact
copy. The resulting work is called a "modified version" of the earlier work or
a work "based on" the earlier work.
A "covered work" means either the unmodified Program or a work based on the
Program.
To "propagate" a work means to do anything with it that, without permission,
would make you directly or secondarily liable for infringement under
applicable copyright law, except executing it on a computer or modifying a
private copy. Propagation includes copying, distribution (with or without
modification), making available to the public, and in some countries other
activities as well.
To "convey" a work means any kind of propagation that enables other
parties to make or receive copies. Mere interaction with a user
through a computer network, with no transfer of a copy, is not
conveying.
An interactive user interface displays "Appropriate Legal Notices" to
the extent that it includes a convenient and prominently visible
feature that (1) displays an appropriate copyright notice, and (2)
tells the user that there is no warranty for the work (except to the
extent that warranties are provided), that licensees may convey the
work under this License, and how to view a copy of this License. If
the interface presents a list of user commands or options, such as a
menu, a prominent item in the list meets this criterion.
1. Source Code.
The "source code" for a work means the preferred form of the work for
making modifications to it. "Object code" means any non-source form of
a work.
A "Standard Interface" means an interface that either is an official
standard defined by a recognized standards body, or, in the case of
interfaces specified for a particular programming language, one that
is widely used among developers working in that language.
The "System Libraries" of an executable work include anything, other
than the work as a whole, that (a) is included in the normal form of
packaging a Major Component, but which is not part of that Major
Component, and (b) serves only to enable use of the work with that
Major Component, or to implement a Standard Interface for which an
implementation is available to the public in source code form. A
"Major Component", in this context, means a major essential component
(kernel, window system, and so on) of the specific operating system
(if any) on which the executable work runs, or a compiler used to
produce the work, or an object code interpreter used to run it.
The "Corresponding Source" for a work in object code form means all
the source code needed to generate, install, and (for an executable
work) run the object code and to modify the work, including scripts to
control those activities. However, it does not include the work's
System Libraries, or general-purpose tools or generally available free
programs which are used unmodified in performing those activities but
which are not part of the work. For example, Corresponding Source
includes interface definition files associated with source files for
the work, and the source code for shared libraries and dynamically
linked subprograms that the work is specifically designed to require,
such as by intimate data communication or control flow between those
subprograms and other parts of the work.
The Corresponding Source need not include anything that users can
regenerate automatically from other parts of the Corresponding Source.
The Corresponding Source for a work in source code form is that same
work.
2. Basic Permissions.
All rights granted under this License are granted for the term of
copyright on the Program, and are irrevocable provided the stated
conditions are met. This License explicitly affirms your unlimited
permission to run the unmodified Program. The output from running a
covered work is covered by this License only if the output, given its
content, constitutes a covered work. This License acknowledges your
rights of fair use or other equivalent, as provided by copyright law.
You may make, run and propagate covered works that you do not convey,
without conditions so long as your license otherwise remains in
force. You may convey covered works to others for the sole purpose of
having them make modifications exclusively for you, or provide you
with facilities for running those works, provided that you comply with
the terms of this License in conveying all material for which you do
not control copyright. Those thus making or running the covered works
for you must do so exclusively on your behalf, under your direction
and control, on terms that prohibit them from making any copies of
your copyrighted material outside their relationship with you.
Conveying under any other circumstances is permitted solely under the
conditions stated below. Sublicensing is not allowed section 10 makes
it unnecessary.
3. Protecting Users' Legal Rights From Anti-Circumvention Law.
No covered work shall be deemed part of an effective technological
measure under any applicable law fulfilling obligations under article
11 of the WIPO copyright treaty adopted on 20 December 1996, or
similar laws prohibiting or restricting circumvention of such
measures.
When you convey a covered work, you waive any legal power to forbid
circumvention of technological measures to the extent such
circumvention is effected by exercising rights under this License with
respect to the covered work, and you disclaim any intention to limit
operation or modification of the work as a means of enforcing, against
the work's users, your or third parties' legal rights to forbid
circumvention of technological measures.
4. Conveying Verbatim Copies.
You may convey verbatim copies of the Program's source code as you
receive it, in any medium, provided that you conspicuously and
appropriately publish on each copy an appropriate copyright notice
keep intact all notices stating that this License and any
non-permissive terms added in accord with section 7 apply to the code
keep intact all notices of the absence of any warranty and give all
recipients a copy of this License along with the Program.
You may charge any price or no price for each copy that you convey,
and you may offer support or warranty protection for a fee.
5. Conveying Modified Source Versions.
You may convey a work based on the Program, or the modifications to
produce it from the Program, in the form of source code under the
terms of section 4, provided that you also meet all of these
conditions:
* a) The work must carry prominent notices stating that you
modified it, and giving a relevant date.
* b) The work must carry prominent notices stating that it is
released under this License and any conditions added under section
7. This requirement modifies the requirement in section 4 to "keep
intact all notices".
* c) You must license the entire work, as a whole, under this
License to anyone who comes into possession of a copy. This License
will therefore apply, along with any applicable section 7 additional
terms, to the whole of the work, and all its parts, regardless of how
they are packaged. This License gives no permission to license the
work in any other way, but it does not invalidate such permission if
you have separately received it.
* d) If the work has interactive user interfaces, each must
display Appropriate Legal Notices however, if the Program has
interactive interfaces that do not display Appropriate Legal Notices,
your work need not make them do so.
A compilation of a covered work with other separate and independent
works, which are not by their nature extensions of the covered work,
and which are not combined with it such as to form a larger program,
in or on a volume of a storage or distribution medium, is called an
"aggregate" if the compilation and its resulting copyright are not
used to limit the access or legal rights of the compilation's users
beyond what the individual works permit. Inclusion of a covered work
in an aggregate does not cause this License to apply to the other
parts of the aggregate.
6. Conveying Non-Source Forms.
You may convey a covered work in object code form under the terms of
sections 4 and 5, provided that you also convey the machine-readable
Corresponding Source under the terms of this License, in one of these
ways:
* a) Convey the object code in, or embodied in, a physical product
(including a physical distribution medium), accompanied by the
Corresponding Source fixed on a durable physical medium customarily
used for software interchange.
* b) Convey the object code in, or embodied in, a physical product
(including a physical distribution medium), accompanied by a written
offer, valid for at least three years and valid for as long as you
offer spare parts or customer support for that product model, to give
anyone who possesses the object code either (1) a copy of the
Corresponding Source for all the software in the product that is
covered by this License, on a durable physical medium customarily used
for software interchange, for a price no more than your reasonable
cost of physically performing this conveying of source, or (2) access
to copy the Corresponding Source from a network server at no charge.
* c) Convey individual copies of the object code with a copy of
the written offer to provide the Corresponding Source. This
alternative is allowed only occasionally and noncommercially, and only
if you received the object code with such an offer, in accord with
subsection 6b.
* d) Convey the object code by offering access from a designated
place (gratis or for a charge), and offer equivalent access to the
Corresponding Source in the same way through the same place at no
further charge. You need not require recipients to copy the
Corresponding Source along with the object code. If the place to copy
the object code is a network server, the Corresponding Source may be
on a different server (operated by you or a third party) that supports
equivalent copying facilities, provided you maintain clear directions
next to the object code saying where to find the Corresponding
Source. Regardless of what server hosts the Corresponding Source, you
remain obligated to ensure that it is available for as long as needed
to satisfy these requirements.
* e) Convey the object code using peer-to-peer transmission,
provided you inform other peers where the object code and
Corresponding Source of the work are being offered to the general
public at no charge under subsection 6d.
A separable portion of the object code, whose source code is excluded
from the Corresponding Source as a System Library, need not be
included in conveying the object code work.
A "User Product" is either (1) a "consumer product", which means any
tangible personal property which is normally used for personal,
family, or household purposes, or (2) anything designed or sold for
incorporation into a dwelling. In determining whether a product is a
consumer product, doubtful cases shall be resolved in favor of
coverage. For a particular product received by a particular user,
"normally used" refers to a typical or common use of that class of
product, regardless of the status of the particular user or of the way
in which the particular user actually uses, or expects or is expected
to use, the product. A product is a consumer product regardless of
whether the product has substantial commercial, industrial or
non-consumer uses, unless such uses represent the only significant
mode of use of the product.
"Installation Information" for a User Product means any methods,
procedures, authorization keys, or other information required to
install and execute modified versions of a covered work in that User
Product from a modified version of its Corresponding Source. The
information must suffice to ensure that the continued functioning of
the modified object code is in no case prevented or interfered with
solely because modification has been made.
If you convey an object code work under this section in, or with, or
specifically for use in, a User Product, and the conveying occurs as
part of a transaction in which the right of possession and use of the
User Product is transferred to the recipient in perpetuity or for a
fixed term (regardless of how the transaction is characterized), the
Corresponding Source conveyed under this section must be accompanied
by the Installation Information. But this requirement does not apply
if neither you nor any third party retains the ability to install
modified object code on the User Product (for example, the work has
been installed in ROM).
The requirement to provide Installation Information does not include a
requirement to continue to provide support service, warranty, or
updates for a work that has been modified or installed by the
recipient, or for the User Product in which it has been modified or
installed. Access to a network may be denied when the modification
itself materially and adversely affects the operation of the network
or violates the rules and protocols for communication across the
network.
Corresponding Source conveyed, and Installation Information provided,
in accord with this section must be in a format that is publicly
documented (and with an implementation available to the public in
source code form), and must require no special password or key for
unpacking, reading or copying.
7. Additional Terms.
"Additional permissions" are terms that supplement the terms of this
License by making exceptions from one or more of its
conditions. Additional permissions that are applicable to the entire
Program shall be treated as though they were included in this License,
to the extent that they are valid under applicable law. If additional
permissions apply only to part of the Program, that part may be used
separately under those permissions, but the entire Program remains
governed by this License without regard to the additional permissions.
When you convey a copy of a covered work, you may at your option
remove any additional permissions from that copy, or from any part of
it. (Additional permissions may be written to require their own
removal in certain cases when you modify the work.) You may place
additional permissions on material, added by you to a covered work,
for which you have or can give appropriate copyright permission.
Notwithstanding any other provision of this License, for material you
add to a covered work, you may (if authorized by the copyright holders
of that material) supplement the terms of this License with terms:
* a) Disclaiming warranty or limiting liability differently from
the terms of sections 15 and 16 of this License or
* b) Requiring preservation of specified reasonable legal notices
or author attributions in that material or in the Appropriate Legal
Notices displayed by works containing it or
* c) Prohibiting misrepresentation of the origin of that material,
or requiring that modified versions of such material be marked in
reasonable ways as different from the original version or
* d) Limiting the use for publicity purposes of names of licensors
or authors of the material or
* e) Declining to grant rights under trademark law for use of some
trade names, trademarks, or service marks or
* f) Requiring indemnification of licensors and authors of that
material by anyone who conveys the material (or modified versions of
it) with contractual assumptions of liability to the recipient, for
any liability that these contractual assumptions directly impose on
those licensors and authors.
All other non-permissive additional terms are considered "further
restrictions" within the meaning of section 10. If the Program as you
received it, or any part of it, contains a notice stating that it is
governed by this License along with a term that is a further
restriction, you may remove that term. If a license document contains
a further restriction but permits relicensing or conveying under this
License, you may add to a covered work material governed by the terms
of that license document, provided that the further restriction does
not survive such relicensing or conveying.
If you add terms to a covered work in accord with this section, you
must place, in the relevant source files, a statement of the
additional terms that apply to those files, or a notice indicating
where to find the applicable terms.
Additional terms, permissive or non-permissive, may be stated in the
form of a separately written license, or stated as exceptions the
above requirements apply either way.
8. Termination.
You may not propagate or modify a covered work except as expressly
provided under this License. Any attempt otherwise to propagate or
modify it is void, and will automatically terminate your rights under
this License (including any patent licenses granted under the third
paragraph of section 11).
However, if you cease all violation of this License, then your license
from a particular copyright holder is reinstated (a) provisionally,
unless and until the copyright holder explicitly and finally
terminates your license, and (b) permanently, if the copyright holder
fails to notify you of the violation by some reasonable means prior to
60 days after the cessation.
Moreover, your license from a particular copyright holder is
reinstated permanently if the copyright holder notifies you of the
violation by some reasonable means, this is the first time you have
received notice of violation of this License (for any work) from that
copyright holder, and you cure the violation prior to 30 days after
your receipt of the notice.
Termination of your rights under this section does not terminate the
licenses of parties who have received copies or rights from you under
this License. If your rights have been terminated and not permanently
reinstated, you do not qualify to receive new licenses for the same
material under section 10. 9. Acceptance Not Required for Having
Copies.
You are not required to accept this License in order to receive or run
a copy of the Program. Ancillary propagation of a covered work
occurring solely as a consequence of using peer-to-peer transmission
to receive a copy likewise does not require acceptance. However,
nothing other than this License grants you permission to propagate or
modify any covered work. These actions infringe copyright if you do
not accept this License. Therefore, by modifying or propagating a
covered work, you indicate your acceptance of this License to do so.
10. Automatic Licensing of Downstream Recipients.
Each time you convey a covered work, the recipient automatically
receives a license from the original licensors, to run, modify and
propagate that work, subject to this License. You are not responsible
for enforcing compliance by third parties with this License.
An "entity transaction" is a transaction transferring control of an
organization, or substantially all assets of one, or subdividing an
organization, or merging organizations. If propagation of a covered
work results from an entity transaction, each party to that
transaction who receives a copy of the work also receives whatever
licenses to the work the party's predecessor in interest had or could
give under the previous paragraph, plus a right to possession of the
Corresponding Source of the work from the predecessor in interest, if
the predecessor has it or can get it with reasonable efforts.
You may not impose any further restrictions on the exercise of the
rights granted or affirmed under this License. For example, you may
not impose a license fee, royalty, or other charge for exercise of
rights granted under this License, and you may not initiate litigation
(including a cross-claim or counterclaim in a lawsuit) alleging that
any patent claim is infringed by making, using, selling, offering for
sale, or importing the Program or any portion of it. 11. Patents.
A "contributor" is a copyright holder who authorizes use under this
License of the Program or a work on which the Program is based. The
work thus licensed is called the contributor's "contributor version".
A contributor's "essential patent claims" are all patent claims owned
or controlled by the contributor, whether already acquired or
hereafter acquired, that would be infringed by some manner, permitted
by this License, of making, using, or selling its contributor version,
but do not include claims that would be infringed only as a
consequence of further modification of the contributor version. For
purposes of this definition, "control" includes the right to grant
patent sublicenses in a manner consistent with the requirements of
this License.
Each contributor grants you a non-exclusive, worldwide, royalty-free
patent license under the contributor's essential patent claims, to
make, use, sell, offer for sale, import and otherwise run, modify and
propagate the contents of its contributor version.
In the following three paragraphs, a "patent license" is any express
agreement or commitment, however denominated, not to enforce a patent
(such as an express permission to practice a patent or covenant not to
sue for patent infringement). To "grant" such a patent license to a
party means to make such an agreement or commitment not to enforce a
patent against the party.
If you convey a covered work, knowingly relying on a patent license,
and the Corresponding Source of the work is not available for anyone
to copy, free of charge and under the terms of this License, through a
publicly available network server or other readily accessible means,
then you must either (1) cause the Corresponding Source to be so
available, or (2) arrange to deprive yourself of the benefit of the
patent license for this particular work, or (3) arrange, in a manner
consistent with the requirements of this License, to extend the patent
license to downstream recipients. "Knowingly relying" means you have
actual knowledge that, but for the patent license, your conveying the
covered work in a country, or your recipient's use of the covered work
in a country, would infringe one or more identifiable patents in that
country that you have reason to believe are valid.
If, pursuant to or in connection with a single transaction or
arrangement, you convey, or propagate by procuring conveyance of, a
covered work, and grant a patent license to some of the parties
receiving the covered work authorizing them to use, propagate, modify
or convey a specific copy of the covered work, then the patent license
you grant is automatically extended to all recipients of the covered
work and works based on it.
A patent license is "discriminatory" if it does not include within the
scope of its coverage, prohibits the exercise of, or is conditioned on
the non-exercise of one or more of the rights that are specifically
granted under this License. You may not convey a covered work if you
are a party to an arrangement with a third party that is in the
business of distributing software, under which you make payment to the
third party based on the extent of your activity of conveying the
work, and under which the third party grants, to any of the parties
who would receive the covered work from you, a discriminatory patent
license (a) in connection with copies of the covered work conveyed by
you (or copies made from those copies), or (b) primarily for and in
connection with specific products or compilations that contain the
covered work, unless you entered into that arrangement, or that patent
license was granted, prior to 28 March 2007.
Nothing in this License shall be construed as excluding or limiting
any implied license or other defenses to infringement that may
otherwise be available to you under applicable patent law.
12. No Surrender of Others' Freedom.
If conditions are imposed on you (whether by court order, agreement or
otherwise) that contradict the conditions of this License, they do not
excuse you from the conditions of this License. If you cannot convey a
covered work so as to satisfy simultaneously your obligations under
this License and any other pertinent obligations, then as a
consequence you may not convey it at all. For example, if you agree to
terms that obligate you to collect a royalty for further conveying
from those to whom you convey the Program, the only way you could
satisfy both those terms and this License would be to refrain entirely
from conveying the Program.
13. Use with the GNU Affero General Public License.
Notwithstanding any other provision of this License, you have
permission to link or combine any covered work with a work licensed
under version 3 of the GNU Affero General Public License into a single
combined work, and to convey the resulting work. The terms of this
License will continue to apply to the part which is the covered work,
but the special requirements of the GNU Affero General Public License,
section 13, concerning interaction through a network will apply to the
combination as such. 14. Revised Versions of this License.
The Free Software Foundation may publish revised and/or new versions
of the GNU General Public License from time to time. Such new versions
will be similar in spirit to the present version, but may differ in
detail to address new problems or concerns.
Each version is given a distinguishing version number. If the Program
specifies that a certain numbered version of the GNU General Public
License "or any later version" applies to it, you have the option of
following the terms and conditions either of that numbered version or
of any later version published by the Free Software Foundation. If the
Program does not specify a version number of the GNU General Public
License, you may choose any version ever published by the Free
Software Foundation.
If the Program specifies that a proxy can decide which future versions
of the GNU General Public License can be used, that proxy's public
statement of acceptance of a version permanently authorizes you to
choose that version for the Program.
Later license versions may give you additional or different
permissions. However, no additional obligations are imposed on any
author or copyright holder as a result of your choosing to follow a
later version.
15. Disclaimer of Warranty.
THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY
APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT
HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT
WARRANTY OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND
PERFORMANCE OF THE PROGRAM IS WITH YOU. SHOULD THE PROGRAM PROVE
DEFECTIVE, YOU ASSUME THE COST OF ALL NECESSARY SERVICING, REPAIR OR
CORRECTION.
16. Limitation of Liability.
IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR
CONVEYS THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES,
INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES
ARISING OUT OF THE USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT
NOT LIMITED TO LOSS OF DATA OR DATA BEING RENDERED INACCURATE OR
LOSSES SUSTAINED BY YOU OR THIRD PARTIES OR A FAILURE OF THE PROGRAM
TO OPERATE WITH ANY OTHER PROGRAMS), EVEN IF SUCH HOLDER OR OTHER
PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH DAMAGES.
17. Interpretation of Sections 15 and 16.
If the disclaimer of warranty and limitation of liability provided
above cannot be given local legal effect according to their terms,
reviewing courts shall apply local law that most closely approximates
an absolute waiver of all civil liability in connection with the
Program, unless a warranty or assumption of liability accompanies a
copy of the Program in return for a fee.
END OF TERMS AND CONDITIONS
How to Apply These Terms to Your New Programs
If you develop a new program, and you want it to be of the greatest
possible use to the public, the best way to achieve this is to make it
free software which everyone can redistribute and change under these
terms.
To do so, attach the following notices to the program. It is safest to
attach them to the start of each source file to most effectively state
the exclusion of warranty and each file should have at least the
"copyright" line and a pointer to where the full notice is found.
<one line to give the program's name and a brief idea of what it does.>
Copyright (C) <year> <name of author>
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
Also add information on how to contact you by electronic and paper mail.
If the program does terminal interaction, make it output a short notice
like this when it starts in an interactive mode:
<program> Copyright (C) <year> <name of author>
This program comes with ABSOLUTELY NO WARRANTY for details type 'show w'.
This is free software, and you are welcome to redistribute it
under certain conditions type 'show c' for details.
The hypothetical commands 'show w' and 'show c' should show the
appropriate parts of the General Public License. Of course, your
program's commands might be different for a GUI interface, you would
use an "about box".
You should also get your employer (if you work as a programmer) or
school, if any, to sign a "copyright disclaimer" for the program, if
necessary. For more information on this, and how to apply and follow
the GNU GPL, see <http://www.gnu.org/licenses/>.
The GNU General Public License does not permit incorporating your
program into proprietary programs. If your program is a subroutine
library, you may consider it more useful to permit linking proprietary
applications with the library. If this is what you want to do, use the
GNU Lesser General Public License instead of this License. But first,
please read <http://www.gnu.org/philosophy/why-not-lgpl.html>."""
# Thanks to
# * Barre, for the n overflow solution
# * http://lipsum.sourceforge.net/whatis.php for source lorems and inspiration
from string import join
def get_lorem(q = 0):
i = -1
i += 1
if q == i:
#This text is under public domain
#Lorem ipsum
#Cicero
return u"""lorem ipsum dolor sit amet consetetur sadipscing elitr sed diam nonumy
eirmod tempor invidunt ut labore et dolore magna aliquyam erat sed diam
voluptua at vero eos et accusam et justo duo dolores et ea rebum stet clita
kasd gubergren no sea takimata sanctus est lorem ipsum dolor sit amet lorem
ipsum dolor sit amet consetetur sadipscing elitr sed diam nonumy eirmod
tempor invidunt ut labore et dolore magna aliquyam erat sed diam voluptua at
vero eos et accusam et justo duo dolores et ea rebum stet clita kasd
gubergren no sea takimata sanctus est lorem ipsum dolor sit amet lorem ipsum
dolor sit amet consetetur sadipscing elitr sed diam nonumy eirmod tempor
invidunt ut labore et dolore magna aliquyam erat sed diam voluptua at vero
eos et accusam et justo duo dolores et ea rebum stet clita kasd gubergren no
sea takimata sanctus est lorem ipsum dolor sit amet
duis autem vel eum iriure dolor in hendrerit in vulputate velit esse
molestie consequat vel illum dolore eu feugiat nulla facilisis at vero eros
et accumsan et iusto odio dignissim qui blandit praesent luptatum zzril
delenit augue duis dolore te feugait nulla facilisi lorem ipsum dolor sit
amet consectetuer adipiscing elit sed diam nonummy nibh euismod tincidunt ut
laoreet dolore magna aliquam erat volutpat
ut wisi enim ad minim veniam quis nostrud exerci tation ullamcorper suscipit
lobortis nisl ut aliquip ex ea commodo consequat duis autem vel eum iriure
dolor in hendrerit in vulputate velit esse molestie consequat vel illum
dolore eu feugiat nulla facilisis at vero eros et accumsan et iusto odio
dignissim qui blandit praesent luptatum zzril delenit augue duis dolore te
feugait nulla facilisi
nam liber tempor cum soluta nobis eleifend option congue nihil imperdiet
doming id quod mazim placerat facer possim assum lorem ipsum dolor sit amet
consectetuer adipiscing elit sed diam nonummy nibh euismod tincidunt ut
laoreet dolore magna aliquam erat volutpat ut wisi enim ad minim veniam quis
nostrud exerci tation ullamcorper suscipit lobortis nisl ut aliquip ex ea
commodo consequat
duis autem vel eum iriure dolor in hendrerit in vulputate velit esse
molestie consequat vel illum dolore eu feugiat nulla facilisis
at vero eos et accusam et justo duo dolores et ea rebum stet clita kasd
gubergren no sea takimata sanctus est lorem ipsum dolor sit amet lorem ipsum
dolor sit amet consetetur sadipscing elitr sed diam nonumy eirmod tempor
invidunt ut labore et dolore magna aliquyam erat sed diam voluptua at vero
eos et accusam et justo duo dolores et ea rebum stet clita kasd gubergren no
sea takimata sanctus est lorem ipsum dolor sit amet lorem ipsum dolor sit
amet consetetur sadipscing elitr at accusam aliquyam diam diam dolore
dolores duo eirmod eos erat et nonumy sed tempor et et invidunt justo labore
stet clita ea et gubergren kasd magna no rebum sanctus sea sed takimata ut
vero voluptua est lorem ipsum dolor sit amet lorem ipsum dolor sit amet
consetetur sadipscing elitr sed diam nonumy eirmod tempor invidunt ut labore
et dolore magna aliquyam erat
consetetur sadipscing elitr sed diam nonumy eirmod tempor invidunt ut labore
et dolore magna aliquyam erat sed diam voluptua at vero eos et accusam et
justo duo dolores et ea rebum stet clita kasd gubergren no sea takimata
sanctus est lorem ipsum dolor sit amet lorem ipsum dolor sit amet consetetur
sadipscing elitr sed diam nonumy eirmod tempor invidunt ut labore et dolore
magna aliquyam erat sed diam voluptua at vero eos et accusam et justo duo
dolores et ea rebum stet clita kasd gubergren no sea takimata sanctus est
lorem ipsum dolor sit amet lorem ipsum dolor sit amet consetetur sadipscing
elitr sed diam nonumy eirmod tempor invidunt ut labore et dolore magna
aliquyam erat sed diam voluptua at vero eos et accusam et justo duo dolores
et ea rebum stet clita kasd gubergren no sea takimata sanctus est lorem
ipsum dolor sit amet"""
i += 1
if q == i:
#This text is under public domain
#Childe Harold's Pilgrimage - Canto the first (I.-X.)
#Lord Byron
return u"""oh thou in hellas deemed of heavenly birth
muse formed or fabled at the minstrels will
since shamed full oft by later lyres on earth
mine dares not call thee from thy sacred hill
yet there ive wandered by thy vaunted rill
yes sighed oer delphis longdeserted shrine
where save that feeble fountain all is still
nor mote my shell awake the weary nine
to grace so plain a talethis lowly lay of mine
whilome in albions isle there dwelt a youth
who ne in virtues ways did take delight
but spent his days in riot most uncouth
and vexed with mirth the drowsy ear of night
ah me in sooth he was a shameless wight
sore given to revel and ungodly glee
few earthly things found favour in his sight
save concubines and carnal companie
and flaunting wassailers of high and low degree
childe harold was he hight but whence his name
and lineage long it suits me not to say
suffice it that perchance they were of fame
and had been glorious in another day
but one sad losel soils a name for aye
however mighty in the olden time
nor all that heralds rake from coffined clay
nor florid prose nor honeyed lines of rhyme
can blazon evil deeds or consecrate a crime
childe harold basked him in the noontide sun
disporting there like any other fly
nor deemed before his little day was done
one blast might chill him into misery
but long ere scarce a third of his passed by
worse than adversity the childe befell
he felt the fulness of satiety
then loathed he in his native land to dwell
which seemed to him more lone than eremites sad cell
for he through sins long labyrinth had run
nor made atonement when he did amiss
had sighed to many though he loved but one
and that loved one alas could neer be his
ah happy she to scape from him whose kiss
had been pollution unto aught so chaste
who soon had left her charms for vulgar bliss
and spoiled her goodly lands to gild his waste
nor calm domestic peace had ever deigned to taste
and now childe harold was sore sick at heart
and from his fellow bacchanals would flee
tis said at times the sullen tear would start
but pride congealed the drop within his ee
apart he stalked in joyless reverie
and from his native land resolved to go
and visit scorching climes beyond the sea
with pleasure drugged he almost longed for woe
and een for change of scene would seek the shades below
the childe departed from his fathers hall
it was a vast and venerable pile
so old it seemed only not to fall
yet strength was pillared in each massy aisle
monastic dome condemned to uses vile
where superstition once had made her den
now paphian girls were known to sing and smile
and monks might deem their time was come agen
if ancient tales say true nor wrong these holy men
yet ofttimes in his maddest mirthful mood
strange pangs would flash along childe harolds brow
as if the memory of some deadly feud
or disappointed passion lurked below
but this none knew nor haply cared to know
for his was not that open artless soul
that feels relief by bidding sorrow flow
nor sought he friend to counsel or condole
whateer this grief mote be which he could not control
and none did love him though to hall and bower
he gathered revellers from far and near
he knew them flatterers of the festal hour
the heartless parasites of present cheer
yea none did love himnot his lemans dear
but pomp and power alone are womans care
and where these are light eros finds a feere
maidens like moths are ever caught by glare
and mammon wins his way where seraphs might despair
childe harold had a mothernot forgot
though parting from that mother he did shun
a sister whom he loved but saw her not
before his weary pilgrimage begun
if friends he had he bade adieu to none
yet deem not thence his breast a breast of steel
ye who have known what tis to dote upon
a few dear objects will in sadness feel
such partings break the heart they fondly hope to heal"""
i += 1
if q == i:
#This text is under public domain
#Decameron - Novella Prima
#Giovanni Boccaccio
return u"""convenevole cosa e carissime donne che ciascheduna cosa la quale l'uomo fa
dallo ammirabile e santo nome di colui il quale di tutte fu facitore le dea
principio per che dovendo io al vostro novellare sí come primo dare
cominciamento intendo da una delle sue maravigliose cose incominciare accio
che quella udita la nostra speranza in lui sí come in cosa impermutabile si
fermi e sempre sia da noi il suo nome lodato manifesta cosa e che sí come le
cose temporali tutte sono transitorie e mortali cosí in sé e fuor di sé
esser piene di noia d'angoscia e di fatica e a infiniti pericoli sogiacere
alle quali senza niuno fallo né potremmo noi che viviamo mescolati in esse e
che siamo parte d'esse durare né ripararci se spezial grazia di dio forza e
avvedimento non ci prestasse la quale a noi e in noi non e da credere che
per alcun nostro merito discenda ma dalla sua propria benignita mossa e da'
prieghi di coloro impetrata che sí come noi siamo furon mortali e bene i
suoi piaceri mentre furono in vita seguendo ora con lui eterni son divenuti
e beati alli quali noi medesimi sí come a procuratori informati per
esperienza della nostra fragilita forse non audaci di porgere i prieghi
nostri nel cospetto di tanto giudice delle cose le quali a noi reputiamo
oportune gli porgiamo e ancora piú in lui verso noi di pietosa liberalita
pieno discerniamo che non potendo l'acume dell'occhio mortale nel segreto
della divina mente trapassare in alcun modo avvien forse tal volta che da
oppinione ingannati tale dinanzi alla sua maesta facciamo procuratore che da
quella con etterno essilio e iscacciato e nondimeno esso al quale niuna cosa
e occulta piú alla purita del pregator riguardando che alla sua ignoranza o
allo essilio del pregato cosí come se quegli fosse nel suo cospetto beato
essaudisce coloro che 'l priegano il che manifestamente potra apparire nella
novella la quale di raccontare intendo manifestamente dico non il giudicio
di dio ma quel degli uomini seguitando"""
i += 1
if q == i:
#This text is under public domain
#Faust: Der Tragödie erster Teil
#Johann Wolfgang von Goethe
return u"""ihr naht euch wieder schwankende gestalten
die früh sich einst dem trüben blick gezeigt
versuch ich wohl euch diesmal festzuhalten
fühl ich mein herz noch jenem wahn geneigt
ihr drängt euch zu nun gut so mögt ihr walten
wie ihr aus dunst und nebel um mich steigt
mein busen fühlt sich jugendlich erschüttert
vom zauberhauch der euren zug umwittert
ihr bringt mit euch die bilder froher tage
und manche liebe schatten steigen auf
gleich einer alten halbverklungnen sage
kommt erste lieb und freundschaft mit herauf
der schmerz wird neu es wiederholt die klage
des lebens labyrinthisch irren lauf
und nennt die guten die um schöne stunden
vom glück getäuscht vor mir hinweggeschwunden
sie hören nicht die folgenden gesänge
die seelen denen ich die ersten sang
zerstoben ist das freundliche gedränge
verklungen ach der erste widerklang
mein lied ertönt der unbekannten menge
ihr beifall selbst macht meinem herzen bang
und was sich sonst an meinem lied erfreuet
wenn es noch lebt irrt in der welt zerstreuet
und mich ergreift ein längst entwöhntes sehnen
nach jenem stillen ernsten geisterreich
es schwebet nun in unbestimmten tönen
mein lispelnd lied der äolsharfe gleich
ein schauer faßt mich träne folgt den tränen
das strenge herz es fühlt sich mild und weich
was ich besitze seh ich wie im weiten
und was verschwand wird mir zu wirklichkeiten"""
i += 1
if q == i:
#This text is under public domain
#In der Fremde
#Heinrich Heine
return u"""es treibt dich fort von ort zu ort
du weißt nicht mal warum
im winde klingt ein sanftes wort
schaust dich verwundert um
die liebe die dahinten blieb
sie ruft dich sanft zurück
o komm zurück ich hab dich lieb
du bist mein einz'ges glück
doch weiter weiter sonder rast
du darfst nicht stillestehn
was du so sehr geliebet hast
sollst du nicht wiedersehn
du bist ja heut so grambefangen
wie ich dich lange nicht geschaut
es perlet still von deinen wangen
und deine seufzer werden laue
denkst du der heimat die so ferne
so nebelferne dir verschwand
gestehe mir's du wärest gerne
manchmal im teuren vaterland
denkst du der dame die so niedlich
mit kleinem zürnen dich ergötzt
oft zürntest du dann ward sie friedlich
und immer lachtet ihr zuletzt
denkst du der freunde die da sanken
an deine brust in großer stund'
im herzen stürmten die gedanken
jedoch verschwiegen blieb der mund
denkst du der mutter und der schwester
mit beiden standest du ja gut
ich glaube gar es schmilzt mein bester
in deiner brust der wilde mut
denkst du der vögel und der bäume
des schönen gartens wo du oft
geträumt der liebe junge träume
wo du gezagt wo du gehofft
es ist schon spät die nacht ist helle
trübhell gefärbt vom feuchten schnee
ankleiden muß ich mich nun schnelle
und in gesellschaft gehn o weh"""
i += 1
if q == i:
#This text is under public domain
#Le Bateau Ivre
#Arthur Baudelaire
return u"""comme je descendais des fleuves impassibles
je ne me sentis plus guidé par les haleurs
des peaux-rouges criards les avaient pris pour cibles
les ayant cloués nus aux poteaux de couleurs
j'étais insoucieux de tous les équipages
porteur de blés flamands ou de cotons anglais
quand avec mes haleurs ont fini ces tapages
les fleuves m'ont laissé descendre ou je voulais
dans les clapotements furieux des marées
moi l'autre hiver plus sourd que les cerveaux d'enfants
je courus et les péninsules démarrées
n'ont pas subi tohu-bohus plus triomphants
la tempete a béni mes éveils maritimes
plus léger qu'un bouchon j'ai dansé sur les flots
qu'on appelle rouleurs éternels de victimes
dix nuits sans regretter l'oeil niais des falots
plus douce qu'aux enfants la chair des pommes sures
l'eau verte pénétra ma coque de sapin
et des taches de vins bleus et des vomissures
me lava dispersant gouvernail et grappin
et des lors je me suis baigné dans le poeme
de la mer infusé d'astres et lactescent
dévorant les azurs verts ou flottaison bleme
et ravie un noyé pensif parfois descend
ou teignant tout a coup les bleuités délires
et rythmes lents sous les rutilements du jour
plus fortes que l'alcool plus vastes que nos lyres
fermentent les rousseurs ameres de l'amour
je sais les cieux crevant en éclairs et les trombes
et les ressacs et les courants je sais le soir
l'aube exaltée ainsi qu'un peuple de colombes
et j'ai vu quelque fois ce que l'homme a cru voir
j'ai vu le soleil bas taché d'horreurs mystiques
illuminant de longs figements violets
pareils a des acteurs de drames tres-antiques
les flots roulant au loin leurs frissons de volets
j'ai revé la nuit verte aux neiges éblouies
baiser montant aux yeux des mers avec lenteurs
la circulation des seves inouies
et l'éveil jaune et bleu des phosphores chanteurs
j'ai suivi des mois pleins pareille aux vacheries
hystériques la houle a l'assaut des récifs
sans songer que les pieds lumineux des maries
pussent forcer le mufle aux océans poussifs
j'ai heurté savez-vous d'incroyables florides
melant aux fleurs des yeux de pantheres a peaux
d'hommes des arcs-en-ciel tendus comme des brides
sous l'horizon des mers a de glauques troupeaux
j'ai vu fermenter les marais énormes nasses
ou pourrit dans les joncs tout un léviathan
des écroulement d'eau au milieu des bonaces
et les lointains vers les gouffres cataractant
glaciers soleils d'argent flots nacreux cieux de braises
échouages hideux au fond des golfes bruns
ou les serpents géants dévorés de punaises
choient des arbres tordus avec de noirs parfums
j'aurais voulu montrer aux enfants ces dorades
du flot bleu ces poissons d'or ces poissons chantants
- des écumes de fleurs ont bercé mes dérades
et d'ineffables vents m'ont ailé par instants
parfois martyr lassé des pôles et des zones
la mer dont le sanglot faisait mon roulis doux
montait vers moi ses fleurs d'ombres aux ventouses jaunes
et je restais ainsi qu'une femme a genoux
presque île balottant sur mes bords les querelles
et les fientes d'oiseaux clabaudeurs aux yeux blonds
et je voguais lorsqu'a travers mes liens freles
des noyés descendaient dormir a reculons
or moi bateau perdu sous les cheveux des anses
jeté par l'ouragan dans l'éther sans oiseau
moi dont les monitors et les voiliers des hanses
n'auraient pas repeché la carcasse ivre d'eau
libre fumant monté de brumes violettes
moi qui trouais le ciel rougeoyant comme un mur
qui porte confiture exquise aux bons poetes
des lichens de soleil et des morves d'azur
qui courais taché de lunules électriques
planche folle escorté des hippocampes noirs
quand les juillets faisaient crouler a coups de triques
les cieux ultramarins aux ardents entonnoirs
moi qui tremblais sentant geindre a cinquante lieues
le rut des béhémots et les maelstroms épais
fileur éternel des immobilités bleues
je regrette l'europe aux anciens parapets
j'ai vu des archipels sidéraux et des îles
dont les cieux délirants sont ouverts au vogueur
- est-ce en ces nuits sans fond que tu dors et t'exiles
million d'oiseaux d'or ô future vigueur -
mais vrai j'ai trop pleuré les aubes sont navrantes
toute lune est atroce et tout soleil amer
l'âcre amour m'a gonflé de torpeurs enivrantes
ô que ma quille éclate ô que j'aille a la mer
si je désire une eau d'europe c'est la flache
noire et froide ou vers le crépuscule embaumé
un enfant accroupi plein de tristesses lâche
un bateau frele comme un papillon de mai
je ne puis plus baigné de vos langueurs ô lames
enlever leur sillage aux porteurs de cotons
ni traverser l'orgueil des drapeaux et des flammes
ni nager sous les yeux horribles des pontons"""
i += 1
if q == i:
#This text is under public domain
#Le Masque
#Arthur Rembaud
return u"""contemplons ce trésor de grâces florentines
dans l'ondulation de ce corps musculeux
l'elégance et la force abondent soeurs divines
cette femme morceau vraiment miraculeux
divinement robuste adorablement mince
est faite pour trôner sur des lits somptueux
et charmer les loisirs d'un pontife ou d'un prince
aussi vois ce souris fin et voluptueux
ou la fatuité promene son extase
ce long regard sournois langoureux et moqueur
ce visage mignard tout encadré de gaze
dont chaque trait nous dit avec un air vainqueur
«la volupté m'appelle et l'amour me couronne»
a cet etre doué de tant de majesté
vois quel charme excitant la gentillesse donne
approchons et tournons autour de sa beauté
ô blaspheme de l'art ô surprise fatale
la femme au corps divin promettant le bonheur
par le haut se termine en monstre bicéphale
mais non ce n'est qu'un masque un décor suborneur
ce visage éclairé d'une exquise grimace
et regarde voici crispée atrocement
la véritable tete et la sincere face
renversée a l'abri de la face qui ment
pauvre grande beauté le magnifique fleuve
de tes pleurs aboutit dans mon coeur soucieux
ton mensonge m'enivre et mon âme s'abreuve
aux flots que la douleur fait jaillir de tes yeux
mais pourquoi pleure-t-elle elle beauté parfaite
qui mettrait a ses pieds le genre humain vaincu
quel mal mystérieux ronge son flanc d'athlete
elle pleure insensé parce qu'elle a vécu
et parce qu'elle vit mais ce qu'elle déplore
surtout ce qui la fait frémir jusqu'aux genoux
c'est que demain hélas il faudra vivre encore
demain apres-demain et toujours comme nous"""
i += 1
if q == i:
#This text is under public domain
#Nagyon fáj
#József Attila
return u"""kivül belõl
leselkedõ halál elõl
mint lukba megriadt egérke
amíg hevülsz
az asszonyhoz úgy menekülsz
hogy óvjon karja öle térde
nemcsak a lágy
meleg öl csal nemcsak a vágy
de odataszít a muszáj is
ezért ölel
minden ami asszonyra lel
míg el nem fehérül a száj is
kettõs teher
s kettõs kincs hogy szeretni kell
ki szeret s párra nem találhat
oly hontalan
mint amilyen gyámoltalan
a szükségét végzõ vadállat
nincsen egyéb
menedékünk a kés hegyét
bár anyádnak szegezd te bátor
és lásd akadt
nõ ki érti e szavakat
de mégis ellökött magától
nincsen helyem
így élõk közt zúg a fejem
gondom s fájdalmam kicifrázva
mint a gyerek
kezében a csörgõ csereg
ha magára hagyottan rázza
mit kellene
tenni érte és ellene
nem szégyellem ha kitalálom
hisz kitaszít
a világ így is olyat akit
kábít a nap rettent az álom
a kultúra
úgy hull le rólam mint ruha
másról a boldog szerelemben
de az hol áll
hogy nézze mint dobál halál
s még egyedül kelljen szenvednem
a csecsemõ
is szenvedi ha szül a nõ
páros kínt enyhíthet alázat
de énnekem
pénzt hoz fájdalmas énekem
s hozzám szegõdik a gyalázat
segítsetek
ti kisfiúk a szemetek
pattanjon meg ott õ ahol jár
ártatlanok
csizmák alatt sikongjatok
és mondjátok neki¨nagyon fáj
ti hû ebek
kerék alá kerüljetek
s ugassátok neki nagyon fáj
nõk terhetek
viselõk elvetéljetek
és sírjátok neki nagyon fáj
ép emberek
bukjatok öszetörjetek
s motyogjatok neki nagyon fáj
ti férfiak
egymást megtépve nõ miatt
ne hallgassátok el nagyon fáj
lovak bikák
kiket hogy húzzatok igát
herélnek ríjjátok nagyon fáj
néma halak
horgot kapjatok jég alatt
és tátogjatok rá nagyon fáj
elevenek
minden mi kíntól megremeg
égjen hol laktok kert vadon táj
s ágya körül
üszkösen ha elszenderül
vakogjatok velem nagyon fáj
hallja míg él
azt tagadta meg amit ér
elvonta puszta kénye végett
kivül belõl
menekülõ élõ elõl
a legutolsó menedéket"""
i += 1
if q == i:
#This text is under public domain
#Ómagyar-Mária siralom
#Ismeretlen
return u"""volek syrolm thudothlon syrolmol sepedyk buol ozuk epedek walasth vylagumtul
sydou fyodumtul ezes urumemtuul o en eses urodum eggen yg fyodum syrou aniath
thekunched buabeleul kyniuhhad scemem kunuel arad en iunhum buol farad the
werud hullothya en iunhum olelothya vylag uilaga viragnac uiraga keseruen
kynzathul uos scegegkel werethul vh nequem en fyon ezes mezuul scegenul
scepsegud wirud hioll wyzeul syrolmom fuhazatum therthetyk kyul en iumhumnok
bel bua qui sumha nym kyul hyul wegh halal engumet eggedum illen maraggun
urodum kyth wylag felleyn o ygoz symeonnok bezzeg scouuo ere en erzem ez
buthuruth kyt niha egyre tuled ualmun de num ualallal hul yg kynzassal fyom
halallal sydou myth thez turuentelen fyom merth hol byuntelen fugwa huztuzwa
wklelue kethwe ulud keguggethuk fyomnok ne leg kegulm mogomnok owog halal
kynaal anyath ezes fyaal egembelu ullyetuk"""
i += 1
if q == i:
#This text is under public domain
#Robinsono Kruso (Esperanto)
#Daniel Defoe
return u"""mi naskigxis en jorko anglujo je marto kiu estas la
sesjarrego de la regxo karolo la unua infane mi sentadis grandan
deziron por pasigi mian vivon sur la maro kaj pliagxante la deziro
plifortigxis gxis fine mi forlasis mian lernejon kaj hejmon kaj
piede mi trovis mian vojon al hull kie mi baldaux trovis okupadon sur
sxipo
post kiam ni velveturis kelke da tagoj okazis ventego kaj kvinanokte
la sxipo enfendigxis cxiuj al la pumpiloj rapidis la sxipon ni sentis
gxemi en cxiuj siaj tabuloj kaj gxian trabajxon ektremi de la antauxa gxis
la posta parto kaj baldaux klarigxis ke ne estas ia espero por gxi kaj
ke cxio kion ni povas fari estas savi niajn vivojn
unue ni pafadis pafilegojn por venigi helpon kaj post kelke da
tempo sxipo kusxante ne malproksime alsendis boaton por helpi nin sed
la maro estis tro maltrankvila por gxi restadi sxipflanke tial ni
eljxetis sxnuregon kiun la boatanoj ekkaptis kaj firme fiksis kaj
tiamaniere ni cxiuj enboatigxis
tamen vanigxis en tia maltrankvila maro por peni albordigxi la sxipon
kiu alsendis la virojn aux aluzi la remilojn de la boato kaj ni ne
povis ion fari krom gxin lasi peligxi teron
duonhore nia sxipo trafis rifon kaj subakvigxis kaj gxin ni ne vidis
plu tre malrapide ni alproksimigxis teron kiun iafoje ni vidis kiam
ajn la boato levigxis sur la supro de ia alta ondo kaj tie ni vidis
homojn kurante amase tien kaj reen havante unu celon savi nin
fine gxojege ni surterigxis kie bonsxance ni renkontis amikojn kiuj
donis al ni helpon por reveturi al hull kaj se tiam mi havus la
bonan sencon por iri hejmon estus pli bone por mi
la viro kies sxipo subakvigxis diris kun grava mieno junulo ne iru
plu surmaron tiu ne estas la vivmaniero por vi kial do sinjoro
vi mem iros plu surmaron tiu estas alia afero mi estas elnutrita
por la maro sed vi ne estas vi venis sur mian sxipon por eltrovi la
staton de vivo surmara kaj vi povas diveni tion kio okazos al vi se
vi ne reiros hejmon dio ne benos vin kaj eble vi kauxzis tiun-cxi
tutan malbonon al ni
mi ne parolis alian vorton al li kiun vojon li iris mi nek scias
nek deziris sciigxi cxar mi estis ofendita pro tiu-cxi malgxentila
parolado mi multe pensis cxu iri hejmon aux cxu iradi surmaron honto
detenis min pri iri hejmon kaj mi ne povis decidi la vivkuron kiun
mi estis ironta
kiel estis mia sorto travive cxiam elekti la plej malbonon tiel same
mi nun faris mi havis oron en mia monujo kaj bonan vestajxon sur mia
korpo sed surmaron mi ree iris
sed nun mi havis pli malbonan sxancon ol iam cxar kiam ni estis tre
malproksime enmaro kelke da turkoj en sxipeto plencxase alproksimigxis
al ni ni levis tiom da veloj kiom niaj velstangoj povis elporti por
ke ni forkuru de ili tamen malgraux tio ni vidis ke niaj malamikoj
pli kaj pli alproksimigxis kaj certigxis ke baldaux ili atingos nian
sxipon
fine ili atingis nin sed ni direktis niajn pafilegojn sur ilin kio
kauxzis portempe ke ili deflanku sian vojon sed ili dauxrigis pafadon
sur ni tiel longe kiel ili estis en pafspaco proksimigxante la duan
fojon kelkaj viroj atingis la ferdekon de nia sxipo kaj ektrancxis la
velojn kaj ekfaris cxiuspecajn difektajxojn tial post kiam dek el
niaj sxipanoj kusxas mortitaj kaj la plimulto el la ceteraj havas
vundojn ni kapitulacis
la cxefo de la turkoj prenis min kiel sian rabajxon al haveno okupita
de mauxroj li ne agis al mi tiel malbone kiel mi lin unue jugxis sed
li min laborigis kun la ceteraj de siaj sklavoj tio estis sxangxo en
mia vivo kiun mi neniam antauxvidis ho ve kiom mia koro malgxojis
pensante pri tiuj kiujn mi lasis hejme al kiuj mi ne montris tiom da
komplezemo kiom diri adiauxi kiam mi iris surmaron aux sciigi tion
kion mi intencas fari
tamen cxio kion mi travivis tiam estas nur antauxgusto de la penadoj
kaj zorgoj kiujn de tiam estis mia sorto suferi
unue mi pensis ke la turko kunprenos min kun si kiam li ree iros
surmaron kaj ke mi iel povos liberigxi sed la espero nelonge dauxris
cxar tiatempe li lasis min surtere por prizorgi liajn rikoltojn
tiamaniere mi vivis du jarojn tamen la turko konante kaj vidante min
plu min pli kaj pli liberigis li unufoje aux dufoje cxiusemajne
veturis en sia boato por kapti iajn platfisxojn kaj iafoje li
kunprenis min kaj knabon kun si cxar ni estas rapidaj cxe tia sporto
kaj tial li pli kaj pli sxatis min
unu tagon la turko elsendis min viron kaj knabon boate por kapti
kelke da fisxoj surmare okazas tia densa nebulo ke dekduhore ni ne
povas vidi la teron kvankam ni ne estas pli ol duonmejlon 00
metrojn de la terbordo kaj morgauxtage kiam la suno levigxis nia
boato estas enmaro almenaux dek mejlojn kilometrojn de la
terbordo la vento vigle blovis kaj ni cxiuj tre bezonis nutrajxon sed
fine per la helpo de remiloj kaj veloj ni sendangxere reatingis la
terbordon
kiam la turko sciigxis kiamaniere ni vojperdis li diris ke de nun
kiam li velveturos li prenos boaton kiu enhavos cxion kion ni
bezonus se ni longatempe estus detenataj surmare tial li farigis
grandan kajuton en la longboato de sia sxipo kiel ankaux cxambron por ni
sklavoj unu tagon li min sendis por ke mi ordigu la boaton pro tio
ke li havas du amikojn kiuj intencas veturi kun li por fisxkapti sed
kiam la tempo alvenis ili ne veturas tial li sendis min viron kaj
knabon -- kies nomo estas zuro -- por kapti kelke da fisxoj por la
gastoj kiuj estas vespermangxontaj kun li
subite eniris en mian kapon la ideo ke nun estas bona okazo boate
forkuri kaj liberigxi tial mi tuj prenis tiom da nutrajxo kiom mi
povas havigi kaj mi diris al la viro ke estus tro malrespekte
mangxante la panon metitan en la boaton por la turko li diris ke li
pensas tiel same tial li alportis sakon da rizo kaj kelke da ruskoj
kukoj
dum la viro estis surtere mi provizis iom da vino pecegon da vakso
segilon hakilon fosilon iom da sxnurego kaj cxiuspecajn objektojn
kiuj eble estos utilaj al ni mi sciis kie trovigxas la vinkesto de la
turko kaj mi gxin metis surboaton dum la viro estas surtere per alia
ruzo mi havigis cxion kion mi bezonis mi diris al la knabo la
pafiloj de la turko estas en la boato sed ne trovigxas ia pafajxo cxu
vi pensas ke vi povas havigi iom da gxi vi scias kie gxi estas
konservata kaj eble ni volos pafi birdon aux du li do alportis kesto
kaj saketon kiuj enhavas cxion kion ni eble bezonas por la pafiloj
tiujn-cxi mi metis surboaton kaj poste velveturis por fisxkapti
la vento blovis de la nordo aux nordokcidento tia vento estis malbona
por mi cxar se gxi estus de la sudo mi estus povinta velveturi al la
terbordo de hispanujo tamen de kiu ajn loko la vento blovos mi
estis decidinta forkuri kaj lasi la ceterajn al ilia sorto mi do
mallevis miajn hokfadenojn kvazaux fisxkapti sed mi zorgis ke mi havu
malbonan sukceson kaj kiam la fisxoj mordis mi ilin ne eltiris cxar
mi deziris ke la mauxro ilin ne vidu mi diris al li tiu-cxi loko
estas nebona ni ne kaptos fisxojn tie-cxi ni devas iom antauxen iri
nu la mauxro pensis ke tion fari ne estos malbone li levis la
velojn kaj cxar la direktilo estis en miaj manoj mi elsendis la
boaton unu mejlon aux plu enmaron kaj poste gxin haltigis kvazaux mi
intencas fisxkapti
nun mi pripensis tiu-cxi estas mia okazo liberigxi tial mi transdonis
la direktilon al la knabo kaj tiam ekprenis la mauxron cxirkaux la
talio kaj eljxetis lin el la boato
malsupren li falis sed baldaux reaperis por ke li povis nagxi kvazaux
anaso li diris ke li volonte irus cxirkaux la mondo kun mi se mi
enprenus lin
iom timante ke li surrampos la boatflankon kaj reenigxos perforte mi
direktis mian pafilon sur lin kaj diris vi facile povas nagxi
alteron se vi tion deziras tial rapidigxu tien plie se vi reen
alproksimigxos la boaton vi ricevos kuglon tra la kapo cxar mi de nun
intencas esti libera viro
tiam li eknagxis kaj sendube sendangxere atingis la terbordon cxar la
maro estis tre trankvila
unue mi intencis kunpreni la mauxron kun mi kaj nagxigi zuron alteron
sed la mauxro ne estis viro pri kiu mi povis konfidi
post kiam li forigxis mi diris al zuro se vi jxuros ke vi estos
fidela al mi vi iam farigxos grava viro se vi ne jxuros mi certe
ankaux vin eljxetos el la boato
la knabo tiel dolcxe ridetis kiam li jxuris resti fidela al mi ke mi
lin ne povis dubi en mia koro
dum ankoraux ni povis vidi la mauxron survoje alteren ni antauxen iris
enmaron por ke li kaj tiuj kiuj nin vidis de la terbordo kredu ke
ni iros al la influejo de la markolo cxar neniu velveturis al la suda
marbordo cxar tie logxas gento da homoj kiuj laux sciigoj mortigas kaj
mangxas siajn malamikojn
tiam mi direktis mian veturadon oriente por ke ni lauxlongiru la
marbordon kaj havante favoron venton kaj trankvilan maron ni
morgauxtagmeze estis malapud kaj preter la povo de la turko
ankoraux mi timis ke mi estus kaptota de la mauxroj tial mi ne volis
iri surteron tage duonlume ni direktis nian boaton alteren kaj
atingis la enfluejon riveretan de kiu mi pensis ni povos nagxi
surteron kaj tiam rigardi la cxirkauxajxojn sed kiam malheligxis la
lumo ni auxdis strangajn sonojn bojojn kriegojn gruntojn
blekadojn la malfelicxa knabo diris ke li ne kuragxas iri surteron
antaux la tagigxo nu mi diris tiuokaze ni atendu sed tage
povas vidi nin la homoj kiuj eble nin pli malhelpos ol sovagxaj
bestoj tiam ni pafilos ilin ridante diris zuro kaj forkurigu
ilin
mi gxojis vidi ke la knabo montras tiom da gajeco kaj mi donis al li
iom da pano kaj rizo tiunokte ni silente ripozis sed ne longe
dormis cxar post kelke da horoj iaj grandegaj bestoj malsuprenvenis
al la maro por sin bani la malfelicxa knabo ektremis de kapo al
piedoj pro la vidajxo unu el tiuj bestoj alproksimigxis nian boaton
kaj kvankam estis tro mallume por gxin bone vidi ni auxdis gxin blovi
kaj sciis pro gxia bruego ke gxi certe estas granda fine la bruto
tiom alproksimigxis la boaton kiom la longeco de du remiloj tial mi
pafis sur gxin kaj gxi nagxis alteren
la blekegoj kaj kriegoj kiujn faris bestoj kaj birdoj pro la bruo de
mia pafilo sxajne montris ke ni faris malbonan elekton por surterejo
sed vole ne vole ni devis iri surtere por sercxi fresxan fonton por
ke ni povu plenigi niajn barelojn zuro diris ke li eltrovus cxu la
fontaj akvoj tauxgas por trinki se mi permesus al li preni unu el la
botelegoj kaj ke li gxin reportos plenigitan se la akvo estas bona
kial vi volas iri mi diris kial mi ne estas ironta vi povas
resti en la boato kontrauxe zuro diris se la sovagxuloj venos ili
min mangxu sed vi forkuru mi devis ami la junulon pro la afabla
parolado nu mi diris ni ambaux iros kaj se la sovagxuloj venos
ni ilin mortigu ja ili ne mangxos aux vin aux min
mi donis al zuro iom da rumo el la kesto de la turko por reforti lin
kaj ni iris surteron la knabo ekiris kun sia pafilo mejlon de la
loko kie ni surteriris kaj li revenis kun leporo kiun li mortpafis
kaj kiun ni gxoje kuiris kaj mangxis laux la bona novajxo kiun li
raportis li eltrovis fonton kaj ne vidis sovagxulojn
mi divenis ke la promontoro de la verdaj insuloj ne estas
malproksime cxar mi vidis la supron de la granda pinto kiun kiel mi
sciis estas apud ili mia sola espero estis ke lauxlongirante la
terbordon ni trovos sxipon kiu ensxipigos nin kaj tiam kaj ne antaux
tiam mi sentos kvazaux libera viro unuvorte mi konfidis mian sorton
al la sxanco aux renkonti ian sxipon aux morti
surteron ni ekvidis iujn homojn kiuj staras kaj rigardas nin ili
estis nigraj kaj ne portis vestajxon mi estus irinta surteron al ili
sed zuro -- kiu sciis plej bone -- diris ne vi iru ne vi iru tial
mi direktis la boaton lauxteron por ke mi povu paroli kun ili kaj ili
longaspace iradis laux ni mi ekvidis ke unu havas lancon en mano
mi faris signojn ke ili alportu iom da nutrajxo al mi kaj ili
siaparte faris signojn ke mi haltu mian boaton tial mi demetis la
supran parton de mia velo kaj haltis tiam du el ili ekforkuris kaj
duonhore revenis kun iom da sekigxita viando kaj ia greno kiu kreskas
en tiu parto de la mondo tion-cxi ni deziregis sed ne sciis kiel
havigi gxin cxar ni ne kuragxis iri surteron al ili nek ili kuragxis
alproksimigxi al ni
fine ili eltrovis peron sendangxeran por ni cxiuj alportante la
nutrajxon al la marbordo ili gxin demetis kaj tre fortirigis si mem dum
ni gxin prenis ni faris signojn por montri nian dankon ne havante ion
alian kion ni povas doni al ili sed bonsxance ni baldaux kaptis
grandan donacon por ili cxar du sovagxaj bestoj de la sama speco pri
kiu mi jam priparolis venis plencxase de la montetoj al la maro
ili nagxis kvazaux ili venis por sportigi cxiuj forkuris de ili krom
tiu kiu portas la lancon unu el tiuj bestoj alproksimigxis nian
boaton tial mi gxin atendis kun mia pafilo kaj tuj kiam gxi estis en
pafspaco mi gxin pafis tra la kapo dufoje gxi subakvigxis kaj dufoje gxi
suprenlevigxis kaj poste gxi nagxis alteren kaj falis senviva la viroj
tiom timis pro la pafilbruo kiom ili antauxe timis je la vidajxo de la
bestoj sed kiam mi faris signojn por ke ili venu al la marbordo ili
tuj venis
ili rapidis al sia rabajxo kaj tordante cxirkaux gxi sxnuregon ili gxin
sendangxere eltiris surteron
ni nun lasis niajn sovagxulojn kaj iradis dekdu tagojn plu la terbordo
antaux ni etendis sin kvar aux kvin mejlojn aux kilometrojn
bekforme kaj ni devis veturi iom de la terbordo por atingi tiun
terpinton tiel ke ni portempe ne vidis teron
mi konfidis la direktilon al zuro kaj sidigxis por pripensi tion kion
estos plej bone nun fari kiam subite mi auxdis ke la knabo krias
sxipon kun velo sxipon kun velo li ne montris multe da gxojo je la
vidajxo opiniante ke la sxipo venis por repreni lin sed mi bone
scias laux la sxajno ke gxi ne estas iu el la sxipoj de la turko
mi levis kiel eble plej multe da veloj por renkonti la sxipon gxiavoje
kaj ordonis al zuro ke li ekpafu pafilon cxar mi esperis ke se tiuj
kiuj estas sur la ferdeko ne povus auxdi la sonon ili vidus la
fumigadon ili ja gxin vidis kaj tuj demetis siajn velojn por ke ni
povu atingi ilin kaj trihore ni estis cxe la sxipflanko la viroj
parolis kun ni per la franca lingvo sed ni ne povis kompreni tion
kion ili diras fine skoto sursxipe diris per mia lingvo kiu vi
estas de kien vi venas mi diris al li iomvorte kiel mi liberigxis
de la mauxroj
tiam la sxipestro invitis min veni sxipbordon kaj ensxipis min zuron
kaj cxiujn miajn posedajxojn mi diris al li ke li havu cxion kion mi
havas sed li respondis vi estas rericevonta viajn posedajxojn post
kiam ni atingos teron cxar mi por vi nur faris tion kion por mi vi
farus samstate
li pagis al mi multan monon por mia boato kaj diris ke mi ricevos
egalan monon por zuro se mi lin fordonus sed mi diris al li ke
liberigxinte kun helpo de la knabo mi lin ne volas vendi li diris ke
estas juste kaj prave por mi tiel senti sed se mi decidus fordoni
zuron li estus liberigota dujare tial cxar la sklavo deziris iri mi
nenial diris ne trisemajne mi alvenis al cxiuj sanktuloj golfeto kaj
nun mi estis liberulo
mi ricevis multan monon por cxiujn miaj posedajxoj kaj kun gxi mi iris
surteron sed mi tute ne sciis kion nun fari fine mi renkontis
viron kies stato estas laux la mia kaj ni ambaux akiris pecon da tero
por gxin prilabori mia farmilaro laux la lia estis malgranda sed ni
produktigis la farmojn suficxe por subteni nin sed ne plu ni bezonis
helpon kaj nun mi eksentis ke mi eraris ellasante la knabon
mi tute ne sxatis tiun manieron de vivo kion mi pensis cxu mi venis
tian longan vojon por fari tion kion mi lauxbone povus fari hejme kaj
kun miaj parencoj cxirkaux mi kaj pligrandigxis mia malgxojo cxar la
bonamiko kiu min alsxipis tien-cxi intencas nune lasi tiun-cxi
terbordon
kiam mi estis knabo kaj ekiris surmaron mi metis en la manojn de mia
onklino iom da mono pri kiu mia bonamiko diris ke mi bone farus se
mi gxin elspezus pro mia bieno tial post kiam li revenis hejmon li
alsendis iom da gxi kontante kaj la restajxon kiel tukoj sxtofoj
lanajxoj kaj similajxoj kiujn li acxetis mia onklino tiam metis en
liajn manojn iom da livroj kiel donaco al li por montri sian
dankecon pro cxio kion li faris por mi kaj per tiu mono li afable
acxetis sklavon por mi intertempe mi jam acxetis sklavon tial mi nun
havas du kaj cxio prosperis dum la sekvanta jaro"""
i += 1
if q == i:
#This text is under public domain
#The Raven
#Edgar Allan Poe
return u"""once upon a midnight dreary while i pondered weak and weary
over many a quaint and curious volume of forgotten lore
while i nodded nearly napping suddenly there came a tapping
as of some one gently rapping rapping at my chamber door
tis some visiter i muttered tapping at my chamber door
only this and nothing more
ah distinctly i remember it was in the bleak december
and each separate dying ember wrought its ghost upon the floor
eagerly i wished the morrow vainly i had sought to borrow
from my books surcease of sorrowsorrow for the lost lenore
for the rare and radiant maiden whom the angels name lenore
nameless here for evermore
and the silken sad uncertain rustling of each purple curtain
thrilled mefilled me with fantastic terrors never felt before
so that now to still the beating of my heart i stood repeating
tis some visiter entreating entrance at my chamber door
some late visiter entreating entrance at my chamber door
this it is and nothing more
presently my soul grew stronger hesitating then no longer
sir said i or madam truly your forgiveness i implore
but the fact is i was napping and so gently you came rapping
and so faintly you came tapping tapping at my chamber door
that i scarce was sure i heard youhere i opened wide the door
darkness there and nothing more
deep into that darkness peering long i stood there wondering fearing
doubting dreaming dreams no mortals ever dared to dream before
but the silence was unbroken and the stillness gave no token
and the only word there spoken was the whispered word lenore
this i whispered and an echo murmured back the word lenore
merely this and nothing more
back into the chamber turning all my soul within me burning
soon again i heard a tapping something louder than before
surely said i surely that is something at my window lattice
let me see then what thereat is and this mystery explore
let my heart be still a moment and this mystery explore
tis the wind and nothing more
open here i flung the shutter when with many a flirt and flutter
in there stepped a stately raven of the saintly days of yore
not the least obeisance made he not a minute stopped or stayed he
but with mien of lord or lady perched above my chamber door
perched upon a bust of pallas just above my chamber door
perched and sat and nothing more
then the ebony bird beguiling my sad fancy into smiling
by the grave and stern decorum of the countenance it wore
though thy crest be shorn and shaven thou i said art sure no craven
ghastly grim and ancient raven wandering from the nightly shore
tell me what thy lordly name is on the nights plutonian shore
quoth the raven nevermore
much i marvelled this ungainly fowl to hear discourse so plainly
though its answer little meaninglittle relevancy bore
for we cannot help agreeing that no living human being
ever yet was blessed with seeing bird above his chamber door
bird or beast upon the sculptured bust above his chamber door
with such name as nevermore
but the raven sitting lonely on that placid bust spoke only
that one word as if its soul in that one word he did outpour
nothing farther then he uttered not a feather then he fluttered
till i scarcely more than muttered other friends have flown before
on the morrow he will leave me as my hopes have flown before
then the bird said nevermore
startled at the stillness broken by reply so aptly spoken
doubtless said i what it utters is its only stock and store
caught from some unhappy master whom unmerciful disaster
followed fast and followed faster till his songs one burden bore
till the dirges of his hope that melancholy burden bore
of nevernevermore
but the raven still beguiling all my sad soul into smiling
straight i wheeled a cushioned seat in front of bird and bust and door
then upon the velvet sinking i betook myself to linking
fancy unto fancy thinking what this ominous bird of yore
what this grim ungainly ghastly gaunt and ominous bird of yore
meant in croaking nevermore
this i sat engaged in guessing but no syllable expressing
to the fowl whose fiery eyes now burned into my bosoms core
this and more i sat divining with my head at ease reclining
on the cushions velvet lining that the lamplight gloated oer
but whose velvet violet lining with the lamplight gloating oer
she shall press ah nevermore
then methought the air grew denser perfumed from an unseen censer
swung by seraphim whose footfalls tinkled on the tufted floor
wretch i cried thy god hath lent theeby these angels he hath sent thee
respiterespite and nepenthe from thy memories of lenore
quaff oh quaff this kind nepenthe and forget this lost lenore
quoth the raven nevermore
prophet said i thing of evilprophet still if bird or devil
whether tempter sent or whether tempest tossed thee here ashore
desolate yet all undaunted on this desert land enchanted
on this home by horror hauntedtell me truly i implore
is thereis there balm in gileadtell metell me i implore
quoth the raven nevermore
prophet said i thing of evilprophet still if bird or devil
by that heaven that bends above usby that god we both adore
tell this soul with sorrow laden if within the distant aidenn
it shall clasp a sainted maiden whom the angels name lenore
clasp a rare and radiant maiden whom the angels name lenore
quoth the raven nevermore
be that our sign of parting bird or fiend i shrieked upstarting
get thee back into the tempest and the nights plutonian shore
leave no black plume as a token of that lie thy soul has spoken
leave my loneliness unbrokenquit the bust above my door
take thy beak from out my heart and take thy form from off my door
quoth the raven nevermore
and the raven never flitting still is sitting still is sitting
on the pallid bust of pallas just above my chamber door
and his eyes have all the seeming of a demons that is dreaming
and the lamplight oer him streaming throws his shadows on the floor
and my soul from out that shadow that lies floating on the floor
shall be lifted nevermore"""
i += 1
if q == i:
#This text is under public domain
#Tierra y Luna
#Federico García Lorca
return u"""me quedo con el transparente hombrecillo
que come los huevos de la golondrina
me quedo con el nino desnudo
que pisotean los borrachos de brooklyn
con las criaturas mudas que pasan bajo los arcos
con el arroyo de venas ansioso de abrir sus manecitas
tierra tan sólo tierra
tierra para los manteles estremecidos
para la pupila viciosa de nube
para las heridas recientes y el húmedo pensamiento
tierra para todo lo que huye de la tierra
no es la ceniza en vilo de las cosas quemadas
ni los muertos que mueven sus lenguas bajo los árboles
es la tierra desnuda que bala por el cielo
y deja atrás los grupos ligeros de ballenas
es la tierra alegrísima imperturbable nadadora
la que yo encuentro en el nino y en las criaturas que pasan los arcos
viva la tierra de mi pulso y del baile de los helechos
que deja a veces por el aire un duro perfil de faraón
me quedo con la mujer fría
donde se queman los musgos inocentes
me quedo con los borrachos de brooklyn
que pisan al nino desnudo
me quedo con los signos desgarrados
de la lenta comida de los osos
pero entonces baja la luna despenada por las escaleras
poniendo las ciudades de hule celeste y talco sensitivo
llenando los pies de mármol la llanura sin recodos
y olvidando bajo las sillas diminutas carcajadas de algodón
oh diana diana diana vacía
convexa resonancia donde la abeja se vuelve loca
mi amor de paso tránsito larga muerte gustada
nunca la piel ilesa de tu desnudo huido
es tierra dios mío tierra lo que vengo buscando
embozo de horizonte latido y sepultura
es dolor que se acaba y amor que se consume
torre de sangre abierta con las manos quemadas
pero la luna subía y bajaba las escaleras
repartiendo lentejas desangradas en los ojos
dando escobazos de plata a los ninos de los muelles
y borrando mi apariencia por el término del aire"""
i += 1
if q == i:
#This text is under public domain
#Hemsöborna
#August Strindberg (1912-1921)
return u"""han kom som ett yrväder en aprilafton och hade
ett höganäskrus i en svångrem om halsen clara
och lotten voro inne med skötekan att hämta
honom på dalarö brygga men det dröjde evigheter
innan de kommo i båt de skulle till handelsman
och ha en tunna tjära och på abeteket och hämta
gråsalva åt grisen och så skulle de på posten och
få ett frimärke och så skulle de ner till fia lövström
i kroken och låna tuppen mot ett halvpund
småtärna till notbygget och sist hade de hamnat på
gästgivaregården där carlsson bjudit på kaffe med
dopp och så kommo de äntligen i båt men carlsson
ville styra och det kunde han inte för han hade
aldrig sett en råseglare förr och därför skrek han
att de skulle hissa focken som inte fanns
och på tullbryggan stodo lotsar och vaktmästare
och grinade åt manövern när ekan gick över stag
och länsade ner åt saltsäcken
hörru du har hål i båten skrek en lotslärling
genom vinden stopp till stopp till och medan
carlsson tittade efter hålen hade clara knuffat
undan honom och tagit roret och med årorna lyckades
lotten få ekan opp i vinden igen så att nu strök
det ner åt aspösund med god gång
carlsson var en liten fyrkantig värmländing med
blå ögon och näsa krokig som en syskonhake livlig
lekfull och nyfiken var han men sjöaffärerna förstod
han inte alls och han var också kallad ut till
hemsö för att ta hand om åker och kreatur som
ingen annan ville ta befattning med sedan gubben
flod gått ur livet och änkan satt ensam vid gården"""
# if we got this far - let's return this crap (instead of throwing)
return u"""fubar1 fubar2 fubar3
foobar1 foobar2 foobar3 foobar4 foobar5
baroba1
raboof1"""
from optparse import OptionParser
def do_parse():
usage = """Usage: lorem [-n|-l|-c] N [-q M]?
where
N and M are integers
Note: If -n -l and/or -c are combined -l has priority over -c that has
priority over -n.
Examples: lorem -n 10
Get 10 words of lorem.
lorem -l 5
Get 5 lines of lorem
lorem -c 79
Get 79 characters of lorem
lorem -l 5 -q 11
Get 5 lines of lorem from quote 11
License: Copyright (C) 2007 Per Erik Strandberg
This program comes with ABSOLUTELY NO WARRANTY.
This is free software, and you are welcome to redistribute it
under the GNU GENERAL PUBLIC LICENSE Version 3.
"""
versioninfo = "%s version %s\n%s\n\n%s" % (program, version, #
copyright, license)
parser = OptionParser(usage=usage, version = versioninfo)
parser.add_option('-n', dest='n', help="Number of Words", default=0)
parser.add_option('-l', dest='l', help="Number of Lines", default=0)
parser.add_option('-c', dest='c', help="Number of Chars", default=0)
parser.add_option('-q', dest='q', help="Quote index (0+)", default=0)
(options, args) = parser.parse_args()
try:
options.n = int(options.n)
options.l = int(options.l)
options.c = int(options.c)
options.q = int(options.q)
except:
parser.error("At least one of the arguments is not an integer.")
for m in [options.n, options.l, options.c, options.q]:
if m < 0:
parser.error("Negative argument.")
if options.n == 0 and options.l == 0 and options.c == 0:
parser.error("No arguments, try 'lorem -n 25'.")
return (options, args)
def do_lorem(n=0, l=0, c=0, q=0):
lorem = get_lorem(q)
res = ''
if l!=0:
#do lines
lorem = lorem.replace('\n\n','\n')
lines = lorem.split('\n')
while l:
if l > len(lines):
l1 = len(lines)
l -= l1
else:
l1 = l
l = 0
for i in xrange(l1):
res += lines[i].strip()+'\n'
elif c!=0:
# do chars
chars = lorem
chars = chars.replace('\n', ' ')
chars = chars.replace(' ', ' ')
outchars = []
while c:
if c > len(chars):
c1 = len(chars)
c -= c1
outchars.append(chars)
else:
c1 = c
c = 0
outchars.append(chars[0:c1])
res += join(outchars,'')+'\n'
else:
# do words
words = lorem.replace('\n\n','\n')
words = words.replace('\n', ' ')
words = words.replace(' ', ' ')
words = words.split(' ')
while n:
if n > len(words):
n1 = len(words)
n -= n1
else:
n1 = n
n = 0
for i in xrange(n1):
res += words[i]+' '
return res
|
davidzchen/samza | refs/heads/master | samza-test/src/main/python/integration_tests.py | 25 | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# 'License'); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# 'AS IS' BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import os
dir = os.path.dirname(os.path.abspath(__file__))
test = {
'deployment_code': os.path.join(dir, 'deployment.py'),
'perf_code': os.path.join(dir, 'perf.py'),
'configs_directory': os.path.join(dir, 'configs'),
'test_code': [
os.path.join(dir, 'tests', 'smoke_tests.py'),
os.path.join(dir, 'tests', 'performance_tests.py'),
],
}
|
flavour/helios | refs/heads/master | modules/geopy/geocoders/__init__.py | 26 | from geopy.geocoders_old import *
|
iovation/launchkey-python | refs/heads/master | features/steps/directory_session_steps.py | 2 | from uuid import uuid4
from behave import given, when, then
# Delete session
@when("I delete the Sessions for the current User")
def delete_session_for_current_user(context):
current_directory = context.entity_manager.get_current_directory()
user_identifier = context.entity_manager.get_current_user_identifier()
context.directory_session_manager.end_all_sessions_for_user(
user_identifier,
current_directory.id,
)
@when("I attempt to delete the Sessions for the User \"{user_identifier}\"")
def attempt_to_delete_session_for_given_user(context, user_identifier):
current_directory = context.entity_manager.get_current_directory()
try:
context.directory_session_manager.end_all_sessions_for_user(
user_identifier,
current_directory.id,
)
except Exception as e:
context.current_exception = e
# Retrieve session
@when("I retrieve the Session list for the current User")
def retrieve_session_for_current_user(context):
current_directory = context.entity_manager.get_current_directory()
user_identifier = context.entity_manager.get_current_user_identifier()
context.directory_session_manager.retrieve_session_list_for_user(
user_identifier,
current_directory.id,
)
@when("I attempt to retrieve the Session list for the User "
"\"{user_identifier}\"")
def attempt_to_retrieve_session_list_for_user_identifier(context,
user_identifier):
current_directory = context.entity_manager.get_current_directory()
try:
context.directory_session_manager.retrieve_session_list_for_user(
user_identifier,
current_directory.id,
)
except Exception as e:
context.current_exception = e
@then("the Service User Session List has {count:d} Sessions")
def verify_service_user_session_has_count_sessions(context, count):
sessions_list = context.entity_manager.\
get_current_directory_user_sessions()
if len(sessions_list) != count:
raise Exception("Session list length was %s when it was "
"expected to be %s" % len(sessions_list, count))
|
GrizliK1988/PhotoWidget | refs/heads/master | src/JuliaDima/PhotoGalleryWidgetBundle/Resources/public/vendors/jQueryFileUpload/server/gae-python/main.py | 6 | # -*- coding: utf-8 -*-
#
# jQuery File Upload Plugin GAE Python Example 2.0.1
# https://github.com/blueimp/jQuery-File-Upload
#
# Copyright 2011, Sebastian Tschan
# https://blueimp.net
#
# Licensed under the MIT license:
# http://www.opensource.org/licenses/MIT
#
from __future__ import with_statement
from google.appengine.api import files, images
from google.appengine.ext import blobstore, deferred
from google.appengine.ext.webapp import blobstore_handlers
import json
import re
import urllib
import webapp2
WEBSITE = 'http://blueimp.github.io/jQuery-File-Upload/'
MIN_FILE_SIZE = 1 # bytes
MAX_FILE_SIZE = 5000000 # bytes
IMAGE_TYPES = re.compile('image/(gif|p?jpeg|(x-)?png)')
ACCEPT_FILE_TYPES = IMAGE_TYPES
THUMBNAIL_MODIFICATOR = '=s80' # max width / height
EXPIRATION_TIME = 300 # seconds
def cleanup(blob_keys):
blobstore.delete(blob_keys)
class UploadHandler(webapp2.RequestHandler):
def initialize(self, request, response):
super(UploadHandler, self).initialize(request, response)
self.response.headers['Access-Control-Allow-Origin'] = '*'
self.response.headers[
'Access-Control-Allow-Methods'
] = 'OPTIONS, HEAD, GET, POST, PUT, DELETE'
self.response.headers[
'Access-Control-Allow-Headers'
] = 'Content-Type, Content-Range, Content-Disposition'
def validate(self, file):
if file['size'] < MIN_FILE_SIZE:
file['error'] = 'File is too small'
elif file['size'] > MAX_FILE_SIZE:
file['error'] = 'File is too big'
elif not ACCEPT_FILE_TYPES.match(file['type']):
file['error'] = 'Filetype not allowed'
else:
return True
return False
def get_file_size(self, file):
file.seek(0, 2) # Seek to the end of the file
size = file.tell() # Get the position of EOF
file.seek(0) # Reset the file position to the beginning
return size
def write_blob(self, data, info):
blob = files.blobstore.create(
mime_type=info['type'],
_blobinfo_uploaded_filename=info['name']
)
with files.open(blob, 'a') as f:
f.write(data)
files.finalize(blob)
return files.blobstore.get_blob_key(blob)
def handle_upload(self):
results = []
blob_keys = []
for name, fieldStorage in self.request.POST.items():
if type(fieldStorage) is unicode:
continue
result = {}
result['name'] = re.sub(
r'^.*\\',
'',
fieldStorage.filename
)
result['type'] = fieldStorage.type
result['size'] = self.get_file_size(fieldStorage.file)
if self.validate(result):
blob_key = str(
self.write_blob(fieldStorage.value, result)
)
blob_keys.append(blob_key)
result['delete_type'] = 'DELETE'
result['delete_url'] = self.request.host_url +\
'/?key=' + urllib.quote(blob_key, '')
if (IMAGE_TYPES.match(result['type'])):
try:
result['url'] = images.get_serving_url(
blob_key,
secure_url=self.request.host_url.startswith(
'https'
)
)
result['thumbnail_url'] = result['url'] +\
THUMBNAIL_MODIFICATOR
except: # Could not get an image serving url
pass
if not 'url' in result:
result['url'] = self.request.host_url +\
'/' + blob_key + '/' + urllib.quote(
result['name'].encode('utf-8'), '')
results.append(result)
deferred.defer(
cleanup,
blob_keys,
_countdown=EXPIRATION_TIME
)
return results
def options(self):
pass
def head(self):
pass
def get(self):
self.redirect(WEBSITE)
def post(self):
if (self.request.get('_method') == 'DELETE'):
return self.delete()
result = {'files': self.handle_upload()}
s = json.dumps(result, separators=(',', ':'))
redirect = self.request.get('redirect')
if redirect:
return self.redirect(str(
redirect.replace('%s', urllib.quote(s, ''), 1)
))
if 'application/json' in self.request.headers.get('Accept'):
self.response.headers['Content-Type'] = 'application/json'
self.response.write(s)
def delete(self):
blobstore.delete(self.request.get('key') or '')
class DownloadHandler(blobstore_handlers.BlobstoreDownloadHandler):
def get(self, key, filename):
if not blobstore.get(key):
self.error(404)
else:
# Cache for the expiration time:
self.response.headers['Cache-Control'] =\
'public,max-age=%d' % EXPIRATION_TIME
self.send_blob(key, save_as=filename)
app = webapp2.WSGIApplication(
[
('/', UploadHandler),
('/([^/]+)/([^/]+)', DownloadHandler)
],
debug=True
)
|
nextgis/ngq_compulink | refs/heads/master | qgis-installer/customization-conf/plugins/identifyplus/ngw_external_api_python/tests/test_ngw_feature.py | 2 | import os, sys
sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
from core.ngw_connection_settings import NGWConnectionSettings
from core.ngw_connection import NGWConnection
from core.ngw_resource_factory import NGWResourceFactory
from core.ngw_resource import NGWResource
from core.ngw_vector_layer import NGWVectorLayer
from core.ngw_feature import NGWFeature
from core.ngw_attachment import NGWAttachment
if __name__=="__main__":
ngw_resources_id = 1881
ngw_feature_id = 159
ngwConnectionSettings = NGWConnectionSettings("ngw", "http://demo.nextgis.ru/ngw", "administrator", "admin")
ngwConnection = NGWConnection(ngwConnectionSettings)
ngwResourceFactory = NGWResourceFactory(ngwConnectionSettings)
ngwResource = NGWVectorLayer(ngwResourceFactory, NGWResource.receive_resource_obj(ngwConnection, ngw_resources_id))
ngwFeature = NGWFeature(ngw_feature_id, ngwResource)
#files = [os.path.join(os.path.dirname(__file__), 'media', 'plaza-1.jpg')]
files = [
"d:\\Development\\NextGIS\\D-Day\\foto\\plaza-1.jpg",
#"d:\\Development\\NextGIS\\D-Day\\foto\\plaza-2.jpg",
#"d:\\Development\\NextGIS\\D-Day\\foto\\plaza-3.jpg",
#"d:\\Development\\NextGIS\\D-Day\\foto\\plaza-4.jpg",
#"d:\\Development\\NextGIS\\D-Day\\foto\\plaza-5.jpg",
#"d:\\Development\\NextGIS\\D-Day\\foto\\plaza-6.jpg",
]
for file_name in files:
attachment_info = ngwConnection.upload_file( file_name )
id = ngwFeature.link_attachment(attachment_info)
print "link attachment with id %s"%str(id)
attachments = ngwFeature.get_attachments()
for attachment in attachments:
if attachment[u'is_image'] == True:
ngw_attachment = NGWAttachment( attachment[u'id'], ngwFeature)
print ngw_attachment.get_image_full_url()
#ngwFeature.unlink_attachment( attachment[u'id'] ) |
brunotougeiro/python | refs/heads/master | venv/Lib/keyword.py | 162 | #! /usr/bin/env python3
"""Keywords (from "graminit.c")
This file is automatically generated; please don't muck it up!
To update the symbols in this file, 'cd' to the top directory of
the python source tree after building the interpreter and run:
./python Lib/keyword.py
"""
__all__ = ["iskeyword", "kwlist"]
kwlist = [
#--start keywords--
'False',
'None',
'True',
'and',
'as',
'assert',
'break',
'class',
'continue',
'def',
'del',
'elif',
'else',
'except',
'finally',
'for',
'from',
'global',
'if',
'import',
'in',
'is',
'lambda',
'nonlocal',
'not',
'or',
'pass',
'raise',
'return',
'try',
'while',
'with',
'yield',
#--end keywords--
]
iskeyword = frozenset(kwlist).__contains__
def main():
import sys, re
args = sys.argv[1:]
iptfile = args and args[0] or "Python/graminit.c"
if len(args) > 1: optfile = args[1]
else: optfile = "Lib/keyword.py"
# load the output skeleton from the target, taking care to preserve its
# newline convention.
with open(optfile, newline='') as fp:
format = fp.readlines()
nl = format[0][len(format[0].strip()):] if format else '\n'
# scan the source file for keywords
with open(iptfile) as fp:
strprog = re.compile('"([^"]+)"')
lines = []
for line in fp:
if '{1, "' in line:
match = strprog.search(line)
if match:
lines.append(" '" + match.group(1) + "'," + nl)
lines.sort()
# insert the lines of keywords into the skeleton
try:
start = format.index("#--start keywords--" + nl) + 1
end = format.index("#--end keywords--" + nl)
format[start:end] = lines
except ValueError:
sys.stderr.write("target does not contain format markers\n")
sys.exit(1)
# write the output file
with open(optfile, 'w', newline='') as fp:
fp.writelines(format)
if __name__ == "__main__":
main()
|
hyperized/ansible | refs/heads/devel | lib/ansible/module_utils/cloudstack.py | 38 | # -*- coding: utf-8 -*-
# Copyright (c) 2015, René Moser <mail@renemoser.net>
# Simplified BSD License (see licenses/simplified_bsd.txt or https://opensource.org/licenses/BSD-2-Clause)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
import os
import sys
import time
import traceback
from ansible.module_utils._text import to_text, to_native
from ansible.module_utils.basic import missing_required_lib
CS_IMP_ERR = None
try:
from cs import CloudStack, CloudStackException, read_config
HAS_LIB_CS = True
except ImportError:
CS_IMP_ERR = traceback.format_exc()
HAS_LIB_CS = False
if sys.version_info > (3,):
long = int
def cs_argument_spec():
return dict(
api_key=dict(default=os.environ.get('CLOUDSTACK_KEY')),
api_secret=dict(default=os.environ.get('CLOUDSTACK_SECRET'), no_log=True),
api_url=dict(default=os.environ.get('CLOUDSTACK_ENDPOINT')),
api_http_method=dict(choices=['get', 'post'], default=os.environ.get('CLOUDSTACK_METHOD')),
api_timeout=dict(type='int', default=os.environ.get('CLOUDSTACK_TIMEOUT')),
api_region=dict(default=os.environ.get('CLOUDSTACK_REGION') or 'cloudstack'),
)
def cs_required_together():
return [['api_key', 'api_secret']]
class AnsibleCloudStack:
def __init__(self, module):
if not HAS_LIB_CS:
module.fail_json(msg=missing_required_lib('cs'), exception=CS_IMP_ERR)
self.result = {
'changed': False,
'diff': {
'before': dict(),
'after': dict()
}
}
# Common returns, will be merged with self.returns
# search_for_key: replace_with_key
self.common_returns = {
'id': 'id',
'name': 'name',
'created': 'created',
'zonename': 'zone',
'state': 'state',
'project': 'project',
'account': 'account',
'domain': 'domain',
'displaytext': 'display_text',
'displayname': 'display_name',
'description': 'description',
}
# Init returns dict for use in subclasses
self.returns = {}
# these values will be casted to int
self.returns_to_int = {}
# these keys will be compared case sensitive in self.has_changed()
self.case_sensitive_keys = [
'id',
'displaytext',
'displayname',
'description',
]
self.module = module
self._cs = None
# Helper for VPCs
self._vpc_networks_ids = None
self.domain = None
self.account = None
self.project = None
self.ip_address = None
self.network = None
self.physical_network = None
self.vpc = None
self.zone = None
self.vm = None
self.vm_default_nic = None
self.os_type = None
self.hypervisor = None
self.capabilities = None
self.network_acl = None
@property
def cs(self):
if self._cs is None:
api_config = self.get_api_config()
self._cs = CloudStack(**api_config)
return self._cs
def get_api_config(self):
api_region = self.module.params.get('api_region') or os.environ.get('CLOUDSTACK_REGION')
try:
config = read_config(api_region)
except KeyError:
config = {}
api_config = {
'endpoint': self.module.params.get('api_url') or config.get('endpoint'),
'key': self.module.params.get('api_key') or config.get('key'),
'secret': self.module.params.get('api_secret') or config.get('secret'),
'timeout': self.module.params.get('api_timeout') or config.get('timeout') or 10,
'method': self.module.params.get('api_http_method') or config.get('method') or 'get',
}
self.result.update({
'api_region': api_region,
'api_url': api_config['endpoint'],
'api_key': api_config['key'],
'api_timeout': int(api_config['timeout']),
'api_http_method': api_config['method'],
})
if not all([api_config['endpoint'], api_config['key'], api_config['secret']]):
self.fail_json(msg="Missing api credentials: can not authenticate")
return api_config
def fail_json(self, **kwargs):
self.result.update(kwargs)
self.module.fail_json(**self.result)
def get_or_fallback(self, key=None, fallback_key=None):
value = self.module.params.get(key)
if not value:
value = self.module.params.get(fallback_key)
return value
def has_changed(self, want_dict, current_dict, only_keys=None, skip_diff_for_keys=None):
result = False
for key, value in want_dict.items():
# Optionally limit by a list of keys
if only_keys and key not in only_keys:
continue
# Skip None values
if value is None:
continue
if key in current_dict:
if isinstance(value, (int, float, long, complex)):
# ensure we compare the same type
if isinstance(value, int):
current_dict[key] = int(current_dict[key])
elif isinstance(value, float):
current_dict[key] = float(current_dict[key])
elif isinstance(value, long):
current_dict[key] = long(current_dict[key])
elif isinstance(value, complex):
current_dict[key] = complex(current_dict[key])
if value != current_dict[key]:
if skip_diff_for_keys and key not in skip_diff_for_keys:
self.result['diff']['before'][key] = current_dict[key]
self.result['diff']['after'][key] = value
result = True
else:
before_value = to_text(current_dict[key])
after_value = to_text(value)
if self.case_sensitive_keys and key in self.case_sensitive_keys:
if before_value != after_value:
if skip_diff_for_keys and key not in skip_diff_for_keys:
self.result['diff']['before'][key] = before_value
self.result['diff']['after'][key] = after_value
result = True
# Test for diff in case insensitive way
elif before_value.lower() != after_value.lower():
if skip_diff_for_keys and key not in skip_diff_for_keys:
self.result['diff']['before'][key] = before_value
self.result['diff']['after'][key] = after_value
result = True
else:
if skip_diff_for_keys and key not in skip_diff_for_keys:
self.result['diff']['before'][key] = None
self.result['diff']['after'][key] = to_text(value)
result = True
return result
def _get_by_key(self, key=None, my_dict=None):
if my_dict is None:
my_dict = {}
if key:
if key in my_dict:
return my_dict[key]
self.fail_json(msg="Something went wrong: %s not found" % key)
return my_dict
def query_api(self, command, **args):
try:
res = getattr(self.cs, command)(**args)
if 'errortext' in res:
self.fail_json(msg="Failed: '%s'" % res['errortext'])
except CloudStackException as e:
self.fail_json(msg='CloudStackException: %s' % to_native(e))
except Exception as e:
self.fail_json(msg=to_native(e))
return res
def get_network_acl(self, key=None):
if self.network_acl is None:
args = {
'name': self.module.params.get('network_acl'),
'vpcid': self.get_vpc(key='id'),
}
network_acls = self.query_api('listNetworkACLLists', **args)
if network_acls:
self.network_acl = network_acls['networkacllist'][0]
self.result['network_acl'] = self.network_acl['name']
if self.network_acl:
return self._get_by_key(key, self.network_acl)
else:
self.fail_json(msg="Network ACL %s not found" % self.module.params.get('network_acl'))
def get_vpc(self, key=None):
"""Return a VPC dictionary or the value of given key of."""
if self.vpc:
return self._get_by_key(key, self.vpc)
vpc = self.module.params.get('vpc')
if not vpc:
vpc = os.environ.get('CLOUDSTACK_VPC')
if not vpc:
return None
args = {
'account': self.get_account(key='name'),
'domainid': self.get_domain(key='id'),
'projectid': self.get_project(key='id'),
'zoneid': self.get_zone(key='id'),
}
vpcs = self.query_api('listVPCs', **args)
if not vpcs:
self.fail_json(msg="No VPCs available.")
for v in vpcs['vpc']:
if vpc in [v['name'], v['displaytext'], v['id']]:
# Fail if the identifyer matches more than one VPC
if self.vpc:
self.fail_json(msg="More than one VPC found with the provided identifyer '%s'" % vpc)
else:
self.vpc = v
self.result['vpc'] = v['name']
if self.vpc:
return self._get_by_key(key, self.vpc)
self.fail_json(msg="VPC '%s' not found" % vpc)
def is_vpc_network(self, network_id):
"""Returns True if network is in VPC."""
# This is an efficient way to query a lot of networks at a time
if self._vpc_networks_ids is None:
args = {
'account': self.get_account(key='name'),
'domainid': self.get_domain(key='id'),
'projectid': self.get_project(key='id'),
'zoneid': self.get_zone(key='id'),
}
vpcs = self.query_api('listVPCs', **args)
self._vpc_networks_ids = []
if vpcs:
for vpc in vpcs['vpc']:
for n in vpc.get('network', []):
self._vpc_networks_ids.append(n['id'])
return network_id in self._vpc_networks_ids
def get_physical_network(self, key=None):
if self.physical_network:
return self._get_by_key(key, self.physical_network)
physical_network = self.module.params.get('physical_network')
args = {
'zoneid': self.get_zone(key='id')
}
physical_networks = self.query_api('listPhysicalNetworks', **args)
if not physical_networks:
self.fail_json(msg="No physical networks available.")
for net in physical_networks['physicalnetwork']:
if physical_network in [net['name'], net['id']]:
self.physical_network = net
self.result['physical_network'] = net['name']
return self._get_by_key(key, self.physical_network)
self.fail_json(msg="Physical Network '%s' not found" % physical_network)
def get_network(self, key=None):
"""Return a network dictionary or the value of given key of."""
if self.network:
return self._get_by_key(key, self.network)
network = self.module.params.get('network')
if not network:
vpc_name = self.get_vpc(key='name')
if vpc_name:
self.fail_json(msg="Could not find network for VPC '%s' due missing argument: network" % vpc_name)
return None
args = {
'account': self.get_account(key='name'),
'domainid': self.get_domain(key='id'),
'projectid': self.get_project(key='id'),
'zoneid': self.get_zone(key='id'),
'vpcid': self.get_vpc(key='id')
}
networks = self.query_api('listNetworks', **args)
if not networks:
self.fail_json(msg="No networks available.")
for n in networks['network']:
# ignore any VPC network if vpc param is not given
if 'vpcid' in n and not self.get_vpc(key='id'):
continue
if network in [n['displaytext'], n['name'], n['id']]:
self.result['network'] = n['name']
self.network = n
return self._get_by_key(key, self.network)
self.fail_json(msg="Network '%s' not found" % network)
def get_project(self, key=None):
if self.project:
return self._get_by_key(key, self.project)
project = self.module.params.get('project')
if not project:
project = os.environ.get('CLOUDSTACK_PROJECT')
if not project:
return None
args = {
'account': self.get_account(key='name'),
'domainid': self.get_domain(key='id')
}
projects = self.query_api('listProjects', **args)
if projects:
for p in projects['project']:
if project.lower() in [p['name'].lower(), p['id']]:
self.result['project'] = p['name']
self.project = p
return self._get_by_key(key, self.project)
self.fail_json(msg="project '%s' not found" % project)
def get_ip_address(self, key=None):
if self.ip_address:
return self._get_by_key(key, self.ip_address)
ip_address = self.module.params.get('ip_address')
if not ip_address:
self.fail_json(msg="IP address param 'ip_address' is required")
args = {
'ipaddress': ip_address,
'account': self.get_account(key='name'),
'domainid': self.get_domain(key='id'),
'projectid': self.get_project(key='id'),
'vpcid': self.get_vpc(key='id'),
}
ip_addresses = self.query_api('listPublicIpAddresses', **args)
if not ip_addresses:
self.fail_json(msg="IP address '%s' not found" % args['ipaddress'])
self.ip_address = ip_addresses['publicipaddress'][0]
return self._get_by_key(key, self.ip_address)
def get_vm_guest_ip(self):
vm_guest_ip = self.module.params.get('vm_guest_ip')
default_nic = self.get_vm_default_nic()
if not vm_guest_ip:
return default_nic['ipaddress']
for secondary_ip in default_nic['secondaryip']:
if vm_guest_ip == secondary_ip['ipaddress']:
return vm_guest_ip
self.fail_json(msg="Secondary IP '%s' not assigned to VM" % vm_guest_ip)
def get_vm_default_nic(self):
if self.vm_default_nic:
return self.vm_default_nic
nics = self.query_api('listNics', virtualmachineid=self.get_vm(key='id'))
if nics:
for n in nics['nic']:
if n['isdefault']:
self.vm_default_nic = n
return self.vm_default_nic
self.fail_json(msg="No default IP address of VM '%s' found" % self.module.params.get('vm'))
def get_vm(self, key=None, filter_zone=True):
if self.vm:
return self._get_by_key(key, self.vm)
vm = self.module.params.get('vm')
if not vm:
self.fail_json(msg="Virtual machine param 'vm' is required")
args = {
'account': self.get_account(key='name'),
'domainid': self.get_domain(key='id'),
'projectid': self.get_project(key='id'),
'zoneid': self.get_zone(key='id') if filter_zone else None,
'fetch_list': True,
}
vms = self.query_api('listVirtualMachines', **args)
if vms:
for v in vms:
if vm.lower() in [v['name'].lower(), v['displayname'].lower(), v['id']]:
self.vm = v
return self._get_by_key(key, self.vm)
self.fail_json(msg="Virtual machine '%s' not found" % vm)
def get_disk_offering(self, key=None):
disk_offering = self.module.params.get('disk_offering')
if not disk_offering:
return None
# Do not add domain filter for disk offering listing.
disk_offerings = self.query_api('listDiskOfferings')
if disk_offerings:
for d in disk_offerings['diskoffering']:
if disk_offering in [d['displaytext'], d['name'], d['id']]:
return self._get_by_key(key, d)
self.fail_json(msg="Disk offering '%s' not found" % disk_offering)
def get_zone(self, key=None):
if self.zone:
return self._get_by_key(key, self.zone)
zone = self.module.params.get('zone')
if not zone:
zone = os.environ.get('CLOUDSTACK_ZONE')
zones = self.query_api('listZones')
if not zones:
self.fail_json(msg="No zones available. Please create a zone first")
# use the first zone if no zone param given
if not zone:
self.zone = zones['zone'][0]
self.result['zone'] = self.zone['name']
return self._get_by_key(key, self.zone)
if zones:
for z in zones['zone']:
if zone.lower() in [z['name'].lower(), z['id']]:
self.result['zone'] = z['name']
self.zone = z
return self._get_by_key(key, self.zone)
self.fail_json(msg="zone '%s' not found" % zone)
def get_os_type(self, key=None):
if self.os_type:
return self._get_by_key(key, self.zone)
os_type = self.module.params.get('os_type')
if not os_type:
return None
os_types = self.query_api('listOsTypes')
if os_types:
for o in os_types['ostype']:
if os_type in [o['description'], o['id']]:
self.os_type = o
return self._get_by_key(key, self.os_type)
self.fail_json(msg="OS type '%s' not found" % os_type)
def get_hypervisor(self):
if self.hypervisor:
return self.hypervisor
hypervisor = self.module.params.get('hypervisor')
hypervisors = self.query_api('listHypervisors')
# use the first hypervisor if no hypervisor param given
if not hypervisor:
self.hypervisor = hypervisors['hypervisor'][0]['name']
return self.hypervisor
for h in hypervisors['hypervisor']:
if hypervisor.lower() == h['name'].lower():
self.hypervisor = h['name']
return self.hypervisor
self.fail_json(msg="Hypervisor '%s' not found" % hypervisor)
def get_account(self, key=None):
if self.account:
return self._get_by_key(key, self.account)
account = self.module.params.get('account')
if not account:
account = os.environ.get('CLOUDSTACK_ACCOUNT')
if not account:
return None
domain = self.module.params.get('domain')
if not domain:
self.fail_json(msg="Account must be specified with Domain")
args = {
'name': account,
'domainid': self.get_domain(key='id'),
'listall': True
}
accounts = self.query_api('listAccounts', **args)
if accounts:
self.account = accounts['account'][0]
self.result['account'] = self.account['name']
return self._get_by_key(key, self.account)
self.fail_json(msg="Account '%s' not found" % account)
def get_domain(self, key=None):
if self.domain:
return self._get_by_key(key, self.domain)
domain = self.module.params.get('domain')
if not domain:
domain = os.environ.get('CLOUDSTACK_DOMAIN')
if not domain:
return None
args = {
'listall': True,
}
domains = self.query_api('listDomains', **args)
if domains:
for d in domains['domain']:
if d['path'].lower() in [domain.lower(), "root/" + domain.lower(), "root" + domain.lower()]:
self.domain = d
self.result['domain'] = d['path']
return self._get_by_key(key, self.domain)
self.fail_json(msg="Domain '%s' not found" % domain)
def query_tags(self, resource, resource_type):
args = {
'resourceid': resource['id'],
'resourcetype': resource_type,
}
tags = self.query_api('listTags', **args)
return self.get_tags(resource=tags, key='tag')
def get_tags(self, resource=None, key='tags'):
existing_tags = []
for tag in resource.get(key) or []:
existing_tags.append({'key': tag['key'], 'value': tag['value']})
return existing_tags
def _process_tags(self, resource, resource_type, tags, operation="create"):
if tags:
self.result['changed'] = True
if not self.module.check_mode:
args = {
'resourceids': resource['id'],
'resourcetype': resource_type,
'tags': tags,
}
if operation == "create":
response = self.query_api('createTags', **args)
else:
response = self.query_api('deleteTags', **args)
self.poll_job(response)
def _tags_that_should_exist_or_be_updated(self, resource, tags):
existing_tags = self.get_tags(resource)
return [tag for tag in tags if tag not in existing_tags]
def _tags_that_should_not_exist(self, resource, tags):
existing_tags = self.get_tags(resource)
return [tag for tag in existing_tags if tag not in tags]
def ensure_tags(self, resource, resource_type=None):
if not resource_type or not resource:
self.fail_json(msg="Error: Missing resource or resource_type for tags.")
if 'tags' in resource:
tags = self.module.params.get('tags')
if tags is not None:
self._process_tags(resource, resource_type, self._tags_that_should_not_exist(resource, tags), operation="delete")
self._process_tags(resource, resource_type, self._tags_that_should_exist_or_be_updated(resource, tags))
resource['tags'] = self.query_tags(resource=resource, resource_type=resource_type)
return resource
def get_capabilities(self, key=None):
if self.capabilities:
return self._get_by_key(key, self.capabilities)
capabilities = self.query_api('listCapabilities')
self.capabilities = capabilities['capability']
return self._get_by_key(key, self.capabilities)
def poll_job(self, job=None, key=None):
if 'jobid' in job:
while True:
res = self.query_api('queryAsyncJobResult', jobid=job['jobid'])
if res['jobstatus'] != 0 and 'jobresult' in res:
if 'errortext' in res['jobresult']:
self.fail_json(msg="Failed: '%s'" % res['jobresult']['errortext'])
if key and key in res['jobresult']:
job = res['jobresult'][key]
break
time.sleep(2)
return job
def update_result(self, resource, result=None):
if result is None:
result = dict()
if resource:
returns = self.common_returns.copy()
returns.update(self.returns)
for search_key, return_key in returns.items():
if search_key in resource:
result[return_key] = resource[search_key]
# Bad bad API does not always return int when it should.
for search_key, return_key in self.returns_to_int.items():
if search_key in resource:
result[return_key] = int(resource[search_key])
if 'tags' in resource:
result['tags'] = resource['tags']
return result
def get_result(self, resource):
return self.update_result(resource, self.result)
def get_result_and_facts(self, facts_name, resource):
result = self.get_result(resource)
ansible_facts = {
facts_name: result.copy()
}
for k in ['diff', 'changed']:
if k in ansible_facts[facts_name]:
del ansible_facts[facts_name][k]
result.update(ansible_facts=ansible_facts)
return result
|
ZAAK-ZURICHBERLIN/ZAAK.IO-Editor | refs/heads/master | utils/exporters/blender/addons/io_three/exporter/material.py | 70 | from .. import constants, logger
from . import base_classes, utilities, api
class Material(base_classes.BaseNode):
"""Class that wraps material nodes"""
def __init__(self, node, parent):
logger.debug("Material().__init__(%s)", node)
base_classes.BaseNode.__init__(self, node, parent,
constants.MATERIAL)
self._common_attributes()
if self[constants.TYPE] == constants.THREE_PHONG:
self._phong_attributes()
textures = self.parent.options.get(constants.MAPS)
if textures:
self._update_maps()
def _common_attributes(self):
"""Parse the common material attributes"""
logger.debug('Material()._common_attributes()')
dispatch = {
constants.PHONG: constants.THREE_PHONG,
constants.LAMBERT: constants.THREE_LAMBERT,
constants.BASIC: constants.THREE_BASIC
}
shader_type = api.material.type(self.node)
self[constants.TYPE] = dispatch[shader_type]
diffuse = api.material.diffuse_color(self.node)
self[constants.COLOR] = utilities.rgb2int(diffuse)
if self[constants.TYPE] != constants.THREE_BASIC:
emissive = api.material.emissive_color(self.node)
self[constants.EMISSIVE] = utilities.rgb2int(emissive)
vertex_color = api.material.use_vertex_colors(self.node)
if vertex_color:
self[constants.VERTEX_COLORS] = constants.VERTEX_COLORS_ON
else:
self[constants.VERTEX_COLORS] = constants.VERTEX_COLORS_OFF
self[constants.BLENDING] = api.material.blending(self.node)
if api.material.transparent(self.node):
self[constants.TRANSPARENT] = True
if api.material.double_sided(self.node):
self[constants.SIDE] = constants.SIDE_DOUBLE
self[constants.DEPTH_TEST] = api.material.depth_test(self.node)
self[constants.DEPTH_WRITE] = api.material.depth_write(self.node)
def _phong_attributes(self):
"""Parse phong specific attributes"""
logger.debug("Material()._phong_attributes()")
specular = api.material.specular_color(self.node)
self[constants.SPECULAR] = utilities.rgb2int(specular)
self[constants.SHININESS] = api.material.specular_coef(self.node)
def _update_maps(self):
"""Parses maps/textures and updates the textures array
with any new nodes found.
"""
logger.debug("Material()._update_maps()")
mapping = (
(api.material.diffuse_map, constants.MAP),
(api.material.specular_map, constants.SPECULAR_MAP),
(api.material.light_map, constants.LIGHT_MAP)
)
for func, key in mapping:
map_node = func(self.node)
if map_node:
logger.info('Found map node %s for %s', map_node, key)
tex_inst = self.scene.texture(map_node.name)
self[key] = tex_inst[constants.UUID]
if self[constants.TYPE] == constants.THREE_PHONG:
mapping = (
(api.material.bump_map, constants.BUMP_MAP,
constants.BUMP_SCALE, api.material.bump_scale),
(api.material.normal_map, constants.NORMAL_MAP,
constants.NORMAL_SCALE, api.material.normal_scale)
)
for func, map_key, scale_key, scale_func in mapping:
map_node = func(self.node)
if not map_node:
continue
logger.info("Found map node %s for %s", map_node, map_key)
tex_inst = self.scene.texture(map_node.name)
self[map_key] = tex_inst[constants.UUID]
self[scale_key] = scale_func(self.node)
|
CoolCloud/taiga-back | refs/heads/master | taiga/external_apps/permissions.py | 8 | # Copyright (C) 2014 Andrey Antukh <niwi@niwi.be>
# Copyright (C) 2014 Jesús Espino <jespinog@gmail.com>
# Copyright (C) 2014 David Barragán <bameda@dbarragan.com>
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from taiga.base.api.permissions import TaigaResourcePermission
from taiga.base.api.permissions import IsAuthenticated
from taiga.base.api.permissions import PermissionComponent
class ApplicationPermission(TaigaResourcePermission):
retrieve_perms = IsAuthenticated()
token_perms = IsAuthenticated()
list_perms = IsAuthenticated()
class CanUseToken(PermissionComponent):
def check_permissions(self, request, view, obj=None):
if not obj:
return False
return request.user == obj.user
class ApplicationTokenPermission(TaigaResourcePermission):
retrieve_perms = IsAuthenticated() & CanUseToken()
by_application_perms = IsAuthenticated()
create_perms = IsAuthenticated()
update_perms = IsAuthenticated() & CanUseToken()
partial_update_perms = IsAuthenticated() & CanUseToken()
destroy_perms = IsAuthenticated() & CanUseToken()
list_perms = IsAuthenticated()
|
zoobab/gyp | refs/heads/master | test/win/gyptest-cl-default-char-is-unsigned.py | 269 | #!/usr/bin/env python
# Copyright (c) 2012 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
Make sure DefaultCharIsUnsigned option is functional.
"""
import TestGyp
import sys
if sys.platform == 'win32':
test = TestGyp.TestGyp(formats=['msvs', 'ninja'])
CHDIR = 'compiler-flags'
test.run_gyp('default-char-is-unsigned.gyp', chdir=CHDIR)
test.build('default-char-is-unsigned.gyp', test.ALL, chdir=CHDIR)
test.pass_test()
|
GdZ/scriptfile | refs/heads/master | software/googleAppEngine/lib/django_1_4/tests/regressiontests/templates/urls.py | 34 | # coding: utf-8
from __future__ import absolute_import
from django.conf.urls import patterns, url
from . import views
urlpatterns = patterns('',
# Test urls for testing reverse lookups
(r'^$', views.index),
(r'^client/([\d,]+)/$', views.client),
(r'^client/(?P<id>\d+)/(?P<action>[^/]+)/$', views.client_action),
(r'^client/(?P<client_id>\d+)/(?P<action>[^/]+)/$', views.client_action),
url(r'^named-client/(\d+)/$', views.client2, name="named.client"),
# Unicode strings are permitted everywhere.
url(ur'^Юникод/(\w+)/$', views.client2, name=u"метка_оператора"),
url(ur'^Юникод/(?P<tag>\S+)/$', 'regressiontests.templates.views.client2', name=u"метка_оператора_2"),
)
|
tongwang01/tensorflow | refs/heads/master | tensorflow/contrib/learn/python/learn/estimators/linear.py | 2 | # Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Linear Estimators."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import math
import re
import six
from tensorflow.contrib import layers
from tensorflow.contrib.framework import deprecated
from tensorflow.contrib.framework import deprecated_arg_values
from tensorflow.contrib.framework.python.ops import variables as contrib_variables
from tensorflow.contrib.learn.python.learn import evaluable
from tensorflow.contrib.learn.python.learn import trainable
from tensorflow.contrib.learn.python.learn.estimators import estimator
from tensorflow.contrib.learn.python.learn.estimators import head as head_lib
from tensorflow.contrib.learn.python.learn.estimators import prediction_key
from tensorflow.contrib.learn.python.learn.utils import export
from tensorflow.contrib.linear_optimizer.python import sdca_optimizer
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import ops
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import clip_ops
from tensorflow.python.ops import gradients
from tensorflow.python.ops import partitioned_variables
from tensorflow.python.ops import variable_scope
from tensorflow.python.platform import tf_logging as logging
from tensorflow.python.training import session_run_hook
from tensorflow.python.training import training as train
# The default learning rate of 0.2 is a historical artifact of the initial
# implementation, but seems a reasonable choice.
_LEARNING_RATE = 0.2
def _get_optimizer(spec):
if isinstance(spec, six.string_types):
return layers.OPTIMIZER_CLS_NAMES[spec](
learning_rate=_LEARNING_RATE)
elif callable(spec):
return spec()
return spec
# TODO(ispir): Remove this function by fixing '_infer_model' with single outputs
# and as_iteable case.
def _as_iterable(preds, output):
for pred in preds:
yield pred[output]
def _add_bias_column(feature_columns, columns_to_tensors, bias_variable,
labels, columns_to_variables):
# TODO(b/31008490): Move definition to a common constants place.
bias_column_name = "tf_virtual_bias_column"
if any(col.name is bias_column_name for col in feature_columns):
raise ValueError("%s is a reserved column name." % bias_column_name)
bias_column = layers.real_valued_column(bias_column_name)
columns_to_tensors[bias_column] = array_ops.ones_like(labels,
dtype=dtypes.float32)
columns_to_variables[bias_column] = [bias_variable]
def _linear_model_fn(features, labels, mode, params):
"""A model_fn for linear models that use a gradient-based optimizer.
Args:
features: `Tensor` or dict of `Tensor` (depends on data passed to `fit`).
labels: `Tensor` of shape [batch_size, 1] or [batch_size] labels of
dtype `int32` or `int64` in the range `[0, n_classes)`.
mode: Defines whether this is training, evaluation or prediction.
See `ModeKeys`.
params: A dict of hyperparameters.
The following hyperparameters are expected:
* head: A `Head` instance.
* feature_columns: An iterable containing all the feature columns used by
the model.
* optimizer: string, `Optimizer` object, or callable that defines the
optimizer to use for training.
* gradient_clip_norm: A float > 0. If provided, gradients are
clipped to their global norm with this clipping ratio.
* num_ps_replicas: The number of parameter server replicas.
* joint_weights: If True, the weights for all columns will be stored in a
single (possibly partitioned) variable. It's more efficient, but it's
incompatible with SDCAOptimizer, and requires all feature columns are
sparse and use the 'sum' combiner.
Returns:
An `estimator.ModelFnOps` instance.
Raises:
ValueError: If mode is not any of the `ModeKeys`.
"""
head = params["head"]
feature_columns = params["feature_columns"]
optimizer = params["optimizer"]
gradient_clip_norm = params.get("gradient_clip_norm", None)
num_ps_replicas = params.get("num_ps_replicas", 0)
joint_weights = params.get("joint_weights", False)
if not isinstance(features, dict):
features = {"": features}
parent_scope = "linear"
partitioner = partitioned_variables.min_max_variable_partitioner(
max_partitions=num_ps_replicas,
min_slice_size=64 << 20)
with variable_scope.variable_scope(
parent_scope, values=features.values(), partitioner=partitioner) as scope:
if joint_weights:
logits, _, _ = (
layers.joint_weighted_sum_from_feature_columns(
columns_to_tensors=features,
feature_columns=feature_columns,
num_outputs=head.logits_dimension,
weight_collections=[parent_scope],
scope=scope))
else:
logits, _, _ = (
layers.weighted_sum_from_feature_columns(
columns_to_tensors=features,
feature_columns=feature_columns,
num_outputs=head.logits_dimension,
weight_collections=[parent_scope],
scope=scope))
def _train_op_fn(loss):
global_step = contrib_variables.get_global_step()
my_vars = ops.get_collection("linear")
grads = gradients.gradients(loss, my_vars)
if gradient_clip_norm:
grads, _ = clip_ops.clip_by_global_norm(grads, gradient_clip_norm)
return (optimizer.apply_gradients(
zip(grads, my_vars), global_step=global_step))
return head.head_ops(features, labels, mode, _train_op_fn, logits)
def sdca_model_fn(features, labels, mode, params):
"""A model_fn for linear models that use the SDCA optimizer.
Args:
features: A dict of `Tensor` keyed by column name.
labels: `Tensor` of shape [batch_size, 1] or [batch_size] labels of
dtype `int32` or `int64` in the range `[0, n_classes)`.
mode: Defines whether this is training, evaluation or prediction.
See `ModeKeys`.
params: A dict of hyperparameters.
The following hyperparameters are expected:
* head: A `Head` instance. Type must be one of `_BinarySvmHead`,
`_RegressionHead` or `_MultiClassHead`.
* feature_columns: An iterable containing all the feature columns used by
the model.
* optimizer: An `SDCAOptimizer` instance.
* weight_column_name: A string defining the weight feature column, or
None if there are no weights.
* update_weights_hook: A `SessionRunHook` object or None. Used to update
model weights.
Returns:
An `estimator.ModelFnOps` instance.
Raises:
ValueError: If `optimizer` is not an `SDCAOptimizer` instance.
ValueError: If the type of head is neither `_BinarySvmHead`, nor
`_RegressionHead` nor `_MultiClassHead`.
ValueError: If mode is not any of the `ModeKeys`.
"""
head = params["head"]
feature_columns = params["feature_columns"]
optimizer = params["optimizer"]
weight_column_name = params["weight_column_name"]
update_weights_hook = params.get("update_weights_hook", None)
if not isinstance(optimizer, sdca_optimizer.SDCAOptimizer):
raise ValueError("Optimizer must be of type SDCAOptimizer")
if isinstance(head, head_lib._BinarySvmHead): # pylint: disable=protected-access
loss_type = "hinge_loss"
elif isinstance(head, head_lib._MultiClassHead): # pylint: disable=protected-access
loss_type = "logistic_loss"
elif isinstance(head, head_lib._RegressionHead): # pylint: disable=protected-access
loss_type = "squared_loss"
else:
return ValueError("Unsupported head type: {}".format(head))
parent_scope = "linear"
with variable_scope.variable_op_scope(
features.values(), parent_scope) as scope:
logits, columns_to_variables, bias = (
layers.weighted_sum_from_feature_columns(
columns_to_tensors=features,
feature_columns=feature_columns,
num_outputs=1,
scope=scope))
_add_bias_column(feature_columns, features, bias, labels,
columns_to_variables)
def _train_op_fn(unused_loss):
global_step = contrib_variables.get_global_step()
sdca_model, train_op = optimizer.get_train_step(columns_to_variables,
weight_column_name,
loss_type, features,
labels, global_step)
if update_weights_hook is not None:
update_weights_hook.set_parameters(sdca_model, train_op)
return train_op
return head.head_ops(features, labels, mode, _train_op_fn, logits)
# Ensures consistency with LinearComposableModel.
def _get_default_optimizer(feature_columns):
learning_rate = min(_LEARNING_RATE, 1.0 / math.sqrt(len(feature_columns)))
return train.FtrlOptimizer(learning_rate=learning_rate)
class _SdcaUpdateWeightsHook(session_run_hook.SessionRunHook):
"""SessionRunHook to update and shrink SDCA model weights."""
def __init__(self):
pass
def set_parameters(self, sdca_model, train_op):
self._sdca_model = sdca_model
self._train_op = train_op
def begin(self):
"""Construct the update_weights op.
The op is implicitly added to the default graph.
"""
self._update_op = self._sdca_model.update_weights(self._train_op)
def before_run(self, run_context):
"""Return the update_weights op so that it is executed during this run."""
return session_run_hook.SessionRunArgs(self._update_op)
class LinearClassifier(evaluable.Evaluable, trainable.Trainable):
"""Linear classifier model.
Train a linear model to classify instances into one of multiple possible
classes. When number of possible classes is 2, this is binary classification.
Example:
```python
sparse_column_a = sparse_column_with_hash_bucket(...)
sparse_column_b = sparse_column_with_hash_bucket(...)
sparse_feature_a_x_sparse_feature_b = crossed_column(...)
# Estimator using the default optimizer.
estimator = LinearClassifier(
feature_columns=[sparse_column_a, sparse_feature_a_x_sparse_feature_b])
# Or estimator using the FTRL optimizer with regularization.
estimator = LinearClassifier(
feature_columns=[sparse_column_a, sparse_feature_a_x_sparse_feature_b],
optimizer=tf.train.FtrlOptimizer(
learning_rate=0.1,
l1_regularization_strength=0.001
))
# Or estimator using the SDCAOptimizer.
estimator = LinearClassifier(
feature_columns=[sparse_column_a, sparse_feature_a_x_sparse_feature_b],
optimizer=tf.contrib.linear_optimizer.SDCAOptimizer(
example_id_column='example_id',
num_loss_partitions=...,
symmetric_l2_regularization=2.0
))
# Input builders
def input_fn_train: # returns x, y
...
def input_fn_eval: # returns x, y
...
estimator.fit(input_fn=input_fn_train)
estimator.evaluate(input_fn=input_fn_eval)
estimator.predict(x=x)
```
Input of `fit` and `evaluate` should have following features,
otherwise there will be a `KeyError`:
* if `weight_column_name` is not `None`, a feature with
`key=weight_column_name` whose value is a `Tensor`.
* for each `column` in `feature_columns`:
- if `column` is a `SparseColumn`, a feature with `key=column.name`
whose `value` is a `SparseTensor`.
- if `column` is a `WeightedSparseColumn`, two features: the first with
`key` the id column name, the second with `key` the weight column name.
Both features' `value` must be a `SparseTensor`.
- if `column` is a `RealValuedColumn`, a feature with `key=column.name`
whose `value` is a `Tensor`.
"""
def __init__(self, # _joint_weight pylint: disable=invalid-name
feature_columns,
model_dir=None,
n_classes=2,
weight_column_name=None,
optimizer=None,
gradient_clip_norm=None,
enable_centered_bias=False,
_joint_weight=False,
config=None,
feature_engineering_fn=None):
"""Construct a `LinearClassifier` estimator object.
Args:
feature_columns: An iterable containing all the feature columns used by
the model. All items in the set should be instances of classes derived
from `FeatureColumn`.
model_dir: Directory to save model parameters, graph and etc. This can
also be used to load checkpoints from the directory into a estimator
to continue training a previously saved model.
n_classes: number of label classes. Default is binary classification.
weight_column_name: A string defining feature column name representing
weights. It is used to down weight or boost examples during training. It
will be multiplied by the loss of the example.
optimizer: The optimizer used to train the model. If specified, it should
be either an instance of `tf.Optimizer` or the SDCAOptimizer. If `None`,
the Ftrl optimizer will be used.
gradient_clip_norm: A `float` > 0. If provided, gradients are clipped
to their global norm with this clipping ratio. See
`tf.clip_by_global_norm` for more details.
enable_centered_bias: A bool. If True, estimator will learn a centered
bias variable for each class. Rest of the model structure learns the
residual after centered bias.
_joint_weight: If True, the weights for all columns will be stored in a
single (possibly partitioned) variable. It's more efficient, but it's
incompatible with SDCAOptimizer, and requires all feature columns are
sparse and use the 'sum' combiner.
config: `RunConfig` object to configure the runtime settings.
feature_engineering_fn: Feature engineering function. Takes features and
labels which are the output of `input_fn` and
returns features and labels which will be fed
into the model.
Returns:
A `LinearClassifier` estimator.
Raises:
ValueError: if n_classes < 2.
"""
# TODO(zoy): Give an unsupported error if enable_centered_bias is
# requested for SDCA once its default changes to False.
self._feature_columns = feature_columns
assert self._feature_columns
self._optimizer = _get_default_optimizer(feature_columns)
if optimizer:
self._optimizer = _get_optimizer(optimizer)
chief_hook = None
if (isinstance(optimizer, sdca_optimizer.SDCAOptimizer) and
enable_centered_bias):
enable_centered_bias = False
logging.warning("centered_bias is not supported with SDCA, "
"please disable it explicitly.")
head = head_lib._multi_class_head( # pylint: disable=protected-access
n_classes,
weight_column_name=weight_column_name,
enable_centered_bias=enable_centered_bias)
params = {
"head": head,
"feature_columns": feature_columns,
"optimizer": self._optimizer,
}
if isinstance(optimizer, sdca_optimizer.SDCAOptimizer):
assert not _joint_weight, ("_joint_weight is incompatible with the"
" SDCAOptimizer")
assert n_classes == 2, "SDCA only applies to binary classification."
model_fn = sdca_model_fn
# The model_fn passes the model parameters to the chief_hook. We then use
# the hook to update weights and shrink step only on the chief.
chief_hook = _SdcaUpdateWeightsHook()
params.update({
"weight_column_name": weight_column_name,
"update_weights_hook": chief_hook,
})
else:
model_fn = _linear_model_fn
params.update({
"gradient_clip_norm": gradient_clip_norm,
"num_ps_replicas": config.num_ps_replicas if config else 0,
"joint_weights": _joint_weight,
})
self._estimator = estimator.Estimator(
model_fn=model_fn,
model_dir=model_dir,
config=config,
params=params,
feature_engineering_fn=feature_engineering_fn)
self._additional_run_hook = (chief_hook if self._estimator.config.is_chief
else None)
def get_estimator(self):
return self._estimator
def fit(self, x=None, y=None, input_fn=None, steps=None, batch_size=None,
monitors=None, max_steps=None):
"""See trainable.Trainable."""
# TODO(roumposg): Remove when deprecated monitors are removed.
if monitors is None:
monitors = []
deprecated_monitors = [
m for m in monitors
if not isinstance(m, session_run_hook.SessionRunHook)
]
for monitor in deprecated_monitors:
monitor.set_estimator(self)
monitor._lock_estimator() # pylint: disable=protected-access
if self._additional_run_hook:
monitors.append(self._additional_run_hook)
result = self._estimator.fit(x=x, y=y, input_fn=input_fn, steps=steps,
batch_size=batch_size, monitors=monitors,
max_steps=max_steps)
for monitor in deprecated_monitors:
monitor._unlock_estimator() # pylint: disable=protected-access
return result
def evaluate(self, x=None, y=None, input_fn=None, feed_fn=None,
batch_size=None, steps=None, metrics=None, name=None):
"""See evaluable.Evaluable."""
return self._estimator.evaluate(x=x, y=y, input_fn=input_fn,
feed_fn=feed_fn, batch_size=batch_size,
steps=steps, metrics=metrics, name=name)
@deprecated_arg_values(
estimator.AS_ITERABLE_DATE, estimator.AS_ITERABLE_INSTRUCTIONS,
as_iterable=False)
def predict(self, x=None, input_fn=None, batch_size=None, as_iterable=True):
"""Runs inference to determine the predicted class."""
key = prediction_key.PredictionKey.CLASSES
preds = self._estimator.predict(
x=x,
input_fn=input_fn,
batch_size=batch_size,
outputs=[key],
as_iterable=as_iterable)
if as_iterable:
return _as_iterable(preds, output=key)
return preds[key]
@deprecated_arg_values(
estimator.AS_ITERABLE_DATE, estimator.AS_ITERABLE_INSTRUCTIONS,
as_iterable=False)
def predict_proba(self, x=None, input_fn=None, batch_size=None, outputs=None,
as_iterable=True):
"""Runs inference to determine the class probability predictions."""
key = prediction_key.PredictionKey.PROBABILITIES
preds = self._estimator.predict(
x=x,
input_fn=input_fn,
batch_size=batch_size,
outputs=[key],
as_iterable=as_iterable)
if as_iterable:
return _as_iterable(preds, output=key)
return preds[key]
def get_variable_names(self):
return self._estimator.get_variable_names()
def get_variable_value(self, name):
return self._estimator.get_variable_value(name)
def export(self,
export_dir,
input_fn=None,
input_feature_key=None,
use_deprecated_input_fn=True,
signature_fn=None,
default_batch_size=1,
exports_to_keep=None):
"""See BaseEstimator.export."""
def default_input_fn(unused_estimator, examples):
return layers.parse_feature_columns_from_examples(
examples, self._feature_columns)
return self._estimator.export(
export_dir=export_dir,
input_fn=input_fn or default_input_fn,
input_feature_key=input_feature_key,
use_deprecated_input_fn=use_deprecated_input_fn,
signature_fn=(signature_fn or
export.classification_signature_fn_with_prob),
prediction_key=prediction_key.PredictionKey.PROBABILITIES,
default_batch_size=default_batch_size,
exports_to_keep=exports_to_keep)
@property
@deprecated("2016-10-30",
"This method will be removed after the deprecation date. "
"To inspect variables, use get_variable_names() and "
"get_variable_value().")
def weights_(self):
values = {}
optimizer_regex = r".*/"+self._optimizer.get_name() + r"(_\d)?$"
for name in self.get_variable_names():
if (name.startswith("linear/") and
name != "linear/bias_weight" and
not re.match(optimizer_regex, name)):
values[name] = self.get_variable_value(name)
if len(values) == 1:
return values[list(values.keys())[0]]
return values
@property
@deprecated("2016-10-30",
"This method will be removed after the deprecation date. "
"To inspect variables, use get_variable_names() and "
"get_variable_value().")
def bias_(self):
return self.get_variable_value("linear/bias_weight")
@property
def config(self):
return self._estimator.config
@property
def model_dir(self):
return self._estimator.model_dir
class LinearRegressor(evaluable.Evaluable, trainable.Trainable):
"""Linear regressor model.
Train a linear regression model to predict label value given observation of
feature values.
Example:
```python
sparse_column_a = sparse_column_with_hash_bucket(...)
sparse_column_b = sparse_column_with_hash_bucket(...)
sparse_feature_a_x_sparse_feature_b = crossed_column(...)
estimator = LinearRegressor(
feature_columns=[sparse_column_a, sparse_feature_a_x_sparse_feature_b])
# Input builders
def input_fn_train: # returns x, y
...
def input_fn_eval: # returns x, y
...
estimator.fit(input_fn=input_fn_train)
estimator.evaluate(input_fn=input_fn_eval)
estimator.predict(x=x)
```
Input of `fit` and `evaluate` should have following features,
otherwise there will be a KeyError:
* if `weight_column_name` is not `None`:
key=weight_column_name, value=a `Tensor`
* for column in `feature_columns`:
- if isinstance(column, `SparseColumn`):
key=column.name, value=a `SparseTensor`
- if isinstance(column, `WeightedSparseColumn`):
{key=id column name, value=a `SparseTensor`,
key=weight column name, value=a `SparseTensor`}
- if isinstance(column, `RealValuedColumn`):
key=column.name, value=a `Tensor`
"""
def __init__(self, # _joint_weights: pylint: disable=invalid-name
feature_columns,
model_dir=None,
weight_column_name=None,
optimizer=None,
gradient_clip_norm=None,
enable_centered_bias=False,
label_dimension=1,
_joint_weights=False,
config=None,
feature_engineering_fn=None):
"""Construct a `LinearRegressor` estimator object.
Args:
feature_columns: An iterable containing all the feature columns used by
the model. All items in the set should be instances of classes derived
from `FeatureColumn`.
model_dir: Directory to save model parameters, graph, etc. This can
also be used to load checkpoints from the directory into a estimator
to continue training a previously saved model.
weight_column_name: A string defining feature column name representing
weights. It is used to down weight or boost examples during training. It
will be multiplied by the loss of the example.
optimizer: An instance of `tf.Optimizer` used to train the model. If
`None`, will use an Ftrl optimizer.
gradient_clip_norm: A `float` > 0. If provided, gradients are clipped
to their global norm with this clipping ratio. See
`tf.clip_by_global_norm` for more details.
enable_centered_bias: A bool. If True, estimator will learn a centered
bias variable for each class. Rest of the model structure learns the
residual after centered bias.
label_dimension: dimension of the label for multilabels.
_joint_weights: If True use a single (possibly partitioned) variable to
store the weights. It's faster, but requires all feature columns are
sparse and have the 'sum' combiner. Incompatible with SDCAOptimizer.
config: `RunConfig` object to configure the runtime settings.
feature_engineering_fn: Feature engineering function. Takes features and
labels which are the output of `input_fn` and
returns features and labels which will be fed
into the model.
Returns:
A `LinearRegressor` estimator.
"""
self._feature_columns = feature_columns
assert self._feature_columns
self._optimizer = _get_default_optimizer(feature_columns)
if optimizer:
self._optimizer = _get_optimizer(optimizer)
chief_hook = None
if (isinstance(optimizer, sdca_optimizer.SDCAOptimizer) and
enable_centered_bias):
enable_centered_bias = False
logging.warning("centered_bias is not supported with SDCA, "
"please disable it explicitly.")
head = head_lib._regression_head( # pylint: disable=protected-access
weight_column_name=weight_column_name,
label_dimension=label_dimension,
enable_centered_bias=enable_centered_bias)
params = {
"head": head,
"feature_columns": feature_columns,
"optimizer": self._optimizer,
}
if isinstance(optimizer, sdca_optimizer.SDCAOptimizer):
assert label_dimension == 1, "SDCA only applies for label_dimension=1."
assert not _joint_weights, ("_joint_weights is incompatible with"
" SDCAOptimizer.")
model_fn = sdca_model_fn
# The model_fn passes the model parameters to the chief_hook. We then use
# the hook to update weights and shrink step only on the chief.
chief_hook = _SdcaUpdateWeightsHook()
params.update({
"weight_column_name": weight_column_name,
"update_weights_hook": chief_hook,
})
else:
model_fn = _linear_model_fn
params.update({
"gradient_clip_norm": gradient_clip_norm,
"num_ps_replicas": config.num_ps_replicas if config else 0,
"joint_weights": _joint_weights,
})
self._estimator = estimator.Estimator(
model_fn=model_fn,
model_dir=model_dir,
config=config,
params=params,
feature_engineering_fn=feature_engineering_fn)
self._additional_run_hook = (chief_hook if self._estimator.config.is_chief
else None)
def fit(self, x=None, y=None, input_fn=None, steps=None, batch_size=None,
monitors=None, max_steps=None):
"""See trainable.Trainable."""
# TODO(roumposg): Remove when deprecated monitors are removed.
if monitors is None:
monitors = []
deprecated_monitors = [
m for m in monitors
if not isinstance(m, session_run_hook.SessionRunHook)
]
for monitor in deprecated_monitors:
monitor.set_estimator(self)
monitor._lock_estimator() # pylint: disable=protected-access
if self._additional_run_hook:
monitors.append(self._additional_run_hook)
result = self._estimator.fit(x=x, y=y, input_fn=input_fn, steps=steps,
batch_size=batch_size, monitors=monitors,
max_steps=max_steps)
for monitor in deprecated_monitors:
monitor._unlock_estimator() # pylint: disable=protected-access
return result
def evaluate(self, x=None, y=None, input_fn=None, feed_fn=None,
batch_size=None, steps=None, metrics=None, name=None):
"""See evaluable.Evaluable."""
return self._estimator.evaluate(x=x, y=y, input_fn=input_fn,
feed_fn=feed_fn, batch_size=batch_size,
steps=steps, metrics=metrics, name=name)
@deprecated_arg_values(
estimator.AS_ITERABLE_DATE, estimator.AS_ITERABLE_INSTRUCTIONS,
as_iterable=False)
def predict(self, x=None, input_fn=None, batch_size=None, as_iterable=True):
"""Runs inference to determine the predicted class."""
key = prediction_key.PredictionKey.SCORES
preds = self._estimator.predict(
x=x,
input_fn=input_fn,
batch_size=batch_size,
outputs=[key],
as_iterable=as_iterable)
if as_iterable:
return _as_iterable(preds, output=key)
return preds[key]
def get_variable_names(self):
return self._estimator.get_variable_names()
def get_variable_value(self, name):
return self._estimator.get_variable_value(name)
def export(self,
export_dir,
input_fn=None,
input_feature_key=None,
use_deprecated_input_fn=True,
signature_fn=None,
default_batch_size=1,
exports_to_keep=None):
"""See BaseEstimator.export."""
def default_input_fn(unused_estimator, examples):
return layers.parse_feature_columns_from_examples(
examples, self._feature_columns)
return self._estimator.export(
export_dir=export_dir,
input_fn=input_fn or default_input_fn,
input_feature_key=input_feature_key,
use_deprecated_input_fn=use_deprecated_input_fn,
signature_fn=(signature_fn or export.regression_signature_fn),
prediction_key=prediction_key.PredictionKey.SCORES,
default_batch_size=default_batch_size,
exports_to_keep=exports_to_keep)
@property
@deprecated("2016-10-30",
"This method will be removed after the deprecation date. "
"To inspect variables, use get_variable_names() and "
"get_variable_value().")
def weights_(self):
values = {}
optimizer_regex = r".*/"+self._optimizer.get_name() + r"(_\d)?$"
for name in self.get_variable_names():
if (name.startswith("linear/") and
name != "linear/bias_weight" and
not re.match(optimizer_regex, name)):
values[name] = self.get_variable_value(name)
if len(values) == 1:
return values[list(values.keys())[0]]
return values
@property
@deprecated("2016-10-30",
"This method will be removed after the deprecation date. "
"To inspect variables, use get_variable_names() and "
"get_variable_value().")
def bias_(self):
return self.get_variable_value("linear/bias_weight")
@property
def config(self):
return self._estimator.config
@property
def model_dir(self):
return self._estimator.model_dir
|
ianyh/heroku-buildpack-python-opencv | refs/heads/master | vendor/.heroku/lib/python2.7/test/test_binascii.py | 115 | """Test the binascii C module."""
from test import test_support
import unittest
import binascii
import array
# Note: "*_hex" functions are aliases for "(un)hexlify"
b2a_functions = ['b2a_base64', 'b2a_hex', 'b2a_hqx', 'b2a_qp', 'b2a_uu',
'hexlify', 'rlecode_hqx']
a2b_functions = ['a2b_base64', 'a2b_hex', 'a2b_hqx', 'a2b_qp', 'a2b_uu',
'unhexlify', 'rledecode_hqx']
all_functions = a2b_functions + b2a_functions + ['crc32', 'crc_hqx']
class BinASCIITest(unittest.TestCase):
type2test = str
# Create binary test data
rawdata = "The quick brown fox jumps over the lazy dog.\r\n"
# Be slow so we don't depend on other modules
rawdata += "".join(map(chr, xrange(256)))
rawdata += "\r\nHello world.\n"
def setUp(self):
self.data = self.type2test(self.rawdata)
def test_exceptions(self):
# Check module exceptions
self.assertTrue(issubclass(binascii.Error, Exception))
self.assertTrue(issubclass(binascii.Incomplete, Exception))
def test_functions(self):
# Check presence of all functions
for name in all_functions:
self.assertTrue(hasattr(getattr(binascii, name), '__call__'))
self.assertRaises(TypeError, getattr(binascii, name))
def test_returned_value(self):
# Limit to the minimum of all limits (b2a_uu)
MAX_ALL = 45
raw = self.rawdata[:MAX_ALL]
for fa, fb in zip(a2b_functions, b2a_functions):
a2b = getattr(binascii, fa)
b2a = getattr(binascii, fb)
try:
a = b2a(self.type2test(raw))
res = a2b(self.type2test(a))
except Exception, err:
self.fail("{}/{} conversion raises {!r}".format(fb, fa, err))
if fb == 'b2a_hqx':
# b2a_hqx returns a tuple
res, _ = res
self.assertEqual(res, raw, "{}/{} conversion: "
"{!r} != {!r}".format(fb, fa, res, raw))
self.assertIsInstance(res, str)
self.assertIsInstance(a, str)
self.assertLess(max(ord(c) for c in a), 128)
self.assertIsInstance(binascii.crc_hqx(raw, 0), int)
self.assertIsInstance(binascii.crc32(raw), int)
def test_base64valid(self):
# Test base64 with valid data
MAX_BASE64 = 57
lines = []
for i in range(0, len(self.rawdata), MAX_BASE64):
b = self.type2test(self.rawdata[i:i+MAX_BASE64])
a = binascii.b2a_base64(b)
lines.append(a)
res = ""
for line in lines:
a = self.type2test(line)
b = binascii.a2b_base64(a)
res = res + b
self.assertEqual(res, self.rawdata)
def test_base64invalid(self):
# Test base64 with random invalid characters sprinkled throughout
# (This requires a new version of binascii.)
MAX_BASE64 = 57
lines = []
for i in range(0, len(self.data), MAX_BASE64):
b = self.type2test(self.rawdata[i:i+MAX_BASE64])
a = binascii.b2a_base64(b)
lines.append(a)
fillers = ""
valid = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789+/"
for i in xrange(256):
c = chr(i)
if c not in valid:
fillers += c
def addnoise(line):
noise = fillers
ratio = len(line) // len(noise)
res = ""
while line and noise:
if len(line) // len(noise) > ratio:
c, line = line[0], line[1:]
else:
c, noise = noise[0], noise[1:]
res += c
return res + noise + line
res = ""
for line in map(addnoise, lines):
a = self.type2test(line)
b = binascii.a2b_base64(a)
res += b
self.assertEqual(res, self.rawdata)
# Test base64 with just invalid characters, which should return
# empty strings. TBD: shouldn't it raise an exception instead ?
self.assertEqual(binascii.a2b_base64(self.type2test(fillers)), '')
def test_uu(self):
MAX_UU = 45
lines = []
for i in range(0, len(self.data), MAX_UU):
b = self.type2test(self.rawdata[i:i+MAX_UU])
a = binascii.b2a_uu(b)
lines.append(a)
res = ""
for line in lines:
a = self.type2test(line)
b = binascii.a2b_uu(a)
res += b
self.assertEqual(res, self.rawdata)
self.assertEqual(binascii.a2b_uu("\x7f"), "\x00"*31)
self.assertEqual(binascii.a2b_uu("\x80"), "\x00"*32)
self.assertEqual(binascii.a2b_uu("\xff"), "\x00"*31)
self.assertRaises(binascii.Error, binascii.a2b_uu, "\xff\x00")
self.assertRaises(binascii.Error, binascii.a2b_uu, "!!!!")
self.assertRaises(binascii.Error, binascii.b2a_uu, 46*"!")
# Issue #7701 (crash on a pydebug build)
self.assertEqual(binascii.b2a_uu('x'), '!> \n')
def test_crc32(self):
crc = binascii.crc32(self.type2test("Test the CRC-32 of"))
crc = binascii.crc32(self.type2test(" this string."), crc)
self.assertEqual(crc, 1571220330)
self.assertRaises(TypeError, binascii.crc32)
def test_hqx(self):
# Perform binhex4 style RLE-compression
# Then calculate the hexbin4 binary-to-ASCII translation
rle = binascii.rlecode_hqx(self.data)
a = binascii.b2a_hqx(self.type2test(rle))
b, _ = binascii.a2b_hqx(self.type2test(a))
res = binascii.rledecode_hqx(b)
self.assertEqual(res, self.rawdata)
def test_hex(self):
# test hexlification
s = '{s\005\000\000\000worldi\002\000\000\000s\005\000\000\000helloi\001\000\000\0000'
t = binascii.b2a_hex(self.type2test(s))
u = binascii.a2b_hex(self.type2test(t))
self.assertEqual(s, u)
self.assertRaises(TypeError, binascii.a2b_hex, t[:-1])
self.assertRaises(TypeError, binascii.a2b_hex, t[:-1] + 'q')
# Verify the treatment of Unicode strings
if test_support.have_unicode:
self.assertEqual(binascii.hexlify(unicode('a', 'ascii')), '61')
def test_qp(self):
# A test for SF bug 534347 (segfaults without the proper fix)
try:
binascii.a2b_qp("", **{1:1})
except TypeError:
pass
else:
self.fail("binascii.a2b_qp(**{1:1}) didn't raise TypeError")
self.assertEqual(binascii.a2b_qp("= "), "= ")
self.assertEqual(binascii.a2b_qp("=="), "=")
self.assertEqual(binascii.a2b_qp("=AX"), "=AX")
self.assertRaises(TypeError, binascii.b2a_qp, foo="bar")
self.assertEqual(binascii.a2b_qp("=00\r\n=00"), "\x00\r\n\x00")
self.assertEqual(
binascii.b2a_qp("\xff\r\n\xff\n\xff"),
"=FF\r\n=FF\r\n=FF"
)
self.assertEqual(
binascii.b2a_qp("0"*75+"\xff\r\n\xff\r\n\xff"),
"0"*75+"=\r\n=FF\r\n=FF\r\n=FF"
)
self.assertEqual(binascii.b2a_qp('\0\n'), '=00\n')
self.assertEqual(binascii.b2a_qp('\0\n', quotetabs=True), '=00\n')
self.assertEqual(binascii.b2a_qp('foo\tbar\t\n'), 'foo\tbar=09\n')
self.assertEqual(binascii.b2a_qp('foo\tbar\t\n', quotetabs=True), 'foo=09bar=09\n')
self.assertEqual(binascii.b2a_qp('.'), '=2E')
self.assertEqual(binascii.b2a_qp('.\n'), '=2E\n')
self.assertEqual(binascii.b2a_qp('a.\n'), 'a.\n')
def test_empty_string(self):
# A test for SF bug #1022953. Make sure SystemError is not raised.
empty = self.type2test('')
for func in all_functions:
if func == 'crc_hqx':
# crc_hqx needs 2 arguments
binascii.crc_hqx(empty, 0)
continue
f = getattr(binascii, func)
try:
f(empty)
except Exception, err:
self.fail("{}({!r}) raises {!r}".format(func, empty, err))
class ArrayBinASCIITest(BinASCIITest):
def type2test(self, s):
return array.array('c', s)
class BytearrayBinASCIITest(BinASCIITest):
type2test = bytearray
class MemoryviewBinASCIITest(BinASCIITest):
type2test = memoryview
def test_main():
test_support.run_unittest(BinASCIITest,
ArrayBinASCIITest,
BytearrayBinASCIITest,
MemoryviewBinASCIITest)
if __name__ == "__main__":
test_main()
|
sindrig/django-guardian | refs/heads/devel | guardian/templatetags/__init__.py | 363 | from __future__ import unicode_literals
|
Beanstream-DRWP/beanstream-python | refs/heads/master | beanstream/recurring_billing.py | 1 | '''
Copyright 2012 Upverter Inc.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
'''
import logging
import re
from beanstream import errors, process_transaction, transaction
log = logging.getLogger('beanstream.recurring_billing')
STATUS_DESCRIPTORS = {
'active' : 'A',
'closed' : 'C',
'on hold' : 'O'
}
STATUS_CODES = {
'A' : 'active',
'C' : 'closed',
'O' : 'on hold'
}
class CreateRecurringBillingAccount(process_transaction.Purchase):
""" Creating a recurring billing account is essentially doing a purchase
transaction with some options specifying recurring billing.
"""
def __init__(self, beanstream, amount, frequency_period,
frequency_increment):
""" Create a new recurring billing account creation transaction.
Arguments:
beanstream: gateway object
amount: the amount to charge on a recurring basis
frequency_period: one of DWMY; used in combination with
frequency_increment to set billing frequency
frequency_increment: numeric; used in combination with
frequency_period to set billing frequency
"""
super(CreateRecurringBillingAccount, self).__init__(beanstream, amount)
self.response_class = CreateRecurringBillingAccountResponse
self.params['trnRecurring'] = '1'
frequency_period = frequency_period.upper()
if frequency_period not in 'DWMY':
raise errors.ValidationException('invalid frequency period specified: %s (must be one of DWMY)' % frequency_period)
self.params['rbBillingPeriod'] = frequency_period
self.params['rbBillingIncrement'] = frequency_increment
def set_end_month(self, on):
if self.params['rbBillingPeriod'] != 'M':
log.warning('cannot set end_month attribute if billing period is not monthly')
return
self.params['rbEndMonth'] = '1' if on else '0'
def set_delay_charge(self, on):
self.params['rbCharge'] = '0' if on else '1'
def set_first_date(self, first_date):
self.params['rbFirstBilling'] = first_date.strftime('%m%d%Y')
def set_second_date(self, second_date):
self.params['rbSecondBilling'] = second_date.strftime('%m%d%Y')
def set_expiry(self, expiry):
self.params['rbExpiry'] = expiry.strftime('%m%d%Y')
def set_tax1(self, on):
self.params['rbApplyTax1'] = '1' if on else '0'
def set_tax2(self, on):
self.params['rbApplyTax2'] = '1' if on else '0'
def set_taxes(self, on):
self.set_tax1(on)
self.set_tax2(on)
class CreateRecurringBillingAccountResponse(process_transaction.PurchaseResponse):
def account_id(self):
''' The account id for the recurring billing account. '''
return self.resp.get('rbAccountId', [None])[0]
class ModifyRecurringBillingAccount(transaction.Transaction):
def __init__(self, beanstream, account_id):
super(ModifyRecurringBillingAccount, self).__init__(beanstream)
self.url = self.URLS['recurring_billing']
self.response_class = ModifyRecurringBillingAccountResponse
if not self.beanstream.recurring_billing_passcode:
raise errors.ConfigurationException('recurring billing passcode must be specified to modify recurring billing accounts')
self.params['merchantId'] = self.beanstream.merchant_id
self.params['serviceVersion'] = '1.0'
self.params['operationType'] = 'M'
self.params['passcode'] = self.beanstream.recurring_billing_passcode
self.params['responseFormat'] = 'QS'
self.params['rbAccountId'] = account_id
def parse_raw_response(self, body):
pattern = re.compile(r'^<\?xml version="1\.0".*>\s*<response>\s*<accountId>([^<]+)</accountId>\s*<code>(\d+)</code>\s*<message>(.*)</message>\s*</response>\s*$')
m = pattern.match(body)
if m:
account_id, response_code, message = m.groups()
return {
'accountId': [account_id],
'code': [response_code],
'message': [message]
}
else:
raise errors.ValidationException('unexpected message format received: %s' % body)
def set_amount(self, amount):
self.params['Amount'] = self._process_amount(amount)
def set_billing_state(self, billing_state):
billing_state = billing_state.lower()
if billing_state not in STATUS_DESCRIPTORS:
raise errors.ValidationException('invalid billing state option specified: %s' % billing_state)
self.params['rbBillingState'] = STATUS_DESCRIPTORS[billing_state]
def set_comments(self, comments):
self.params['trnComments'] = comments
def set_first_date(self, first_date):
self.params['rbFirstBilling'] = first_date.strftime('%m%d%Y')
def set_second_date(self, second_date):
self.params['rbSecondBilling'] = second_date.strftime('%m%d%Y')
def set_expiry(self, expiry):
self.params['rbExpiry'] = expiry.strftime('%m%d%Y')
def set_frequency_period(self, frequency_period):
frequency_period = frequency_period.upper()
if frequency_period not in 'DWMY':
raise errors.ValidationException('invalid frequency period specified: %s (must be one of DMWY)' % frequency_period)
self.params['rbBillingPeriod'] = frequency_period
def set_frequency_increment(self, frequency_increment):
self.params['rbBillingIncrement'] = frequency_increment
def set_tax1(self, on):
self.params['rbApplyTax1'] = '1' if on else '0'
def set_tax2(self, on):
self.params['rbApplyTax2'] = '1' if on else '0'
def set_taxes(self, on):
self.set_tax1(on)
self.set_tax2(on)
def set_end_month(self, on):
if self.params['rbBillingPeriod'] != 'M':
log.warning('cannot set end_month attribute if billing period is not monthly')
return
self.params['rbBillingEndMonth'] = '1' if on else '0'
def set_never_expires(self, on):
self.params['rbNeverExpires'] = '1' if on else '0'
def set_process_back_payments(self, on):
self.params['processBackPayments'] = '1' if on else '0'
class ModifyRecurringBillingAccountResponse(transaction.Response):
def approved(self):
return self.resp.get('code', [0])[0] == '1'
def message(self):
return self.resp.get('message', [None])[0]
|
faridani/pyDoc | refs/heads/master | Unidecode/unidecode/x003.py | 246 | data = (
'', # 0x00
'', # 0x01
'', # 0x02
'', # 0x03
'', # 0x04
'', # 0x05
'', # 0x06
'', # 0x07
'', # 0x08
'', # 0x09
'', # 0x0a
'', # 0x0b
'', # 0x0c
'', # 0x0d
'', # 0x0e
'', # 0x0f
'', # 0x10
'', # 0x11
'', # 0x12
'', # 0x13
'', # 0x14
'', # 0x15
'', # 0x16
'', # 0x17
'', # 0x18
'', # 0x19
'', # 0x1a
'', # 0x1b
'', # 0x1c
'', # 0x1d
'', # 0x1e
'', # 0x1f
'', # 0x20
'', # 0x21
'', # 0x22
'', # 0x23
'', # 0x24
'', # 0x25
'', # 0x26
'', # 0x27
'', # 0x28
'', # 0x29
'', # 0x2a
'', # 0x2b
'', # 0x2c
'', # 0x2d
'', # 0x2e
'', # 0x2f
'', # 0x30
'', # 0x31
'', # 0x32
'', # 0x33
'', # 0x34
'', # 0x35
'', # 0x36
'', # 0x37
'', # 0x38
'', # 0x39
'', # 0x3a
'', # 0x3b
'', # 0x3c
'', # 0x3d
'', # 0x3e
'', # 0x3f
'', # 0x40
'', # 0x41
'', # 0x42
'', # 0x43
'', # 0x44
'', # 0x45
'', # 0x46
'', # 0x47
'', # 0x48
'', # 0x49
'', # 0x4a
'', # 0x4b
'', # 0x4c
'', # 0x4d
'', # 0x4e
'[?]', # 0x4f
'[?]', # 0x50
'[?]', # 0x51
'[?]', # 0x52
'[?]', # 0x53
'[?]', # 0x54
'[?]', # 0x55
'[?]', # 0x56
'[?]', # 0x57
'[?]', # 0x58
'[?]', # 0x59
'[?]', # 0x5a
'[?]', # 0x5b
'[?]', # 0x5c
'[?]', # 0x5d
'[?]', # 0x5e
'[?]', # 0x5f
'', # 0x60
'', # 0x61
'', # 0x62
'a', # 0x63
'e', # 0x64
'i', # 0x65
'o', # 0x66
'u', # 0x67
'c', # 0x68
'd', # 0x69
'h', # 0x6a
'm', # 0x6b
'r', # 0x6c
't', # 0x6d
'v', # 0x6e
'x', # 0x6f
'[?]', # 0x70
'[?]', # 0x71
'[?]', # 0x72
'[?]', # 0x73
'\'', # 0x74
',', # 0x75
'[?]', # 0x76
'[?]', # 0x77
'[?]', # 0x78
'[?]', # 0x79
'', # 0x7a
'[?]', # 0x7b
'[?]', # 0x7c
'[?]', # 0x7d
'?', # 0x7e
'[?]', # 0x7f
'[?]', # 0x80
'[?]', # 0x81
'[?]', # 0x82
'[?]', # 0x83
'', # 0x84
'', # 0x85
'A', # 0x86
';', # 0x87
'E', # 0x88
'E', # 0x89
'I', # 0x8a
'[?]', # 0x8b
'O', # 0x8c
'[?]', # 0x8d
'U', # 0x8e
'O', # 0x8f
'I', # 0x90
'A', # 0x91
'B', # 0x92
'G', # 0x93
'D', # 0x94
'E', # 0x95
'Z', # 0x96
'E', # 0x97
'Th', # 0x98
'I', # 0x99
'K', # 0x9a
'L', # 0x9b
'M', # 0x9c
'N', # 0x9d
'Ks', # 0x9e
'O', # 0x9f
'P', # 0xa0
'R', # 0xa1
'[?]', # 0xa2
'S', # 0xa3
'T', # 0xa4
'U', # 0xa5
'Ph', # 0xa6
'Kh', # 0xa7
'Ps', # 0xa8
'O', # 0xa9
'I', # 0xaa
'U', # 0xab
'a', # 0xac
'e', # 0xad
'e', # 0xae
'i', # 0xaf
'u', # 0xb0
'a', # 0xb1
'b', # 0xb2
'g', # 0xb3
'd', # 0xb4
'e', # 0xb5
'z', # 0xb6
'e', # 0xb7
'th', # 0xb8
'i', # 0xb9
'k', # 0xba
'l', # 0xbb
'm', # 0xbc
'n', # 0xbd
'x', # 0xbe
'o', # 0xbf
'p', # 0xc0
'r', # 0xc1
's', # 0xc2
's', # 0xc3
't', # 0xc4
'u', # 0xc5
'ph', # 0xc6
'kh', # 0xc7
'ps', # 0xc8
'o', # 0xc9
'i', # 0xca
'u', # 0xcb
'o', # 0xcc
'u', # 0xcd
'o', # 0xce
'[?]', # 0xcf
'b', # 0xd0
'th', # 0xd1
'U', # 0xd2
'U', # 0xd3
'U', # 0xd4
'ph', # 0xd5
'p', # 0xd6
'&', # 0xd7
'[?]', # 0xd8
'[?]', # 0xd9
'St', # 0xda
'st', # 0xdb
'W', # 0xdc
'w', # 0xdd
'Q', # 0xde
'q', # 0xdf
'Sp', # 0xe0
'sp', # 0xe1
'Sh', # 0xe2
'sh', # 0xe3
'F', # 0xe4
'f', # 0xe5
'Kh', # 0xe6
'kh', # 0xe7
'H', # 0xe8
'h', # 0xe9
'G', # 0xea
'g', # 0xeb
'CH', # 0xec
'ch', # 0xed
'Ti', # 0xee
'ti', # 0xef
'k', # 0xf0
'r', # 0xf1
'c', # 0xf2
'j', # 0xf3
'[?]', # 0xf4
'[?]', # 0xf5
'[?]', # 0xf6
'[?]', # 0xf7
'[?]', # 0xf8
'[?]', # 0xf9
'[?]', # 0xfa
'[?]', # 0xfb
'[?]', # 0xfc
'[?]', # 0xfd
'[?]', # 0xfe
)
|
mandx/django-cumulus | refs/heads/master | versioneer.py | 47 |
# Version: 0.12
"""
The Versioneer
==============
* like a rocketeer, but for versions!
* https://github.com/warner/python-versioneer
* Brian Warner
* License: Public Domain
* Compatible With: python2.6, 2.7, 3.2, 3.3, 3.4, and pypy
[](https://travis-ci.org/warner/python-versioneer)
This is a tool for managing a recorded version number in distutils-based
python projects. The goal is to remove the tedious and error-prone "update
the embedded version string" step from your release process. Making a new
release should be as easy as recording a new tag in your version-control
system, and maybe making new tarballs.
## Quick Install
* `pip install versioneer` to somewhere to your $PATH
* run `versioneer-installer` in your source tree: this installs `versioneer.py`
* follow the instructions below (also in the `versioneer.py` docstring)
## Version Identifiers
Source trees come from a variety of places:
* a version-control system checkout (mostly used by developers)
* a nightly tarball, produced by build automation
* a snapshot tarball, produced by a web-based VCS browser, like github's
"tarball from tag" feature
* a release tarball, produced by "setup.py sdist", distributed through PyPI
Within each source tree, the version identifier (either a string or a number,
this tool is format-agnostic) can come from a variety of places:
* ask the VCS tool itself, e.g. "git describe" (for checkouts), which knows
about recent "tags" and an absolute revision-id
* the name of the directory into which the tarball was unpacked
* an expanded VCS keyword ($Id$, etc)
* a `_version.py` created by some earlier build step
For released software, the version identifier is closely related to a VCS
tag. Some projects use tag names that include more than just the version
string (e.g. "myproject-1.2" instead of just "1.2"), in which case the tool
needs to strip the tag prefix to extract the version identifier. For
unreleased software (between tags), the version identifier should provide
enough information to help developers recreate the same tree, while also
giving them an idea of roughly how old the tree is (after version 1.2, before
version 1.3). Many VCS systems can report a description that captures this,
for example 'git describe --tags --dirty --always' reports things like
"0.7-1-g574ab98-dirty" to indicate that the checkout is one revision past the
0.7 tag, has a unique revision id of "574ab98", and is "dirty" (it has
uncommitted changes.
The version identifier is used for multiple purposes:
* to allow the module to self-identify its version: `myproject.__version__`
* to choose a name and prefix for a 'setup.py sdist' tarball
## Theory of Operation
Versioneer works by adding a special `_version.py` file into your source
tree, where your `__init__.py` can import it. This `_version.py` knows how to
dynamically ask the VCS tool for version information at import time. However,
when you use "setup.py build" or "setup.py sdist", `_version.py` in the new
copy is replaced by a small static file that contains just the generated
version data.
`_version.py` also contains `$Revision$` markers, and the installation
process marks `_version.py` to have this marker rewritten with a tag name
during the "git archive" command. As a result, generated tarballs will
contain enough information to get the proper version.
## Installation
First, decide on values for the following configuration variables:
* `VCS`: the version control system you use. Currently accepts "git".
* `versionfile_source`:
A project-relative pathname into which the generated version strings should
be written. This is usually a `_version.py` next to your project's main
`__init__.py` file, so it can be imported at runtime. If your project uses
`src/myproject/__init__.py`, this should be `src/myproject/_version.py`.
This file should be checked in to your VCS as usual: the copy created below
by `setup.py versioneer` will include code that parses expanded VCS
keywords in generated tarballs. The 'build' and 'sdist' commands will
replace it with a copy that has just the calculated version string.
This must be set even if your project does not have any modules (and will
therefore never import `_version.py`), since "setup.py sdist" -based trees
still need somewhere to record the pre-calculated version strings. Anywhere
in the source tree should do. If there is a `__init__.py` next to your
`_version.py`, the `setup.py versioneer` command (described below) will
append some `__version__`-setting assignments, if they aren't already
present.
* `versionfile_build`:
Like `versionfile_source`, but relative to the build directory instead of
the source directory. These will differ when your setup.py uses
'package_dir='. If you have `package_dir={'myproject': 'src/myproject'}`,
then you will probably have `versionfile_build='myproject/_version.py'` and
`versionfile_source='src/myproject/_version.py'`.
If this is set to None, then `setup.py build` will not attempt to rewrite
any `_version.py` in the built tree. If your project does not have any
libraries (e.g. if it only builds a script), then you should use
`versionfile_build = None` and override `distutils.command.build_scripts`
to explicitly insert a copy of `versioneer.get_version()` into your
generated script.
* `tag_prefix`:
a string, like 'PROJECTNAME-', which appears at the start of all VCS tags.
If your tags look like 'myproject-1.2.0', then you should use
tag_prefix='myproject-'. If you use unprefixed tags like '1.2.0', this
should be an empty string.
* `parentdir_prefix`:
a string, frequently the same as tag_prefix, which appears at the start of
all unpacked tarball filenames. If your tarball unpacks into
'myproject-1.2.0', this should be 'myproject-'.
This tool provides one script, named `versioneer-installer`. That script does
one thing: write a copy of `versioneer.py` into the current directory.
To versioneer-enable your project:
* 1: Run `versioneer-installer` to copy `versioneer.py` into the top of your
source tree.
* 2: add the following lines to the top of your `setup.py`, with the
configuration values you decided earlier:
import versioneer
versioneer.VCS = 'git'
versioneer.versionfile_source = 'src/myproject/_version.py'
versioneer.versionfile_build = 'myproject/_version.py'
versioneer.tag_prefix = '' # tags are like 1.2.0
versioneer.parentdir_prefix = 'myproject-' # dirname like 'myproject-1.2.0'
* 3: add the following arguments to the setup() call in your setup.py:
version=versioneer.get_version(),
cmdclass=versioneer.get_cmdclass(),
* 4: now run `setup.py versioneer`, which will create `_version.py`, and will
modify your `__init__.py` (if one exists next to `_version.py`) to define
`__version__` (by calling a function from `_version.py`). It will also
modify your `MANIFEST.in` to include both `versioneer.py` and the generated
`_version.py` in sdist tarballs.
* 5: commit these changes to your VCS. To make sure you won't forget,
`setup.py versioneer` will mark everything it touched for addition.
## Post-Installation Usage
Once established, all uses of your tree from a VCS checkout should get the
current version string. All generated tarballs should include an embedded
version string (so users who unpack them will not need a VCS tool installed).
If you distribute your project through PyPI, then the release process should
boil down to two steps:
* 1: git tag 1.0
* 2: python setup.py register sdist upload
If you distribute it through github (i.e. users use github to generate
tarballs with `git archive`), the process is:
* 1: git tag 1.0
* 2: git push; git push --tags
Currently, all version strings must be based upon a tag. Versioneer will
report "unknown" until your tree has at least one tag in its history. This
restriction will be fixed eventually (see issue #12).
## Version-String Flavors
Code which uses Versioneer can learn about its version string at runtime by
importing `_version` from your main `__init__.py` file and running the
`get_versions()` function. From the "outside" (e.g. in `setup.py`), you can
import the top-level `versioneer.py` and run `get_versions()`.
Both functions return a dictionary with different keys for different flavors
of the version string:
* `['version']`: condensed tag+distance+shortid+dirty identifier. For git,
this uses the output of `git describe --tags --dirty --always` but strips
the tag_prefix. For example "0.11-2-g1076c97-dirty" indicates that the tree
is like the "1076c97" commit but has uncommitted changes ("-dirty"), and
that this commit is two revisions ("-2-") beyond the "0.11" tag. For
released software (exactly equal to a known tag), the identifier will only
contain the stripped tag, e.g. "0.11".
* `['full']`: detailed revision identifier. For Git, this is the full SHA1
commit id, followed by "-dirty" if the tree contains uncommitted changes,
e.g. "1076c978a8d3cfc70f408fe5974aa6c092c949ac-dirty".
Some variants are more useful than others. Including `full` in a bug report
should allow developers to reconstruct the exact code being tested (or
indicate the presence of local changes that should be shared with the
developers). `version` is suitable for display in an "about" box or a CLI
`--version` output: it can be easily compared against release notes and lists
of bugs fixed in various releases.
In the future, this will also include a
[PEP-0440](http://legacy.python.org/dev/peps/pep-0440/) -compatible flavor
(e.g. `1.2.post0.dev123`). This loses a lot of information (and has no room
for a hash-based revision id), but is safe to use in a `setup.py`
"`version=`" argument. It also enables tools like *pip* to compare version
strings and evaluate compatibility constraint declarations.
The `setup.py versioneer` command adds the following text to your
`__init__.py` to place a basic version in `YOURPROJECT.__version__`:
from ._version import get_versions
__version__ = get_versions()['version']
del get_versions
## Updating Versioneer
To upgrade your project to a new release of Versioneer, do the following:
* install the new Versioneer (`pip install -U versioneer` or equivalent)
* re-run `versioneer-installer` in your source tree to replace your copy of
`versioneer.py`
* edit `setup.py`, if necessary, to include any new configuration settings
indicated by the release notes
* re-run `setup.py versioneer` to replace `SRC/_version.py`
* commit any changed files
### Upgrading from 0.10 to 0.11
You must add a `versioneer.VCS = "git"` to your `setup.py` before re-running
`setup.py versioneer`. This will enable the use of additional version-control
systems (SVN, etc) in the future.
### Upgrading from 0.11 to 0.12
Nothing special.
## Future Directions
This tool is designed to make it easily extended to other version-control
systems: all VCS-specific components are in separate directories like
src/git/ . The top-level `versioneer.py` script is assembled from these
components by running make-versioneer.py . In the future, make-versioneer.py
will take a VCS name as an argument, and will construct a version of
`versioneer.py` that is specific to the given VCS. It might also take the
configuration arguments that are currently provided manually during
installation by editing setup.py . Alternatively, it might go the other
direction and include code from all supported VCS systems, reducing the
number of intermediate scripts.
## License
To make Versioneer easier to embed, all its code is hereby released into the
public domain. The `_version.py` that it creates is also in the public
domain.
"""
import os, sys, re, subprocess, errno
from distutils.core import Command
from distutils.command.sdist import sdist as _sdist
from distutils.command.build import build as _build
# these configuration settings will be overridden by setup.py after it
# imports us
versionfile_source = None
versionfile_build = None
tag_prefix = None
parentdir_prefix = None
VCS = None
# these dictionaries contain VCS-specific tools
LONG_VERSION_PY = {}
def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False):
assert isinstance(commands, list)
p = None
for c in commands:
try:
# remember shell=False, so use git.cmd on windows, not just git
p = subprocess.Popen([c] + args, cwd=cwd, stdout=subprocess.PIPE,
stderr=(subprocess.PIPE if hide_stderr
else None))
break
except EnvironmentError:
e = sys.exc_info()[1]
if e.errno == errno.ENOENT:
continue
if verbose:
print("unable to run %s" % args[0])
print(e)
return None
else:
if verbose:
print("unable to find command, tried %s" % (commands,))
return None
stdout = p.communicate()[0].strip()
if sys.version >= '3':
stdout = stdout.decode()
if p.returncode != 0:
if verbose:
print("unable to run %s (error)" % args[0])
return None
return stdout
LONG_VERSION_PY['git'] = '''
# This file helps to compute a version number in source trees obtained from
# git-archive tarball (such as those provided by githubs download-from-tag
# feature). Distribution tarballs (built by setup.py sdist) and build
# directories (produced by setup.py build) will contain a much shorter file
# that just contains the computed version number.
# This file is released into the public domain. Generated by
# versioneer-0.12 (https://github.com/warner/python-versioneer)
# these strings will be replaced by git during git-archive
git_refnames = "%(DOLLAR)sFormat:%%d%(DOLLAR)s"
git_full = "%(DOLLAR)sFormat:%%H%(DOLLAR)s"
# these strings are filled in when 'setup.py versioneer' creates _version.py
tag_prefix = "%(TAG_PREFIX)s"
parentdir_prefix = "%(PARENTDIR_PREFIX)s"
versionfile_source = "%(VERSIONFILE_SOURCE)s"
import os, sys, re, subprocess, errno
def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False):
assert isinstance(commands, list)
p = None
for c in commands:
try:
# remember shell=False, so use git.cmd on windows, not just git
p = subprocess.Popen([c] + args, cwd=cwd, stdout=subprocess.PIPE,
stderr=(subprocess.PIPE if hide_stderr
else None))
break
except EnvironmentError:
e = sys.exc_info()[1]
if e.errno == errno.ENOENT:
continue
if verbose:
print("unable to run %%s" %% args[0])
print(e)
return None
else:
if verbose:
print("unable to find command, tried %%s" %% (commands,))
return None
stdout = p.communicate()[0].strip()
if sys.version >= '3':
stdout = stdout.decode()
if p.returncode != 0:
if verbose:
print("unable to run %%s (error)" %% args[0])
return None
return stdout
def versions_from_parentdir(parentdir_prefix, root, verbose=False):
# Source tarballs conventionally unpack into a directory that includes
# both the project name and a version string.
dirname = os.path.basename(root)
if not dirname.startswith(parentdir_prefix):
if verbose:
print("guessing rootdir is '%%s', but '%%s' doesn't start with prefix '%%s'" %%
(root, dirname, parentdir_prefix))
return None
return {"version": dirname[len(parentdir_prefix):], "full": ""}
def git_get_keywords(versionfile_abs):
# the code embedded in _version.py can just fetch the value of these
# keywords. When used from setup.py, we don't want to import _version.py,
# so we do it with a regexp instead. This function is not used from
# _version.py.
keywords = {}
try:
f = open(versionfile_abs,"r")
for line in f.readlines():
if line.strip().startswith("git_refnames ="):
mo = re.search(r'=\s*"(.*)"', line)
if mo:
keywords["refnames"] = mo.group(1)
if line.strip().startswith("git_full ="):
mo = re.search(r'=\s*"(.*)"', line)
if mo:
keywords["full"] = mo.group(1)
f.close()
except EnvironmentError:
pass
return keywords
def git_versions_from_keywords(keywords, tag_prefix, verbose=False):
if not keywords:
return {} # keyword-finding function failed to find keywords
refnames = keywords["refnames"].strip()
if refnames.startswith("$Format"):
if verbose:
print("keywords are unexpanded, not using")
return {} # unexpanded, so not in an unpacked git-archive tarball
refs = set([r.strip() for r in refnames.strip("()").split(",")])
# starting in git-1.8.3, tags are listed as "tag: foo-1.0" instead of
# just "foo-1.0". If we see a "tag: " prefix, prefer those.
TAG = "tag: "
tags = set([r[len(TAG):] for r in refs if r.startswith(TAG)])
if not tags:
# Either we're using git < 1.8.3, or there really are no tags. We use
# a heuristic: assume all version tags have a digit. The old git %%d
# expansion behaves like git log --decorate=short and strips out the
# refs/heads/ and refs/tags/ prefixes that would let us distinguish
# between branches and tags. By ignoring refnames without digits, we
# filter out many common branch names like "release" and
# "stabilization", as well as "HEAD" and "master".
tags = set([r for r in refs if re.search(r'\d', r)])
if verbose:
print("discarding '%%s', no digits" %% ",".join(refs-tags))
if verbose:
print("likely tags: %%s" %% ",".join(sorted(tags)))
for ref in sorted(tags):
# sorting will prefer e.g. "2.0" over "2.0rc1"
if ref.startswith(tag_prefix):
r = ref[len(tag_prefix):]
if verbose:
print("picking %%s" %% r)
return { "version": r,
"full": keywords["full"].strip() }
# no suitable tags, so we use the full revision id
if verbose:
print("no suitable tags, using full revision id")
return { "version": keywords["full"].strip(),
"full": keywords["full"].strip() }
def git_versions_from_vcs(tag_prefix, root, verbose=False):
# this runs 'git' from the root of the source tree. This only gets called
# if the git-archive 'subst' keywords were *not* expanded, and
# _version.py hasn't already been rewritten with a short version string,
# meaning we're inside a checked out source tree.
if not os.path.exists(os.path.join(root, ".git")):
if verbose:
print("no .git in %%s" %% root)
return {}
GITS = ["git"]
if sys.platform == "win32":
GITS = ["git.cmd", "git.exe"]
stdout = run_command(GITS, ["describe", "--tags", "--dirty", "--always"],
cwd=root)
if stdout is None:
return {}
if not stdout.startswith(tag_prefix):
if verbose:
print("tag '%%s' doesn't start with prefix '%%s'" %% (stdout, tag_prefix))
return {}
tag = stdout[len(tag_prefix):]
stdout = run_command(GITS, ["rev-parse", "HEAD"], cwd=root)
if stdout is None:
return {}
full = stdout.strip()
if tag.endswith("-dirty"):
full += "-dirty"
return {"version": tag, "full": full}
def get_versions(default={"version": "unknown", "full": ""}, verbose=False):
# I am in _version.py, which lives at ROOT/VERSIONFILE_SOURCE. If we have
# __file__, we can work backwards from there to the root. Some
# py2exe/bbfreeze/non-CPython implementations don't do __file__, in which
# case we can only use expanded keywords.
keywords = { "refnames": git_refnames, "full": git_full }
ver = git_versions_from_keywords(keywords, tag_prefix, verbose)
if ver:
return ver
try:
root = os.path.abspath(__file__)
# versionfile_source is the relative path from the top of the source
# tree (where the .git directory might live) to this file. Invert
# this to find the root from __file__.
for i in range(len(versionfile_source.split(os.sep))):
root = os.path.dirname(root)
except NameError:
return default
return (git_versions_from_vcs(tag_prefix, root, verbose)
or versions_from_parentdir(parentdir_prefix, root, verbose)
or default)
'''
def git_get_keywords(versionfile_abs):
# the code embedded in _version.py can just fetch the value of these
# keywords. When used from setup.py, we don't want to import _version.py,
# so we do it with a regexp instead. This function is not used from
# _version.py.
keywords = {}
try:
f = open(versionfile_abs,"r")
for line in f.readlines():
if line.strip().startswith("git_refnames ="):
mo = re.search(r'=\s*"(.*)"', line)
if mo:
keywords["refnames"] = mo.group(1)
if line.strip().startswith("git_full ="):
mo = re.search(r'=\s*"(.*)"', line)
if mo:
keywords["full"] = mo.group(1)
f.close()
except EnvironmentError:
pass
return keywords
def git_versions_from_keywords(keywords, tag_prefix, verbose=False):
if not keywords:
return {} # keyword-finding function failed to find keywords
refnames = keywords["refnames"].strip()
if refnames.startswith("$Format"):
if verbose:
print("keywords are unexpanded, not using")
return {} # unexpanded, so not in an unpacked git-archive tarball
refs = set([r.strip() for r in refnames.strip("()").split(",")])
# starting in git-1.8.3, tags are listed as "tag: foo-1.0" instead of
# just "foo-1.0". If we see a "tag: " prefix, prefer those.
TAG = "tag: "
tags = set([r[len(TAG):] for r in refs if r.startswith(TAG)])
if not tags:
# Either we're using git < 1.8.3, or there really are no tags. We use
# a heuristic: assume all version tags have a digit. The old git %d
# expansion behaves like git log --decorate=short and strips out the
# refs/heads/ and refs/tags/ prefixes that would let us distinguish
# between branches and tags. By ignoring refnames without digits, we
# filter out many common branch names like "release" and
# "stabilization", as well as "HEAD" and "master".
tags = set([r for r in refs if re.search(r'\d', r)])
if verbose:
print("discarding '%s', no digits" % ",".join(refs-tags))
if verbose:
print("likely tags: %s" % ",".join(sorted(tags)))
for ref in sorted(tags):
# sorting will prefer e.g. "2.0" over "2.0rc1"
if ref.startswith(tag_prefix):
r = ref[len(tag_prefix):]
if verbose:
print("picking %s" % r)
return { "version": r,
"full": keywords["full"].strip() }
# no suitable tags, so we use the full revision id
if verbose:
print("no suitable tags, using full revision id")
return { "version": keywords["full"].strip(),
"full": keywords["full"].strip() }
def git_versions_from_vcs(tag_prefix, root, verbose=False):
# this runs 'git' from the root of the source tree. This only gets called
# if the git-archive 'subst' keywords were *not* expanded, and
# _version.py hasn't already been rewritten with a short version string,
# meaning we're inside a checked out source tree.
if not os.path.exists(os.path.join(root, ".git")):
if verbose:
print("no .git in %s" % root)
return {}
GITS = ["git"]
if sys.platform == "win32":
GITS = ["git.cmd", "git.exe"]
stdout = run_command(GITS, ["describe", "--tags", "--dirty", "--always"],
cwd=root)
if stdout is None:
return {}
if not stdout.startswith(tag_prefix):
if verbose:
print("tag '%s' doesn't start with prefix '%s'" % (stdout, tag_prefix))
return {}
tag = stdout[len(tag_prefix):]
stdout = run_command(GITS, ["rev-parse", "HEAD"], cwd=root)
if stdout is None:
return {}
full = stdout.strip()
if tag.endswith("-dirty"):
full += "-dirty"
return {"version": tag, "full": full}
def do_vcs_install(manifest_in, versionfile_source, ipy):
GITS = ["git"]
if sys.platform == "win32":
GITS = ["git.cmd", "git.exe"]
files = [manifest_in, versionfile_source]
if ipy:
files.append(ipy)
try:
me = __file__
if me.endswith(".pyc") or me.endswith(".pyo"):
me = os.path.splitext(me)[0] + ".py"
versioneer_file = os.path.relpath(me)
except NameError:
versioneer_file = "versioneer.py"
files.append(versioneer_file)
present = False
try:
f = open(".gitattributes", "r")
for line in f.readlines():
if line.strip().startswith(versionfile_source):
if "export-subst" in line.strip().split()[1:]:
present = True
f.close()
except EnvironmentError:
pass
if not present:
f = open(".gitattributes", "a+")
f.write("%s export-subst\n" % versionfile_source)
f.close()
files.append(".gitattributes")
run_command(GITS, ["add", "--"] + files)
def versions_from_parentdir(parentdir_prefix, root, verbose=False):
# Source tarballs conventionally unpack into a directory that includes
# both the project name and a version string.
dirname = os.path.basename(root)
if not dirname.startswith(parentdir_prefix):
if verbose:
print("guessing rootdir is '%s', but '%s' doesn't start with prefix '%s'" %
(root, dirname, parentdir_prefix))
return None
return {"version": dirname[len(parentdir_prefix):], "full": ""}
SHORT_VERSION_PY = """
# This file was generated by 'versioneer.py' (0.12) from
# revision-control system data, or from the parent directory name of an
# unpacked source archive. Distribution tarballs contain a pre-generated copy
# of this file.
version_version = '%(version)s'
version_full = '%(full)s'
def get_versions(default={}, verbose=False):
return {'version': version_version, 'full': version_full}
"""
DEFAULT = {"version": "unknown", "full": "unknown"}
def versions_from_file(filename):
versions = {}
try:
with open(filename) as f:
for line in f.readlines():
mo = re.match("version_version = '([^']+)'", line)
if mo:
versions["version"] = mo.group(1)
mo = re.match("version_full = '([^']+)'", line)
if mo:
versions["full"] = mo.group(1)
except EnvironmentError:
return {}
return versions
def write_to_version_file(filename, versions):
with open(filename, "w") as f:
f.write(SHORT_VERSION_PY % versions)
print("set %s to '%s'" % (filename, versions["version"]))
def get_root():
try:
return os.path.dirname(os.path.abspath(__file__))
except NameError:
return os.path.dirname(os.path.abspath(sys.argv[0]))
def vcs_function(vcs, suffix):
return getattr(sys.modules[__name__], '%s_%s' % (vcs, suffix), None)
def get_versions(default=DEFAULT, verbose=False):
# returns dict with two keys: 'version' and 'full'
assert versionfile_source is not None, "please set versioneer.versionfile_source"
assert tag_prefix is not None, "please set versioneer.tag_prefix"
assert parentdir_prefix is not None, "please set versioneer.parentdir_prefix"
assert VCS is not None, "please set versioneer.VCS"
# I am in versioneer.py, which must live at the top of the source tree,
# which we use to compute the root directory. py2exe/bbfreeze/non-CPython
# don't have __file__, in which case we fall back to sys.argv[0] (which
# ought to be the setup.py script). We prefer __file__ since that's more
# robust in cases where setup.py was invoked in some weird way (e.g. pip)
root = get_root()
versionfile_abs = os.path.join(root, versionfile_source)
# extract version from first of _version.py, VCS command (e.g. 'git
# describe'), parentdir. This is meant to work for developers using a
# source checkout, for users of a tarball created by 'setup.py sdist',
# and for users of a tarball/zipball created by 'git archive' or github's
# download-from-tag feature or the equivalent in other VCSes.
get_keywords_f = vcs_function(VCS, "get_keywords")
versions_from_keywords_f = vcs_function(VCS, "versions_from_keywords")
if get_keywords_f and versions_from_keywords_f:
vcs_keywords = get_keywords_f(versionfile_abs)
ver = versions_from_keywords_f(vcs_keywords, tag_prefix)
if ver:
if verbose: print("got version from expanded keyword %s" % ver)
return ver
ver = versions_from_file(versionfile_abs)
if ver:
if verbose: print("got version from file %s %s" % (versionfile_abs,ver))
return ver
versions_from_vcs_f = vcs_function(VCS, "versions_from_vcs")
if versions_from_vcs_f:
ver = versions_from_vcs_f(tag_prefix, root, verbose)
if ver:
if verbose: print("got version from VCS %s" % ver)
return ver
ver = versions_from_parentdir(parentdir_prefix, root, verbose)
if ver:
if verbose: print("got version from parentdir %s" % ver)
return ver
if verbose: print("got version from default %s" % default)
return default
def get_version(verbose=False):
return get_versions(verbose=verbose)["version"]
class cmd_version(Command):
description = "report generated version string"
user_options = []
boolean_options = []
def initialize_options(self):
pass
def finalize_options(self):
pass
def run(self):
ver = get_version(verbose=True)
print("Version is currently: %s" % ver)
class cmd_build(_build):
def run(self):
versions = get_versions(verbose=True)
_build.run(self)
# now locate _version.py in the new build/ directory and replace it
# with an updated value
if versionfile_build:
target_versionfile = os.path.join(self.build_lib, versionfile_build)
print("UPDATING %s" % target_versionfile)
os.unlink(target_versionfile)
with open(target_versionfile, "w") as f:
f.write(SHORT_VERSION_PY % versions)
if 'cx_Freeze' in sys.modules: # cx_freeze enabled?
from cx_Freeze.dist import build_exe as _build_exe
class cmd_build_exe(_build_exe):
def run(self):
versions = get_versions(verbose=True)
target_versionfile = versionfile_source
print("UPDATING %s" % target_versionfile)
os.unlink(target_versionfile)
with open(target_versionfile, "w") as f:
f.write(SHORT_VERSION_PY % versions)
_build_exe.run(self)
os.unlink(target_versionfile)
with open(versionfile_source, "w") as f:
assert VCS is not None, "please set versioneer.VCS"
LONG = LONG_VERSION_PY[VCS]
f.write(LONG % {"DOLLAR": "$",
"TAG_PREFIX": tag_prefix,
"PARENTDIR_PREFIX": parentdir_prefix,
"VERSIONFILE_SOURCE": versionfile_source,
})
class cmd_sdist(_sdist):
def run(self):
versions = get_versions(verbose=True)
self._versioneer_generated_versions = versions
# unless we update this, the command will keep using the old version
self.distribution.metadata.version = versions["version"]
return _sdist.run(self)
def make_release_tree(self, base_dir, files):
_sdist.make_release_tree(self, base_dir, files)
# now locate _version.py in the new base_dir directory (remembering
# that it may be a hardlink) and replace it with an updated value
target_versionfile = os.path.join(base_dir, versionfile_source)
print("UPDATING %s" % target_versionfile)
os.unlink(target_versionfile)
with open(target_versionfile, "w") as f:
f.write(SHORT_VERSION_PY % self._versioneer_generated_versions)
INIT_PY_SNIPPET = """
from ._version import get_versions
__version__ = get_versions()['version']
del get_versions
"""
class cmd_update_files(Command):
description = "install/upgrade Versioneer files: __init__.py SRC/_version.py"
user_options = []
boolean_options = []
def initialize_options(self):
pass
def finalize_options(self):
pass
def run(self):
print(" creating %s" % versionfile_source)
with open(versionfile_source, "w") as f:
assert VCS is not None, "please set versioneer.VCS"
LONG = LONG_VERSION_PY[VCS]
f.write(LONG % {"DOLLAR": "$",
"TAG_PREFIX": tag_prefix,
"PARENTDIR_PREFIX": parentdir_prefix,
"VERSIONFILE_SOURCE": versionfile_source,
})
ipy = os.path.join(os.path.dirname(versionfile_source), "__init__.py")
if os.path.exists(ipy):
try:
with open(ipy, "r") as f:
old = f.read()
except EnvironmentError:
old = ""
if INIT_PY_SNIPPET not in old:
print(" appending to %s" % ipy)
with open(ipy, "a") as f:
f.write(INIT_PY_SNIPPET)
else:
print(" %s unmodified" % ipy)
else:
print(" %s doesn't exist, ok" % ipy)
ipy = None
# Make sure both the top-level "versioneer.py" and versionfile_source
# (PKG/_version.py, used by runtime code) are in MANIFEST.in, so
# they'll be copied into source distributions. Pip won't be able to
# install the package without this.
manifest_in = os.path.join(get_root(), "MANIFEST.in")
simple_includes = set()
try:
with open(manifest_in, "r") as f:
for line in f:
if line.startswith("include "):
for include in line.split()[1:]:
simple_includes.add(include)
except EnvironmentError:
pass
# That doesn't cover everything MANIFEST.in can do
# (http://docs.python.org/2/distutils/sourcedist.html#commands), so
# it might give some false negatives. Appending redundant 'include'
# lines is safe, though.
if "versioneer.py" not in simple_includes:
print(" appending 'versioneer.py' to MANIFEST.in")
with open(manifest_in, "a") as f:
f.write("include versioneer.py\n")
else:
print(" 'versioneer.py' already in MANIFEST.in")
if versionfile_source not in simple_includes:
print(" appending versionfile_source ('%s') to MANIFEST.in" %
versionfile_source)
with open(manifest_in, "a") as f:
f.write("include %s\n" % versionfile_source)
else:
print(" versionfile_source already in MANIFEST.in")
# Make VCS-specific changes. For git, this means creating/changing
# .gitattributes to mark _version.py for export-time keyword
# substitution.
do_vcs_install(manifest_in, versionfile_source, ipy)
def get_cmdclass():
cmds = {'version': cmd_version,
'versioneer': cmd_update_files,
'build': cmd_build,
'sdist': cmd_sdist,
}
if 'cx_Freeze' in sys.modules: # cx_freeze enabled?
cmds['build_exe'] = cmd_build_exe
del cmds['build']
return cmds
|
molgun/ocl_web | refs/heads/master | ocl_web/apps/ocl_search/views.py | 5 | """Views for OCL Global search
Examples:
https://openconceptlab.org/search?q=malaria
https://openconceptlab.org/search?q=oncology&type=sources
"""
import logging
from django.views.generic import TemplateView
from django.http import Http404
from django.core.paginator import Paginator
from django.utils.http import urlencode
from apps.core.utils import SearchStringFormatter
from libs.ocl import (OclApi, OclSearch, OclConstants)
logger = logging.getLogger('oclweb')
class GlobalSearchView(TemplateView):
""" View for global OCL search """
template_name = "ocl_search/search.html"
def get_context_data(self, *args, **kwargs):
""" Set context for OCL global search """
context = super(GlobalSearchView, self).get_context_data(*args, **kwargs)
# Perform the primary search via the API
original_search_string = self.request.GET.get('q', '')
SearchStringFormatter.add_wildcard(self.request)
searcher = OclSearch(params=self.request.GET)
api = OclApi(
self.request, debug=True,
facets=OclConstants.resource_has_facets(searcher.search_type))
search_response = api.get(searcher.search_type, params=searcher.search_params)
if search_response.status_code == 404:
raise Http404
elif search_response.status_code != 200:
search_response.raise_for_status()
# Process the primary search results
searcher.process_search_results(
search_type=searcher.search_type,
search_response=search_response,
search_params=self.request.GET)
# Setup paginator for primary search
search_paginator = Paginator(range(searcher.num_found), searcher.num_per_page)
search_current_page = search_paginator.page(searcher.current_page)
# Set context for primary search
context['results'] = searcher.search_results
context['page'] = search_current_page
context['pagination_url'] = self.request.get_full_path()
context['search_type'] = searcher.search_type
context['search_type_name'] = OclConstants.resource_display_name(searcher.search_type)
context['search_type_icon'] = OclConstants.resource_display_icon(searcher.search_type)
context['search_sort_option_defs'] = searcher.get_sort_option_definitions()
context['search_sort'] = searcher.get_sort()
context['search_filters'] = searcher.search_filter_list
context['search_query'] = original_search_string
context['hide_nav_search'] = True
if self.request.user.is_authenticated() and searcher.search_type in ['concepts', 'mappings']:
context['all_collections'] = api.get_all_collections_for_user(self.request.user.username)
# Build URL params for navigating to other resources
other_resource_search_params = {}
for param in OclSearch.TRANSFERRABLE_SEARCH_PARAMS:
if param in self.request.GET:
if param == 'q':
other_resource_search_params[param] = original_search_string
else:
other_resource_search_params[param] = self.request.GET.get(param)
# Encode the search parameters into a single URL-encoded string so that it can
# easily be appended onto URL links on the search page
context['other_resource_search_params'] = ''
if other_resource_search_params:
context['other_resource_search_params'] = (
'&' + urlencode(other_resource_search_params))
# Perform the counter searches for the other resources
resource_count = {}
for resource_type in OclConstants.RESOURCE_TYPE_INFO:
if resource_type == searcher.search_type:
# Primary search has already been performed, so just set value from above
resource_count[searcher.search_type] = searcher.num_found
elif OclConstants.RESOURCE_TYPE_INFO[resource_type]['show_on_global_search']:
# Get resource count applying transferrable search criteria
count_response = api.head(resource_type, params=other_resource_search_params)
if 'num_found' in count_response.headers:
resource_count[resource_type] = int(count_response.headers['num_found'])
else:
resource_count[resource_type] = 0
context['resource_count'] = resource_count
# Set debug variables
context['url_params'] = self.request.GET
context['search_params'] = searcher.search_params
context['search_response_headers'] = search_response.headers
context['search_facets_json'] = searcher.search_facets
context['search_filters_debug'] = str(searcher.search_filter_list)
return context
|
wkschwartz/django | refs/heads/stable/3.2.x | tests/apps/two_default_configs_app/apps.py | 21 | from django.apps import AppConfig
class TwoConfig(AppConfig):
default = True
name = 'apps.two_default_configs_app'
class TwoConfigBis(AppConfig):
default = True
name = 'apps.two_default_configs_app'
|
toblerone554/TutorialPyhton | refs/heads/master | Tema2/diccionario2.py | 1 | #!/usr/bin/env python
#-*- coding: utf-8 -*-
diccionario = {'numero_1' : "valor_1", 'numero_2' : "valor_2"}
print diccionario['numero_1']
print "borramos numero_1"
del(diccionario['numero_1'])
print "ahora no debe existir"
|
unkyulee/elastic-cms | refs/heads/master | src/web/modules/post/controllers/field/default.py | 1 | from flask import request, render_template
import lib.es as es
def get(p):
host = p['c']['host'] ; index = p['c']['index'] ;
# get list of mappings
mapping = es.mapping(host, index, 'post')
properties = mapping[index]['mappings']['post']['properties']
# get all field list
p['field_list'] = []
for prop in properties.keys():
field = es.get(host, index, 'field', prop)
if not field:
# create field
field = {
"type": properties[prop].get('type'),
"name": prop,
"order_key": '10000'
}
es.create(host, index, 'field', prop, field)
es.flush(host, index)
field['id'] = prop
# add to field list
p['field_list'].append(field)
# sort by order key
p['field_list'] = sorted(p['field_list'],
key=lambda field: int(field['order_key'] if field['order_key'] else 10000))
return render_template("post/field/default.html", p=p)
|
jykntr/wishapp | refs/heads/master | config.py | 1 | import os
basedir = os.path.abspath(os.path.dirname(__file__))
class Config:
SECRET_KEY = os.environ.get('SECRET_KEY') or 'hard to guess string'
SSL_DISABLE = True
SQLALCHEMY_COMMIT_ON_TEARDOWN = True
SQLALCHEMY_RECORD_QUERIES = True
MAIL_SERVER = 'smtp.googlemail.com'
MAIL_PORT = 587
MAIL_USE_TLS = True
MAIL_USERNAME = os.environ.get('MAIL_USERNAME')
MAIL_PASSWORD = os.environ.get('MAIL_PASSWORD')
MAIL_SUBJECT_PREFIX = '[Wishlist]'
MAIL_SENDER = 'Wishlist Admin <someone@gmail.com>'
ADMIN_USER = os.environ.get('ADMIN_USER')
FLASKY_SLOW_DB_QUERY_TIME = 0.5
@staticmethod
def init_app(app):
pass
class DevelopmentConfig(Config):
DEBUG = True
SQLALCHEMY_DATABASE_URI = os.environ.get('DEV_DATABASE_URL') or \
'sqlite:///' + os.path.join(basedir, 'data-dev.sqlite')
class TestingConfig(Config):
TESTING = True
SQLALCHEMY_DATABASE_URI = os.environ.get('TEST_DATABASE_URL') or \
'sqlite:///' + os.path.join(basedir, 'data-test.sqlite')
WTF_CSRF_ENABLED = False
class ProductionConfig(Config):
SQLALCHEMY_DATABASE_URI = os.environ.get('DATABASE_URL') or \
'sqlite:///' + os.path.join(basedir, 'data.sqlite')
class HerokuConfig(ProductionConfig):
SSL_DISABLE = bool(os.environ.get('SSL_DISABLE'))
@classmethod
def init_app(cls, app):
ProductionConfig.init_app(app)
# handle proxy server headers
from werkzeug.contrib.fixers import ProxyFix
app.wsgi_app = ProxyFix(app.wsgi_app)
# log to stderr
import logging
from logging import StreamHandler
file_handler = StreamHandler()
file_handler.setLevel(logging.WARNING)
app.logger.addHandler(file_handler)
config = {
'development': DevelopmentConfig,
'testing': TestingConfig,
'production': ProductionConfig,
'heroku': HerokuConfig,
'default': DevelopmentConfig
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.