repo_name
stringlengths 5
100
| ref
stringlengths 12
67
| path
stringlengths 4
244
| copies
stringlengths 1
8
| content
stringlengths 0
1.05M
⌀ |
|---|---|---|---|---|
proxysh/Safejumper-for-Mac
|
refs/heads/master
|
buildlinux/env32/lib/python2.7/site-packages/cryptography/hazmat/primitives/constant_time.py
|
55
|
# This file is dual licensed under the terms of the Apache License, Version
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
# for complete details.
from __future__ import absolute_import, division, print_function
import hmac
from cryptography.hazmat.bindings._constant_time import lib
if hasattr(hmac, "compare_digest"):
def bytes_eq(a, b):
if not isinstance(a, bytes) or not isinstance(b, bytes):
raise TypeError("a and b must be bytes.")
return hmac.compare_digest(a, b)
else:
def bytes_eq(a, b):
if not isinstance(a, bytes) or not isinstance(b, bytes):
raise TypeError("a and b must be bytes.")
return lib.Cryptography_constant_time_bytes_eq(
a, len(a), b, len(b)
) == 1
|
VTacius/MZBackup
|
refs/heads/master
|
mailbox.py
|
1
|
#!/usr/bin/python
# encoding: utf-8
# vim: tabstop=4 : shiftwidth=4 : expandtab
from modulos.backupeador import Respaldante, Enviante
from modulos.configuracion import configuracion
from modulos.listado import listar
from modulos.utilidades import abrir_listado, situar_directorio, titulador, situar_remoto
from argparse import ArgumentParser
from Queue import Queue
from shutil import copy
from threading import Semaphore
from os.path import dirname
# Obtenemos valores de configuración para la aplicación desde mzbackup.ini
s_mailbox = int(configuracion("s_mailbox"))
s_envio = int(configuracion("s_envio"))
if __name__ == "__main__":
# Definimos como argumento -c con fichero
parser = ArgumentParser(description='Backup de Buzones de correo')
parser.add_argument('-l', '--listado', help='Fichero dominio.lst dentro de un directorio {{dir-base}}/usuarios-{{fecha}}')
parser.add_argument('-e', '--envio', action='store_true', help='Envio el fichero de backup al servidor remoto')
parser.add_argument('-u', '--usuarios', help='Lista de usuarios a los cuales ha de realizarse backup')
# Tomamos el valor de las opciones pasadas al fichero
args = parser.parse_args()
listado_dominios = args.listado
listado_usuarios = args.usuarios
ejecutar_envio = args.envio
# Me situo en el directorio base de trabajo configurado en mzbackup.ini
titulador("Empezamos operaciones situándonos en el directorio base")
situar_directorio("mailbox")
# Creamos directorio remoto donde almacenar los fichero
if ejecutar_envio:
titulador("Creamos el directorio remoto para enviar los datos")
situar_remoto()
# Sinceramente creo que de otras partes de este proyecto se agradecería tener esta funcionalidad en un métodos exportable
# Declaramos una matriz que cuyo claves serán dominios y el contenido serán listas de usuarios
matriz_usuarios = {}
if listado_usuarios:
titulador("Obtenemos la lista de usuarios")
lista_usuarios = abrir_listado(listado_usuarios)
titulador("Obtenemos backup de cada usuario")
for correo in lista_usuarios:
dominio = correo.split('@')[1]
if not dominio in matriz_usuarios:
matriz_usuarios[dominio] = []
matriz_usuarios[dominio].append(correo)
elif listado_dominios:
titulador("Obtenemos la lista de dominios")
lista_dominios = abrir_listado(listado_dominios)
copy(listado_dominios, '.')
titulador("Obtenemos la lista de usuarios por cada dominio")
directorio = dirname(listado_dominios)
for dominio in lista_dominios:
lista_usuarios = "{0}/{1}.lst".format(directorio, dominio)
copy(lista_usuarios, '.')
matriz_usuarios[dominio] = abrir_listado(lista_usuarios)
else:
listador = listar()
titulador("Obtenemos la lista de dominios")
listador.listar_dominios().almacenar()
titulador("Obtenemos la lista de usuarios por cada dominio")
listador.listar_usuarios().almacenar()
matriz_usuarios = listador.usuarios
# Creamos semáforos y colas para usar dentro de las clases multi-threading
cola = Queue() if ejecutar_envio else None
# Definido el número de hilos a usar según la configuración de la clave s_mailbox en mzbackup.ini
semaforo_respaldo = Semaphore(s_mailbox)
# Definido el número de hilos a usar según la configuración de la clave s_envio en mzbackup.ini
semaforo_envio = Semaphore(s_envio)
for dominio, lista_usuarios in matriz_usuarios.iteritems():
for usuario in lista_usuarios:
respaldo = Respaldante(semaforo_respaldo, cola, usuario)
respaldo.start()
if ejecutar_envio:
envio = Enviante(semaforo_envio, cola)
envio.setDaemon(True)
envio.start()
respaldo.join()
if ejecutar_envio:
cola.join()
|
balloob/home-assistant
|
refs/heads/dev
|
homeassistant/components/hisense_aehw4a1/config_flow.py
|
12
|
"""Config flow for Hisense AEH-W4A1 integration."""
from pyaehw4a1.aehw4a1 import AehW4a1
from homeassistant import config_entries
from homeassistant.helpers import config_entry_flow
from .const import DOMAIN
async def _async_has_devices(hass):
"""Return if there are devices that can be discovered."""
aehw4a1_ip_addresses = await AehW4a1().discovery()
return len(aehw4a1_ip_addresses) > 0
config_entry_flow.register_discovery_flow(
DOMAIN, "Hisense AEH-W4A1", _async_has_devices, config_entries.CONN_CLASS_LOCAL_POLL
)
|
TRESCLOUD/odoopub
|
refs/heads/master
|
addons/base_action_rule/tests/base_action_rule_test.py
|
395
|
from openerp import SUPERUSER_ID
from openerp.tests import common
from .. import test_models
class base_action_rule_test(common.TransactionCase):
def setUp(self):
"""*****setUp*****"""
super(base_action_rule_test, self).setUp()
cr, uid = self.cr, self.uid
self.demo = self.registry('ir.model.data').get_object(cr, uid, 'base', 'user_demo').id
self.admin = SUPERUSER_ID
self.model = self.registry('base.action.rule.lead.test')
self.base_action_rule = self.registry('base.action.rule')
def create_filter_done(self, cr, uid, context=None):
filter_pool = self.registry('ir.filters')
return filter_pool.create(cr, uid, {
'name': "Lead is in done state",
'is_default': False,
'model_id': 'base.action.rule.lead.test',
'domain': "[('state','=','done')]",
}, context=context)
def create_filter_draft(self, cr, uid, context=None):
filter_pool = self.registry('ir.filters')
return filter_pool.create(cr, uid, {
'name': "Lead is in draft state",
'is_default': False,
'model_id': "base.action.rule.lead.test",
'domain' : "[('state','=','draft')]",
}, context=context)
def create_lead_test_1(self, cr, uid, context=None):
"""
Create a new lead_test
"""
return self.model.create(cr, uid, {
'name': "Lead Test 1",
'user_id': self.admin,
}, context=context)
def create_rule(self, cr, uid, kind, filter_id=False, filter_pre_id=False, context=None):
"""
The "Rule 1" says that when a lead goes to the 'draft' state, the responsible for that lead changes to user "demo"
"""
return self.base_action_rule.create(cr,uid,{
'name': "Rule 1",
'model_id': self.registry('ir.model').search(cr, uid, [('model','=','base.action.rule.lead.test')], context=context)[0],
'kind': kind,
'filter_pre_id': filter_pre_id,
'filter_id': filter_id,
'act_user_id': self.demo,
}, context=context)
def delete_rules(self, cr, uid, context=None):
""" delete all the rules on model 'base.action.rule.lead.test' """
action_ids = self.base_action_rule.search(cr, uid, [('model', '=', self.model._name)], context=context)
return self.base_action_rule.unlink(cr, uid, action_ids, context=context)
def test_00_check_to_state_draft_pre(self):
"""
Check that a new record (with state = draft) doesn't change its responsible when there is a precondition filter which check that the state is draft.
"""
cr, uid = self.cr, self.uid
filter_draft = self.create_filter_draft(cr, uid)
self.create_rule(cr, uid, 'on_write', filter_pre_id=filter_draft)
new_lead_id = self.create_lead_test_1(cr, uid)
new_lead = self.model.browse(cr, uid, new_lead_id)
self.assertEquals(new_lead.state, 'draft')
self.assertEquals(new_lead.user_id.id, self.admin)
self.delete_rules(cr, uid)
def test_01_check_to_state_draft_post(self):
"""
Check that a new record changes its responsible when there is a postcondition filter which check that the state is draft.
"""
cr, uid = self.cr, self.uid
filter_draft = self.create_filter_draft(cr, uid)
self.create_rule(cr, uid, 'on_create')
new_lead_id = self.create_lead_test_1(cr, uid)
new_lead = self.model.browse(cr, uid, new_lead_id)
self.assertEquals(new_lead.state, 'draft')
self.assertEquals(new_lead.user_id.id, self.demo)
self.delete_rules(cr, uid)
def test_02_check_from_draft_to_done_with_steps(self):
"""
A new record will be created and will goes from draft to done state via the other states (open, pending and cancel)
We will create a rule that says in precondition that the record must be in the "draft" state while a postcondition filter says
that the record will be done. If the state goes from 'draft' to 'done' the responsible will change. If those two conditions aren't
verified, the responsible will stay the same
The responsible in that test will never change
"""
cr, uid = self.cr, self.uid
filter_draft = self.create_filter_draft(cr, uid)
filter_done = self.create_filter_done(cr, uid)
self.create_rule(cr, uid, 'on_write', filter_pre_id=filter_draft, filter_id=filter_done)
new_lead_id = self.create_lead_test_1(cr, uid)
new_lead = self.model.browse(cr, uid, new_lead_id)
self.assertEquals(new_lead.state, 'draft')
self.assertEquals(new_lead.user_id.id, self.admin)
""" change the state of new_lead to open and check that responsible doen't change"""
new_lead.write({'state': 'open'})
new_lead = self.model.browse(cr, uid, new_lead_id)
self.assertEquals(new_lead.state, 'open')
self.assertEquals(new_lead.user_id.id, self.admin)
""" change the state of new_lead to pending and check that responsible doen't change"""
new_lead.write({'state': 'pending'})
new_lead = self.model.browse(cr, uid, new_lead_id)
self.assertEquals(new_lead.state, 'pending')
self.assertEquals(new_lead.user_id.id, self.admin)
""" change the state of new_lead to cancel and check that responsible doen't change"""
new_lead.write({'state': 'cancel'})
new_lead = self.model.browse(cr, uid, new_lead_id)
self.assertEquals(new_lead.state, 'cancel')
self.assertEquals(new_lead.user_id.id, self.admin)
""" change the state of new_lead to done and check that responsible doen't change """
new_lead.write({'state': 'done'})
new_lead = self.model.browse(cr, uid, new_lead_id)
self.assertEquals(new_lead.state, 'done')
self.assertEquals(new_lead.user_id.id, self.admin)
self.delete_rules(cr, uid)
def test_02_check_from_draft_to_done_without_steps(self):
"""
A new record will be created and will goes from draft to done in one operation
We will create a rule that says in precondition that the record must be in the "draft" state while a postcondition filter says
that the record will be done. If the state goes from 'draft' to 'done' the responsible will change. If those two conditions aren't
verified, the responsible will stay the same
The responsible in that test will change to user "demo"
"""
cr, uid = self.cr, self.uid
filter_draft = self.create_filter_draft(cr, uid)
filter_done = self.create_filter_done(cr, uid)
self.create_rule(cr, uid, 'on_write', filter_pre_id=filter_draft, filter_id=filter_done)
new_lead_id = self.create_lead_test_1(cr, uid)
new_lead = self.model.browse(cr, uid, new_lead_id)
self.assertEquals(new_lead.state, 'draft')
self.assertEquals(new_lead.user_id.id, self.admin)
""" change the state of new_lead to done and check that responsible change to Demo_user"""
new_lead.write({'state': 'done'})
new_lead = self.model.browse(cr, uid, new_lead_id)
self.assertEquals(new_lead.state, 'done')
self.assertEquals(new_lead.user_id.id, self.demo)
self.delete_rules(cr, uid)
|
lucafavatella/intellij-community
|
refs/heads/cli-wip
|
python/lib/Lib/encodings/cp1253.py
|
593
|
""" Python Character Mapping Codec cp1253 generated from 'MAPPINGS/VENDORS/MICSFT/WINDOWS/CP1253.TXT' with gencodec.py.
"""#"
import codecs
### Codec APIs
class Codec(codecs.Codec):
def encode(self,input,errors='strict'):
return codecs.charmap_encode(input,errors,encoding_table)
def decode(self,input,errors='strict'):
return codecs.charmap_decode(input,errors,decoding_table)
class IncrementalEncoder(codecs.IncrementalEncoder):
def encode(self, input, final=False):
return codecs.charmap_encode(input,self.errors,encoding_table)[0]
class IncrementalDecoder(codecs.IncrementalDecoder):
def decode(self, input, final=False):
return codecs.charmap_decode(input,self.errors,decoding_table)[0]
class StreamWriter(Codec,codecs.StreamWriter):
pass
class StreamReader(Codec,codecs.StreamReader):
pass
### encodings module API
def getregentry():
return codecs.CodecInfo(
name='cp1253',
encode=Codec().encode,
decode=Codec().decode,
incrementalencoder=IncrementalEncoder,
incrementaldecoder=IncrementalDecoder,
streamreader=StreamReader,
streamwriter=StreamWriter,
)
### Decoding Table
decoding_table = (
u'\x00' # 0x00 -> NULL
u'\x01' # 0x01 -> START OF HEADING
u'\x02' # 0x02 -> START OF TEXT
u'\x03' # 0x03 -> END OF TEXT
u'\x04' # 0x04 -> END OF TRANSMISSION
u'\x05' # 0x05 -> ENQUIRY
u'\x06' # 0x06 -> ACKNOWLEDGE
u'\x07' # 0x07 -> BELL
u'\x08' # 0x08 -> BACKSPACE
u'\t' # 0x09 -> HORIZONTAL TABULATION
u'\n' # 0x0A -> LINE FEED
u'\x0b' # 0x0B -> VERTICAL TABULATION
u'\x0c' # 0x0C -> FORM FEED
u'\r' # 0x0D -> CARRIAGE RETURN
u'\x0e' # 0x0E -> SHIFT OUT
u'\x0f' # 0x0F -> SHIFT IN
u'\x10' # 0x10 -> DATA LINK ESCAPE
u'\x11' # 0x11 -> DEVICE CONTROL ONE
u'\x12' # 0x12 -> DEVICE CONTROL TWO
u'\x13' # 0x13 -> DEVICE CONTROL THREE
u'\x14' # 0x14 -> DEVICE CONTROL FOUR
u'\x15' # 0x15 -> NEGATIVE ACKNOWLEDGE
u'\x16' # 0x16 -> SYNCHRONOUS IDLE
u'\x17' # 0x17 -> END OF TRANSMISSION BLOCK
u'\x18' # 0x18 -> CANCEL
u'\x19' # 0x19 -> END OF MEDIUM
u'\x1a' # 0x1A -> SUBSTITUTE
u'\x1b' # 0x1B -> ESCAPE
u'\x1c' # 0x1C -> FILE SEPARATOR
u'\x1d' # 0x1D -> GROUP SEPARATOR
u'\x1e' # 0x1E -> RECORD SEPARATOR
u'\x1f' # 0x1F -> UNIT SEPARATOR
u' ' # 0x20 -> SPACE
u'!' # 0x21 -> EXCLAMATION MARK
u'"' # 0x22 -> QUOTATION MARK
u'#' # 0x23 -> NUMBER SIGN
u'$' # 0x24 -> DOLLAR SIGN
u'%' # 0x25 -> PERCENT SIGN
u'&' # 0x26 -> AMPERSAND
u"'" # 0x27 -> APOSTROPHE
u'(' # 0x28 -> LEFT PARENTHESIS
u')' # 0x29 -> RIGHT PARENTHESIS
u'*' # 0x2A -> ASTERISK
u'+' # 0x2B -> PLUS SIGN
u',' # 0x2C -> COMMA
u'-' # 0x2D -> HYPHEN-MINUS
u'.' # 0x2E -> FULL STOP
u'/' # 0x2F -> SOLIDUS
u'0' # 0x30 -> DIGIT ZERO
u'1' # 0x31 -> DIGIT ONE
u'2' # 0x32 -> DIGIT TWO
u'3' # 0x33 -> DIGIT THREE
u'4' # 0x34 -> DIGIT FOUR
u'5' # 0x35 -> DIGIT FIVE
u'6' # 0x36 -> DIGIT SIX
u'7' # 0x37 -> DIGIT SEVEN
u'8' # 0x38 -> DIGIT EIGHT
u'9' # 0x39 -> DIGIT NINE
u':' # 0x3A -> COLON
u';' # 0x3B -> SEMICOLON
u'<' # 0x3C -> LESS-THAN SIGN
u'=' # 0x3D -> EQUALS SIGN
u'>' # 0x3E -> GREATER-THAN SIGN
u'?' # 0x3F -> QUESTION MARK
u'@' # 0x40 -> COMMERCIAL AT
u'A' # 0x41 -> LATIN CAPITAL LETTER A
u'B' # 0x42 -> LATIN CAPITAL LETTER B
u'C' # 0x43 -> LATIN CAPITAL LETTER C
u'D' # 0x44 -> LATIN CAPITAL LETTER D
u'E' # 0x45 -> LATIN CAPITAL LETTER E
u'F' # 0x46 -> LATIN CAPITAL LETTER F
u'G' # 0x47 -> LATIN CAPITAL LETTER G
u'H' # 0x48 -> LATIN CAPITAL LETTER H
u'I' # 0x49 -> LATIN CAPITAL LETTER I
u'J' # 0x4A -> LATIN CAPITAL LETTER J
u'K' # 0x4B -> LATIN CAPITAL LETTER K
u'L' # 0x4C -> LATIN CAPITAL LETTER L
u'M' # 0x4D -> LATIN CAPITAL LETTER M
u'N' # 0x4E -> LATIN CAPITAL LETTER N
u'O' # 0x4F -> LATIN CAPITAL LETTER O
u'P' # 0x50 -> LATIN CAPITAL LETTER P
u'Q' # 0x51 -> LATIN CAPITAL LETTER Q
u'R' # 0x52 -> LATIN CAPITAL LETTER R
u'S' # 0x53 -> LATIN CAPITAL LETTER S
u'T' # 0x54 -> LATIN CAPITAL LETTER T
u'U' # 0x55 -> LATIN CAPITAL LETTER U
u'V' # 0x56 -> LATIN CAPITAL LETTER V
u'W' # 0x57 -> LATIN CAPITAL LETTER W
u'X' # 0x58 -> LATIN CAPITAL LETTER X
u'Y' # 0x59 -> LATIN CAPITAL LETTER Y
u'Z' # 0x5A -> LATIN CAPITAL LETTER Z
u'[' # 0x5B -> LEFT SQUARE BRACKET
u'\\' # 0x5C -> REVERSE SOLIDUS
u']' # 0x5D -> RIGHT SQUARE BRACKET
u'^' # 0x5E -> CIRCUMFLEX ACCENT
u'_' # 0x5F -> LOW LINE
u'`' # 0x60 -> GRAVE ACCENT
u'a' # 0x61 -> LATIN SMALL LETTER A
u'b' # 0x62 -> LATIN SMALL LETTER B
u'c' # 0x63 -> LATIN SMALL LETTER C
u'd' # 0x64 -> LATIN SMALL LETTER D
u'e' # 0x65 -> LATIN SMALL LETTER E
u'f' # 0x66 -> LATIN SMALL LETTER F
u'g' # 0x67 -> LATIN SMALL LETTER G
u'h' # 0x68 -> LATIN SMALL LETTER H
u'i' # 0x69 -> LATIN SMALL LETTER I
u'j' # 0x6A -> LATIN SMALL LETTER J
u'k' # 0x6B -> LATIN SMALL LETTER K
u'l' # 0x6C -> LATIN SMALL LETTER L
u'm' # 0x6D -> LATIN SMALL LETTER M
u'n' # 0x6E -> LATIN SMALL LETTER N
u'o' # 0x6F -> LATIN SMALL LETTER O
u'p' # 0x70 -> LATIN SMALL LETTER P
u'q' # 0x71 -> LATIN SMALL LETTER Q
u'r' # 0x72 -> LATIN SMALL LETTER R
u's' # 0x73 -> LATIN SMALL LETTER S
u't' # 0x74 -> LATIN SMALL LETTER T
u'u' # 0x75 -> LATIN SMALL LETTER U
u'v' # 0x76 -> LATIN SMALL LETTER V
u'w' # 0x77 -> LATIN SMALL LETTER W
u'x' # 0x78 -> LATIN SMALL LETTER X
u'y' # 0x79 -> LATIN SMALL LETTER Y
u'z' # 0x7A -> LATIN SMALL LETTER Z
u'{' # 0x7B -> LEFT CURLY BRACKET
u'|' # 0x7C -> VERTICAL LINE
u'}' # 0x7D -> RIGHT CURLY BRACKET
u'~' # 0x7E -> TILDE
u'\x7f' # 0x7F -> DELETE
u'\u20ac' # 0x80 -> EURO SIGN
u'\ufffe' # 0x81 -> UNDEFINED
u'\u201a' # 0x82 -> SINGLE LOW-9 QUOTATION MARK
u'\u0192' # 0x83 -> LATIN SMALL LETTER F WITH HOOK
u'\u201e' # 0x84 -> DOUBLE LOW-9 QUOTATION MARK
u'\u2026' # 0x85 -> HORIZONTAL ELLIPSIS
u'\u2020' # 0x86 -> DAGGER
u'\u2021' # 0x87 -> DOUBLE DAGGER
u'\ufffe' # 0x88 -> UNDEFINED
u'\u2030' # 0x89 -> PER MILLE SIGN
u'\ufffe' # 0x8A -> UNDEFINED
u'\u2039' # 0x8B -> SINGLE LEFT-POINTING ANGLE QUOTATION MARK
u'\ufffe' # 0x8C -> UNDEFINED
u'\ufffe' # 0x8D -> UNDEFINED
u'\ufffe' # 0x8E -> UNDEFINED
u'\ufffe' # 0x8F -> UNDEFINED
u'\ufffe' # 0x90 -> UNDEFINED
u'\u2018' # 0x91 -> LEFT SINGLE QUOTATION MARK
u'\u2019' # 0x92 -> RIGHT SINGLE QUOTATION MARK
u'\u201c' # 0x93 -> LEFT DOUBLE QUOTATION MARK
u'\u201d' # 0x94 -> RIGHT DOUBLE QUOTATION MARK
u'\u2022' # 0x95 -> BULLET
u'\u2013' # 0x96 -> EN DASH
u'\u2014' # 0x97 -> EM DASH
u'\ufffe' # 0x98 -> UNDEFINED
u'\u2122' # 0x99 -> TRADE MARK SIGN
u'\ufffe' # 0x9A -> UNDEFINED
u'\u203a' # 0x9B -> SINGLE RIGHT-POINTING ANGLE QUOTATION MARK
u'\ufffe' # 0x9C -> UNDEFINED
u'\ufffe' # 0x9D -> UNDEFINED
u'\ufffe' # 0x9E -> UNDEFINED
u'\ufffe' # 0x9F -> UNDEFINED
u'\xa0' # 0xA0 -> NO-BREAK SPACE
u'\u0385' # 0xA1 -> GREEK DIALYTIKA TONOS
u'\u0386' # 0xA2 -> GREEK CAPITAL LETTER ALPHA WITH TONOS
u'\xa3' # 0xA3 -> POUND SIGN
u'\xa4' # 0xA4 -> CURRENCY SIGN
u'\xa5' # 0xA5 -> YEN SIGN
u'\xa6' # 0xA6 -> BROKEN BAR
u'\xa7' # 0xA7 -> SECTION SIGN
u'\xa8' # 0xA8 -> DIAERESIS
u'\xa9' # 0xA9 -> COPYRIGHT SIGN
u'\ufffe' # 0xAA -> UNDEFINED
u'\xab' # 0xAB -> LEFT-POINTING DOUBLE ANGLE QUOTATION MARK
u'\xac' # 0xAC -> NOT SIGN
u'\xad' # 0xAD -> SOFT HYPHEN
u'\xae' # 0xAE -> REGISTERED SIGN
u'\u2015' # 0xAF -> HORIZONTAL BAR
u'\xb0' # 0xB0 -> DEGREE SIGN
u'\xb1' # 0xB1 -> PLUS-MINUS SIGN
u'\xb2' # 0xB2 -> SUPERSCRIPT TWO
u'\xb3' # 0xB3 -> SUPERSCRIPT THREE
u'\u0384' # 0xB4 -> GREEK TONOS
u'\xb5' # 0xB5 -> MICRO SIGN
u'\xb6' # 0xB6 -> PILCROW SIGN
u'\xb7' # 0xB7 -> MIDDLE DOT
u'\u0388' # 0xB8 -> GREEK CAPITAL LETTER EPSILON WITH TONOS
u'\u0389' # 0xB9 -> GREEK CAPITAL LETTER ETA WITH TONOS
u'\u038a' # 0xBA -> GREEK CAPITAL LETTER IOTA WITH TONOS
u'\xbb' # 0xBB -> RIGHT-POINTING DOUBLE ANGLE QUOTATION MARK
u'\u038c' # 0xBC -> GREEK CAPITAL LETTER OMICRON WITH TONOS
u'\xbd' # 0xBD -> VULGAR FRACTION ONE HALF
u'\u038e' # 0xBE -> GREEK CAPITAL LETTER UPSILON WITH TONOS
u'\u038f' # 0xBF -> GREEK CAPITAL LETTER OMEGA WITH TONOS
u'\u0390' # 0xC0 -> GREEK SMALL LETTER IOTA WITH DIALYTIKA AND TONOS
u'\u0391' # 0xC1 -> GREEK CAPITAL LETTER ALPHA
u'\u0392' # 0xC2 -> GREEK CAPITAL LETTER BETA
u'\u0393' # 0xC3 -> GREEK CAPITAL LETTER GAMMA
u'\u0394' # 0xC4 -> GREEK CAPITAL LETTER DELTA
u'\u0395' # 0xC5 -> GREEK CAPITAL LETTER EPSILON
u'\u0396' # 0xC6 -> GREEK CAPITAL LETTER ZETA
u'\u0397' # 0xC7 -> GREEK CAPITAL LETTER ETA
u'\u0398' # 0xC8 -> GREEK CAPITAL LETTER THETA
u'\u0399' # 0xC9 -> GREEK CAPITAL LETTER IOTA
u'\u039a' # 0xCA -> GREEK CAPITAL LETTER KAPPA
u'\u039b' # 0xCB -> GREEK CAPITAL LETTER LAMDA
u'\u039c' # 0xCC -> GREEK CAPITAL LETTER MU
u'\u039d' # 0xCD -> GREEK CAPITAL LETTER NU
u'\u039e' # 0xCE -> GREEK CAPITAL LETTER XI
u'\u039f' # 0xCF -> GREEK CAPITAL LETTER OMICRON
u'\u03a0' # 0xD0 -> GREEK CAPITAL LETTER PI
u'\u03a1' # 0xD1 -> GREEK CAPITAL LETTER RHO
u'\ufffe' # 0xD2 -> UNDEFINED
u'\u03a3' # 0xD3 -> GREEK CAPITAL LETTER SIGMA
u'\u03a4' # 0xD4 -> GREEK CAPITAL LETTER TAU
u'\u03a5' # 0xD5 -> GREEK CAPITAL LETTER UPSILON
u'\u03a6' # 0xD6 -> GREEK CAPITAL LETTER PHI
u'\u03a7' # 0xD7 -> GREEK CAPITAL LETTER CHI
u'\u03a8' # 0xD8 -> GREEK CAPITAL LETTER PSI
u'\u03a9' # 0xD9 -> GREEK CAPITAL LETTER OMEGA
u'\u03aa' # 0xDA -> GREEK CAPITAL LETTER IOTA WITH DIALYTIKA
u'\u03ab' # 0xDB -> GREEK CAPITAL LETTER UPSILON WITH DIALYTIKA
u'\u03ac' # 0xDC -> GREEK SMALL LETTER ALPHA WITH TONOS
u'\u03ad' # 0xDD -> GREEK SMALL LETTER EPSILON WITH TONOS
u'\u03ae' # 0xDE -> GREEK SMALL LETTER ETA WITH TONOS
u'\u03af' # 0xDF -> GREEK SMALL LETTER IOTA WITH TONOS
u'\u03b0' # 0xE0 -> GREEK SMALL LETTER UPSILON WITH DIALYTIKA AND TONOS
u'\u03b1' # 0xE1 -> GREEK SMALL LETTER ALPHA
u'\u03b2' # 0xE2 -> GREEK SMALL LETTER BETA
u'\u03b3' # 0xE3 -> GREEK SMALL LETTER GAMMA
u'\u03b4' # 0xE4 -> GREEK SMALL LETTER DELTA
u'\u03b5' # 0xE5 -> GREEK SMALL LETTER EPSILON
u'\u03b6' # 0xE6 -> GREEK SMALL LETTER ZETA
u'\u03b7' # 0xE7 -> GREEK SMALL LETTER ETA
u'\u03b8' # 0xE8 -> GREEK SMALL LETTER THETA
u'\u03b9' # 0xE9 -> GREEK SMALL LETTER IOTA
u'\u03ba' # 0xEA -> GREEK SMALL LETTER KAPPA
u'\u03bb' # 0xEB -> GREEK SMALL LETTER LAMDA
u'\u03bc' # 0xEC -> GREEK SMALL LETTER MU
u'\u03bd' # 0xED -> GREEK SMALL LETTER NU
u'\u03be' # 0xEE -> GREEK SMALL LETTER XI
u'\u03bf' # 0xEF -> GREEK SMALL LETTER OMICRON
u'\u03c0' # 0xF0 -> GREEK SMALL LETTER PI
u'\u03c1' # 0xF1 -> GREEK SMALL LETTER RHO
u'\u03c2' # 0xF2 -> GREEK SMALL LETTER FINAL SIGMA
u'\u03c3' # 0xF3 -> GREEK SMALL LETTER SIGMA
u'\u03c4' # 0xF4 -> GREEK SMALL LETTER TAU
u'\u03c5' # 0xF5 -> GREEK SMALL LETTER UPSILON
u'\u03c6' # 0xF6 -> GREEK SMALL LETTER PHI
u'\u03c7' # 0xF7 -> GREEK SMALL LETTER CHI
u'\u03c8' # 0xF8 -> GREEK SMALL LETTER PSI
u'\u03c9' # 0xF9 -> GREEK SMALL LETTER OMEGA
u'\u03ca' # 0xFA -> GREEK SMALL LETTER IOTA WITH DIALYTIKA
u'\u03cb' # 0xFB -> GREEK SMALL LETTER UPSILON WITH DIALYTIKA
u'\u03cc' # 0xFC -> GREEK SMALL LETTER OMICRON WITH TONOS
u'\u03cd' # 0xFD -> GREEK SMALL LETTER UPSILON WITH TONOS
u'\u03ce' # 0xFE -> GREEK SMALL LETTER OMEGA WITH TONOS
u'\ufffe' # 0xFF -> UNDEFINED
)
### Encoding table
encoding_table=codecs.charmap_build(decoding_table)
|
lhopps/grit-i18n
|
refs/heads/master
|
grit/format/policy_templates/writers/admx_writer_unittest.py
|
41
|
#!/usr/bin/env python
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Unittests for grit.format.policy_templates.writers.admx_writer."""
import os
import sys
import unittest
if __name__ == '__main__':
sys.path.append(os.path.join(os.path.dirname(__file__), '../../../..'))
from grit.format.policy_templates.writers import admx_writer
from grit.format.policy_templates.writers import xml_writer_base_unittest
from xml.dom import minidom
class AdmxWriterUnittest(xml_writer_base_unittest.XmlWriterBaseTest):
def _CreateDocumentElement(self):
dom_impl = minidom.getDOMImplementation('')
doc = dom_impl.createDocument(None, 'root', None)
return doc.documentElement
def setUp(self):
# Writer configuration. This dictionary contains parameter used by the ADMX
# Writer
config = {
'win_group_policy_class': 'TestClass',
'win_supported_os': 'SUPPORTED_TESTOS',
'win_reg_mandatory_key_name': 'Software\\Policies\\Test',
'win_reg_recommended_key_name': 'Software\\Policies\\Test\\Recommended',
'win_mandatory_category_path': ['test_category'],
'win_recommended_category_path': ['test_recommended_category'],
'admx_namespace': 'ADMXWriter.Test.Namespace',
'admx_prefix': 'test_prefix',
'build': 'test_product',
}
self.writer = admx_writer.GetWriter(config)
self.writer.Init()
def _GetPoliciesElement(self, doc):
node_list = doc.getElementsByTagName('policies')
self.assertTrue(node_list.length == 1)
return node_list.item(0)
def _GetCategoriesElement(self, doc):
node_list = doc.getElementsByTagName('categories')
self.assertTrue(node_list.length == 1)
return node_list.item(0)
def testEmpty(self):
self.writer.BeginTemplate()
self.writer.EndTemplate()
output = self.writer.GetTemplateText()
expected_output = (
'<?xml version="1.0" ?>\n'
'<policyDefinitions revision="1.0" schemaVersion="1.0">\n'
' <policyNamespaces>\n'
' <target namespace="ADMXWriter.Test.Namespace"'
' prefix="test_prefix"/>\n'
' <using namespace="Microsoft.Policies.Windows" prefix="windows"/>\n'
' </policyNamespaces>\n'
' <resources minRequiredRevision="1.0"/>\n'
' <supportedOn>\n'
' <definitions>\n'
' <definition displayName="'
'$(string.SUPPORTED_TESTOS)" name="SUPPORTED_TESTOS"/>\n'
' </definitions>\n'
' </supportedOn>\n'
' <categories>\n'
' <category displayName="$(string.test_category)"'
' name="test_category"/>\n'
' <category displayName="$(string.test_recommended_category)"'
' name="test_recommended_category"/>\n'
' </categories>\n'
' <policies/>\n'
'</policyDefinitions>')
self.AssertXMLEquals(output, expected_output)
def testEmptyVersion(self):
self.writer.config['version'] = '39.0.0.0'
self.writer.BeginTemplate()
self.writer.EndTemplate()
output = self.writer.GetTemplateText()
expected_output = (
'<?xml version="1.0" ?>\n'
'<policyDefinitions revision="1.0" schemaVersion="1.0">\n'
' <!--test_product version: 39.0.0.0-->\n'
' <policyNamespaces>\n'
' <target namespace="ADMXWriter.Test.Namespace"'
' prefix="test_prefix"/>\n'
' <using namespace="Microsoft.Policies.Windows" prefix="windows"/>\n'
' </policyNamespaces>\n'
' <resources minRequiredRevision="1.0"/>\n'
' <supportedOn>\n'
' <definitions>\n'
' <definition displayName="'
'$(string.SUPPORTED_TESTOS)" name="SUPPORTED_TESTOS"/>\n'
' </definitions>\n'
' </supportedOn>\n'
' <categories>\n'
' <category displayName="$(string.test_category)"'
' name="test_category"/>\n'
' <category displayName="$(string.test_recommended_category)"'
' name="test_recommended_category"/>\n'
' </categories>\n'
' <policies/>\n'
'</policyDefinitions>')
self.AssertXMLEquals(output, expected_output)
def testEmptyPolicyGroup(self):
empty_policy_group = {
'name': 'PolicyGroup',
'policies': []
}
# Initialize writer to write a policy group.
self.writer.BeginTemplate()
# Write policy group
self.writer.BeginPolicyGroup(empty_policy_group)
self.writer.EndPolicyGroup()
output = self.GetXMLOfChildren(self._GetPoliciesElement(self.writer._doc))
expected_output = ''
self.AssertXMLEquals(output, expected_output)
output = self.GetXMLOfChildren(
self._GetCategoriesElement(self.writer._doc))
expected_output = (
'<category displayName="$(string.test_category)"'
' name="test_category"/>\n'
'<category displayName="$(string.test_recommended_category)"'
' name="test_recommended_category"/>\n'
'<category displayName="$(string.PolicyGroup_group)"'
' name="PolicyGroup">\n'
' <parentCategory ref="test_category"/>\n'
'</category>')
self.AssertXMLEquals(output, expected_output)
def testPolicyGroup(self):
empty_policy_group = {
'name': 'PolicyGroup',
'policies': [
{'name': 'PolicyStub2',
'type': 'main'},
{'name': 'PolicyStub1',
'type': 'main'},
]
}
# Initialize writer to write a policy group.
self.writer.BeginTemplate()
# Write policy group
self.writer.BeginPolicyGroup(empty_policy_group)
self.writer.EndPolicyGroup()
output = self.GetXMLOfChildren(self._GetPoliciesElement(self.writer._doc))
expected_output = ''
self.AssertXMLEquals(output, expected_output)
output = self.GetXMLOfChildren(
self._GetCategoriesElement(self.writer._doc))
expected_output = (
'<category displayName="$(string.test_category)"'
' name="test_category"/>\n'
'<category displayName="$(string.test_recommended_category)"'
' name="test_recommended_category"/>\n'
'<category displayName="$(string.PolicyGroup_group)"'
' name="PolicyGroup">\n'
' <parentCategory ref="test_category"/>\n'
'</category>')
self.AssertXMLEquals(output, expected_output)
def _initWriterForPolicy(self, writer, policy):
'''Initializes the writer to write the given policy next.
'''
policy_group = {
'name': 'PolicyGroup',
'policies': [policy]
}
writer.BeginTemplate()
writer.BeginPolicyGroup(policy_group)
def testMainPolicy(self):
main_policy = {
'name': 'DummyMainPolicy',
'type': 'main',
}
self._initWriterForPolicy(self.writer, main_policy)
self.writer.WritePolicy(main_policy)
output = self.GetXMLOfChildren(self._GetPoliciesElement(self.writer._doc))
expected_output = (
'<policy class="TestClass" displayName="$(string.DummyMainPolicy)"'
' explainText="$(string.DummyMainPolicy_Explain)"'
' key="Software\\Policies\\Test" name="DummyMainPolicy"'
' presentation="$(presentation.DummyMainPolicy)"'
' valueName="DummyMainPolicy">\n'
' <parentCategory ref="PolicyGroup"/>\n'
' <supportedOn ref="SUPPORTED_TESTOS"/>\n'
' <enabledValue>\n'
' <decimal value="1"/>\n'
' </enabledValue>\n'
' <disabledValue>\n'
' <decimal value="0"/>\n'
' </disabledValue>\n'
'</policy>')
self.AssertXMLEquals(output, expected_output)
def testRecommendedPolicy(self):
main_policy = {
'name': 'DummyMainPolicy',
'type': 'main',
}
policy_group = {
'name': 'PolicyGroup',
'policies': [main_policy],
}
self.writer.BeginTemplate()
self.writer.BeginRecommendedPolicyGroup(policy_group)
self.writer.WriteRecommendedPolicy(main_policy)
output = self.GetXMLOfChildren(self._GetPoliciesElement(self.writer._doc))
expected_output = (
'<policy class="TestClass" displayName="$(string.DummyMainPolicy)"'
' explainText="$(string.DummyMainPolicy_Explain)"'
' key="Software\\Policies\\Test\\Recommended"'
' name="DummyMainPolicy_recommended"'
' presentation="$(presentation.DummyMainPolicy)"'
' valueName="DummyMainPolicy">\n'
' <parentCategory ref="PolicyGroup_recommended"/>\n'
' <supportedOn ref="SUPPORTED_TESTOS"/>\n'
' <enabledValue>\n'
' <decimal value="1"/>\n'
' </enabledValue>\n'
' <disabledValue>\n'
' <decimal value="0"/>\n'
' </disabledValue>\n'
'</policy>')
self.AssertXMLEquals(output, expected_output)
def testRecommendedOnlyPolicy(self):
main_policy = {
'name': 'DummyMainPolicy',
'type': 'main',
'features': {
'can_be_recommended': True,
'can_be_mandatory': False,
}
}
policy_group = {
'name': 'PolicyGroup',
'policies': [main_policy],
}
self.writer.BeginTemplate()
self.writer.BeginRecommendedPolicyGroup(policy_group)
self.writer.WritePolicy(main_policy)
self.writer.WriteRecommendedPolicy(main_policy)
output = self.GetXMLOfChildren(self._GetPoliciesElement(self.writer._doc))
expected_output = (
'<policy class="TestClass" displayName="$(string.DummyMainPolicy)"'
' explainText="$(string.DummyMainPolicy_Explain)"'
' key="Software\\Policies\\Test\\Recommended"'
' name="DummyMainPolicy_recommended"'
' presentation="$(presentation.DummyMainPolicy)"'
' valueName="DummyMainPolicy">\n'
' <parentCategory ref="PolicyGroup_recommended"/>\n'
' <supportedOn ref="SUPPORTED_TESTOS"/>\n'
' <enabledValue>\n'
' <decimal value="1"/>\n'
' </enabledValue>\n'
' <disabledValue>\n'
' <decimal value="0"/>\n'
' </disabledValue>\n'
'</policy>')
self.AssertXMLEquals(output, expected_output)
def testStringPolicy(self):
string_policy = {
'name': 'SampleStringPolicy',
'type': 'string',
}
self._initWriterForPolicy(self.writer, string_policy)
self.writer.WritePolicy(string_policy)
output = self.GetXMLOfChildren(self._GetPoliciesElement(self.writer._doc))
expected_output = (
'<policy class="TestClass" displayName="$(string.SampleStringPolicy)"'
' explainText="$(string.SampleStringPolicy_Explain)"'
' key="Software\\Policies\\Test" name="SampleStringPolicy"'
' presentation="$(presentation.SampleStringPolicy)">\n'
' <parentCategory ref="PolicyGroup"/>\n'
' <supportedOn ref="SUPPORTED_TESTOS"/>\n'
' <elements>\n'
' <text id="SampleStringPolicy" maxLength="1000000"'
' valueName="SampleStringPolicy"/>\n'
' </elements>\n'
'</policy>')
self.AssertXMLEquals(output, expected_output)
def testIntPolicy(self):
int_policy = {
'name': 'SampleIntPolicy',
'type': 'int',
}
self._initWriterForPolicy(self.writer, int_policy)
self.writer.WritePolicy(int_policy)
output = self.GetXMLOfChildren(self._GetPoliciesElement(self.writer._doc))
expected_output = (
'<policy class="TestClass" displayName="$(string.SampleIntPolicy)"'
' explainText="$(string.SampleIntPolicy_Explain)"'
' key="Software\\Policies\\Test" name="SampleIntPolicy"'
' presentation="$(presentation.SampleIntPolicy)">\n'
' <parentCategory ref="PolicyGroup"/>\n'
' <supportedOn ref="SUPPORTED_TESTOS"/>\n'
' <elements>\n'
' <decimal id="SampleIntPolicy" maxValue="2000000000" '
'valueName="SampleIntPolicy"/>\n'
' </elements>\n'
'</policy>')
self.AssertXMLEquals(output, expected_output)
def testIntEnumPolicy(self):
enum_policy = {
'name': 'SampleEnumPolicy',
'type': 'int-enum',
'items': [
{'name': 'item_1', 'value': 0},
{'name': 'item_2', 'value': 1},
]
}
self._initWriterForPolicy(self.writer, enum_policy)
self.writer.WritePolicy(enum_policy)
output = self.GetXMLOfChildren(self._GetPoliciesElement(self.writer._doc))
expected_output = (
'<policy class="TestClass" displayName="$(string.SampleEnumPolicy)"'
' explainText="$(string.SampleEnumPolicy_Explain)"'
' key="Software\\Policies\\Test" name="SampleEnumPolicy"'
' presentation="$(presentation.SampleEnumPolicy)">\n'
' <parentCategory ref="PolicyGroup"/>\n'
' <supportedOn ref="SUPPORTED_TESTOS"/>\n'
' <elements>\n'
' <enum id="SampleEnumPolicy" valueName="SampleEnumPolicy">\n'
' <item displayName="$(string.item_1)">\n'
' <value>\n'
' <decimal value="0"/>\n'
' </value>\n'
' </item>\n'
' <item displayName="$(string.item_2)">\n'
' <value>\n'
' <decimal value="1"/>\n'
' </value>\n'
' </item>\n'
' </enum>\n'
' </elements>\n'
'</policy>')
self.AssertXMLEquals(output, expected_output)
def testStringEnumPolicy(self):
enum_policy = {
'name': 'SampleEnumPolicy',
'type': 'string-enum',
'items': [
{'name': 'item_1', 'value': 'one'},
{'name': 'item_2', 'value': 'two'},
]
}
# This test is different than the others because it also tests that space
# usage inside <string> nodes is correct.
dom_impl = minidom.getDOMImplementation('')
self.writer._doc = dom_impl.createDocument(None, 'policyDefinitions', None)
self.writer._active_policies_elem = self.writer._doc.documentElement
self.writer._active_mandatory_policy_group_name = 'PolicyGroup'
self.writer.WritePolicy(enum_policy)
output = self.writer.GetTemplateText()
expected_output = (
'<?xml version="1.0" ?>\n'
'<policyDefinitions>\n'
' <policy class="TestClass" displayName="$(string.SampleEnumPolicy)"'
' explainText="$(string.SampleEnumPolicy_Explain)"'
' key="Software\\Policies\\Test" name="SampleEnumPolicy"'
' presentation="$(presentation.SampleEnumPolicy)">\n'
' <parentCategory ref="PolicyGroup"/>\n'
' <supportedOn ref="SUPPORTED_TESTOS"/>\n'
' <elements>\n'
' <enum id="SampleEnumPolicy" valueName="SampleEnumPolicy">\n'
' <item displayName="$(string.item_1)">\n'
' <value>\n'
' <string>one</string>\n'
' </value>\n'
' </item>\n'
' <item displayName="$(string.item_2)">\n'
' <value>\n'
' <string>two</string>\n'
' </value>\n'
' </item>\n'
' </enum>\n'
' </elements>\n'
' </policy>\n'
'</policyDefinitions>')
self.AssertXMLEquals(output, expected_output)
def testListPolicy(self):
list_policy = {
'name': 'SampleListPolicy',
'type': 'list',
}
self._initWriterForPolicy(self.writer, list_policy)
self.writer.WritePolicy(list_policy)
output = self.GetXMLOfChildren(self._GetPoliciesElement(self.writer._doc))
expected_output = (
'<policy class="TestClass" displayName="$(string.SampleListPolicy)"'
' explainText="$(string.SampleListPolicy_Explain)"'
' key="Software\\Policies\\Test" name="SampleListPolicy"'
' presentation="$(presentation.SampleListPolicy)">\n'
' <parentCategory ref="PolicyGroup"/>\n'
' <supportedOn ref="SUPPORTED_TESTOS"/>\n'
' <elements>\n'
' <list id="SampleListPolicyDesc"'
' key="Software\Policies\Test\SampleListPolicy" valuePrefix=""/>\n'
' </elements>\n'
'</policy>')
self.AssertXMLEquals(output, expected_output)
def testStringEnumListPolicy(self):
list_policy = {
'name': 'SampleListPolicy',
'type': 'string-enum-list',
'items': [
{'name': 'item_1', 'value': 'one'},
{'name': 'item_2', 'value': 'two'},
]
}
self._initWriterForPolicy(self.writer, list_policy)
self.writer.WritePolicy(list_policy)
output = self.GetXMLOfChildren(self._GetPoliciesElement(self.writer._doc))
expected_output = (
'<policy class="TestClass" displayName="$(string.SampleListPolicy)"'
' explainText="$(string.SampleListPolicy_Explain)"'
' key="Software\\Policies\\Test" name="SampleListPolicy"'
' presentation="$(presentation.SampleListPolicy)">\n'
' <parentCategory ref="PolicyGroup"/>\n'
' <supportedOn ref="SUPPORTED_TESTOS"/>\n'
' <elements>\n'
' <list id="SampleListPolicyDesc"'
' key="Software\Policies\Test\SampleListPolicy" valuePrefix=""/>\n'
' </elements>\n'
'</policy>')
self.AssertXMLEquals(output, expected_output)
def testDictionaryPolicy(self):
dict_policy = {
'name': 'SampleDictionaryPolicy',
'type': 'dict',
}
self._initWriterForPolicy(self.writer, dict_policy)
self.writer.WritePolicy(dict_policy)
output = self.GetXMLOfChildren(self._GetPoliciesElement(self.writer._doc))
expected_output = (
'<policy class="TestClass" displayName="$(string.'
'SampleDictionaryPolicy)"'
' explainText="$(string.SampleDictionaryPolicy_Explain)"'
' key="Software\\Policies\\Test" name="SampleDictionaryPolicy"'
' presentation="$(presentation.SampleDictionaryPolicy)">\n'
' <parentCategory ref="PolicyGroup"/>\n'
' <supportedOn ref="SUPPORTED_TESTOS"/>\n'
' <elements>\n'
' <text id="SampleDictionaryPolicy" maxLength="1000000"'
' valueName="SampleDictionaryPolicy"/>\n'
' </elements>\n'
'</policy>')
self.AssertXMLEquals(output, expected_output)
def testPlatform(self):
# Test that the writer correctly chooses policies of platform Windows.
self.assertTrue(self.writer.IsPolicySupported({
'supported_on': [
{'platforms': ['win', 'zzz']}, {'platforms': ['aaa']}
]
}))
self.assertFalse(self.writer.IsPolicySupported({
'supported_on': [
{'platforms': ['mac', 'linux']}, {'platforms': ['aaa']}
]
}))
def testStringEncodings(self):
enum_policy_a = {
'name': 'SampleEnumPolicy.A',
'type': 'string-enum',
'items': [
{'name': 'tls1.2', 'value': 'tls1.2'}
]
}
enum_policy_b = {
'name': 'SampleEnumPolicy.B',
'type': 'string-enum',
'items': [
{'name': 'tls1.2', 'value': 'tls1.2'}
]
}
dom_impl = minidom.getDOMImplementation('')
self.writer._doc = dom_impl.createDocument(None, 'policyDefinitions', None)
self.writer._active_policies_elem = self.writer._doc.documentElement
self.writer._active_mandatory_policy_group_name = 'PolicyGroup'
self.writer.WritePolicy(enum_policy_a)
self.writer.WritePolicy(enum_policy_b)
output = self.writer.GetTemplateText()
expected_output = (
'<?xml version="1.0" ?>\n'
'<policyDefinitions>\n'
' <policy class="TestClass" displayName="$(string.SampleEnumPolicy_A)"'
' explainText="$(string.SampleEnumPolicy_A_Explain)"'
' key="Software\\Policies\\Test" name="SampleEnumPolicy.A"'
' presentation="$(presentation.SampleEnumPolicy.A)">\n'
' <parentCategory ref="PolicyGroup"/>\n'
' <supportedOn ref="SUPPORTED_TESTOS"/>\n'
' <elements>\n'
' <enum id="SampleEnumPolicy.A" valueName="SampleEnumPolicy.A">\n'
' <item displayName="$(string.tls1_2)">\n'
' <value>\n'
' <string>tls1.2</string>\n'
' </value>\n'
' </item>\n'
' </enum>\n'
' </elements>\n'
' </policy>\n'
' <policy class="TestClass" displayName="$(string.SampleEnumPolicy_B)"'
' explainText="$(string.SampleEnumPolicy_B_Explain)"'
' key="Software\\Policies\\Test" name="SampleEnumPolicy.B"'
' presentation="$(presentation.SampleEnumPolicy.B)">\n'
' <parentCategory ref="PolicyGroup"/>\n'
' <supportedOn ref="SUPPORTED_TESTOS"/>\n'
' <elements>\n'
' <enum id="SampleEnumPolicy.B" valueName="SampleEnumPolicy.B">\n'
' <item displayName="$(string.tls1_2)">\n'
' <value>\n'
' <string>tls1.2</string>\n'
' </value>\n'
' </item>\n'
' </enum>\n'
' </elements>\n'
' </policy>\n'
'</policyDefinitions>')
self.AssertXMLEquals(output, expected_output)
if __name__ == '__main__':
unittest.main()
|
kbehafarin/newfies-dialer
|
refs/heads/master
|
newfies/dialer_campaign/views.py
|
3
|
#
# Newfies-Dialer License
# http://www.newfies-dialer.org
#
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this file,
# You can obtain one at http://mozilla.org/MPL/2.0/.
#
# Copyright (C) 2011-2013 Star2Billing S.L.
#
# The Initial Developer of the Original Code is
# Arezqui Belaid <info@star2billing.com>
#
from django.contrib.auth.models import User
from django.contrib.auth.decorators import login_required, \
permission_required
from django.http import HttpResponseRedirect, Http404
from django.shortcuts import render_to_response, get_object_or_404
from django.core.urlresolvers import reverse
from django.core.mail import mail_admins
from django.conf import settings
from django.template.context import RequestContext
from django.utils.translation import ugettext as _
from django.contrib.contenttypes.models import ContentType
from django.db.models import get_model
from dialer_contact.models import Phonebook
from dialer_campaign.models import Campaign
from dialer_campaign.forms import CampaignForm, DuplicateCampaignForm
from dialer_campaign.constants import CAMPAIGN_STATUS, CAMPAIGN_COLUMN_NAME
from dialer_campaign.function_def import check_dialer_setting, dialer_setting_limit, \
user_dialer_setting, user_dialer_setting_msg
from dialer_campaign.tasks import collect_subscriber
from survey.models import Section, Branching, Survey_template
from user_profile.constants import NOTIFICATION_NAME
from frontend_notification.views import frontend_send_notification
from common.common_functions import current_view, get_pagination_vars
from dateutil.relativedelta import relativedelta
from datetime import datetime
import re
@login_required
def update_campaign_status_admin(request, pk, status):
"""Campaign Status (e.g. start|stop|pause|abort) can be changed from
admin interface (via campaign list)"""
obj_campaign = Campaign.objects.get(id=pk)
obj_campaign.status = status
obj_campaign.save()
recipient = request.user
frontend_send_notification(request, status, recipient)
return HttpResponseRedirect(
reverse("admin:dialer_campaign_campaign_changelist"))
@login_required
def update_campaign_status_cust(request, pk, status):
"""Campaign Status (e.g. start|stop|pause|abort) can be changed from
customer interface (via dialer_campaign/campaign list)"""
obj_campaign = Campaign.objects.get(id=pk)
obj_campaign.status = status
obj_campaign.save()
pagination_path = '/campaign/'
if request.session.get('pagination_path'):
pagination_path = request.session.get('pagination_path')
#Check if no phonebook attached
if int(status) == CAMPAIGN_STATUS.START and obj_campaign.phonebook.all().count() == 0:
request.session['error_msg'] = _('error : you have to assign a phonebook to your campaign before starting it')
else:
recipient = request.user
frontend_send_notification(request, status, recipient)
# Notify user while campaign Start
if int(status) == CAMPAIGN_STATUS.START and not obj_campaign.has_been_started:
request.session['info_msg'] = \
_('the campaign global settings cannot be edited when the campaign is started')
# change has_been_started flag
obj_campaign.has_been_started = True
obj_campaign.save()
if obj_campaign.content_type.model == 'survey_template':
# Copy survey
survey_template = Survey_template.objects.get(user=request.user, pk=obj_campaign.object_id)
survey_template.copy_survey_template(obj_campaign.id)
collect_subscriber.delay(obj_campaign.id)
return HttpResponseRedirect(pagination_path)
@login_required
def notify_admin(request):
"""Notify administrator regarding dialer setting configuration for
system user via mail
"""
# Get all the admin users - admin superuser
all_admin_user = User.objects.filter(is_superuser=True)
for user in all_admin_user:
recipient = user
if not request.session['has_notified']:
frontend_send_notification(
request, NOTIFICATION_NAME.dialer_setting_configuration, recipient)
# Send mail to ADMINS
subject = _('dialer setting configuration').title()
message = _('Notification - User Dialer Setting. The user "%(user)s" - "%(user_id)s" is not properly configured to use the system, please configure their dialer settings.') %\
{'user': request.user, 'user_id': request.user.id}
# mail_admins() is a shortcut for sending an email to the site admins,
# as defined in the ADMINS setting
mail_admins(subject, message)
request.session['has_notified'] = True
return HttpResponseRedirect('/dashboard/')
def tpl_control_icon(icon):
"""
function to produce control html icon
"""
return 'style="text-decoration:none;background-image:url(%snewfies/icons/%s);"' % (settings.STATIC_URL, icon)
def get_url_campaign_status(id, status):
"""
Helper to display campaign status button on the grid
"""
#Store html for campaign control button
control_play_style = tpl_control_icon('control_play_blue.png')
control_pause_style = tpl_control_icon('control_pause_blue.png')
control_abort_style = tpl_control_icon('control_abort_blue.png')
control_stop_style = tpl_control_icon('control_stop_blue.png')
#set different url for the campaign status
url_cpg_status = 'update_campaign_status_cust/%s' % str(id)
url_cpg_start = '%s/%s/' % (url_cpg_status, CAMPAIGN_STATUS.START)
url_cpg_pause = '%s/%s/' % (url_cpg_status, CAMPAIGN_STATUS.PAUSE)
url_cpg_abort = '%s/%s/' % (url_cpg_status, CAMPAIGN_STATUS.ABORT)
url_cpg_stop = '%s/%s/' % (url_cpg_status, CAMPAIGN_STATUS.END)
#according to the current status, disable link and change the button color
if status == CAMPAIGN_STATUS.START:
url_cpg_start = '#'
control_play_style = tpl_control_icon('control_play.png')
elif status == CAMPAIGN_STATUS.PAUSE:
url_cpg_pause = '#'
control_pause_style = tpl_control_icon('control_pause.png')
elif status == CAMPAIGN_STATUS.ABORT:
url_cpg_abort = '#'
control_abort_style = tpl_control_icon('control_abort.png')
elif status == CAMPAIGN_STATUS.END:
url_cpg_stop = '#'
control_stop_style = tpl_control_icon('control_stop.png')
#return all the html button for campaign status management
return "<a href='%s' class='icon' title='%s' %s></a> <a href='%s' class='icon' title='%s' %s></a> <a href='%s' class='icon' title='%s' %s></a> <a href='%s' class='icon' title='%s' %s></a>" % \
(url_cpg_start, _("start").capitalize(), control_play_style,
url_cpg_pause, _("pause").capitalize(), control_pause_style,
url_cpg_abort, _("abort").capitalize(), control_abort_style,
url_cpg_stop, _("stop").capitalize(), control_stop_style)
def get_app_name(app_label, model_name, object_id):
"""To get app name from app_label, model_name & object_id"""
try:
return get_model(app_label, model_name).objects.get(pk=object_id)
except:
return '-'
def _return_link(app_name, obj_id):
"""
Return link on campaign listing view
"""
link = ''
# Object view links
if app_name == 'survey':
link = '<a href="/survey_view/%s/" target="_blank" class="icon" title="%s" %s></a>' % \
(obj_id, _('survey').title(), tpl_control_icon('zoom.png'))
# Object edit links
if app_name == 'survey_template':
link = '<a href="/survey/%s/" target="_blank" class="icon" title="%s" %s></a>' %\
(obj_id, _('edit survey').title(), tpl_control_icon('zoom.png'))
return link
def get_campaign_survey_view(campaign_object):
"""display view button on campaign list"""
link = ''
if campaign_object.status and int(campaign_object.status) == CAMPAIGN_STATUS.START:
if campaign_object.content_type.model == 'survey':
link = _return_link('survey', campaign_object.object_id)
if campaign_object.status and int(campaign_object.status) != CAMPAIGN_STATUS.START:
if campaign_object.content_type.model == 'survey_template':
link = _return_link('survey_template', campaign_object.object_id)
if campaign_object.content_type.model == 'survey':
link = _return_link('survey', campaign_object.object_id)
return link
def make_duplicate_campaign(campaign_object_id):
"""Create link to make duplicate campaign"""
link = '<a href="#campaign-duplicate" url="/campaign_duplicate/%s/" class="campaign-duplicate icon" data-toggle="modal" data-controls-modal="campaign-duplicate" title="%s" %s></a>'\
% (campaign_object_id, _('duplicate this campaign').capitalize(),
tpl_control_icon('layers.png'))
return link
@permission_required('dialer_campaign.view_campaign', login_url='/')
@login_required
def campaign_list(request):
"""List all campaigns for the logged in user
**Attributes**:
* ``template`` - frontend/campaign/list.html
**Logic Description**:
* List all campaigns belonging to the logged in user
"""
request.session['pagination_path'] = request.META['PATH_INFO'] + '?' + request.META['QUERY_STRING']
sort_col_field_list = ['id', 'name', 'startingdate', 'status', 'totalcontact']
default_sort_field = 'id'
pagination_data =\
get_pagination_vars(request, sort_col_field_list, default_sort_field)
PAGE_SIZE = pagination_data['PAGE_SIZE']
sort_order = pagination_data['sort_order']
campaign_list = Campaign.objects.filter(user=request.user).order_by(sort_order)
template = 'frontend/campaign/list.html'
data = {
'module': current_view(request),
'campaign_list': campaign_list,
'total_campaign': campaign_list.count(),
'PAGE_SIZE': PAGE_SIZE,
'CAMPAIGN_COLUMN_NAME': CAMPAIGN_COLUMN_NAME,
'col_name_with_order': pagination_data['col_name_with_order'],
'msg': request.session.get('msg'),
'error_msg': request.session.get('error_msg'),
'info_msg': request.session.get('info_msg'),
'dialer_setting_msg': user_dialer_setting_msg(request.user),
}
request.session['msg'] = ''
request.session['error_msg'] = ''
request.session['info_msg'] = ''
return render_to_response(template, data,
context_instance=RequestContext(request))
def get_content_type(object_string):
"""
Retrieve ContentType and Object ID from a string
It is used by campaign_add & campaign_change
#get_content_type("type:38-id:1")
#{'object_type': <ContentType: Phonebook>, 'object_id': '1'}
"""
contenttype = {}
matches = re.match("type:(\d+)-id:(\d+)", object_string).groups()
object_type_id = matches[0] # get 45 from "type:45-id:38"
contenttype['object_id'] = matches[1] # get 38 from "type:45-id:38"
contenttype['object_type'] = ContentType.objects.get(id=object_type_id)
return contenttype
@permission_required('dialer_campaign.add_campaign', login_url='/')
@login_required
def campaign_add(request):
"""Add a new campaign for the logged in user
**Attributes**:
* ``form`` - CampaignForm
* ``template`` - frontend/campaign/change.html
**Logic Description**:
* Before adding a campaign, check dialer setting limit if
applicable to the user.
* Add the new campaign which will belong to the logged in user
via CampaignForm & get redirected to campaign list
"""
# If dialer setting is not attached with user, redirect to campaign list
if not user_dialer_setting(request.user):
request.session['error_msg'] = \
_("in order to add a campaign, you need to have your settings configured properly, please contact the admin.")
return HttpResponseRedirect("/campaign/")
# Check dialer setting limit
if request.user and request.method != 'POST':
# check Max Number of running campaign
if check_dialer_setting(request, check_for="campaign"):
msg = _("you have too many campaigns. Max allowed %(limit)s") \
% {'limit': dialer_setting_limit(request, limit_for="campaign")}
request.session['msg'] = msg
# campaign limit reached
frontend_send_notification(request, NOTIFICATION_NAME.campaign_limit_reached)
return HttpResponseRedirect("/campaign/")
form = CampaignForm(request.user)
# Add campaign
if request.method == 'POST':
form = CampaignForm(request.user, request.POST)
if form.is_valid():
obj = form.save(commit=False)
contenttype = get_content_type(form.cleaned_data['content_object'])
obj.content_type = contenttype['object_type']
obj.object_id = contenttype['object_id']
obj.user = request.user
obj.save()
form.save_m2m()
request.session["msg"] = _('"%(name)s" added.') %\
{'name': request.POST['name']}
return HttpResponseRedirect('/campaign/')
template = 'frontend/campaign/change.html'
data = {
'module': current_view(request),
'form': form,
'action': 'add',
'AMD': settings.AMD,
}
return render_to_response(template, data,
context_instance=RequestContext(request))
@permission_required('dialer_campaign.delete_campaign', login_url='/')
@login_required
def campaign_del(request, object_id):
"""Delete/Stop campaign for the logged in user
**Attributes**:
* ``object_id`` - Selected campaign object
* ``object_list`` - Selected campaign objects
**Logic Description**:
* Delete/Stop the selected campaign from the campaign list
"""
stop_campaign = request.GET.get('stop_campaign', False)
if int(object_id) != 0:
# When object_id is not 0
campaign = get_object_or_404(Campaign, pk=object_id, user=request.user)
if stop_campaign:
campaign.status = CAMPAIGN_STATUS.END
campaign.save()
request.session["msg"] = _('"%(name)s" is stopped.')\
% {'name': campaign.name}
else:
request.session["msg"] = _('"%(name)s" is deleted.')\
% {'name': campaign.name}
campaign.delete()
else:
# When object_id is 0 (Multiple records delete)
values = request.POST.getlist('select')
values = ", ".join(["%s" % el for el in values])
try:
campaign_list = Campaign.objects\
.filter(user=request.user)\
.extra(where=['id IN (%s)' % values])
if campaign_list:
if stop_campaign:
campaign_list.update(status=CAMPAIGN_STATUS.END)
request.session["msg"] = _('%(count)s campaign(s) are stopped.')\
% {'count': campaign_list.count()}
else:
request.session["msg"] = _('%(count)s campaign(s) are deleted.')\
% {'count': campaign_list.count()}
campaign_list.delete()
except:
raise Http404
return HttpResponseRedirect('/campaign/')
@permission_required('dialer_campaign.change_campaign', login_url='/')
@login_required
def campaign_change(request, object_id):
"""Update/Delete campaign for the logged in user
**Attributes**:
* ``object_id`` - Selected campaign object
* ``form`` - CampaignForm
* ``template`` - frontend/campaign/change.html
**Logic Description**:
* Update/delete selected campaign from the campaign list
via CampaignForm & get redirected to the campaign list
"""
# If dialer setting is not attached with user, redirect to campaign list
if not user_dialer_setting(request.user):
return HttpResponseRedirect("/campaign/")
campaign = get_object_or_404(Campaign, pk=object_id, user=request.user)
content_object = "type:%s-id:%s" % \
(campaign.content_type_id, campaign.object_id)
form = CampaignForm(request.user,
instance=campaign,
initial={'content_object': content_object})
if campaign.status == CAMPAIGN_STATUS.START:
request.session['info_msg'] =\
_('the campaign is started, you can only edit Dialer settings and Campaign schedule')
if request.method == 'POST':
# Delete campaign
if request.POST.get('delete'):
return HttpResponseRedirect('/campaign/del/%s/' % object_id)
else:
# Update campaign
form = CampaignForm(request.user, request.POST, instance=campaign)
if form.is_valid():
form.save()
obj = form.save(commit=False)
selected_content_object = form.cleaned_data['content_object']
if not selected_content_object:
selected_content_object = form.cleaned_data['selected_content_object']
# while campaign status is running
if campaign.status == CAMPAIGN_STATUS.START:
if request.POST.get('selected_phonebook'):
selected_phonebook = str(request.POST.get('selected_phonebook'))\
.split(',')
obj.phonebook = Phonebook.objects\
.filter(id__in=selected_phonebook)
contenttype = get_content_type(selected_content_object)
obj.content_type = contenttype['object_type']
obj.object_id = contenttype['object_id']
obj.save()
request.session["msg"] = _('"%(name)s" is updated.') \
% {'name': request.POST['name']}
request.session['error_msg'] = ''
return HttpResponseRedirect('/campaign/')
template = 'frontend/campaign/change.html'
data = {
'module': current_view(request),
'form': form,
'action': 'update',
'error_msg': request.session.get('error_msg'),
'info_msg': request.session.get('info_msg'),
'AMD': settings.AMD,
}
request.session['error_msg'] = ''
request.session['info_msg'] = ''
return render_to_response(template, data,
context_instance=RequestContext(request))
def make_duplicate_survey(campaign_obj, new_campaign):
"""Make duplicate survey with section & branching
& return new survey object id
"""
survey_obj = campaign_obj.content_type.model_class().objects.get(pk=campaign_obj.object_id)
original_survey_id = survey_obj.id
# make clone of survey
survey_obj.pk = None
survey_obj.campaign = new_campaign
survey_obj.save()
old_new_section_dict = {}
section_objs = Section.objects.filter(survey_id=original_survey_id).order_by('order')
for section_obj in section_objs:
old_section_id = section_obj.id
# make clone of section
section_obj.pk = None
section_obj.survey = survey_obj
section_obj.save()
old_new_section_dict[old_section_id] = section_obj.id
for old_section_id, new_section_id in old_new_section_dict.iteritems():
branching_objs = Branching.objects.filter(section_id=old_section_id)
for branching_obj in branching_objs:
new_goto_id = None
if branching_obj.goto_id is not None:
new_goto_id = old_new_section_dict[branching_obj.goto_id]
branching_obj.pk = None
branching_obj.section_id = new_section_id
branching_obj.goto_id = new_goto_id
branching_obj.save()
return survey_obj.id
@login_required
def campaign_duplicate(request, id):
"""
Duplicate campaign via DuplicateCampaignForm
**Attributes**:
* ``id`` - Selected campaign object
* ``form`` - DuplicateCampaignForm
* ``template`` - frontend/campaign/campaign_duplicate.html
"""
form = DuplicateCampaignForm(request.user)
request.session['error_msg'] = ''
if request.method == 'POST':
form = DuplicateCampaignForm(request.user, request.POST)
if form.is_valid():
original_camp = campaign_obj = Campaign.objects.get(pk=id)
#Make duplicate survey
new_survey_id = campaign_obj.object_id
campaign_obj.pk = None
campaign_obj.campaign_code = request.POST.get('campaign_code')
campaign_obj.name = request.POST.get('name')
campaign_obj.status = CAMPAIGN_STATUS.PAUSE
campaign_obj.totalcontact = 0
campaign_obj.completed = 0
campaign_obj.startingdate = datetime.now()
campaign_obj.expirationdate = datetime.now() + relativedelta(days=+1)
campaign_obj.imported_phonebook = ''
campaign_obj.has_been_started = False
campaign_obj.has_been_duplicated = True
campaign_obj.save()
if campaign_obj.content_type.model == 'survey':
new_survey_id = make_duplicate_survey(original_camp, campaign_obj)
campaign_obj.object_id = new_survey_id
campaign_obj.save()
# Many to many field
for pb in request.POST.getlist('phonebook'):
campaign_obj.phonebook.add(pb)
return HttpResponseRedirect('/campaign/')
else:
request.session['error_msg'] = True
else:
request.session['error_msg'] = ''
template = 'frontend/campaign/campaign_duplicate.html'
data = {
'module': current_view(request),
'campaign_id': id,
'form': form,
'err_msg': request.session.get('error_msg'),
}
request.session['error_msg'] = ''
return render_to_response(template, data,
context_instance=RequestContext(request))
|
consulo/consulo-python
|
refs/heads/master
|
plugin/src/test/resources/inspections/PyMethodOverridingInspection/ArgAndKwargs.py
|
83
|
class B:
def foo(self, **kwargs):
pass
class C(B):
def foo(self, arg1=None, **kwargs): # pass
pass
|
codepython/Diamond
|
refs/heads/master
|
src/collectors/nagiosperfdata/nagiosperfdata.py
|
29
|
# coding=utf-8
"""
The NagiosPerfdataCollector parses Nagios performance data in the
PNP4Nagios/Graphios/Metricinga key-value format.
#### Dependencies
* Nagios configured to periodically dump performance data files in
PNP4Nagios format
Configuring Nagios/Icinga
-------------------------
If you're already using Graphios, you're already set up to send metrics through
Metricinga, and you can skip to the next section! If not, read on.
### Modifying the daemon configuration
The default performance data output format used by Nagios and Icinga can't be
easily extended to contain new attributes, so we're going to replace it with
one that prints key-value pairs instead. This will allow us to add in whatever
kind of bookkeeping attributes we want! We need these to do things like override
the display name of a service with a metric name more meaningful to Graphite.
We'll need to edit one of the following files:
* **For Nagios:** /etc/nagios/nagios.cfg
* **For Icinga:** /etc/icinga/icinga.cfg
Make sure that the following configuration keys are set to something like the
values below:
process_performance_data=1
host_perfdata_file=/var/spool/nagios/host-perfdata
host_perfdata_file_mode=a
host_perfdata_file_processing_command=process-host-perfdata-file
host_perfdata_file_processing_interval=60
host_perfdata_file_template=DATATYPE::HOSTPERFDATA\tTIMET::$TIMET$\tHOSTNAME::$HOSTNAME$\tHOSTPERFDATA::$HOSTPERFDATA$\tHOSTCHECKCOMMAND::$HOSTCHECKCOMMAND$\tHOSTSTATE::$HOSTSTATE$\tHOSTSTATETYPE::$HOSTSTATETYPE$\tGRAPHITEPREFIX::$_HOSTGRAPHITEPREFIX$\tGRAPHITEPOSTFIX::$_HOSTGRAPHITEPOSTFIX$ # NOQA
service_perfdata_file=/var/spool/nagios/service-perfdata
service_perfdata_file_mode=a
service_perfdata_file_processing_command=process-service-perfdata-file
service_perfdata_file_processing_interval=60
service_perfdata_file_template=DATATYPE::SERVICEPERFDATA\tTIMET::$TIMET$\tHOSTNAME::$HOSTNAME$\tSERVICEDESC::$SERVICEDESC$\tSERVICEPERFDATA::$SERVICEPERFDATA$\tSERVICECHECKCOMMAND::$SERVICECHECKCOMMAND$\tHOSTSTATE::$HOSTSTATE$\tHOSTSTATETYPE::$HOSTSTATETYPE$\tSERVICESTATE::$SERVICESTATE$\tSERVICESTATETYPE::$SERVICESTATETYPE$\tGRAPHITEPREFIX::$_SERVICEGRAPHITEPREFIX$\tGRAPHITEPOSTFIX::$_SERVICEGRAPHITEPOSTFIX$ # NOQA
Note that you most likely will wish to change $_SERVICEGRAPHITEPREFIX$,
$_HOSTGRAPHITEPREFIX$, $_SERVICEGRAPHITEPOSTFIX$, and $_HOSTGRAPHITEPOSTFIX$
### Configuring file rotation
Next, the rotation commands need to be configured so the performance data files
are periodically moved into the Metrnagios spool directory. Depending on your
system configuration, these commands may be located in
`/etc/nagios/objects/commands.d`:
define command {
command_name process-host-perfdata-file
command_line /bin/mv /var/spool/nagios/host-perfdata /var/spool/diamond/host-perfdata.$TIMET$ # NOQA
}
define command {
command_name process-service-perfdata-file
command_line /bin/mv /var/spool/nagios/service-perfdata /var/spool/diamond/service-perfdata.$TIMET$ # NOQA
}
"""
import os
import re
import diamond.collector
class NagiosPerfdataCollector(diamond.collector.Collector):
"""Diamond collector for Nagios performance data
"""
GENERIC_FIELDS = ['DATATYPE', 'HOSTNAME', 'TIMET']
HOST_FIELDS = ['HOSTPERFDATA']
SERVICE_FIELDS = ['SERVICEDESC', 'SERVICEPERFDATA']
TOKENIZER_RE = (
r"([^\s]+|'[^']+')=([-.\d]+)(c|s|ms|us|B|KB|MB|GB|TB|%)?"
+ r"(?:;([-.\d]+))?(?:;([-.\d]+))?(?:;([-.\d]+))?(?:;([-.\d]+))?")
def get_default_config_help(self):
config_help = super(NagiosPerfdataCollector,
self).get_default_config_help()
config_help.update({
'perfdata_dir': 'The directory containing Nagios perfdata files'
})
return config_help
def get_default_config(self):
config = super(NagiosPerfdataCollector, self).get_default_config()
config.update({
'path': 'nagiosperfdata',
'perfdata_dir': '/var/spool/diamond/nagiosperfdata',
})
return config
def collect(self):
"""Collect statistics from a Nagios perfdata directory.
"""
perfdata_dir = self.config['perfdata_dir']
try:
filenames = os.listdir(perfdata_dir)
except OSError:
self.log.error("Cannot read directory `{dir}'".format(
dir=perfdata_dir))
return
for filename in filenames:
self._process_file(os.path.join(perfdata_dir, filename))
def _extract_fields(self, line):
"""Extract the key/value fields from a line of performance data
"""
acc = {}
field_tokens = line.split("\t")
for field_token in field_tokens:
kv_tokens = field_token.split('::')
if len(kv_tokens) == 2:
(key, value) = kv_tokens
acc[key] = value
return acc
def _fields_valid(self, d):
"""Verify that all necessary fields are present
Determine whether the fields parsed represent a host or
service perfdata. If the perfdata is unknown, return False.
If the perfdata does not contain all fields required for that
type, return False. Otherwise, return True.
"""
if 'DATATYPE' not in d:
return False
datatype = d['DATATYPE']
if datatype == 'HOSTPERFDATA':
fields = self.GENERIC_FIELDS + self.HOST_FIELDS
elif datatype == 'SERVICEPERFDATA':
fields = self.GENERIC_FIELDS + self.SERVICE_FIELDS
else:
return False
for field in fields:
if field not in d:
return False
return True
def _normalize_to_unit(self, value, unit):
"""Normalize the value to the unit returned.
We use base-1000 for second-based units, and base-1024 for
byte-based units. Sadly, the Nagios-Plugins specification doesn't
disambiguate base-1000 (KB) and base-1024 (KiB).
"""
if unit == 'ms':
return value / 1000.0
if unit == 'us':
return value / 1000000.0
if unit == 'KB':
return value * 1024.0
if unit == 'MB':
return value * 1024768.0
if unit == 'GB':
return value * 1073741824.0
if unit == 'TB':
return value * 1099511627776.0
return value
def _parse_perfdata(self, s):
"""Parse performance data from a perfdata string
"""
metrics = []
counters = re.findall(self.TOKENIZER_RE, s)
if counters is None:
self.log.warning("Failed to parse performance data: {s}".format(
s=s))
return metrics
for (key, value, uom, warn, crit, min, max) in counters:
try:
norm_value = self._normalize_to_unit(float(value), uom)
metrics.append((key, norm_value))
except ValueError:
self.log.warning(
"Couldn't convert value '{value}' to float".format(
value=value))
return metrics
def _process_file(self, path):
"""Parse and submit the metrics from a file
"""
try:
f = open(path)
for line in f:
self._process_line(line)
os.remove(path)
except IOError, ex:
self.log.error("Could not open file `{path}': {error}".format(
path=path, error=ex.strerror))
def _process_line(self, line):
"""Parse and submit the metrics from a line of perfdata output
"""
fields = self._extract_fields(line)
if not self._fields_valid(fields):
self.log.warning("Missing required fields for line: {line}".format(
line=line))
metric_path_base = []
graphite_prefix = fields.get('GRAPHITEPREFIX')
graphite_postfix = fields.get('GRAPHITEPOSTFIX')
if graphite_prefix:
metric_path_base.append(graphite_prefix)
hostname = fields['HOSTNAME'].lower()
metric_path_base.append(hostname)
datatype = fields['DATATYPE']
if datatype == 'HOSTPERFDATA':
metric_path_base.append('host')
elif datatype == 'SERVICEPERFDATA':
service_desc = fields.get('SERVICEDESC')
graphite_postfix = fields.get('GRAPHITEPOSTFIX')
if graphite_postfix:
metric_path_base.append(graphite_postfix)
else:
metric_path_base.append(service_desc)
perfdata = fields[datatype]
counters = self._parse_perfdata(perfdata)
for (counter, value) in counters:
metric_path = metric_path_base + [counter]
metric_path = [self._sanitize(x) for x in metric_path]
metric_name = '.'.join(metric_path)
self.publish(metric_name, value)
def _sanitize(self, s):
"""Sanitize the name of a metric to remove unwanted chars
"""
return re.sub("[^\w-]", "_", s)
|
nomnombtc/bitcoin
|
refs/heads/master
|
qa/rpc-tests/rpcbind_test.py
|
17
|
#!/usr/bin/env python3
# Copyright (c) 2014-2016 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
# Test for -rpcbind, as well as -rpcallowip and -rpcconnect
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import *
from test_framework.netutil import *
class RPCBindTest(BitcoinTestFramework):
def __init__(self):
super().__init__()
self.setup_clean_chain = True
self.num_nodes = 1
def setup_network(self):
pass
def setup_nodes(self):
pass
def run_bind_test(self, allow_ips, connect_to, addresses, expected):
'''
Start a node with requested rpcallowip and rpcbind parameters,
then try to connect, and check if the set of bound addresses
matches the expected set.
'''
expected = [(addr_to_hex(addr), port) for (addr, port) in expected]
base_args = ['-disablewallet', '-nolisten']
if allow_ips:
base_args += ['-rpcallowip=' + x for x in allow_ips]
binds = ['-rpcbind='+addr for addr in addresses]
self.nodes = start_nodes(self.num_nodes, self.options.tmpdir, [base_args + binds], connect_to)
try:
pid = bitcoind_processes[0].pid
assert_equal(set(get_bind_addrs(pid)), set(expected))
finally:
stop_nodes(self.nodes)
def run_allowip_test(self, allow_ips, rpchost, rpcport):
'''
Start a node with rpcallow IP, and request getnetworkinfo
at a non-localhost IP.
'''
base_args = ['-disablewallet', '-nolisten'] + ['-rpcallowip='+x for x in allow_ips]
self.nodes = start_nodes(self.num_nodes, self.options.tmpdir, [base_args])
try:
# connect to node through non-loopback interface
node = get_rpc_proxy(rpc_url(0, "%s:%d" % (rpchost, rpcport)), 0)
node.getnetworkinfo()
finally:
node = None # make sure connection will be garbage collected and closed
stop_nodes(self.nodes)
def run_test(self):
# due to OS-specific network stats queries, this test works only on Linux
assert(sys.platform.startswith('linux'))
# find the first non-loopback interface for testing
non_loopback_ip = None
for name,ip in all_interfaces():
if ip != '127.0.0.1':
non_loopback_ip = ip
break
if non_loopback_ip is None:
assert(not 'This test requires at least one non-loopback IPv4 interface')
print("Using interface %s for testing" % non_loopback_ip)
defaultport = rpc_port(0)
# check default without rpcallowip (IPv4 and IPv6 localhost)
self.run_bind_test(None, '127.0.0.1', [],
[('127.0.0.1', defaultport), ('::1', defaultport)])
# check default with rpcallowip (IPv6 any)
self.run_bind_test(['127.0.0.1'], '127.0.0.1', [],
[('::0', defaultport)])
# check only IPv4 localhost (explicit)
self.run_bind_test(['127.0.0.1'], '127.0.0.1', ['127.0.0.1'],
[('127.0.0.1', defaultport)])
# check only IPv4 localhost (explicit) with alternative port
self.run_bind_test(['127.0.0.1'], '127.0.0.1:32171', ['127.0.0.1:32171'],
[('127.0.0.1', 32171)])
# check only IPv4 localhost (explicit) with multiple alternative ports on same host
self.run_bind_test(['127.0.0.1'], '127.0.0.1:32171', ['127.0.0.1:32171', '127.0.0.1:32172'],
[('127.0.0.1', 32171), ('127.0.0.1', 32172)])
# check only IPv6 localhost (explicit)
self.run_bind_test(['[::1]'], '[::1]', ['[::1]'],
[('::1', defaultport)])
# check both IPv4 and IPv6 localhost (explicit)
self.run_bind_test(['127.0.0.1'], '127.0.0.1', ['127.0.0.1', '[::1]'],
[('127.0.0.1', defaultport), ('::1', defaultport)])
# check only non-loopback interface
self.run_bind_test([non_loopback_ip], non_loopback_ip, [non_loopback_ip],
[(non_loopback_ip, defaultport)])
# Check that with invalid rpcallowip, we are denied
self.run_allowip_test([non_loopback_ip], non_loopback_ip, defaultport)
try:
self.run_allowip_test(['1.1.1.1'], non_loopback_ip, defaultport)
assert(not 'Connection not denied by rpcallowip as expected')
except JSONRPCException:
pass
if __name__ == '__main__':
RPCBindTest().main()
|
yakky/django-cms
|
refs/heads/develop
|
cms/tests/test_wizards.py
|
3
|
# -*- coding: utf-8 -*-
from django import forms
from django.core.exceptions import ImproperlyConfigured
from django.forms.models import ModelForm
from django.template import TemplateSyntaxError
from django.test.utils import override_settings
from django.urls import reverse
from django.utils.encoding import smart_text
from django.utils.translation import ugettext as _
from cms.api import create_page, publish_page
from cms.cms_wizards import CMSPageWizard
from cms.constants import TEMPLATE_INHERITANCE_MAGIC
from cms.forms.wizards import CreateCMSPageForm, CreateCMSSubPageForm
from cms.models import Page, PageType, UserSettings
from cms.test_utils.testcases import CMSTestCase, TransactionCMSTestCase
from cms.utils import get_current_site
from cms.utils.conf import get_cms_setting
from cms.wizards.forms import step2_form_factory, WizardStep2BaseForm
from cms.wizards.wizard_base import Wizard
from cms.wizards.wizard_pool import wizard_pool, AlreadyRegisteredException
CreateCMSPageForm = step2_form_factory(
mixin_cls=WizardStep2BaseForm,
entry_form_class=CreateCMSPageForm,
)
CreateCMSSubPageForm = step2_form_factory(
mixin_cls=WizardStep2BaseForm,
entry_form_class=CreateCMSSubPageForm,
)
class WizardForm(forms.Form):
pass
class ModelWizardForm(ModelForm):
class Meta:
model = UserSettings
exclude = []
class BadModelForm(ModelForm):
class Meta:
pass
class WizardTestMixin(object):
page_wizard = None
title_wizard = None
def assertSequencesEqual(self, seq_a, seq_b):
seq_a = list(seq_a)
seq_b = list(seq_b)
zipped = list(zip(seq_a, seq_b))
if len(zipped) < len(seq_a) or len(zipped) < len(seq_b):
self.fail("Sequence lengths are not the same.")
for idx, (a, b) in enumerate(zipped):
if a != b:
self.fail("Sequences differ at index {0}".format(idx))
@classmethod
def setUpClass(cls):
super(WizardTestMixin, cls).setUpClass()
# This prevents auto-discovery, which would otherwise occur as soon as
# tests start, creating unexpected starting conditions.
wizard_pool._discovered = True
class PageWizard(Wizard):
pass
# This is a basic Wizard
cls.page_wizard = PageWizard(
title=_(u"Page"),
weight=100,
form=WizardForm,
model=Page,
template_name='my_template.html', # This doesn't exist anywhere
)
class SettingsWizard(Wizard):
pass
# This is a Wizard that uses a ModelForm to define the model
cls.user_settings_wizard = SettingsWizard(
title=_(u"UserSettings"),
weight=200,
form=ModelWizardForm,
)
class TitleWizard(Wizard):
pass
# This is a bad wizard definition as it neither defines a model, nor
# uses a ModelForm that has model defined in Meta
cls.title_wizard = TitleWizard(
title=_(u"Page"),
weight=100,
form=BadModelForm,
template_name='my_template.html', # This doesn't exist anywhere
)
class TestWizardBase(WizardTestMixin, TransactionCMSTestCase):
def test_user_has_add_permission(self):
# Test does not have permission
user = self.get_staff_user_with_no_permissions()
self.assertFalse(self.page_wizard.user_has_add_permission(user))
# Test has permission
user = self.get_superuser()
self.assertTrue(self.page_wizard.user_has_add_permission(user))
def test_get_success_url(self):
user = self.get_superuser()
page = create_page(
title="Sample Page",
template=TEMPLATE_INHERITANCE_MAGIC,
language="en",
created_by=smart_text(user),
parent=None,
in_navigation=True,
published=False
)
url = "{0}?edit".format(page.get_absolute_url(language="en"))
self.assertEqual(self.page_wizard.get_success_url(
page, language="en"), url)
# Now again without a language code
url = "{0}?edit".format(page.get_absolute_url())
self.assertEqual(self.page_wizard.get_success_url(page), url)
def test_get_model(self):
self.assertEqual(self.page_wizard.get_model(), Page)
self.assertEqual(self.user_settings_wizard.get_model(), UserSettings)
with self.assertRaises(ImproperlyConfigured):
self.title_wizard.get_model()
def test_endpoint_auth_required(self):
endpoint = reverse('cms_wizard_create')
staff_active = self._create_user("staff-active", is_staff=True, is_superuser=False, is_active=True)
response = self.client.get(endpoint)
self.assertEqual(response.status_code, 403)
with self.login_user_context(staff_active):
response = self.client.get(endpoint)
self.assertEqual(response.status_code, 200)
class TestWizardPool(WizardTestMixin, CMSTestCase):
def test_discover(self):
wizard_pool._reset()
self.assertFalse(wizard_pool._discovered)
self.assertEqual(len(wizard_pool._entries), 0)
wizard_pool._discover()
self.assertTrue(wizard_pool._discovered)
def test_register_unregister_isregistered(self):
wizard_pool._clear()
self.assertEqual(len(wizard_pool._entries), 0)
wizard_pool.register(self.page_wizard)
# Now, try to register the same thing
with self.assertRaises(AlreadyRegisteredException):
wizard_pool.register(self.page_wizard)
self.assertEqual(len(wizard_pool._entries), 1)
self.assertTrue(wizard_pool.is_registered(self.page_wizard))
self.assertTrue(wizard_pool.unregister(self.page_wizard))
self.assertEqual(len(wizard_pool._entries), 0)
# Now, try to unregister something that is not registered
self.assertFalse(wizard_pool.unregister(self.user_settings_wizard))
def test_get_entry(self):
wizard_pool._clear()
wizard_pool.register(self.page_wizard)
entry = wizard_pool.get_entry(self.page_wizard)
self.assertEqual(entry, self.page_wizard)
def test_get_entries(self):
"""
Test that the registered entries are returned in weight-order, no matter
which order they were added.
"""
wizard_pool._clear()
wizard_pool.register(self.page_wizard)
wizard_pool.register(self.user_settings_wizard)
wizards = [self.page_wizard, self.user_settings_wizard]
wizards = sorted(wizards, key=lambda e: getattr(e, 'weight'))
entries = wizard_pool.get_entries()
self.assertSequencesEqual(entries, wizards)
wizard_pool._clear()
wizard_pool.register(self.user_settings_wizard)
wizard_pool.register(self.page_wizard)
wizards = [self.page_wizard, self.user_settings_wizard]
wizards = sorted(wizards, key=lambda e: getattr(e, 'weight'))
entries = wizard_pool.get_entries()
self.assertSequencesEqual(entries, wizards)
class TestPageWizard(WizardTestMixin, CMSTestCase):
def test_str(self):
page_wizard = [
entry for entry in wizard_pool.get_entries()
if isinstance(entry, CMSPageWizard)
][0]
self.assertEqual(str(page_wizard), page_wizard.title)
def test_repr(self):
page_wizard = [
entry for entry in wizard_pool.get_entries()
if isinstance(entry, CMSPageWizard)
][0]
self.assertIn("cms.cms_wizards.CMSPageWizard", repr(page_wizard))
self.assertIn("id={}".format(page_wizard.id), repr(page_wizard))
self.assertIn(hex(id(page_wizard)), repr(page_wizard))
def test_wizard_first_page_published(self):
superuser = self.get_superuser()
data = {
'title': 'page 1',
'slug': 'page_1',
'page_type': None,
}
form = CreateCMSPageForm(
data=data,
wizard_page=None,
wizard_user=superuser,
wizard_language='en',
)
self.assertTrue(form.is_valid())
page = form.save()
self.assertTrue(page.is_published('en'))
with self.login_user_context(superuser):
url = page.get_absolute_url('en')
response = self.client.get(url)
self.assertEqual(response.status_code, 200)
def test_wizard_create_child_page(self):
superuser = self.get_superuser()
parent_page = create_page(
title="Parent",
template=TEMPLATE_INHERITANCE_MAGIC,
language="en",
)
data = {
'title': 'Child',
'slug': 'child',
'page_type': None,
}
form = CreateCMSSubPageForm(
data=data,
wizard_page=parent_page,
wizard_user=superuser,
wizard_language='en',
)
self.assertTrue(form.is_valid())
child_page = form.save()
self.assertEqual(child_page.node.depth, 2)
self.assertEqual(child_page.parent_page, parent_page)
self.assertEqual(child_page.get_title('en'), 'Child')
self.assertEqual(child_page.get_path('en'), 'parent/child')
def test_wizard_create_child_page_under_page_type(self):
"""
When a user creates a child page through the wizard,
if the parent page is a page-type, the child page should
also be a page-type.
"""
site = get_current_site()
superuser = self.get_superuser()
source_page = create_page(
title="Source",
template=TEMPLATE_INHERITANCE_MAGIC,
language="en",
)
with self.login_user_context(superuser):
self.client.post(
self.get_admin_url(PageType, 'add'),
data={'source': source_page.pk, 'title': 'type1', 'slug': 'type1', '_save': 1},
)
types_root = PageType.get_root_page(site)
parent_page = types_root.get_child_pages()[0]
data = {
'title': 'page-type-child',
'slug': 'page-type-child',
'page_type': None,
}
form = CreateCMSSubPageForm(
data=data,
wizard_page=parent_page,
wizard_user=superuser,
wizard_language='en',
)
self.assertTrue(form.is_valid())
child_page = form.save()
self.assertTrue(child_page.is_page_type)
self.assertFalse(child_page.in_navigation)
self.assertEqual(child_page.node.depth, 3)
self.assertEqual(child_page.parent_page, parent_page)
self.assertEqual(child_page.get_title('en'), 'page-type-child')
self.assertEqual(child_page.get_path('en'), 'page_types/type1/page-type-child')
def test_wizard_create_atomic(self):
# Ref: https://github.com/divio/django-cms/issues/5652
# We'll simulate a scenario where a user creates a page with an
# invalid template which causes Django to throw an error when the
# template is scanned for placeholders and thus short circuits the
# creation mechanism.
superuser = self.get_superuser()
data = {
'title': 'page 1',
'slug': 'page_1',
'page_type': None,
}
form = CreateCMSPageForm(
data=data,
wizard_page=None,
wizard_user=superuser,
wizard_language='en',
)
self.assertTrue(form.is_valid())
self.assertFalse(Page.objects.filter(template=TEMPLATE_INHERITANCE_MAGIC).exists())
with self.settings(CMS_TEMPLATES=[("col_invalid.html", "notvalid")]):
self.assertRaises(TemplateSyntaxError, form.save)
# The template raised an exception which should cause the database to roll back
# instead of committing a page in a partial state.
self.assertFalse(Page.objects.filter(template=TEMPLATE_INHERITANCE_MAGIC).exists())
def test_wizard_content_placeholder_setting(self):
"""
Tests that the PageWizard respects the
CMS_PAGE_WIZARD_CONTENT_PLACEHOLDER setting.
"""
templates = get_cms_setting('TEMPLATES')
# NOTE, there are 4 placeholders on this template, defined in this
# order: 'header', 'content', 'sub-content', 'footer'.
# 'footer' is a static-placeholder.
templates.append(('page_wizard.html', 'page_wizard.html', ))
settings = {
'CMS_TEMPLATES': templates,
'CMS_PAGE_WIZARD_DEFAULT_TEMPLATE': 'page_wizard.html',
'CMS_PAGE_WIZARD_CONTENT_PLACEHOLDER': 'sub-content',
}
with override_settings(**settings):
superuser = self.get_superuser()
page = create_page("wizard home", "page_wizard.html", "en")
publish_page(page, superuser, "en")
content = '<p>sub-content content.</p>'
data = {
'title': 'page 1',
'slug': 'page_1',
'page_type': None,
'content': content,
}
form = CreateCMSPageForm(
data=data,
wizard_page=page,
wizard_user=superuser,
wizard_language='en',
)
self.assertTrue(form.is_valid())
page = form.save()
page.publish('en')
with self.login_user_context(superuser):
url = page.get_absolute_url('en')
expected = '<div class="sub-content">{0}</div>'.format(content)
unexpected = '<div class="content">{0}</div>'.format(content)
response = self.client.get(url)
self.assertContains(response, expected, status_code=200)
self.assertNotContains(response, unexpected, status_code=200)
def test_wizard_content_placeholder_bad_setting(self):
"""
Tests that the PageWizard won't respect a 'bad' setting such as
targeting a static-placeholder. In this case, will just fail to
add the content (without error).
"""
templates = get_cms_setting('TEMPLATES')
# NOTE, there are 4 placeholders on this template, defined in this
# order: 'header', 'content', 'sub-content', 'footer'.
# 'footer' is a static-placeholder.
templates.append(('page_wizard.html', 'page_wizard.html', ))
settings = {
'CMS_TEMPLATES': templates,
'CMS_PAGE_WIZARD_DEFAULT_TEMPLATE': 'page_wizard.html',
# This is a bad setting.
'CMS_PAGE_WIZARD_CONTENT_PLACEHOLDER': 'footer',
}
with override_settings(**settings):
superuser = self.get_superuser()
page = create_page("wizard home", "page_wizard.html", "en")
publish_page(page, superuser, "en")
content = '<p>footer content.</p>'
data = {
'title': 'page 1',
'slug': 'page_1',
'page_type': None,
'content': content,
}
form = CreateCMSPageForm(
data=data,
wizard_page=page,
wizard_user=superuser,
wizard_language='en',
)
self.assertTrue(form.is_valid())
page = form.save()
page.publish('en')
with self.login_user_context(superuser):
url = page.get_absolute_url('en')
response = self.client.get(url)
self.assertNotContains(response, content, status_code=200)
def test_create_page_with_empty_fields(self):
superuser = self.get_superuser()
data = {
'title': '',
'slug': '',
'page_type': None,
}
form = CreateCMSPageForm(
data=data,
wizard_page=None,
wizard_user=superuser,
wizard_language='en',
)
self.assertFalse(form.is_valid())
def test_create_page_with_existing_slug(self):
superuser = self.get_superuser()
data = {
'title': 'page',
'slug': 'page',
'page_type': None,
}
create_page(
'page',
'nav_playground.html',
language='en',
published=True,
slug='page'
)
# slug -> page-1
form = CreateCMSPageForm(
data=data,
wizard_page=None,
wizard_user=superuser,
wizard_language='en',
)
self.assertTrue(form.is_valid())
self.assertTrue(form.save().title_set.filter(slug='page-2'))
# slug -> page-2
form = CreateCMSPageForm(
data=data,
wizard_page=None,
wizard_user=superuser,
wizard_language='en',
)
self.assertTrue(form.is_valid())
self.assertTrue(form.save().title_set.filter(slug='page-3'))
# Now explicitly request the page-2 slug
data['slug'] = 'page-2'
# slug -> page-2-2
form = CreateCMSPageForm(
data=data,
wizard_page=None,
wizard_user=superuser,
wizard_language='en',
)
self.assertTrue(form.is_valid())
self.assertTrue(form.save().title_set.filter(slug='page-2-2'))
# slug -> page-2-3
form = CreateCMSPageForm(
data=data,
wizard_page=None,
wizard_user=superuser,
wizard_language='en',
)
self.assertTrue(form.is_valid())
self.assertTrue(form.save().title_set.filter(slug='page-2-3'))
|
Bashar/django
|
refs/heads/master
|
django/contrib/flatpages/views.py
|
105
|
from django.conf import settings
from django.contrib.flatpages.models import FlatPage
from django.contrib.sites.shortcuts import get_current_site
from django.http import Http404, HttpResponse, HttpResponsePermanentRedirect
from django.shortcuts import get_object_or_404
from django.template import loader, RequestContext
from django.utils.safestring import mark_safe
from django.views.decorators.csrf import csrf_protect
DEFAULT_TEMPLATE = 'flatpages/default.html'
# This view is called from FlatpageFallbackMiddleware.process_response
# when a 404 is raised, which often means CsrfViewMiddleware.process_view
# has not been called even if CsrfViewMiddleware is installed. So we need
# to use @csrf_protect, in case the template needs {% csrf_token %}.
# However, we can't just wrap this view; if no matching flatpage exists,
# or a redirect is required for authentication, the 404 needs to be returned
# without any CSRF checks. Therefore, we only
# CSRF protect the internal implementation.
def flatpage(request, url):
"""
Public interface to the flat page view.
Models: `flatpages.flatpages`
Templates: Uses the template defined by the ``template_name`` field,
or :template:`flatpages/default.html` if template_name is not defined.
Context:
flatpage
`flatpages.flatpages` object
"""
if not url.startswith('/'):
url = '/' + url
site_id = get_current_site(request).id
try:
f = get_object_or_404(FlatPage,
url=url, sites=site_id)
except Http404:
if not url.endswith('/') and settings.APPEND_SLASH:
url += '/'
f = get_object_or_404(FlatPage,
url=url, sites=site_id)
return HttpResponsePermanentRedirect('%s/' % request.path)
else:
raise
return render_flatpage(request, f)
@csrf_protect
def render_flatpage(request, f):
"""
Internal interface to the flat page view.
"""
# If registration is required for accessing this page, and the user isn't
# logged in, redirect to the login page.
if f.registration_required and not request.user.is_authenticated():
from django.contrib.auth.views import redirect_to_login
return redirect_to_login(request.path)
if f.template_name:
t = loader.select_template((f.template_name, DEFAULT_TEMPLATE))
else:
t = loader.get_template(DEFAULT_TEMPLATE)
# To avoid having to always use the "|safe" filter in flatpage templates,
# mark the title and content as already safe (since they are raw HTML
# content in the first place).
f.title = mark_safe(f.title)
f.content = mark_safe(f.content)
c = RequestContext(request, {
'flatpage': f,
})
response = HttpResponse(t.render(c))
return response
|
davidegalletti/koa-proof-of-concept
|
refs/heads/master
|
kag/taxonomy/migrations/0001_initial.py
|
1
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('entity', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='Taxonomy',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('name', models.CharField(max_length=100L)),
('description', models.CharField(max_length=2000L, blank=True)),
],
),
migrations.CreateModel(
name='TaxonomyLevel',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('table_name', models.CharField(max_length=255L, blank=True)),
('description_field', models.CharField(max_length=255L, db_column=b'descriptionField', blank=True)),
('id_field', models.CharField(max_length=255L, db_column=b'idField', blank=True)),
('name_field', models.CharField(max_length=255L, db_column=b'nameField', blank=True)),
('label', models.CharField(max_length=255L, blank=True)),
('default', models.BooleanField(default=True)),
('dataset_structure', models.ForeignKey(to='entity.Entity', blank=True)),
],
),
migrations.CreateModel(
name='TaxonomyLevelGraph',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('foreign_key_upper', models.CharField(max_length=255L)),
('lower', models.ForeignKey(related_name='lower', to='taxonomy.TaxonomyLevel')),
('upper', models.ForeignKey(related_name='upper', to='taxonomy.TaxonomyLevel')),
],
),
migrations.AddField(
model_name='taxonomylevel',
name='upper_levels',
field=models.ManyToManyField(related_name='lower_levels', through='taxonomy.TaxonomyLevelGraph', to='taxonomy.TaxonomyLevel'),
),
migrations.AddField(
model_name='taxonomy',
name='first_level',
field=models.ForeignKey(to='taxonomy.TaxonomyLevel'),
),
]
|
aparna29/Implementation-of-Random-Exponential-Marking-REM-in-ns-3
|
refs/heads/master
|
bindings/python/ns3modulegen.py
|
199
|
LOCAL_MODULES = [
#'my_extra_api_definitions',
]
import sys
import os
sys.path.insert(0, sys.argv[2])
from pybindgen import FileCodeSink, write_preamble
from pybindgen.module import MultiSectionFactory
import pybindgen.settings
pybindgen.settings.deprecated_virtuals = False
from ns3modulegen_generated import module_init, register_types, register_methods, register_functions
import ns3modulegen_core_customizations
import callbacks_list
import traceback
this_script_dir = os.path.dirname(os.path.abspath(sys.argv[0]))
class ErrorHandler(pybindgen.settings.ErrorHandler):
def handle_error(self, wrapper, exception, traceback_):
print >> sys.stderr
print >> sys.stderr, "---- location:"
traceback.print_stack()
print >> sys.stderr, "---- error:"
traceback.print_tb(traceback_)
try:
stack = wrapper.stack_where_defined
except AttributeError:
print >> sys.stderr, "??:??: %s / %r" % (wrapper, exception)
else:
stack = list(stack)
stack.reverse()
for (filename, line_number, function_name, text) in stack:
file_dir = os.path.dirname(os.path.abspath(filename))
if file_dir.startswith(this_script_dir):
print >> sys.stderr, "%s:%i: %r" % (os.path.join("..", "bindings", "python", os.path.basename(filename)),
line_number, exception)
break
return True
pybindgen.settings.error_handler = ErrorHandler()
pybindgen.settings.wrapper_registry = pybindgen.settings.StdMapWrapperRegistry
class MyMultiSectionFactory(MultiSectionFactory):
def __init__(self, main_file_name, modules):
super(MyMultiSectionFactory, self).__init__()
self.main_file_name = main_file_name
self.main_sink = FileCodeSink(open(main_file_name, "wt"))
self.header_name = "ns3module.h"
header_file_name = os.path.join(os.path.dirname(self.main_file_name), 'pch', self.header_name)
self.header_sink = FileCodeSink(open(header_file_name, "wt"))
self.section_sinks = {'__main__': self.main_sink}
for module in modules:
section_name = 'ns3_module_%s' % module.replace('-', '_')
file_name = os.path.join(os.path.dirname(self.main_file_name), "%s.cc" % section_name)
sink = FileCodeSink(open(file_name, "wt"))
self.section_sinks[section_name] = sink
def get_section_code_sink(self, section_name):
return self.section_sinks[section_name]
def get_main_code_sink(self):
return self.main_sink
def get_common_header_code_sink(self):
return self.header_sink
def get_common_header_include(self):
return '"%s"' % self.header_name
def close(self):
self.header_sink.file.close()
self.main_sink.file.close()
for sink in self.section_sinks.itervalues():
sink.file.close()
def main():
out = MyMultiSectionFactory(sys.argv[1], sys.argv[3:])
root_module = module_init()
root_module.add_include('"everything.h"')
register_types(root_module)
ns3modulegen_core_customizations.Simulator_customizations(root_module)
ns3modulegen_core_customizations.CommandLine_customizations(root_module)
ns3modulegen_core_customizations.TypeId_customizations(root_module)
ns3modulegen_core_customizations.add_std_ofstream(root_module)
ns3modulegen_core_customizations.add_ipv4_address_tp_hash(root_module)
for local_module in LOCAL_MODULES:
mod = __import__(local_module)
mod.register_types(root_module)
ns3modulegen_core_customizations.generate_callback_classes(root_module.after_forward_declarations,
callbacks_list.callback_classes)
register_methods(root_module)
for local_module in LOCAL_MODULES:
mod = __import__(local_module)
mod.register_methods(root_module)
ns3modulegen_core_customizations.Object_customizations(root_module)
ns3modulegen_core_customizations.Attribute_customizations(root_module)
register_functions(root_module)
for local_module in LOCAL_MODULES:
mod = __import__(local_module)
mod.register_functions(root_module)
enabled_features = os.environ['NS3_ENABLED_FEATURES'].split(',')
# if GtkConfigStore support is disabled, disable the class wrapper
if 'GtkConfigStore' not in enabled_features:
try:
root_module.classes.remove(root_module['ns3::GtkConfigStore'])
except KeyError:
pass
# if no sqlite, the class SqliteDataOutput is disabled
if 'SqliteDataOutput' not in enabled_features:
try:
root_module.classes.remove(root_module['ns3::SqliteDataOutput'])
except KeyError:
pass
if 'Threading' not in enabled_features:
for clsname in ['SystemThread', 'SystemMutex', 'SystemCondition', 'CriticalSection',
'SimpleRefCount< ns3::SystemThread, ns3::empty, ns3::DefaultDeleter<ns3::SystemThread> >']:
root_module.classes.remove(root_module['ns3::%s' % clsname])
if 'EmuNetDevice' not in enabled_features:
for clsname in ['EmuNetDevice', 'EmuHelper']:
root_module.classes.remove(root_module['ns3::%s' % clsname])
root_module.enums.remove(root_module['ns3::EmuNetDevice::EncapsulationMode'])
if 'RealTime' not in enabled_features:
for clsname in ['WallClockSynchronizer', 'RealtimeSimulatorImpl']:
root_module.classes.remove(root_module['ns3::%s' % clsname])
root_module.enums.remove(root_module['ns3::RealtimeSimulatorImpl::SynchronizationMode'])
if 'TapBridge' not in enabled_features:
for clsname in ['TapBridge', 'TapBridgeHelper', 'TapBridgeFdReader']:
root_module.classes.remove(root_module['ns3::%s' % clsname])
root_module.enums.remove(root_module['ns3::TapBridge::Mode'])
root_module.generate(out, '_ns3')
out.close()
if __name__ == '__main__':
if 0:
try:
import cProfile as profile
except ImportError:
main()
else:
print >> sys.stderr, "** running under profiler"
profile.run('main()', 'ns3modulegen.pstat')
else:
main()
|
dbremner/bite-project
|
refs/heads/master
|
deps/gdata-python-client/src/gdata/youtube/__init__.py
|
297
|
#!/usr/bin/python
#
# Copyright (C) 2008 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
__author__ = ('api.stephaniel@gmail.com (Stephanie Liu)'
', api.jhartmann@gmail.com (Jochen Hartmann)')
import atom
import gdata
import gdata.media as Media
import gdata.geo as Geo
YOUTUBE_NAMESPACE = 'http://gdata.youtube.com/schemas/2007'
YOUTUBE_FORMAT = '{http://gdata.youtube.com/schemas/2007}format'
YOUTUBE_DEVELOPER_TAG_SCHEME = '%s/%s' % (YOUTUBE_NAMESPACE,
'developertags.cat')
YOUTUBE_SUBSCRIPTION_TYPE_SCHEME = '%s/%s' % (YOUTUBE_NAMESPACE,
'subscriptiontypes.cat')
class Username(atom.AtomBase):
"""The YouTube Username element"""
_tag = 'username'
_namespace = YOUTUBE_NAMESPACE
class QueryString(atom.AtomBase):
"""The YouTube QueryString element"""
_tag = 'queryString'
_namespace = YOUTUBE_NAMESPACE
class FirstName(atom.AtomBase):
"""The YouTube FirstName element"""
_tag = 'firstName'
_namespace = YOUTUBE_NAMESPACE
class LastName(atom.AtomBase):
"""The YouTube LastName element"""
_tag = 'lastName'
_namespace = YOUTUBE_NAMESPACE
class Age(atom.AtomBase):
"""The YouTube Age element"""
_tag = 'age'
_namespace = YOUTUBE_NAMESPACE
class Books(atom.AtomBase):
"""The YouTube Books element"""
_tag = 'books'
_namespace = YOUTUBE_NAMESPACE
class Gender(atom.AtomBase):
"""The YouTube Gender element"""
_tag = 'gender'
_namespace = YOUTUBE_NAMESPACE
class Company(atom.AtomBase):
"""The YouTube Company element"""
_tag = 'company'
_namespace = YOUTUBE_NAMESPACE
class Hobbies(atom.AtomBase):
"""The YouTube Hobbies element"""
_tag = 'hobbies'
_namespace = YOUTUBE_NAMESPACE
class Hometown(atom.AtomBase):
"""The YouTube Hometown element"""
_tag = 'hometown'
_namespace = YOUTUBE_NAMESPACE
class Location(atom.AtomBase):
"""The YouTube Location element"""
_tag = 'location'
_namespace = YOUTUBE_NAMESPACE
class Movies(atom.AtomBase):
"""The YouTube Movies element"""
_tag = 'movies'
_namespace = YOUTUBE_NAMESPACE
class Music(atom.AtomBase):
"""The YouTube Music element"""
_tag = 'music'
_namespace = YOUTUBE_NAMESPACE
class Occupation(atom.AtomBase):
"""The YouTube Occupation element"""
_tag = 'occupation'
_namespace = YOUTUBE_NAMESPACE
class School(atom.AtomBase):
"""The YouTube School element"""
_tag = 'school'
_namespace = YOUTUBE_NAMESPACE
class Relationship(atom.AtomBase):
"""The YouTube Relationship element"""
_tag = 'relationship'
_namespace = YOUTUBE_NAMESPACE
class Recorded(atom.AtomBase):
"""The YouTube Recorded element"""
_tag = 'recorded'
_namespace = YOUTUBE_NAMESPACE
class Statistics(atom.AtomBase):
"""The YouTube Statistics element."""
_tag = 'statistics'
_namespace = YOUTUBE_NAMESPACE
_attributes = atom.AtomBase._attributes.copy()
_attributes['viewCount'] = 'view_count'
_attributes['videoWatchCount'] = 'video_watch_count'
_attributes['subscriberCount'] = 'subscriber_count'
_attributes['lastWebAccess'] = 'last_web_access'
_attributes['favoriteCount'] = 'favorite_count'
def __init__(self, view_count=None, video_watch_count=None,
favorite_count=None, subscriber_count=None, last_web_access=None,
extension_elements=None, extension_attributes=None, text=None):
self.view_count = view_count
self.video_watch_count = video_watch_count
self.subscriber_count = subscriber_count
self.last_web_access = last_web_access
self.favorite_count = favorite_count
atom.AtomBase.__init__(self, extension_elements=extension_elements,
extension_attributes=extension_attributes, text=text)
class Status(atom.AtomBase):
"""The YouTube Status element"""
_tag = 'status'
_namespace = YOUTUBE_NAMESPACE
class Position(atom.AtomBase):
"""The YouTube Position element. The position in a playlist feed."""
_tag = 'position'
_namespace = YOUTUBE_NAMESPACE
class Racy(atom.AtomBase):
"""The YouTube Racy element."""
_tag = 'racy'
_namespace = YOUTUBE_NAMESPACE
class Description(atom.AtomBase):
"""The YouTube Description element."""
_tag = 'description'
_namespace = YOUTUBE_NAMESPACE
class Private(atom.AtomBase):
"""The YouTube Private element."""
_tag = 'private'
_namespace = YOUTUBE_NAMESPACE
class NoEmbed(atom.AtomBase):
"""The YouTube VideoShare element. Whether a video can be embedded or not."""
_tag = 'noembed'
_namespace = YOUTUBE_NAMESPACE
class Comments(atom.AtomBase):
"""The GData Comments element"""
_tag = 'comments'
_namespace = gdata.GDATA_NAMESPACE
_children = atom.AtomBase._children.copy()
_attributes = atom.AtomBase._attributes.copy()
_children['{%s}feedLink' % gdata.GDATA_NAMESPACE] = ('feed_link',
[gdata.FeedLink])
def __init__(self, feed_link=None, extension_elements=None,
extension_attributes=None, text=None):
self.feed_link = feed_link
atom.AtomBase.__init__(self, extension_elements=extension_elements,
extension_attributes=extension_attributes, text=text)
class Rating(atom.AtomBase):
"""The GData Rating element"""
_tag = 'rating'
_namespace = gdata.GDATA_NAMESPACE
_attributes = atom.AtomBase._attributes.copy()
_attributes['min'] = 'min'
_attributes['max'] = 'max'
_attributes['numRaters'] = 'num_raters'
_attributes['average'] = 'average'
def __init__(self, min=None, max=None,
num_raters=None, average=None, extension_elements=None,
extension_attributes=None, text=None):
self.min = min
self.max = max
self.num_raters = num_raters
self.average = average
atom.AtomBase.__init__(self, extension_elements=extension_elements,
extension_attributes=extension_attributes, text=text)
class YouTubePlaylistVideoEntry(gdata.GDataEntry):
"""Represents a YouTubeVideoEntry on a YouTubePlaylist."""
_tag = gdata.GDataEntry._tag
_namespace = gdata.GDataEntry._namespace
_children = gdata.GDataEntry._children.copy()
_attributes = gdata.GDataEntry._attributes.copy()
_children['{%s}feedLink' % gdata.GDATA_NAMESPACE] = ('feed_link',
[gdata.FeedLink])
_children['{%s}description' % YOUTUBE_NAMESPACE] = ('description',
Description)
_children['{%s}rating' % gdata.GDATA_NAMESPACE] = ('rating', Rating)
_children['{%s}comments' % gdata.GDATA_NAMESPACE] = ('comments', Comments)
_children['{%s}statistics' % YOUTUBE_NAMESPACE] = ('statistics', Statistics)
_children['{%s}location' % YOUTUBE_NAMESPACE] = ('location', Location)
_children['{%s}position' % YOUTUBE_NAMESPACE] = ('position', Position)
_children['{%s}group' % gdata.media.MEDIA_NAMESPACE] = ('media', Media.Group)
def __init__(self, author=None, category=None, content=None,
atom_id=None, link=None, published=None, title=None,
updated=None, feed_link=None, description=None,
rating=None, comments=None, statistics=None,
location=None, position=None, media=None,
extension_elements=None, extension_attributes=None):
self.feed_link = feed_link
self.description = description
self.rating = rating
self.comments = comments
self.statistics = statistics
self.location = location
self.position = position
self.media = media
gdata.GDataEntry.__init__(self, author=author, category=category,
content=content, atom_id=atom_id,
link=link, published=published, title=title,
updated=updated,
extension_elements=extension_elements,
extension_attributes=extension_attributes)
class YouTubeVideoCommentEntry(gdata.GDataEntry):
"""Represents a comment on YouTube."""
_tag = gdata.GDataEntry._tag
_namespace = gdata.GDataEntry._namespace
_children = gdata.GDataEntry._children.copy()
_attributes = gdata.GDataEntry._attributes.copy()
class YouTubeSubscriptionEntry(gdata.GDataEntry):
"""Represents a subscription entry on YouTube."""
_tag = gdata.GDataEntry._tag
_namespace = gdata.GDataEntry._namespace
_children = gdata.GDataEntry._children.copy()
_attributes = gdata.GDataEntry._attributes.copy()
_children['{%s}username' % YOUTUBE_NAMESPACE] = ('username', Username)
_children['{%s}queryString' % YOUTUBE_NAMESPACE] = (
'query_string', QueryString)
_children['{%s}feedLink' % gdata.GDATA_NAMESPACE] = ('feed_link',
[gdata.FeedLink])
def __init__(self, author=None, category=None, content=None,
atom_id=None, link=None, published=None, title=None,
updated=None, username=None, query_string=None, feed_link=None,
extension_elements=None, extension_attributes=None):
gdata.GDataEntry.__init__(self, author=author, category=category,
content=content, atom_id=atom_id, link=link,
published=published, title=title, updated=updated)
self.username = username
self.query_string = query_string
self.feed_link = feed_link
def GetSubscriptionType(self):
"""Retrieve the type of this subscription.
Returns:
A string that is either 'channel, 'query' or 'favorites'
"""
for category in self.category:
if category.scheme == YOUTUBE_SUBSCRIPTION_TYPE_SCHEME:
return category.term
class YouTubeVideoResponseEntry(gdata.GDataEntry):
"""Represents a video response. """
_tag = gdata.GDataEntry._tag
_namespace = gdata.GDataEntry._namespace
_children = gdata.GDataEntry._children.copy()
_attributes = gdata.GDataEntry._attributes.copy()
_children['{%s}rating' % gdata.GDATA_NAMESPACE] = ('rating', Rating)
_children['{%s}noembed' % YOUTUBE_NAMESPACE] = ('noembed', NoEmbed)
_children['{%s}statistics' % YOUTUBE_NAMESPACE] = ('statistics', Statistics)
_children['{%s}racy' % YOUTUBE_NAMESPACE] = ('racy', Racy)
_children['{%s}group' % gdata.media.MEDIA_NAMESPACE] = ('media', Media.Group)
def __init__(self, author=None, category=None, content=None, atom_id=None,
link=None, published=None, title=None, updated=None, rating=None,
noembed=None, statistics=None, racy=None, media=None,
extension_elements=None, extension_attributes=None):
gdata.GDataEntry.__init__(self, author=author, category=category,
content=content, atom_id=atom_id, link=link,
published=published, title=title, updated=updated)
self.rating = rating
self.noembed = noembed
self.statistics = statistics
self.racy = racy
self.media = media or Media.Group()
class YouTubeContactEntry(gdata.GDataEntry):
"""Represents a contact entry."""
_tag = gdata.GDataEntry._tag
_namespace = gdata.GDataEntry._namespace
_children = gdata.GDataEntry._children.copy()
_attributes = gdata.GDataEntry._attributes.copy()
_children['{%s}username' % YOUTUBE_NAMESPACE] = ('username', Username)
_children['{%s}status' % YOUTUBE_NAMESPACE] = ('status', Status)
def __init__(self, author=None, category=None, content=None, atom_id=None,
link=None, published=None, title=None, updated=None,
username=None, status=None, extension_elements=None,
extension_attributes=None, text=None):
gdata.GDataEntry.__init__(self, author=author, category=category,
content=content, atom_id=atom_id, link=link,
published=published, title=title, updated=updated)
self.username = username
self.status = status
class YouTubeVideoEntry(gdata.GDataEntry):
"""Represents a video on YouTube."""
_tag = gdata.GDataEntry._tag
_namespace = gdata.GDataEntry._namespace
_children = gdata.GDataEntry._children.copy()
_attributes = gdata.GDataEntry._attributes.copy()
_children['{%s}rating' % gdata.GDATA_NAMESPACE] = ('rating', Rating)
_children['{%s}comments' % gdata.GDATA_NAMESPACE] = ('comments', Comments)
_children['{%s}noembed' % YOUTUBE_NAMESPACE] = ('noembed', NoEmbed)
_children['{%s}statistics' % YOUTUBE_NAMESPACE] = ('statistics', Statistics)
_children['{%s}recorded' % YOUTUBE_NAMESPACE] = ('recorded', Recorded)
_children['{%s}racy' % YOUTUBE_NAMESPACE] = ('racy', Racy)
_children['{%s}group' % gdata.media.MEDIA_NAMESPACE] = ('media', Media.Group)
_children['{%s}where' % gdata.geo.GEORSS_NAMESPACE] = ('geo', Geo.Where)
def __init__(self, author=None, category=None, content=None, atom_id=None,
link=None, published=None, title=None, updated=None, rating=None,
noembed=None, statistics=None, racy=None, media=None, geo=None,
recorded=None, comments=None, extension_elements=None,
extension_attributes=None):
self.rating = rating
self.noembed = noembed
self.statistics = statistics
self.racy = racy
self.comments = comments
self.media = media or Media.Group()
self.geo = geo
self.recorded = recorded
gdata.GDataEntry.__init__(self, author=author, category=category,
content=content, atom_id=atom_id, link=link,
published=published, title=title, updated=updated,
extension_elements=extension_elements,
extension_attributes=extension_attributes)
def GetSwfUrl(self):
"""Return the URL for the embeddable Video
Returns:
URL of the embeddable video
"""
if self.media.content:
for content in self.media.content:
if content.extension_attributes[YOUTUBE_FORMAT] == '5':
return content.url
else:
return None
def AddDeveloperTags(self, developer_tags):
"""Add a developer tag for this entry.
Developer tags can only be set during the initial upload.
Arguments:
developer_tags: A list of developer tags as strings.
Returns:
A list of all developer tags for this video entry.
"""
for tag_text in developer_tags:
self.media.category.append(gdata.media.Category(
text=tag_text, label=tag_text, scheme=YOUTUBE_DEVELOPER_TAG_SCHEME))
return self.GetDeveloperTags()
def GetDeveloperTags(self):
"""Retrieve developer tags for this video entry."""
developer_tags = []
for category in self.media.category:
if category.scheme == YOUTUBE_DEVELOPER_TAG_SCHEME:
developer_tags.append(category)
if len(developer_tags) > 0:
return developer_tags
def GetYouTubeCategoryAsString(self):
"""Convenience method to return the YouTube category as string.
YouTubeVideoEntries can contain multiple Category objects with differing
schemes. This method returns only the category with the correct
scheme, ignoring developer tags.
"""
for category in self.media.category:
if category.scheme != YOUTUBE_DEVELOPER_TAG_SCHEME:
return category.text
class YouTubeUserEntry(gdata.GDataEntry):
"""Represents a user on YouTube."""
_tag = gdata.GDataEntry._tag
_namespace = gdata.GDataEntry._namespace
_children = gdata.GDataEntry._children.copy()
_attributes = gdata.GDataEntry._attributes.copy()
_children['{%s}username' % YOUTUBE_NAMESPACE] = ('username', Username)
_children['{%s}firstName' % YOUTUBE_NAMESPACE] = ('first_name', FirstName)
_children['{%s}lastName' % YOUTUBE_NAMESPACE] = ('last_name', LastName)
_children['{%s}age' % YOUTUBE_NAMESPACE] = ('age', Age)
_children['{%s}books' % YOUTUBE_NAMESPACE] = ('books', Books)
_children['{%s}gender' % YOUTUBE_NAMESPACE] = ('gender', Gender)
_children['{%s}company' % YOUTUBE_NAMESPACE] = ('company', Company)
_children['{%s}description' % YOUTUBE_NAMESPACE] = ('description',
Description)
_children['{%s}hobbies' % YOUTUBE_NAMESPACE] = ('hobbies', Hobbies)
_children['{%s}hometown' % YOUTUBE_NAMESPACE] = ('hometown', Hometown)
_children['{%s}location' % YOUTUBE_NAMESPACE] = ('location', Location)
_children['{%s}movies' % YOUTUBE_NAMESPACE] = ('movies', Movies)
_children['{%s}music' % YOUTUBE_NAMESPACE] = ('music', Music)
_children['{%s}occupation' % YOUTUBE_NAMESPACE] = ('occupation', Occupation)
_children['{%s}school' % YOUTUBE_NAMESPACE] = ('school', School)
_children['{%s}relationship' % YOUTUBE_NAMESPACE] = ('relationship',
Relationship)
_children['{%s}statistics' % YOUTUBE_NAMESPACE] = ('statistics', Statistics)
_children['{%s}feedLink' % gdata.GDATA_NAMESPACE] = ('feed_link',
[gdata.FeedLink])
_children['{%s}thumbnail' % gdata.media.MEDIA_NAMESPACE] = ('thumbnail',
Media.Thumbnail)
def __init__(self, author=None, category=None, content=None, atom_id=None,
link=None, published=None, title=None, updated=None,
username=None, first_name=None, last_name=None, age=None,
books=None, gender=None, company=None, description=None,
hobbies=None, hometown=None, location=None, movies=None,
music=None, occupation=None, school=None, relationship=None,
statistics=None, feed_link=None, extension_elements=None,
extension_attributes=None, text=None):
self.username = username
self.first_name = first_name
self.last_name = last_name
self.age = age
self.books = books
self.gender = gender
self.company = company
self.description = description
self.hobbies = hobbies
self.hometown = hometown
self.location = location
self.movies = movies
self.music = music
self.occupation = occupation
self.school = school
self.relationship = relationship
self.statistics = statistics
self.feed_link = feed_link
gdata.GDataEntry.__init__(self, author=author, category=category,
content=content, atom_id=atom_id,
link=link, published=published,
title=title, updated=updated,
extension_elements=extension_elements,
extension_attributes=extension_attributes,
text=text)
class YouTubeVideoFeed(gdata.GDataFeed, gdata.LinkFinder):
"""Represents a video feed on YouTube."""
_tag = gdata.GDataFeed._tag
_namespace = gdata.GDataFeed._namespace
_children = gdata.GDataFeed._children.copy()
_attributes = gdata.GDataFeed._attributes.copy()
_children['{%s}entry' % atom.ATOM_NAMESPACE] = ('entry', [YouTubeVideoEntry])
class YouTubePlaylistEntry(gdata.GDataEntry):
"""Represents a playlist in YouTube."""
_tag = gdata.GDataEntry._tag
_namespace = gdata.GDataEntry._namespace
_children = gdata.GDataEntry._children.copy()
_attributes = gdata.GDataEntry._attributes.copy()
_children['{%s}description' % YOUTUBE_NAMESPACE] = ('description',
Description)
_children['{%s}private' % YOUTUBE_NAMESPACE] = ('private',
Private)
_children['{%s}feedLink' % gdata.GDATA_NAMESPACE] = ('feed_link',
[gdata.FeedLink])
def __init__(self, author=None, category=None, content=None,
atom_id=None, link=None, published=None, title=None,
updated=None, private=None, feed_link=None,
description=None, extension_elements=None,
extension_attributes=None):
self.description = description
self.private = private
self.feed_link = feed_link
gdata.GDataEntry.__init__(self, author=author, category=category,
content=content, atom_id=atom_id,
link=link, published=published, title=title,
updated=updated,
extension_elements=extension_elements,
extension_attributes=extension_attributes)
class YouTubePlaylistFeed(gdata.GDataFeed, gdata.LinkFinder):
"""Represents a feed of a user's playlists """
_tag = gdata.GDataFeed._tag
_namespace = gdata.GDataFeed._namespace
_children = gdata.GDataFeed._children.copy()
_attributes = gdata.GDataFeed._attributes.copy()
_children['{%s}entry' % atom.ATOM_NAMESPACE] = ('entry',
[YouTubePlaylistEntry])
class YouTubePlaylistVideoFeed(gdata.GDataFeed, gdata.LinkFinder):
"""Represents a feed of video entry on a playlist."""
_tag = gdata.GDataFeed._tag
_namespace = gdata.GDataFeed._namespace
_children = gdata.GDataFeed._children.copy()
_attributes = gdata.GDataFeed._attributes.copy()
_children['{%s}entry' % atom.ATOM_NAMESPACE] = ('entry',
[YouTubePlaylistVideoEntry])
class YouTubeContactFeed(gdata.GDataFeed, gdata.LinkFinder):
"""Represents a feed of a users contacts."""
_tag = gdata.GDataFeed._tag
_namespace = gdata.GDataFeed._namespace
_children = gdata.GDataFeed._children.copy()
_attributes = gdata.GDataFeed._attributes.copy()
_children['{%s}entry' % atom.ATOM_NAMESPACE] = ('entry',
[YouTubeContactEntry])
class YouTubeSubscriptionFeed(gdata.GDataFeed, gdata.LinkFinder):
"""Represents a feed of a users subscriptions."""
_tag = gdata.GDataFeed._tag
_namespace = gdata.GDataFeed._namespace
_children = gdata.GDataFeed._children.copy()
_attributes = gdata.GDataFeed._attributes.copy()
_children['{%s}entry' % atom.ATOM_NAMESPACE] = ('entry',
[YouTubeSubscriptionEntry])
class YouTubeVideoCommentFeed(gdata.GDataFeed, gdata.LinkFinder):
"""Represents a feed of comments for a video."""
_tag = gdata.GDataFeed._tag
_namespace = gdata.GDataFeed._namespace
_children = gdata.GDataFeed._children.copy()
_attributes = gdata.GDataFeed._attributes.copy()
_children['{%s}entry' % atom.ATOM_NAMESPACE] = ('entry',
[YouTubeVideoCommentEntry])
class YouTubeVideoResponseFeed(gdata.GDataFeed, gdata.LinkFinder):
"""Represents a feed of video responses."""
_tag = gdata.GDataFeed._tag
_namespace = gdata.GDataFeed._namespace
_children = gdata.GDataFeed._children.copy()
_attributes = gdata.GDataFeed._attributes.copy()
_children['{%s}entry' % atom.ATOM_NAMESPACE] = ('entry',
[YouTubeVideoResponseEntry])
def YouTubeVideoFeedFromString(xml_string):
return atom.CreateClassFromXMLString(YouTubeVideoFeed, xml_string)
def YouTubeVideoEntryFromString(xml_string):
return atom.CreateClassFromXMLString(YouTubeVideoEntry, xml_string)
def YouTubeContactFeedFromString(xml_string):
return atom.CreateClassFromXMLString(YouTubeContactFeed, xml_string)
def YouTubeContactEntryFromString(xml_string):
return atom.CreateClassFromXMLString(YouTubeContactEntry, xml_string)
def YouTubeVideoCommentFeedFromString(xml_string):
return atom.CreateClassFromXMLString(YouTubeVideoCommentFeed, xml_string)
def YouTubeVideoCommentEntryFromString(xml_string):
return atom.CreateClassFromXMLString(YouTubeVideoCommentEntry, xml_string)
def YouTubeUserFeedFromString(xml_string):
return atom.CreateClassFromXMLString(YouTubeVideoFeed, xml_string)
def YouTubeUserEntryFromString(xml_string):
return atom.CreateClassFromXMLString(YouTubeUserEntry, xml_string)
def YouTubePlaylistFeedFromString(xml_string):
return atom.CreateClassFromXMLString(YouTubePlaylistFeed, xml_string)
def YouTubePlaylistVideoFeedFromString(xml_string):
return atom.CreateClassFromXMLString(YouTubePlaylistVideoFeed, xml_string)
def YouTubePlaylistEntryFromString(xml_string):
return atom.CreateClassFromXMLString(YouTubePlaylistEntry, xml_string)
def YouTubePlaylistVideoEntryFromString(xml_string):
return atom.CreateClassFromXMLString(YouTubePlaylistVideoEntry, xml_string)
def YouTubeSubscriptionFeedFromString(xml_string):
return atom.CreateClassFromXMLString(YouTubeSubscriptionFeed, xml_string)
def YouTubeSubscriptionEntryFromString(xml_string):
return atom.CreateClassFromXMLString(YouTubeSubscriptionEntry, xml_string)
def YouTubeVideoResponseFeedFromString(xml_string):
return atom.CreateClassFromXMLString(YouTubeVideoResponseFeed, xml_string)
def YouTubeVideoResponseEntryFromString(xml_string):
return atom.CreateClassFromXMLString(YouTubeVideoResponseEntry, xml_string)
|
lz1988/django-web2015
|
refs/heads/master
|
build/lib/django/forms/widgets.py
|
98
|
"""
HTML Widget classes
"""
from __future__ import absolute_import, unicode_literals
import copy
import datetime
from itertools import chain
try:
from urllib.parse import urljoin
except ImportError: # Python 2
from urlparse import urljoin
from django.conf import settings
from django.forms.util import flatatt, to_current_timezone
from django.utils.datastructures import MultiValueDict, MergeDict
from django.utils.html import conditional_escape, format_html, format_html_join
from django.utils.translation import ugettext, ugettext_lazy
from django.utils.encoding import force_text, python_2_unicode_compatible
from django.utils.safestring import mark_safe
from django.utils import datetime_safe, formats, six
__all__ = (
'Media', 'MediaDefiningClass', 'Widget', 'TextInput', 'PasswordInput',
'HiddenInput', 'MultipleHiddenInput', 'ClearableFileInput',
'FileInput', 'DateInput', 'DateTimeInput', 'TimeInput', 'Textarea', 'CheckboxInput',
'Select', 'NullBooleanSelect', 'SelectMultiple', 'RadioSelect',
'CheckboxSelectMultiple', 'MultiWidget',
'SplitDateTimeWidget',
)
MEDIA_TYPES = ('css','js')
@python_2_unicode_compatible
class Media(object):
def __init__(self, media=None, **kwargs):
if media:
media_attrs = media.__dict__
else:
media_attrs = kwargs
self._css = {}
self._js = []
for name in MEDIA_TYPES:
getattr(self, 'add_' + name)(media_attrs.get(name, None))
# Any leftover attributes must be invalid.
# if media_attrs != {}:
# raise TypeError("'class Media' has invalid attribute(s): %s" % ','.join(media_attrs.keys()))
def __str__(self):
return self.render()
def render(self):
return mark_safe('\n'.join(chain(*[getattr(self, 'render_' + name)() for name in MEDIA_TYPES])))
def render_js(self):
return [format_html('<script type="text/javascript" src="{0}"></script>', self.absolute_path(path)) for path in self._js]
def render_css(self):
# To keep rendering order consistent, we can't just iterate over items().
# We need to sort the keys, and iterate over the sorted list.
media = sorted(self._css.keys())
return chain(*[
[format_html('<link href="{0}" type="text/css" media="{1}" rel="stylesheet" />', self.absolute_path(path), medium)
for path in self._css[medium]]
for medium in media])
def absolute_path(self, path, prefix=None):
if path.startswith(('http://', 'https://', '/')):
return path
if prefix is None:
if settings.STATIC_URL is None:
# backwards compatibility
prefix = settings.MEDIA_URL
else:
prefix = settings.STATIC_URL
return urljoin(prefix, path)
def __getitem__(self, name):
"Returns a Media object that only contains media of the given type"
if name in MEDIA_TYPES:
return Media(**{str(name): getattr(self, '_' + name)})
raise KeyError('Unknown media type "%s"' % name)
def add_js(self, data):
if data:
for path in data:
if path not in self._js:
self._js.append(path)
def add_css(self, data):
if data:
for medium, paths in data.items():
for path in paths:
if not self._css.get(medium) or path not in self._css[medium]:
self._css.setdefault(medium, []).append(path)
def __add__(self, other):
combined = Media()
for name in MEDIA_TYPES:
getattr(combined, 'add_' + name)(getattr(self, '_' + name, None))
getattr(combined, 'add_' + name)(getattr(other, '_' + name, None))
return combined
def media_property(cls):
def _media(self):
# Get the media property of the superclass, if it exists
sup_cls = super(cls, self)
try:
base = sup_cls.media
except AttributeError:
base = Media()
# Get the media definition for this class
definition = getattr(cls, 'Media', None)
if definition:
extend = getattr(definition, 'extend', True)
if extend:
if extend == True:
m = base
else:
m = Media()
for medium in extend:
m = m + base[medium]
return m + Media(definition)
else:
return Media(definition)
else:
return base
return property(_media)
class MediaDefiningClass(type):
"Metaclass for classes that can have media definitions"
def __new__(cls, name, bases, attrs):
new_class = super(MediaDefiningClass, cls).__new__(cls, name, bases,
attrs)
if 'media' not in attrs:
new_class.media = media_property(new_class)
return new_class
@python_2_unicode_compatible
class SubWidget(object):
"""
Some widgets are made of multiple HTML elements -- namely, RadioSelect.
This is a class that represents the "inner" HTML element of a widget.
"""
def __init__(self, parent_widget, name, value, attrs, choices):
self.parent_widget = parent_widget
self.name, self.value = name, value
self.attrs, self.choices = attrs, choices
def __str__(self):
args = [self.name, self.value, self.attrs]
if self.choices:
args.append(self.choices)
return self.parent_widget.render(*args)
class Widget(six.with_metaclass(MediaDefiningClass)):
is_hidden = False # Determines whether this corresponds to an <input type="hidden">.
needs_multipart_form = False # Determines does this widget need multipart form
is_localized = False
is_required = False
def __init__(self, attrs=None):
if attrs is not None:
self.attrs = attrs.copy()
else:
self.attrs = {}
def __deepcopy__(self, memo):
obj = copy.copy(self)
obj.attrs = self.attrs.copy()
memo[id(self)] = obj
return obj
def subwidgets(self, name, value, attrs=None, choices=()):
"""
Yields all "subwidgets" of this widget. Used only by RadioSelect to
allow template access to individual <input type="radio"> buttons.
Arguments are the same as for render().
"""
yield SubWidget(self, name, value, attrs, choices)
def render(self, name, value, attrs=None):
"""
Returns this Widget rendered as HTML, as a Unicode string.
The 'value' given is not guaranteed to be valid input, so subclass
implementations should program defensively.
"""
raise NotImplementedError
def build_attrs(self, extra_attrs=None, **kwargs):
"Helper function for building an attribute dictionary."
attrs = dict(self.attrs, **kwargs)
if extra_attrs:
attrs.update(extra_attrs)
return attrs
def value_from_datadict(self, data, files, name):
"""
Given a dictionary of data and this widget's name, returns the value
of this widget. Returns None if it's not provided.
"""
return data.get(name, None)
def _has_changed(self, initial, data):
"""
Return True if data differs from initial.
"""
# For purposes of seeing whether something has changed, None is
# the same as an empty string, if the data or inital value we get
# is None, replace it w/ ''.
if data is None:
data_value = ''
else:
data_value = data
if initial is None:
initial_value = ''
else:
initial_value = initial
if force_text(initial_value) != force_text(data_value):
return True
return False
def id_for_label(self, id_):
"""
Returns the HTML ID attribute of this Widget for use by a <label>,
given the ID of the field. Returns None if no ID is available.
This hook is necessary because some widgets have multiple HTML
elements and, thus, multiple IDs. In that case, this method should
return an ID value that corresponds to the first ID in the widget's
tags.
"""
return id_
class Input(Widget):
"""
Base class for all <input> widgets (except type='checkbox' and
type='radio', which are special).
"""
input_type = None # Subclasses must define this.
def _format_value(self, value):
if self.is_localized:
return formats.localize_input(value)
return value
def render(self, name, value, attrs=None):
if value is None:
value = ''
final_attrs = self.build_attrs(attrs, type=self.input_type, name=name)
if value != '':
# Only add the 'value' attribute if a value is non-empty.
final_attrs['value'] = force_text(self._format_value(value))
return format_html('<input{0} />', flatatt(final_attrs))
class TextInput(Input):
input_type = 'text'
def __init__(self, attrs=None):
if attrs is not None:
self.input_type = attrs.pop('type', self.input_type)
super(TextInput, self).__init__(attrs)
class PasswordInput(TextInput):
input_type = 'password'
def __init__(self, attrs=None, render_value=False):
super(PasswordInput, self).__init__(attrs)
self.render_value = render_value
def render(self, name, value, attrs=None):
if not self.render_value: value=None
return super(PasswordInput, self).render(name, value, attrs)
class HiddenInput(Input):
input_type = 'hidden'
is_hidden = True
class MultipleHiddenInput(HiddenInput):
"""
A widget that handles <input type="hidden"> for fields that have a list
of values.
"""
def __init__(self, attrs=None, choices=()):
super(MultipleHiddenInput, self).__init__(attrs)
# choices can be any iterable
self.choices = choices
def render(self, name, value, attrs=None, choices=()):
if value is None: value = []
final_attrs = self.build_attrs(attrs, type=self.input_type, name=name)
id_ = final_attrs.get('id', None)
inputs = []
for i, v in enumerate(value):
input_attrs = dict(value=force_text(v), **final_attrs)
if id_:
# An ID attribute was given. Add a numeric index as a suffix
# so that the inputs don't all have the same ID attribute.
input_attrs['id'] = '%s_%s' % (id_, i)
inputs.append(format_html('<input{0} />', flatatt(input_attrs)))
return mark_safe('\n'.join(inputs))
def value_from_datadict(self, data, files, name):
if isinstance(data, (MultiValueDict, MergeDict)):
return data.getlist(name)
return data.get(name, None)
class FileInput(Input):
input_type = 'file'
needs_multipart_form = True
def render(self, name, value, attrs=None):
return super(FileInput, self).render(name, None, attrs=attrs)
def value_from_datadict(self, data, files, name):
"File widgets take data from FILES, not POST"
return files.get(name, None)
def _has_changed(self, initial, data):
if data is None:
return False
return True
FILE_INPUT_CONTRADICTION = object()
class ClearableFileInput(FileInput):
initial_text = ugettext_lazy('Currently')
input_text = ugettext_lazy('Change')
clear_checkbox_label = ugettext_lazy('Clear')
template_with_initial = '%(initial_text)s: %(initial)s %(clear_template)s<br />%(input_text)s: %(input)s'
template_with_clear = '%(clear)s <label for="%(clear_checkbox_id)s">%(clear_checkbox_label)s</label>'
def clear_checkbox_name(self, name):
"""
Given the name of the file input, return the name of the clear checkbox
input.
"""
return name + '-clear'
def clear_checkbox_id(self, name):
"""
Given the name of the clear checkbox input, return the HTML id for it.
"""
return name + '_id'
def render(self, name, value, attrs=None):
substitutions = {
'initial_text': self.initial_text,
'input_text': self.input_text,
'clear_template': '',
'clear_checkbox_label': self.clear_checkbox_label,
}
template = '%(input)s'
substitutions['input'] = super(ClearableFileInput, self).render(name, value, attrs)
if value and hasattr(value, "url"):
template = self.template_with_initial
substitutions['initial'] = format_html('<a href="{0}">{1}</a>',
value.url,
force_text(value))
if not self.is_required:
checkbox_name = self.clear_checkbox_name(name)
checkbox_id = self.clear_checkbox_id(checkbox_name)
substitutions['clear_checkbox_name'] = conditional_escape(checkbox_name)
substitutions['clear_checkbox_id'] = conditional_escape(checkbox_id)
substitutions['clear'] = CheckboxInput().render(checkbox_name, False, attrs={'id': checkbox_id})
substitutions['clear_template'] = self.template_with_clear % substitutions
return mark_safe(template % substitutions)
def value_from_datadict(self, data, files, name):
upload = super(ClearableFileInput, self).value_from_datadict(data, files, name)
if not self.is_required and CheckboxInput().value_from_datadict(
data, files, self.clear_checkbox_name(name)):
if upload:
# If the user contradicts themselves (uploads a new file AND
# checks the "clear" checkbox), we return a unique marker
# object that FileField will turn into a ValidationError.
return FILE_INPUT_CONTRADICTION
# False signals to clear any existing value, as opposed to just None
return False
return upload
class Textarea(Widget):
def __init__(self, attrs=None):
# The 'rows' and 'cols' attributes are required for HTML correctness.
default_attrs = {'cols': '40', 'rows': '10'}
if attrs:
default_attrs.update(attrs)
super(Textarea, self).__init__(default_attrs)
def render(self, name, value, attrs=None):
if value is None: value = ''
final_attrs = self.build_attrs(attrs, name=name)
return format_html('<textarea{0}>\r\n{1}</textarea>',
flatatt(final_attrs),
force_text(value))
class DateInput(TextInput):
def __init__(self, attrs=None, format=None):
super(DateInput, self).__init__(attrs)
if format:
self.format = format
self.manual_format = True
else:
self.format = formats.get_format('DATE_INPUT_FORMATS')[0]
self.manual_format = False
def _format_value(self, value):
if self.is_localized and not self.manual_format:
return formats.localize_input(value)
elif hasattr(value, 'strftime'):
value = datetime_safe.new_date(value)
return value.strftime(self.format)
return value
def _has_changed(self, initial, data):
# If our field has show_hidden_initial=True, initial will be a string
# formatted by HiddenInput using formats.localize_input, which is not
# necessarily the format used for this widget. Attempt to convert it.
try:
input_format = formats.get_format('DATE_INPUT_FORMATS')[0]
initial = datetime.datetime.strptime(initial, input_format).date()
except (TypeError, ValueError):
pass
return super(DateInput, self)._has_changed(self._format_value(initial), data)
class DateTimeInput(TextInput):
def __init__(self, attrs=None, format=None):
super(DateTimeInput, self).__init__(attrs)
if format:
self.format = format
self.manual_format = True
else:
self.format = formats.get_format('DATETIME_INPUT_FORMATS')[0]
self.manual_format = False
def _format_value(self, value):
if self.is_localized and not self.manual_format:
return formats.localize_input(value)
elif hasattr(value, 'strftime'):
value = datetime_safe.new_datetime(value)
return value.strftime(self.format)
return value
def _has_changed(self, initial, data):
# If our field has show_hidden_initial=True, initial will be a string
# formatted by HiddenInput using formats.localize_input, which is not
# necessarily the format used for this widget. Attempt to convert it.
try:
input_format = formats.get_format('DATETIME_INPUT_FORMATS')[0]
initial = datetime.datetime.strptime(initial, input_format)
except (TypeError, ValueError):
pass
return super(DateTimeInput, self)._has_changed(self._format_value(initial), data)
class TimeInput(TextInput):
def __init__(self, attrs=None, format=None):
super(TimeInput, self).__init__(attrs)
if format:
self.format = format
self.manual_format = True
else:
self.format = formats.get_format('TIME_INPUT_FORMATS')[0]
self.manual_format = False
def _format_value(self, value):
if self.is_localized and not self.manual_format:
return formats.localize_input(value)
elif hasattr(value, 'strftime'):
return value.strftime(self.format)
return value
def _has_changed(self, initial, data):
# If our field has show_hidden_initial=True, initial will be a string
# formatted by HiddenInput using formats.localize_input, which is not
# necessarily the format used for this widget. Attempt to convert it.
try:
input_format = formats.get_format('TIME_INPUT_FORMATS')[0]
initial = datetime.datetime.strptime(initial, input_format).time()
except (TypeError, ValueError):
pass
return super(TimeInput, self)._has_changed(self._format_value(initial), data)
# Defined at module level so that CheckboxInput is picklable (#17976)
def boolean_check(v):
return not (v is False or v is None or v == '')
class CheckboxInput(Widget):
def __init__(self, attrs=None, check_test=None):
super(CheckboxInput, self).__init__(attrs)
# check_test is a callable that takes a value and returns True
# if the checkbox should be checked for that value.
self.check_test = boolean_check if check_test is None else check_test
def render(self, name, value, attrs=None):
final_attrs = self.build_attrs(attrs, type='checkbox', name=name)
if self.check_test(value):
final_attrs['checked'] = 'checked'
if not (value is True or value is False or value is None or value == ''):
# Only add the 'value' attribute if a value is non-empty.
final_attrs['value'] = force_text(value)
return format_html('<input{0} />', flatatt(final_attrs))
def value_from_datadict(self, data, files, name):
if name not in data:
# A missing value means False because HTML form submission does not
# send results for unselected checkboxes.
return False
value = data.get(name)
# Translate true and false strings to boolean values.
values = {'true': True, 'false': False}
if isinstance(value, six.string_types):
value = values.get(value.lower(), value)
return bool(value)
def _has_changed(self, initial, data):
# Sometimes data or initial could be None or '' which should be the
# same thing as False.
if initial == 'False':
# show_hidden_initial may have transformed False to 'False'
initial = False
return bool(initial) != bool(data)
class Select(Widget):
allow_multiple_selected = False
def __init__(self, attrs=None, choices=()):
super(Select, self).__init__(attrs)
# choices can be any iterable, but we may need to render this widget
# multiple times. Thus, collapse it into a list so it can be consumed
# more than once.
self.choices = list(choices)
def render(self, name, value, attrs=None, choices=()):
if value is None: value = ''
final_attrs = self.build_attrs(attrs, name=name)
output = [format_html('<select{0}>', flatatt(final_attrs))]
options = self.render_options(choices, [value])
if options:
output.append(options)
output.append('</select>')
return mark_safe('\n'.join(output))
def render_option(self, selected_choices, option_value, option_label):
option_value = force_text(option_value)
if option_value in selected_choices:
selected_html = mark_safe(' selected="selected"')
if not self.allow_multiple_selected:
# Only allow for a single selection.
selected_choices.remove(option_value)
else:
selected_html = ''
return format_html('<option value="{0}"{1}>{2}</option>',
option_value,
selected_html,
force_text(option_label))
def render_options(self, choices, selected_choices):
# Normalize to strings.
selected_choices = set(force_text(v) for v in selected_choices)
output = []
for option_value, option_label in chain(self.choices, choices):
if isinstance(option_label, (list, tuple)):
output.append(format_html('<optgroup label="{0}">', force_text(option_value)))
for option in option_label:
output.append(self.render_option(selected_choices, *option))
output.append('</optgroup>')
else:
output.append(self.render_option(selected_choices, option_value, option_label))
return '\n'.join(output)
class NullBooleanSelect(Select):
"""
A Select Widget intended to be used with NullBooleanField.
"""
def __init__(self, attrs=None):
choices = (('1', ugettext_lazy('Unknown')),
('2', ugettext_lazy('Yes')),
('3', ugettext_lazy('No')))
super(NullBooleanSelect, self).__init__(attrs, choices)
def render(self, name, value, attrs=None, choices=()):
try:
value = {True: '2', False: '3', '2': '2', '3': '3'}[value]
except KeyError:
value = '1'
return super(NullBooleanSelect, self).render(name, value, attrs, choices)
def value_from_datadict(self, data, files, name):
value = data.get(name, None)
return {'2': True,
True: True,
'True': True,
'3': False,
'False': False,
False: False}.get(value, None)
def _has_changed(self, initial, data):
# For a NullBooleanSelect, None (unknown) and False (No)
# are not the same
if initial is not None:
initial = bool(initial)
if data is not None:
data = bool(data)
return initial != data
class SelectMultiple(Select):
allow_multiple_selected = True
def render(self, name, value, attrs=None, choices=()):
if value is None: value = []
final_attrs = self.build_attrs(attrs, name=name)
output = [format_html('<select multiple="multiple"{0}>', flatatt(final_attrs))]
options = self.render_options(choices, value)
if options:
output.append(options)
output.append('</select>')
return mark_safe('\n'.join(output))
def value_from_datadict(self, data, files, name):
if isinstance(data, (MultiValueDict, MergeDict)):
return data.getlist(name)
return data.get(name, None)
def _has_changed(self, initial, data):
if initial is None:
initial = []
if data is None:
data = []
if len(initial) != len(data):
return True
initial_set = set([force_text(value) for value in initial])
data_set = set([force_text(value) for value in data])
return data_set != initial_set
@python_2_unicode_compatible
class RadioInput(SubWidget):
"""
An object used by RadioFieldRenderer that represents a single
<input type='radio'>.
"""
def __init__(self, name, value, attrs, choice, index):
self.name, self.value = name, value
self.attrs = attrs
self.choice_value = force_text(choice[0])
self.choice_label = force_text(choice[1])
self.index = index
def __str__(self):
return self.render()
def render(self, name=None, value=None, attrs=None, choices=()):
name = name or self.name
value = value or self.value
attrs = attrs or self.attrs
if 'id' in self.attrs:
label_for = format_html(' for="{0}_{1}"', self.attrs['id'], self.index)
else:
label_for = ''
choice_label = force_text(self.choice_label)
return format_html('<label{0}>{1} {2}</label>', label_for, self.tag(), choice_label)
def is_checked(self):
return self.value == self.choice_value
def tag(self):
if 'id' in self.attrs:
self.attrs['id'] = '%s_%s' % (self.attrs['id'], self.index)
final_attrs = dict(self.attrs, type='radio', name=self.name, value=self.choice_value)
if self.is_checked():
final_attrs['checked'] = 'checked'
return format_html('<input{0} />', flatatt(final_attrs))
@python_2_unicode_compatible
class RadioFieldRenderer(object):
"""
An object used by RadioSelect to enable customization of radio widgets.
"""
def __init__(self, name, value, attrs, choices):
self.name, self.value, self.attrs = name, value, attrs
self.choices = choices
def __iter__(self):
for i, choice in enumerate(self.choices):
yield RadioInput(self.name, self.value, self.attrs.copy(), choice, i)
def __getitem__(self, idx):
choice = self.choices[idx] # Let the IndexError propogate
return RadioInput(self.name, self.value, self.attrs.copy(), choice, idx)
def __str__(self):
return self.render()
def render(self):
"""Outputs a <ul> for this set of radio fields."""
return format_html('<ul>\n{0}\n</ul>',
format_html_join('\n', '<li>{0}</li>',
[(force_text(w),) for w in self]
))
class RadioSelect(Select):
renderer = RadioFieldRenderer
def __init__(self, *args, **kwargs):
# Override the default renderer if we were passed one.
renderer = kwargs.pop('renderer', None)
if renderer:
self.renderer = renderer
super(RadioSelect, self).__init__(*args, **kwargs)
def subwidgets(self, name, value, attrs=None, choices=()):
for widget in self.get_renderer(name, value, attrs, choices):
yield widget
def get_renderer(self, name, value, attrs=None, choices=()):
"""Returns an instance of the renderer."""
if value is None: value = ''
str_value = force_text(value) # Normalize to string.
final_attrs = self.build_attrs(attrs)
choices = list(chain(self.choices, choices))
return self.renderer(name, str_value, final_attrs, choices)
def render(self, name, value, attrs=None, choices=()):
return self.get_renderer(name, value, attrs, choices).render()
def id_for_label(self, id_):
# RadioSelect is represented by multiple <input type="radio"> fields,
# each of which has a distinct ID. The IDs are made distinct by a "_X"
# suffix, where X is the zero-based index of the radio field. Thus,
# the label for a RadioSelect should reference the first one ('_0').
if id_:
id_ += '_0'
return id_
class CheckboxSelectMultiple(SelectMultiple):
def render(self, name, value, attrs=None, choices=()):
if value is None: value = []
has_id = attrs and 'id' in attrs
final_attrs = self.build_attrs(attrs, name=name)
output = ['<ul>']
# Normalize to strings
str_values = set([force_text(v) for v in value])
for i, (option_value, option_label) in enumerate(chain(self.choices, choices)):
# If an ID attribute was given, add a numeric index as a suffix,
# so that the checkboxes don't all have the same ID attribute.
if has_id:
final_attrs = dict(final_attrs, id='%s_%s' % (attrs['id'], i))
label_for = format_html(' for="{0}"', final_attrs['id'])
else:
label_for = ''
cb = CheckboxInput(final_attrs, check_test=lambda value: value in str_values)
option_value = force_text(option_value)
rendered_cb = cb.render(name, option_value)
option_label = force_text(option_label)
output.append(format_html('<li><label{0}>{1} {2}</label></li>',
label_for, rendered_cb, option_label))
output.append('</ul>')
return mark_safe('\n'.join(output))
def id_for_label(self, id_):
# See the comment for RadioSelect.id_for_label()
if id_:
id_ += '_0'
return id_
class MultiWidget(Widget):
"""
A widget that is composed of multiple widgets.
Its render() method is different than other widgets', because it has to
figure out how to split a single value for display in multiple widgets.
The ``value`` argument can be one of two things:
* A list.
* A normal value (e.g., a string) that has been "compressed" from
a list of values.
In the second case -- i.e., if the value is NOT a list -- render() will
first "decompress" the value into a list before rendering it. It does so by
calling the decompress() method, which MultiWidget subclasses must
implement. This method takes a single "compressed" value and returns a
list.
When render() does its HTML rendering, each value in the list is rendered
with the corresponding widget -- the first value is rendered in the first
widget, the second value is rendered in the second widget, etc.
Subclasses may implement format_output(), which takes the list of rendered
widgets and returns a string of HTML that formats them any way you'd like.
You'll probably want to use this class with MultiValueField.
"""
def __init__(self, widgets, attrs=None):
self.widgets = [isinstance(w, type) and w() or w for w in widgets]
super(MultiWidget, self).__init__(attrs)
def render(self, name, value, attrs=None):
if self.is_localized:
for widget in self.widgets:
widget.is_localized = self.is_localized
# value is a list of values, each corresponding to a widget
# in self.widgets.
if not isinstance(value, list):
value = self.decompress(value)
output = []
final_attrs = self.build_attrs(attrs)
id_ = final_attrs.get('id', None)
for i, widget in enumerate(self.widgets):
try:
widget_value = value[i]
except IndexError:
widget_value = None
if id_:
final_attrs = dict(final_attrs, id='%s_%s' % (id_, i))
output.append(widget.render(name + '_%s' % i, widget_value, final_attrs))
return mark_safe(self.format_output(output))
def id_for_label(self, id_):
# See the comment for RadioSelect.id_for_label()
if id_:
id_ += '_0'
return id_
def value_from_datadict(self, data, files, name):
return [widget.value_from_datadict(data, files, name + '_%s' % i) for i, widget in enumerate(self.widgets)]
def _has_changed(self, initial, data):
if initial is None:
initial = ['' for x in range(0, len(data))]
else:
if not isinstance(initial, list):
initial = self.decompress(initial)
for widget, initial, data in zip(self.widgets, initial, data):
if widget._has_changed(initial, data):
return True
return False
def format_output(self, rendered_widgets):
"""
Given a list of rendered widgets (as strings), returns a Unicode string
representing the HTML for the whole lot.
This hook allows you to format the HTML design of the widgets, if
needed.
"""
return ''.join(rendered_widgets)
def decompress(self, value):
"""
Returns a list of decompressed values for the given compressed value.
The given value can be assumed to be valid, but not necessarily
non-empty.
"""
raise NotImplementedError('Subclasses must implement this method.')
def _get_media(self):
"Media for a multiwidget is the combination of all media of the subwidgets"
media = Media()
for w in self.widgets:
media = media + w.media
return media
media = property(_get_media)
def __deepcopy__(self, memo):
obj = super(MultiWidget, self).__deepcopy__(memo)
obj.widgets = copy.deepcopy(self.widgets)
return obj
class SplitDateTimeWidget(MultiWidget):
"""
A Widget that splits datetime input into two <input type="text"> boxes.
"""
def __init__(self, attrs=None, date_format=None, time_format=None):
widgets = (DateInput(attrs=attrs, format=date_format),
TimeInput(attrs=attrs, format=time_format))
super(SplitDateTimeWidget, self).__init__(widgets, attrs)
def decompress(self, value):
if value:
value = to_current_timezone(value)
return [value.date(), value.time().replace(microsecond=0)]
return [None, None]
class SplitHiddenDateTimeWidget(SplitDateTimeWidget):
"""
A Widget that splits datetime input into two <input type="hidden"> inputs.
"""
is_hidden = True
def __init__(self, attrs=None, date_format=None, time_format=None):
super(SplitHiddenDateTimeWidget, self).__init__(attrs, date_format, time_format)
for widget in self.widgets:
widget.input_type = 'hidden'
widget.is_hidden = True
|
jmancewicz/DIGITS
|
refs/heads/master
|
digits/frameworks/__init__.py
|
5
|
# Copyright (c) 2015-2016, NVIDIA CORPORATION. All rights reserved.
from __future__ import absolute_import
from .caffe_framework import CaffeFramework
from .framework import Framework
from .torch_framework import TorchFramework
from digits.config import config_value
#
# create framework instances
#
# torch is optional
torch = TorchFramework() if config_value('torch_root') else None
# caffe is mandatory
caffe = CaffeFramework()
#
# utility functions
#
def get_frameworks():
"""
return list of all available framework instances
there may be more than one instance per framework class
"""
frameworks = [caffe]
if torch:
frameworks.append(torch)
return frameworks
def get_framework_by_id(framework_id):
"""
return framework instance associated with given id
"""
for fw in get_frameworks():
if fw.get_id() == framework_id:
return fw
return None
|
pavelchristof/gomoku-ai
|
refs/heads/master
|
tensorflow/tools/compatibility/ast_edits.py
|
47
|
# Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Upgrader for Python scripts according to an API change specification."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import ast
import collections
import os
import shutil
import sys
import tempfile
import traceback
class APIChangeSpec(object):
"""This class defines the transformations that need to happen.
This class must provide the following fields:
* `function_keyword_renames`: maps function names to a map of old -> new
argument names
* `function_renames`: maps function names to new function names
* `change_to_function`: a set of function names that have changed (for
notifications)
* `function_reorders`: maps functions whose argument order has changed to the
list of arguments in the new order
* `function_handle`: maps function names to custom handlers for the function
For an example, see `TFAPIChangeSpec`.
"""
class _FileEditTuple(collections.namedtuple(
"_FileEditTuple", ["comment", "line", "start", "old", "new"])):
"""Each edit that is recorded by a _FileEditRecorder.
Fields:
comment: A description of the edit and why it was made.
line: The line number in the file where the edit occurs (1-indexed).
start: The line number in the file where the edit occurs (0-indexed).
old: text string to remove (this must match what was in file).
new: text string to add in place of `old`.
"""
__slots__ = ()
class _FileEditRecorder(object):
"""Record changes that need to be done to the file."""
def __init__(self, filename):
# all edits are lists of chars
self._filename = filename
self._line_to_edit = collections.defaultdict(list)
self._errors = []
def process(self, text):
"""Process a list of strings, each corresponding to the recorded changes.
Args:
text: A list of lines of text (assumed to contain newlines)
Returns:
A tuple of the modified text and a textual description of what is done.
Raises:
ValueError: if substitution source location does not have expected text.
"""
change_report = ""
# Iterate of each line
for line, edits in self._line_to_edit.items():
offset = 0
# sort by column so that edits are processed in order in order to make
# indexing adjustments cumulative for changes that change the string
# length
edits.sort(key=lambda x: x.start)
# Extract each line to a list of characters, because mutable lists
# are editable, unlike immutable strings.
char_array = list(text[line - 1])
# Record a description of the change
change_report += "%r Line %d\n" % (self._filename, line)
change_report += "-" * 80 + "\n\n"
for e in edits:
change_report += "%s\n" % e.comment
change_report += "\n Old: %s" % (text[line - 1])
# Make underscore buffers for underlining where in the line the edit was
change_list = [" "] * len(text[line - 1])
change_list_new = [" "] * len(text[line - 1])
# Iterate for each edit
for e in edits:
# Create effective start, end by accounting for change in length due
# to previous edits
start_eff = e.start + offset
end_eff = start_eff + len(e.old)
# Make sure the edit is changing what it should be changing
old_actual = "".join(char_array[start_eff:end_eff])
if old_actual != e.old:
raise ValueError("Expected text %r but got %r" %
("".join(e.old), "".join(old_actual)))
# Make the edit
char_array[start_eff:end_eff] = list(e.new)
# Create the underline highlighting of the before and after
change_list[e.start:e.start + len(e.old)] = "~" * len(e.old)
change_list_new[start_eff:end_eff] = "~" * len(e.new)
# Keep track of how to generate effective ranges
offset += len(e.new) - len(e.old)
# Finish the report comment
change_report += " %s\n" % "".join(change_list)
text[line - 1] = "".join(char_array)
change_report += " New: %s" % (text[line - 1])
change_report += " %s\n\n" % "".join(change_list_new)
return "".join(text), change_report, self._errors
def add(self, comment, line, start, old, new, error=None):
"""Add a new change that is needed.
Args:
comment: A description of what was changed
line: Line number (1 indexed)
start: Column offset (0 indexed)
old: old text
new: new text
error: this "edit" is something that cannot be fixed automatically
Returns:
None
"""
self._line_to_edit[line].append(
_FileEditTuple(comment, line, start, old, new))
if error:
self._errors.append("%s:%d: %s" % (self._filename, line, error))
class _ASTCallVisitor(ast.NodeVisitor):
"""AST Visitor that processes function calls.
Updates function calls from old API version to new API version using a given
change spec.
"""
def __init__(self, filename, lines, api_change_spec):
self._filename = filename
self._file_edit = _FileEditRecorder(filename)
self._lines = lines
self._api_change_spec = api_change_spec
def process(self, lines):
return self._file_edit.process(lines)
def generic_visit(self, node):
ast.NodeVisitor.generic_visit(self, node)
def _rename_functions(self, node, full_name):
function_renames = self._api_change_spec.function_renames
try:
new_name = function_renames[full_name]
self._file_edit.add("Renamed function %r to %r" % (full_name,
new_name),
node.lineno, node.col_offset, full_name, new_name)
except KeyError:
pass
def _get_attribute_full_path(self, node):
"""Traverse an attribute to generate a full name e.g. tf.foo.bar.
Args:
node: A Node of type Attribute.
Returns:
a '.'-delimited full-name or None if the tree was not a simple form.
i.e. `foo()+b).bar` returns None, while `a.b.c` would return "a.b.c".
"""
curr = node
items = []
while not isinstance(curr, ast.Name):
if not isinstance(curr, ast.Attribute):
return None
items.append(curr.attr)
curr = curr.value
items.append(curr.id)
return ".".join(reversed(items))
def _find_true_position(self, node):
"""Return correct line number and column offset for a given node.
This is necessary mainly because ListComp's location reporting reports
the next token after the list comprehension list opening.
Args:
node: Node for which we wish to know the lineno and col_offset
"""
import re
find_open = re.compile("^\s*(\\[).*$")
find_string_chars = re.compile("['\"]")
if isinstance(node, ast.ListComp):
# Strangely, ast.ListComp returns the col_offset of the first token
# after the '[' token which appears to be a bug. Workaround by
# explicitly finding the real start of the list comprehension.
line = node.lineno
col = node.col_offset
# loop over lines
while 1:
# Reverse the text to and regular expression search for whitespace
text = self._lines[line-1]
reversed_preceding_text = text[:col][::-1]
# First find if a [ can be found with only whitespace between it and
# col.
m = find_open.match(reversed_preceding_text)
if m:
new_col_offset = col - m.start(1) - 1
return line, new_col_offset
else:
if (reversed_preceding_text=="" or
reversed_preceding_text.isspace()):
line = line - 1
prev_line = self._lines[line - 1]
# TODO(aselle):
# this is poor comment detection, but it is good enough for
# cases where the comment does not contain string literal starting/
# ending characters. If ast gave us start and end locations of the
# ast nodes rather than just start, we could use string literal
# node ranges to filter out spurious #'s that appear in string
# literals.
comment_start = prev_line.find("#")
if comment_start == -1:
col = len(prev_line) -1
elif find_string_chars.search(prev_line[comment_start:]) is None:
col = comment_start
else:
return None, None
else:
return None, None
# Most other nodes return proper locations (with notably does not), but
# it is not possible to use that in an argument.
return node.lineno, node.col_offset
def visit_Call(self, node): # pylint: disable=invalid-name
"""Handle visiting a call node in the AST.
Args:
node: Current Node
"""
# Find a simple attribute name path e.g. "tf.foo.bar"
full_name = self._get_attribute_full_path(node.func)
# Make sure the func is marked as being part of a call
node.func.is_function_for_call = True
if full_name:
# Call special handlers
function_handles = self._api_change_spec.function_handle
if full_name in function_handles:
function_handles[full_name](self._file_edit, node)
# Examine any non-keyword argument and make it into a keyword argument
# if reordering required.
function_reorders = self._api_change_spec.function_reorders
function_keyword_renames = (
self._api_change_spec.function_keyword_renames)
if full_name in function_reorders:
reordered = function_reorders[full_name]
for idx, arg in enumerate(node.args):
lineno, col_offset = self._find_true_position(arg)
if lineno is None or col_offset is None:
self._file_edit.add(
"Failed to add keyword %r to reordered function %r"
% (reordered[idx], full_name), arg.lineno, arg.col_offset,
"", "",
error="A necessary keyword argument failed to be inserted.")
else:
keyword_arg = reordered[idx]
if (full_name in function_keyword_renames and
keyword_arg in function_keyword_renames[full_name]):
keyword_arg = function_keyword_renames[full_name][keyword_arg]
self._file_edit.add("Added keyword %r to reordered function %r"
% (reordered[idx], full_name), lineno,
col_offset, "", keyword_arg + "=")
# Examine each keyword argument and convert it to the final renamed form
renamed_keywords = ({} if full_name not in function_keyword_renames else
function_keyword_renames[full_name])
for keyword in node.keywords:
argkey = keyword.arg
argval = keyword.value
if argkey in renamed_keywords:
argval_lineno, argval_col_offset = self._find_true_position(argval)
if argval_lineno is not None and argval_col_offset is not None:
# TODO(aselle): We should scan backward to find the start of the
# keyword key. Unfortunately ast does not give you the location of
# keyword keys, so we are forced to infer it from the keyword arg
# value.
key_start = argval_col_offset - len(argkey) - 1
key_end = key_start + len(argkey) + 1
if (self._lines[argval_lineno - 1][key_start:key_end] ==
argkey + "="):
self._file_edit.add("Renamed keyword argument from %r to %r" %
(argkey, renamed_keywords[argkey]),
argval_lineno,
argval_col_offset - len(argkey) - 1,
argkey + "=", renamed_keywords[argkey] + "=")
continue
self._file_edit.add(
"Failed to rename keyword argument from %r to %r" %
(argkey, renamed_keywords[argkey]),
argval.lineno,
argval.col_offset - len(argkey) - 1,
"", "",
error="Failed to find keyword lexographically. Fix manually.")
ast.NodeVisitor.generic_visit(self, node)
def visit_Attribute(self, node): # pylint: disable=invalid-name
"""Handle bare Attributes i.e. [tf.foo, tf.bar].
Args:
node: Node that is of type ast.Attribute
"""
full_name = self._get_attribute_full_path(node)
if full_name:
self._rename_functions(node, full_name)
if full_name in self._api_change_spec.change_to_function:
if not hasattr(node, "is_function_for_call"):
new_text = full_name + "()"
self._file_edit.add("Changed %r to %r"%(full_name, new_text),
node.lineno, node.col_offset, full_name, new_text)
ast.NodeVisitor.generic_visit(self, node)
class ASTCodeUpgrader(object):
"""Handles upgrading a set of Python files using a given API change spec."""
def __init__(self, api_change_spec):
if not isinstance(api_change_spec, APIChangeSpec):
raise TypeError("Must pass APIChangeSpec to ASTCodeUpgrader, got %s" %
type(api_change_spec))
self._api_change_spec = api_change_spec
def process_file(self, in_filename, out_filename):
"""Process the given python file for incompatible changes.
Args:
in_filename: filename to parse
out_filename: output file to write to
Returns:
A tuple representing number of files processed, log of actions, errors
"""
# Write to a temporary file, just in case we are doing an implace modify.
with open(in_filename, "r") as in_file, \
tempfile.NamedTemporaryFile("w", delete=False) as temp_file:
ret = self.process_opened_file(
in_filename, in_file, out_filename, temp_file)
shutil.move(temp_file.name, out_filename)
return ret
# Broad exceptions are required here because ast throws whatever it wants.
# pylint: disable=broad-except
def process_opened_file(self, in_filename, in_file, out_filename, out_file):
"""Process the given python file for incompatible changes.
This function is split out to facilitate StringIO testing from
tf_upgrade_test.py.
Args:
in_filename: filename to parse
in_file: opened file (or StringIO)
out_filename: output file to write to
out_file: opened file (or StringIO)
Returns:
A tuple representing number of files processed, log of actions, errors
"""
process_errors = []
text = "-" * 80 + "\n"
text += "Processing file %r\n outputting to %r\n" % (in_filename,
out_filename)
text += "-" * 80 + "\n\n"
parsed_ast = None
lines = in_file.readlines()
try:
parsed_ast = ast.parse("".join(lines))
except Exception:
text += "Failed to parse %r\n\n" % in_filename
text += traceback.format_exc()
if parsed_ast:
visitor = _ASTCallVisitor(in_filename, lines, self._api_change_spec)
visitor.visit(parsed_ast)
out_text, new_text, process_errors = visitor.process(lines)
text += new_text
if out_file:
out_file.write(out_text)
text += "\n"
return 1, text, process_errors
# pylint: enable=broad-except
def process_tree(self, root_directory, output_root_directory,
copy_other_files):
"""Processes upgrades on an entire tree of python files in place.
Note that only Python files. If you have custom code in other languages,
you will need to manually upgrade those.
Args:
root_directory: Directory to walk and process.
output_root_directory: Directory to use as base.
copy_other_files: Copy files that are not touched by this converter.
Returns:
A tuple of files processed, the report string ofr all files, and errors
"""
# make sure output directory doesn't exist
if output_root_directory and os.path.exists(output_root_directory):
print("Output directory %r must not already exist." % (
output_root_directory))
sys.exit(1)
# make sure output directory does not overlap with root_directory
norm_root = os.path.split(os.path.normpath(root_directory))
norm_output = os.path.split(os.path.normpath(output_root_directory))
if norm_root == norm_output:
print("Output directory %r same as input directory %r" % (
root_directory, output_root_directory))
sys.exit(1)
# Collect list of files to process (we do this to correctly handle if the
# user puts the output directory in some sub directory of the input dir)
files_to_process = []
files_to_copy = []
for dir_name, _, file_list in os.walk(root_directory):
py_files = [f for f in file_list if f.endswith(".py")]
copy_files = [f for f in file_list if not f.endswith(".py")]
for filename in py_files:
fullpath = os.path.join(dir_name, filename)
fullpath_output = os.path.join(
output_root_directory, os.path.relpath(fullpath, root_directory))
files_to_process.append((fullpath, fullpath_output))
if copy_other_files:
for filename in copy_files:
fullpath = os.path.join(dir_name, filename)
fullpath_output = os.path.join(
output_root_directory, os.path.relpath(fullpath, root_directory))
files_to_copy.append((fullpath, fullpath_output))
file_count = 0
tree_errors = []
report = ""
report += ("=" * 80) + "\n"
report += "Input tree: %r\n" % root_directory
report += ("=" * 80) + "\n"
for input_path, output_path in files_to_process:
output_directory = os.path.dirname(output_path)
if not os.path.isdir(output_directory):
os.makedirs(output_directory)
file_count += 1
_, l_report, l_errors = self.process_file(input_path, output_path)
tree_errors += l_errors
report += l_report
for input_path, output_path in files_to_copy:
output_directory = os.path.dirname(output_path)
if not os.path.isdir(output_directory):
os.makedirs(output_directory)
shutil.copy(input_path, output_path)
return file_count, report, tree_errors
|
sinkuri256/python-for-android
|
refs/heads/master
|
python-modules/twisted/twisted/internet/endpoints.py
|
49
|
# -*- test-case-name: twisted.internet.test.test_endpoints -*-
# Copyright (c) 2007-2010 Twisted Matrix Laboratories.
# See LICENSE for details.
"""
Implementations of L{IStreamServerEndpoint} and L{IStreamClientEndpoint} that
wrap the L{IReactorTCP}, L{IReactorSSL}, and L{IReactorUNIX} interfaces.
This also implements an extensible mini-language for describing endpoints,
parsed by the L{clientFromString} and L{serverFromString} functions.
@since: 10.1
"""
from zope.interface import implements, directlyProvides
import warnings
from twisted.internet import interfaces, defer, error
from twisted.internet.protocol import ClientFactory, Protocol
from twisted.plugin import getPlugins
from twisted.internet.interfaces import IStreamServerEndpointStringParser
from twisted.internet.interfaces import IStreamClientEndpointStringParser
from twisted.python.filepath import FilePath
__all__ = ["clientFromString", "serverFromString",
"TCP4ServerEndpoint", "TCP4ClientEndpoint",
"UNIXServerEndpoint", "UNIXClientEndpoint",
"SSL4ServerEndpoint", "SSL4ClientEndpoint"]
class _WrappingProtocol(Protocol):
"""
Wrap another protocol in order to notify my user when a connection has
been made.
@ivar _connectedDeferred: The L{Deferred} that will callback
with the C{wrappedProtocol} when it is connected.
@ivar _wrappedProtocol: An L{IProtocol} provider that will be
connected.
"""
def __init__(self, connectedDeferred, wrappedProtocol):
"""
@param connectedDeferred: The L{Deferred} that will callback
with the C{wrappedProtocol} when it is connected.
@param wrappedProtocol: An L{IProtocol} provider that will be
connected.
"""
self._connectedDeferred = connectedDeferred
self._wrappedProtocol = wrappedProtocol
if interfaces.IHalfCloseableProtocol.providedBy(
self._wrappedProtocol):
directlyProvides(self, interfaces.IHalfCloseableProtocol)
def connectionMade(self):
"""
Connect the C{self._wrappedProtocol} to our C{self.transport} and
callback C{self._connectedDeferred} with the C{self._wrappedProtocol}
"""
self._wrappedProtocol.makeConnection(self.transport)
self._connectedDeferred.callback(self._wrappedProtocol)
def dataReceived(self, data):
"""
Proxy C{dataReceived} calls to our C{self._wrappedProtocol}
"""
return self._wrappedProtocol.dataReceived(data)
def connectionLost(self, reason):
"""
Proxy C{connectionLost} calls to our C{self._wrappedProtocol}
"""
return self._wrappedProtocol.connectionLost(reason)
def readConnectionLost(self):
"""
Proxy L{IHalfCloseableProtocol.readConnectionLost} to our
C{self._wrappedProtocol}
"""
self._wrappedProtocol.readConnectionLost()
def writeConnectionLost(self):
"""
Proxy L{IHalfCloseableProtocol.writeConnectionLost} to our
C{self._wrappedProtocol}
"""
self._wrappedProtocol.writeConnectionLost()
class _WrappingFactory(ClientFactory):
"""
Wrap a factory in order to wrap the protocols it builds.
@ivar _wrappedFactory: A provider of I{IProtocolFactory} whose
buildProtocol method will be called and whose resulting protocol
will be wrapped.
@ivar _onConnection: An L{Deferred} that fires when the protocol is
connected
"""
protocol = _WrappingProtocol
def __init__(self, wrappedFactory, canceller):
"""
@param wrappedFactory: A provider of I{IProtocolFactory} whose
buildProtocol method will be called and whose resulting protocol
will be wrapped.
@param canceller: An object that will be called to cancel the
L{self._onConnection} L{Deferred}
"""
self._wrappedFactory = wrappedFactory
self._onConnection = defer.Deferred(canceller=canceller)
def buildProtocol(self, addr):
"""
Proxy C{buildProtocol} to our C{self._wrappedFactory} or errback
the C{self._onConnection} L{Deferred}.
@return: An instance of L{_WrappingProtocol} or C{None}
"""
try:
proto = self._wrappedFactory.buildProtocol(addr)
except:
self._onConnection.errback()
else:
return self.protocol(self._onConnection, proto)
def clientConnectionFailed(self, connector, reason):
"""
Errback the C{self._onConnection} L{Deferred} when the
client connection fails.
"""
self._onConnection.errback(reason)
class TCP4ServerEndpoint(object):
"""
TCP server endpoint with an IPv4 configuration
@ivar _reactor: An L{IReactorTCP} provider.
@type _port: int
@ivar _port: The port number on which to listen for incoming connections.
@type _backlog: int
@ivar _backlog: size of the listen queue
@type _interface: str
@ivar _interface: the hostname to bind to, defaults to '' (all)
"""
implements(interfaces.IStreamServerEndpoint)
def __init__(self, reactor, port, backlog=50, interface=''):
"""
@param reactor: An L{IReactorTCP} provider.
@param port: The port number used listening
@param backlog: size of the listen queue
@param interface: the hostname to bind to, defaults to '' (all)
"""
self._reactor = reactor
self._port = port
self._listenArgs = dict(backlog=50, interface='')
self._backlog = backlog
self._interface = interface
def listen(self, protocolFactory):
"""
Implement L{IStreamServerEndpoint.listen} to listen on a TCP socket
"""
return defer.execute(self._reactor.listenTCP,
self._port,
protocolFactory,
backlog=self._backlog,
interface=self._interface)
class TCP4ClientEndpoint(object):
"""
TCP client endpoint with an IPv4 configuration.
@ivar _reactor: An L{IReactorTCP} provider.
@type _host: str
@ivar _host: The hostname to connect to as a C{str}
@type _port: int
@ivar _port: The port to connect to as C{int}
@type _timeout: int
@ivar _timeout: number of seconds to wait before assuming the
connection has failed.
@type _bindAddress: tuple
@type _bindAddress: a (host, port) tuple of local address to bind
to, or None.
"""
implements(interfaces.IStreamClientEndpoint)
def __init__(self, reactor, host, port, timeout=30, bindAddress=None):
"""
@param reactor: An L{IReactorTCP} provider
@param host: A hostname, used when connecting
@param port: The port number, used when connecting
@param timeout: number of seconds to wait before assuming the
connection has failed.
@param bindAddress: a (host, port tuple of local address to bind to,
or None.
"""
self._reactor = reactor
self._host = host
self._port = port
self._timeout = timeout
self._bindAddress = bindAddress
def connect(self, protocolFactory):
"""
Implement L{IStreamClientEndpoint.connect} to connect via TCP.
"""
def _canceller(deferred):
connector.stopConnecting()
deferred.errback(
error.ConnectingCancelledError(connector.getDestination()))
try:
wf = _WrappingFactory(protocolFactory, _canceller)
connector = self._reactor.connectTCP(
self._host, self._port, wf,
timeout=self._timeout, bindAddress=self._bindAddress)
return wf._onConnection
except:
return defer.fail()
class SSL4ServerEndpoint(object):
"""
SSL secured TCP server endpoint with an IPv4 configuration.
@ivar _reactor: An L{IReactorSSL} provider.
@type _host: str
@ivar _host: The hostname to connect to as a C{str}
@type _port: int
@ivar _port: The port to connect to as C{int}
@type _sslContextFactory: L{OpenSSLCertificateOptions}
@var _sslContextFactory: SSL Configuration information as an
L{OpenSSLCertificateOptions}
@type _backlog: int
@ivar _backlog: size of the listen queue
@type _interface: str
@ivar _interface: the hostname to bind to, defaults to '' (all)
"""
implements(interfaces.IStreamServerEndpoint)
def __init__(self, reactor, port, sslContextFactory,
backlog=50, interface=''):
"""
@param reactor: An L{IReactorSSL} provider.
@param port: The port number used listening
@param sslContextFactory: An instance of
L{twisted.internet._sslverify.OpenSSLCertificateOptions}.
@param timeout: number of seconds to wait before assuming the
connection has failed.
@param bindAddress: a (host, port tuple of local address to bind to,
or None.
"""
self._reactor = reactor
self._port = port
self._sslContextFactory = sslContextFactory
self._backlog = backlog
self._interface = interface
def listen(self, protocolFactory):
"""
Implement L{IStreamServerEndpoint.listen} to listen for SSL on a
TCP socket.
"""
return defer.execute(self._reactor.listenSSL, self._port,
protocolFactory,
contextFactory=self._sslContextFactory,
backlog=self._backlog,
interface=self._interface)
class SSL4ClientEndpoint(object):
"""
SSL secured TCP client endpoint with an IPv4 configuration
@ivar _reactor: An L{IReactorSSL} provider.
@type _host: str
@ivar _host: The hostname to connect to as a C{str}
@type _port: int
@ivar _port: The port to connect to as C{int}
@type _sslContextFactory: L{OpenSSLCertificateOptions}
@var _sslContextFactory: SSL Configuration information as an
L{OpenSSLCertificateOptions}
@type _timeout: int
@ivar _timeout: number of seconds to wait before assuming the
connection has failed.
@type _bindAddress: tuple
@ivar _bindAddress: a (host, port) tuple of local address to bind
to, or None.
"""
implements(interfaces.IStreamClientEndpoint)
def __init__(self, reactor, host, port, sslContextFactory,
timeout=30, bindAddress=None):
"""
@param reactor: An L{IReactorSSL} provider.
@param host: A hostname, used when connecting
@param port: The port number, used when connecting
@param sslContextFactory: SSL Configuration information as An instance
of L{OpenSSLCertificateOptions}.
@param timeout: number of seconds to wait before assuming the
connection has failed.
@param bindAddress: a (host, port tuple of local address to bind to,
or None.
"""
self._reactor = reactor
self._host = host
self._port = port
self._sslContextFactory = sslContextFactory
self._timeout = timeout
self._bindAddress = bindAddress
def connect(self, protocolFactory):
"""
Implement L{IStreamClientEndpoint.connect} to connect with SSL over
TCP.
"""
def _canceller(deferred):
connector.stopConnecting()
deferred.errback(
error.ConnectingCancelledError(connector.getDestination()))
try:
wf = _WrappingFactory(protocolFactory, _canceller)
connector = self._reactor.connectSSL(
self._host, self._port, wf, self._sslContextFactory,
timeout=self._timeout, bindAddress=self._bindAddress)
return wf._onConnection
except:
return defer.fail()
class UNIXServerEndpoint(object):
"""
UnixSocket server endpoint.
@type path: str
@ivar path: a path to a unix socket on the filesystem.
@type _listenArgs: dict
@ivar _listenArgs: A C{dict} of keyword args that will be passed
to L{IReactorUNIX.listenUNIX}
@var _reactor: An L{IReactorTCP} provider.
"""
implements(interfaces.IStreamServerEndpoint)
def __init__(self, reactor, address, backlog=50, mode=0666, wantPID=0):
"""
@param reactor: An L{IReactorUNIX} provider.
@param address: The path to the Unix socket file, used when listening
@param listenArgs: An optional dict of keyword args that will be
passed to L{IReactorUNIX.listenUNIX}
@param backlog: number of connections to allow in backlog.
@param mode: mode to set on the unix socket. This parameter is
deprecated. Permissions should be set on the directory which
contains the UNIX socket.
@param wantPID: if True, create a pidfile for the socket.
"""
self._reactor = reactor
self._address = address
self._backlog = backlog
self._mode = mode
self._wantPID = wantPID
def listen(self, protocolFactory):
"""
Implement L{IStreamServerEndpoint.listen} to listen on a UNIX socket.
"""
return defer.execute(self._reactor.listenUNIX, self._address,
protocolFactory,
backlog=self._backlog,
mode=self._mode,
wantPID=self._wantPID)
class UNIXClientEndpoint(object):
"""
UnixSocket client endpoint.
@type _path: str
@ivar _path: a path to a unix socket on the filesystem.
@type _timeout: int
@ivar _timeout: number of seconds to wait before assuming the connection
has failed.
@type _checkPID: bool
@ivar _checkPID: if True, check for a pid file to verify that a server
is listening.
@var _reactor: An L{IReactorUNIX} provider.
"""
implements(interfaces.IStreamClientEndpoint)
def __init__(self, reactor, path, timeout=30, checkPID=0):
"""
@param reactor: An L{IReactorUNIX} provider.
@param path: The path to the Unix socket file, used when connecting
@param timeout: number of seconds to wait before assuming the
connection has failed.
@param checkPID: if True, check for a pid file to verify that a server
is listening.
"""
self._reactor = reactor
self._path = path
self._timeout = timeout
self._checkPID = checkPID
def connect(self, protocolFactory):
"""
Implement L{IStreamClientEndpoint.connect} to connect via a
UNIX Socket
"""
def _canceller(deferred):
connector.stopConnecting()
deferred.errback(
error.ConnectingCancelledError(connector.getDestination()))
try:
wf = _WrappingFactory(protocolFactory, _canceller)
connector = self._reactor.connectUNIX(
self._path, wf,
timeout=self._timeout,
checkPID=self._checkPID)
return wf._onConnection
except:
return defer.fail()
def _parseTCP(factory, port, interface="", backlog=50):
"""
Internal parser function for L{_parseServer} to convert the string
arguments for a TCP(IPv4) stream endpoint into the structured arguments.
@param factory: the protocol factory being parsed, or C{None}. (This was a
leftover argument from when this code was in C{strports}, and is now
mostly None and unused.)
@type factory: L{IProtocolFactory} or C{NoneType}
@param port: the integer port number to bind
@type port: C{str}
@param interface: the interface IP to listen on
@param backlog: the length of the listen queue
@type backlog: C{str}
@return: a 2-tuple of (args, kwargs), describing the parameters to
L{IReactorTCP.listenTCP} (or, modulo argument 2, the factory, arguments
to L{TCP4ServerEndpoint}.
"""
return (int(port), factory), {'interface': interface,
'backlog': int(backlog)}
def _parseUNIX(factory, address, mode='666', backlog=50, lockfile=True):
"""
Internal parser function for L{_parseServer} to convert the string
arguments for a UNIX (AF_UNIX/SOCK_STREAM) stream endpoint into the
structured arguments.
@param factory: the protocol factory being parsed, or C{None}. (This was a
leftover argument from when this code was in C{strports}, and is now
mostly None and unused.)
@type factory: L{IProtocolFactory} or C{NoneType}
@param address: the pathname of the unix socket
@type address: C{str}
@param backlog: the length of the listen queue
@type backlog: C{str}
@param lockfile: A string '0' or '1', mapping to True and False
respectively. See the C{wantPID} argument to C{listenUNIX}
@return: a 2-tuple of (args, kwargs), describing the parameters to
L{IReactorTCP.listenUNIX} (or, modulo argument 2, the factory,
arguments to L{UNIXServerEndpoint}.
"""
return (
(address, factory),
{'mode': int(mode, 8), 'backlog': int(backlog),
'wantPID': bool(int(lockfile))})
def _parseSSL(factory, port, privateKey="server.pem", certKey=None,
sslmethod=None, interface='', backlog=50):
"""
Internal parser function for L{_parseServer} to convert the string
arguments for an SSL (over TCP/IPv4) stream endpoint into the structured
arguments.
@param factory: the protocol factory being parsed, or C{None}. (This was a
leftover argument from when this code was in C{strports}, and is now
mostly None and unused.)
@type factory: L{IProtocolFactory} or C{NoneType}
@param port: the integer port number to bind
@type port: C{str}
@param interface: the interface IP to listen on
@param backlog: the length of the listen queue
@type backlog: C{str}
@param privateKey: The file name of a PEM format private key file.
@type privateKey: C{str}
@param certKey: The file name of a PEM format certificate file.
@type certKey: C{str}
@param sslmethod: The string name of an SSL method, based on the name of a
constant in C{OpenSSL.SSL}. Must be one of: "SSLv23_METHOD",
"SSLv2_METHOD", "SSLv3_METHOD", "TLSv1_METHOD".
@type sslmethod: C{str}
@return: a 2-tuple of (args, kwargs), describing the parameters to
L{IReactorSSL.listenSSL} (or, modulo argument 2, the factory, arguments
to L{SSL4ServerEndpoint}.
"""
from twisted.internet import ssl
if certKey is None:
certKey = privateKey
kw = {}
if sslmethod is not None:
kw['sslmethod'] = getattr(ssl.SSL, sslmethod)
cf = ssl.DefaultOpenSSLContextFactory(privateKey, certKey, **kw)
return ((int(port), factory, cf),
{'interface': interface, 'backlog': int(backlog)})
_serverParsers = {"tcp": _parseTCP,
"unix": _parseUNIX,
"ssl": _parseSSL}
_OP, _STRING = range(2)
def _tokenize(description):
"""
Tokenize a strports string and yield each token.
@param description: a string as described by L{serverFromString} or
L{clientFromString}.
@return: an iterable of 2-tuples of (L{_OP} or L{_STRING}, string). Tuples
starting with L{_OP} will contain a second element of either ':' (i.e.
'next parameter') or '=' (i.e. 'assign parameter value'). For example,
the string 'hello:greet\=ing=world' would result in a generator
yielding these values::
_STRING, 'hello'
_OP, ':'
_STRING, 'greet=ing'
_OP, '='
_STRING, 'world'
"""
current = ''
ops = ':='
nextOps = {':': ':=', '=': ':'}
description = iter(description)
for n in description:
if n in ops:
yield _STRING, current
yield _OP, n
current = ''
ops = nextOps[n]
elif n == '\\':
current += description.next()
else:
current += n
yield _STRING, current
def _parse(description):
"""
Convert a description string into a list of positional and keyword
parameters, using logic vaguely like what Python does.
@param description: a string as described by L{serverFromString} or
L{clientFromString}.
@return: a 2-tuple of C{(args, kwargs)}, where 'args' is a list of all
':'-separated C{str}s not containing an '=' and 'kwargs' is a map of
all C{str}s which do contain an '='. For example, the result of
C{_parse('a:b:d=1:c')} would be C{(['a', 'b', 'c'], {'d': '1'})}.
"""
args, kw = [], {}
def add(sofar):
if len(sofar) == 1:
args.append(sofar[0])
else:
kw[sofar[0]] = sofar[1]
sofar = ()
for (type, value) in _tokenize(description):
if type is _STRING:
sofar += (value,)
elif value == ':':
add(sofar)
sofar = ()
add(sofar)
return args, kw
# Mappings from description "names" to endpoint constructors.
_endpointServerFactories = {
'TCP': TCP4ServerEndpoint,
'SSL': SSL4ServerEndpoint,
'UNIX': UNIXServerEndpoint,
}
_endpointClientFactories = {
'TCP': TCP4ClientEndpoint,
'SSL': SSL4ClientEndpoint,
'UNIX': UNIXClientEndpoint,
}
_NO_DEFAULT = object()
def _parseServer(description, factory, default=None):
"""
Parse a stports description into a 2-tuple of arguments and keyword values.
@param description: A description in the format explained by
L{serverFromString}.
@type description: C{str}
@param factory: A 'factory' argument; this is left-over from
twisted.application.strports, it's not really used.
@type factory: L{IProtocolFactory} or L{None}
@param default: Deprecated argument, specifying the default parser mode to
use for unqualified description strings (those which do not have a ':'
and prefix).
@type default: C{str} or C{NoneType}
@return: a 3-tuple of (plugin or name, arguments, keyword arguments)
"""
args, kw = _parse(description)
if not args or (len(args) == 1 and not kw):
deprecationMessage = (
"Unqualified strport description passed to 'service'."
"Use qualified endpoint descriptions; for example, 'tcp:%s'."
% (description,))
if default is None:
default = 'tcp'
warnings.warn(
deprecationMessage, category=DeprecationWarning, stacklevel=4)
elif default is _NO_DEFAULT:
raise ValueError(deprecationMessage)
# If the default has been otherwise specified, the user has already
# been warned.
args[0:0] = [default]
endpointType = args[0]
parser = _serverParsers.get(endpointType)
if parser is None:
for plugin in getPlugins(IStreamServerEndpointStringParser):
if plugin.prefix == endpointType:
return (plugin, args[1:], kw)
raise ValueError("Unknown endpoint type: '%s'" % (endpointType,))
return (endpointType.upper(),) + parser(factory, *args[1:], **kw)
def _serverFromStringLegacy(reactor, description, default):
"""
Underlying implementation of L{serverFromString} which avoids exposing the
deprecated 'default' argument to anything but L{strports.service}.
"""
nameOrPlugin, args, kw = _parseServer(description, None, default)
if type(nameOrPlugin) is not str:
plugin = nameOrPlugin
return plugin.parseStreamServer(reactor, *args, **kw)
else:
name = nameOrPlugin
# Chop out the factory.
args = args[:1] + args[2:]
return _endpointServerFactories[name](reactor, *args, **kw)
def serverFromString(reactor, description):
"""
Construct a stream server endpoint from an endpoint description string.
The format for server endpoint descriptions is a simple string. It is a
prefix naming the type of endpoint, then a colon, then the arguments for
that endpoint.
For example, you can call it like this to create an endpoint that will
listen on TCP port 80::
serverFromString(reactor, "tcp:80")
Additional arguments may be specified as keywords, separated with colons.
For example, you can specify the interface for a TCP server endpoint to
bind to like this::
serverFromString(reactor, "tcp:80:interface=127.0.0.1")
SSL server endpoints may be specified with the 'ssl' prefix, and the
private key and certificate files may be specified by the C{privateKey} and
C{certKey} arguments::
serverFromString(reactor, "ssl:443:privateKey=key.pem:certKey=crt.pem")
If a private key file name (C{privateKey}) isn't provided, a "server.pem"
file is assumed to exist which contains the private key. If the certificate
file name (C{certKey}) isn't provided, the private key file is assumed to
contain the certificate as well.
You may escape colons in arguments with a backslash, which you will need to
use if you want to specify a full pathname argument on Windows::
serverFromString(reactor,
"ssl:443:privateKey=C\\:/key.pem:certKey=C\\:/cert.pem")
finally, the 'unix' prefix may be used to specify a filesystem UNIX socket,
optionally with a 'mode' argument to specify the mode of the socket file
created by C{listen}::
serverFromString(reactor, "unix:/var/run/finger")
serverFromString(reactor, "unix:/var/run/finger:mode=660")
This function is also extensible; new endpoint types may be registered as
L{IStreamServerEndpointStringParser} plugins. See that interface for more
information.
@param reactor: The server endpoint will be constructed with this reactor.
@param description: The strports description to parse.
@return: A new endpoint which can be used to listen with the parameters
given by by C{description}.
@rtype: L{IStreamServerEndpoint<twisted.internet.interfaces.IStreamServerEndpoint>}
@raise ValueError: when the 'description' string cannot be parsed.
@since: 10.2
"""
return _serverFromStringLegacy(reactor, description, _NO_DEFAULT)
def quoteStringArgument(argument):
"""
Quote an argument to L{serverFromString} and L{clientFromString}. Since
arguments are separated with colons and colons are escaped with
backslashes, some care is necessary if, for example, you have a pathname,
you may be tempted to interpolate into a string like this::
serverFromString("ssl:443:privateKey=%s" % (myPathName,))
This may appear to work, but will have portability issues (Windows
pathnames, for example). Usually you should just construct the appropriate
endpoint type rather than interpolating strings, which in this case would
be L{SSL4ServerEndpoint}. There are some use-cases where you may need to
generate such a string, though; for example, a tool to manipulate a
configuration file which has strports descriptions in it. To be correct in
those cases, do this instead::
serverFromString("ssl:443:privateKey=%s" %
(quoteStringArgument(myPathName),))
@param argument: The part of the endpoint description string you want to
pass through.
@type argument: C{str}
@return: The quoted argument.
@rtype: C{str}
"""
return argument.replace('\\', '\\\\').replace(':', '\\:')
def _parseClientTCP(**kwargs):
"""
Perform any argument value coercion necessary for TCP client parameters.
Valid keyword arguments to this function are all L{IReactorTCP.connectTCP}
arguments.
@return: The coerced values as a C{dict}.
"""
kwargs['port'] = int(kwargs['port'])
try:
kwargs['timeout'] = int(kwargs['timeout'])
except KeyError:
pass
return kwargs
def _loadCAsFromDir(directoryPath):
"""
Load certificate-authority certificate objects in a given directory.
@param directoryPath: a L{FilePath} pointing at a directory to load .pem
files from.
@return: a C{list} of L{OpenSSL.crypto.X509} objects.
"""
from twisted.internet import ssl
caCerts = {}
for child in directoryPath.children():
if not child.basename().split('.')[-1].lower() == 'pem':
continue
try:
data = child.getContent()
except IOError:
# Permission denied, corrupt disk, we don't care.
continue
try:
theCert = ssl.Certificate.loadPEM(data)
except ssl.SSL.Error:
# Duplicate certificate, invalid certificate, etc. We don't care.
pass
else:
caCerts[theCert.digest()] = theCert.original
return caCerts.values()
def _parseClientSSL(**kwargs):
"""
Perform any argument value coercion necessary for SSL client parameters.
Valid keyword arguments to this function are all L{IReactorSSL.connectSSL}
arguments except for C{contextFactory}. Instead, C{certKey} (the path name
of the certificate file) C{privateKey} (the path name of the private key
associated with the certificate) are accepted and used to construct a
context factory.
@param caCertsDir: The one parameter which is not part of
L{IReactorSSL.connectSSL}'s signature, this is a path name used to
construct a list of certificate authority certificates. The directory
will be scanned for files ending in C{.pem}, all of which will be
considered valid certificate authorities for this connection.
@type caCertsDir: C{str}
@return: The coerced values as a C{dict}.
"""
from twisted.internet import ssl
kwargs = _parseClientTCP(**kwargs)
certKey = kwargs.pop('certKey', None)
privateKey = kwargs.pop('privateKey', None)
caCertsDir = kwargs.pop('caCertsDir', None)
if certKey is not None:
certx509 = ssl.Certificate.loadPEM(
FilePath(certKey).getContent()).original
else:
certx509 = None
if privateKey is not None:
privateKey = ssl.PrivateCertificate.loadPEM(
FilePath(privateKey).getContent()).privateKey.original
else:
privateKey = None
if caCertsDir is not None:
verify = True
caCerts = _loadCAsFromDir(FilePath(caCertsDir))
else:
verify = False
caCerts = None
kwargs['sslContextFactory'] = ssl.CertificateOptions(
method=ssl.SSL.SSLv23_METHOD,
certificate=certx509,
privateKey=privateKey,
verify=verify,
caCerts=caCerts
)
return kwargs
def _parseClientUNIX(**kwargs):
"""
Perform any argument value coercion necessary for UNIX client parameters.
Valid keyword arguments to this function are all L{IReactorUNIX.connectUNIX}
arguments except for C{checkPID}. Instead, C{lockfile} is accepted and has
the same meaning.
@return: The coerced values as a C{dict}.
"""
try:
kwargs['checkPID'] = bool(int(kwargs.pop('lockfile')))
except KeyError:
pass
try:
kwargs['timeout'] = int(kwargs['timeout'])
except KeyError:
pass
return kwargs
_clientParsers = {
'TCP': _parseClientTCP,
'SSL': _parseClientSSL,
'UNIX': _parseClientUNIX,
}
def clientFromString(reactor, description):
"""
Construct a client endpoint from a description string.
Client description strings are much like server description strings,
although they take all of their arguments as keywords, since even the
simplest client endpoint (plain TCP) requires at least 2 arguments (host
and port) to construct.
You can create a TCP client endpoint with the 'host' and 'port' arguments,
like so::
clientFromString(reactor, "tcp:host=www.example.com:port=80")
or an SSL client endpoint with those arguments, plus the arguments used by
the server SSL, for a client certificate::
clientFromString(reactor, "ssl:host=web.example.com:port=443:"
"privateKey=foo.pem:certKey=foo.pem")
to specify your certificate trust roots, you can identify a directory with
PEM files in it with the C{caCertsDir} argument::
clientFromString(reactor, "ssl:host=web.example.com:port=443:"
"caCertsDir=/etc/ssl/certs")
This function is also extensible; new endpoint types may be registered as
L{IStreamClientEndpointStringParser} plugins. See that interface for more
information.
@param reactor: The client endpoint will be constructed with this reactor.
@param description: The strports description to parse.
@return: A new endpoint which can be used to connect with the parameters
given by by C{description}.
@rtype: L{IStreamClientEndpoint<twisted.internet.interfaces.IStreamClientEndpoint>}
@since: 10.2
"""
args, kwargs = _parse(description)
aname = args.pop(0)
name = aname.upper()
for plugin in getPlugins(IStreamClientEndpointStringParser):
if plugin.prefix.upper() == name:
return plugin.parseStreamClient(*args, **kwargs)
if name not in _clientParsers:
raise ValueError("Unknown endpoint type: %r" % (aname,))
kwargs = _clientParsers[name](*args, **kwargs)
return _endpointClientFactories[name](reactor, **kwargs)
|
PythonScientists/Shape
|
refs/heads/master
|
main/module/blog/views.py
|
1
|
#-*-coding:utf-8-*-
from . import blog_blueprint
from flask import render_template
from main.forms import CommentForm
from main.models import Post
"""
主要展示好评、推荐文章
"""
@blog_blueprint.route("/")
def blog_index():
return render_template("blog.html")
"""
主要展示所有文章,只按发表时间顺序
"""
@blog_blueprint.route("/post_list")
def post_list():
post_list = Post.query.order_by(Post.publish_date.desc()).all()
return render_template("blog/post.html",post=post)
"""
显示单个文章
"""
@blog_blueprint.route("/post_list/<int:post_id>")
def post(post_id):
post = Post.query.get_or_404(post_id)
return render_template("blog/post.html", post=post)
"""
显示单个文章的用户操作
"""
@blog_blueprint.route("/post_list/<int:post_id>/<string:control>")
def post_id_control():
pass
|
procangroup/edx-platform
|
refs/heads/master
|
openedx/core/djangoapps/schedules/management/commands/send_course_update.py
|
18
|
from openedx.core.djangoapps.schedules.management.commands import SendEmailBaseCommand
from openedx.core.djangoapps.schedules.tasks import ScheduleCourseUpdate
class Command(SendEmailBaseCommand):
async_send_task = ScheduleCourseUpdate
log_prefix = 'Course Update'
offsets = xrange(-7, -77, -7)
|
cewood/ansible-modules-core
|
refs/heads/devel
|
files/stat.py
|
14
|
#!/usr/bin/python
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
DOCUMENTATION = '''
---
module: stat
version_added: "1.3"
short_description: retrieve file or file system status
description:
- Retrieves facts for a file similar to the linux/unix 'stat' command.
options:
path:
description:
- The full path of the file/object to get the facts of
required: true
default: null
follow:
description:
- Whether to follow symlinks
required: false
default: no
get_md5:
description:
- Whether to return the md5 sum of the file. Will return None if we're unable to use md5 (Common for FIPS-140 compliant systems)
required: false
default: yes
get_checksum:
description:
- Whether to return a checksum of the file (default sha1)
required: false
default: yes
version_added: "1.8"
checksum_algorithm:
description:
- Algorithm to determine checksum of file. Will throw an error if the host is unable to use specified algorithm.
required: false
choices: [ 'sha1', 'sha224', 'sha256', 'sha384', 'sha512' ]
default: sha1
aliases: [ 'checksum_algo', 'checksum' ]
version_added: "2.0"
mime:
description:
- Use file magic and return data about the nature of the file. this uses the 'file' utility found on most Linux/Unix systems.
- This will add both `mime_type` and 'charset' fields to the return, if possible.
required: false
choices: [ Yes, No ]
default: No
version_added: "2.1"
aliases: [ 'mime_type', 'mime-type' ]
author: "Bruce Pennypacker (@bpennypacker)"
'''
EXAMPLES = '''
# Obtain the stats of /etc/foo.conf, and check that the file still belongs
# to 'root'. Fail otherwise.
- stat: path=/etc/foo.conf
register: st
- fail: msg="Whoops! file ownership has changed"
when: st.stat.pw_name != 'root'
# Determine if a path exists and is a symlink. Note that if the path does
# not exist, and we test sym.stat.islnk, it will fail with an error. So
# therefore, we must test whether it is defined.
# Run this to understand the structure, the skipped ones do not pass the
# check performed by 'when'
- stat: path=/path/to/something
register: sym
- debug: msg="islnk isn't defined (path doesn't exist)"
when: sym.stat.islnk is not defined
- debug: msg="islnk is defined (path must exist)"
when: sym.stat.islnk is defined
- debug: msg="Path exists and is a symlink"
when: sym.stat.islnk is defined and sym.stat.islnk
- debug: msg="Path exists and isn't a symlink"
when: sym.stat.islnk is defined and sym.stat.islnk == False
# Determine if a path exists and is a directory. Note that we need to test
# both that p.stat.isdir actually exists, and also that it's set to true.
- stat: path=/path/to/something
register: p
- debug: msg="Path exists and is a directory"
when: p.stat.isdir is defined and p.stat.isdir
# Don't do md5 checksum
- stat: path=/path/to/myhugefile get_md5=no
# Use sha256 to calculate checksum
- stat: path=/path/to/something checksum_algorithm=sha256
'''
RETURN = '''
stat:
description: dictionary containing all the stat data
returned: success
type: dictionary
contains:
exists:
description: if the destination path actually exists or not
returned: success
type: boolean
sample: True
path:
description: The full path of the file/object to get the facts of
returned: success and if path exists
type: string
sample: '/path/to/file'
mode:
description: Unix permissions of the file in octal
returned: success, path exists and user can read stats
type: octal
sample: 1755
isdir:
description: Tells you if the path is a directory
returned: success, path exists and user can read stats
type: boolean
sample: False
ischr:
description: Tells you if the path is a character device
returned: success, path exists and user can read stats
type: boolean
sample: False
isblk:
description: Tells you if the path is a block device
returned: success, path exists and user can read stats
type: boolean
sample: False
isreg:
description: Tells you if the path is a regular file
returned: success, path exists and user can read stats
type: boolean
sample: True
isfifo:
description: Tells you if the path is a named pipe
returned: success, path exists and user can read stats
type: boolean
sample: False
islnk:
description: Tells you if the path is a symbolic link
returned: success, path exists and user can read stats
type: boolean
sample: False
issock:
description: Tells you if the path is a unix domain socket
returned: success, path exists and user can read stats
type: boolean
sample: False
uid:
description: Numeric id representing the file owner
returned: success, path exists and user can read stats
type: int
sample: 1003
gid:
description: Numeric id representing the group of the owner
returned: success, path exists and user can read stats
type: int
sample: 1003
size:
description: Size in bytes for a plain file, ammount of data for some special files
returned: success, path exists and user can read stats
type: int
sample: 203
inode:
description: Inode number of the path
returned: success, path exists and user can read stats
type: int
sample: 12758
dev:
description: Device the inode resides on
returned: success, path exists and user can read stats
type: int
sample: 33
nlink:
description: Number of links to the inode (hard links)
returned: success, path exists and user can read stats
type: int
sample: 1
atime:
description: Time of last access
returned: success, path exists and user can read stats
type: float
sample: 1424348972.575
mtime:
description: Time of last modification
returned: success, path exists and user can read stats
type: float
sample: 1424348972.575
ctime:
description: Time of last metadata update or creation (depends on OS)
returned: success, path exists and user can read stats
type: float
sample: 1424348972.575
wusr:
description: Tells you if the owner has write permission
returned: success, path exists and user can read stats
type: boolean
sample: True
rusr:
description: Tells you if the owner has read permission
returned: success, path exists and user can read stats
type: boolean
sample: True
xusr:
description: Tells you if the owner has execute permission
returned: success, path exists and user can read stats
type: boolean
sample: True
wgrp:
description: Tells you if the owner's group has write permission
returned: success, path exists and user can read stats
type: boolean
sample: False
rgrp:
description: Tells you if the owner's group has read permission
returned: success, path exists and user can read stats
type: boolean
sample: True
xgrp:
description: Tells you if the owner's group has execute permission
returned: success, path exists and user can read stats
type: boolean
sample: True
woth:
description: Tells you if others have write permission
returned: success, path exists and user can read stats
type: boolean
sample: False
roth:
description: Tells you if others have read permission
returned: success, path exists and user can read stats
type: boolean
sample: True
xoth:
description: Tells you if others have execute permission
returned: success, path exists and user can read stats
type: boolean
sample: True
isuid:
description: Tells you if the invoking user's id matches the owner's id
returned: success, path exists and user can read stats
type: boolean
sample: False
isgid:
description: Tells you if the invoking user's group id matches the owner's group id
returned: success, path exists and user can read stats
type: boolean
sample: False
lnk_source:
description: Original path
returned: success, path exists and user can read stats and the path is a symbolic link
type: string
sample: /home/foobar/21102015-1445431274-908472971
md5:
description: md5 hash of the path
returned: success, path exists and user can read stats and path supports hashing and md5 is supported
type: string
sample: f88fa92d8cf2eeecf4c0a50ccc96d0c0
checksum_algorithm:
description: hash of the path
returned: success, path exists, user can read stats, path supports hashing and supplied checksum algorithm is available
type: string
sample: 50ba294cdf28c0d5bcde25708df53346825a429f
aliases: ['checksum', 'checksum_algo']
pw_name:
description: User name of owner
returned: success, path exists and user can read stats and installed python supports it
type: string
sample: httpd
gr_name:
description: Group name of owner
returned: success, path exists and user can read stats and installed python supports it
type: string
sample: www-data
mime_type:
description: file magic data or mime-type
returned: success, path exists and user can read stats and installed python supports it and the `mime` option was true, will return 'unknown' on error.
type: string
sample: PDF document, version 1.2
charset:
description: file character set or encoding
returned: success, path exists and user can read stats and installed python supports it and the `mime` option was true, will return 'unknown' on error.
type: string
sample: us-ascii
'''
import os
import sys
from stat import *
import pwd
import grp
def main():
module = AnsibleModule(
argument_spec = dict(
path = dict(required=True, type='path'),
follow = dict(default='no', type='bool'),
get_md5 = dict(default='yes', type='bool'),
get_checksum = dict(default='yes', type='bool'),
checksum_algorithm = dict(default='sha1', type='str', choices=['sha1', 'sha224', 'sha256', 'sha384', 'sha512'], aliases=['checksum_algo', 'checksum']),
mime = dict(default=False, type='bool', aliases=['mime_type', 'mime-type']),
),
supports_check_mode = True
)
path = module.params.get('path')
follow = module.params.get('follow')
get_md5 = module.params.get('get_md5')
get_checksum = module.params.get('get_checksum')
checksum_algorithm = module.params.get('checksum_algorithm')
try:
if follow:
st = os.stat(path)
else:
st = os.lstat(path)
except OSError:
e = get_exception()
if e.errno == errno.ENOENT:
d = { 'exists' : False }
module.exit_json(changed=False, stat=d)
module.fail_json(msg = e.strerror)
mode = st.st_mode
# back to ansible
d = {
'exists' : True,
'path' : path,
'mode' : "%04o" % S_IMODE(mode),
'isdir' : S_ISDIR(mode),
'ischr' : S_ISCHR(mode),
'isblk' : S_ISBLK(mode),
'isreg' : S_ISREG(mode),
'isfifo' : S_ISFIFO(mode),
'islnk' : S_ISLNK(mode),
'issock' : S_ISSOCK(mode),
'uid' : st.st_uid,
'gid' : st.st_gid,
'size' : st.st_size,
'inode' : st.st_ino,
'dev' : st.st_dev,
'nlink' : st.st_nlink,
'atime' : st.st_atime,
'mtime' : st.st_mtime,
'ctime' : st.st_ctime,
'wusr' : bool(mode & stat.S_IWUSR),
'rusr' : bool(mode & stat.S_IRUSR),
'xusr' : bool(mode & stat.S_IXUSR),
'wgrp' : bool(mode & stat.S_IWGRP),
'rgrp' : bool(mode & stat.S_IRGRP),
'xgrp' : bool(mode & stat.S_IXGRP),
'woth' : bool(mode & stat.S_IWOTH),
'roth' : bool(mode & stat.S_IROTH),
'xoth' : bool(mode & stat.S_IXOTH),
'isuid' : bool(mode & stat.S_ISUID),
'isgid' : bool(mode & stat.S_ISGID),
}
if S_ISLNK(mode):
d['lnk_source'] = os.path.realpath(path)
if S_ISREG(mode) and get_md5 and os.access(path,os.R_OK):
# Will fail on FIPS-140 compliant systems
try:
d['md5'] = module.md5(path)
except ValueError:
d['md5'] = None
if S_ISREG(mode) and get_checksum and os.access(path,os.R_OK):
d['checksum'] = module.digest_from_file(path, checksum_algorithm)
try:
pw = pwd.getpwuid(st.st_uid)
d['pw_name'] = pw.pw_name
grp_info = grp.getgrgid(st.st_gid)
d['gr_name'] = grp_info.gr_name
except:
pass
if module.params.get('mime'):
d['mime_type'] = 'unknown'
d['charset'] = 'unknown'
filecmd = [module.get_bin_path('file', True),'-i', path]
try:
rc, out, err = module.run_command(filecmd)
if rc == 0:
mtype, chset = out.split(':')[1].split(';')
d['mime_type'] = mtype.strip()
d['charset'] = chset.split('=')[1].strip()
except:
pass
module.exit_json(changed=False, stat=d)
# import module snippets
from ansible.module_utils.basic import *
main()
|
kenshay/ImageScripter
|
refs/heads/master
|
ProgramData/SystemFiles/Python/Lib/distutils/unixccompiler.py
|
11
|
"""distutils.unixccompiler
Contains the UnixCCompiler class, a subclass of CCompiler that handles
the "typical" Unix-style command-line C compiler:
* macros defined with -Dname[=value]
* macros undefined with -Uname
* include search directories specified with -Idir
* libraries specified with -lllib
* library search directories specified with -Ldir
* compile handled by 'cc' (or similar) executable with -c option:
compiles .c to .o
* link static library handled by 'ar' command (possibly with 'ranlib')
* link shared library handled by 'cc -shared'
"""
__revision__ = "$Id$"
import os, sys, re
from types import StringType, NoneType
from distutils import sysconfig
from distutils.dep_util import newer
from distutils.ccompiler import \
CCompiler, gen_preprocess_options, gen_lib_options
from distutils.errors import \
DistutilsExecError, CompileError, LibError, LinkError
from distutils import log
if sys.platform == 'darwin':
import _osx_support
# XXX Things not currently handled:
# * optimization/debug/warning flags; we just use whatever's in Python's
# Makefile and live with it. Is this adequate? If not, we might
# have to have a bunch of subclasses GNUCCompiler, SGICCompiler,
# SunCCompiler, and I suspect down that road lies madness.
# * even if we don't know a warning flag from an optimization flag,
# we need some way for outsiders to feed preprocessor/compiler/linker
# flags in to us -- eg. a sysadmin might want to mandate certain flags
# via a site config file, or a user might want to set something for
# compiling this module distribution only via the setup.py command
# line, whatever. As long as these options come from something on the
# current system, they can be as system-dependent as they like, and we
# should just happily stuff them into the preprocessor/compiler/linker
# options and carry on.
class UnixCCompiler(CCompiler):
compiler_type = 'unix'
# These are used by CCompiler in two places: the constructor sets
# instance attributes 'preprocessor', 'compiler', etc. from them, and
# 'set_executable()' allows any of these to be set. The defaults here
# are pretty generic; they will probably have to be set by an outsider
# (eg. using information discovered by the sysconfig about building
# Python extensions).
executables = {'preprocessor' : None,
'compiler' : ["cc"],
'compiler_so' : ["cc"],
'compiler_cxx' : ["cc"],
'linker_so' : ["cc", "-shared"],
'linker_exe' : ["cc"],
'archiver' : ["ar", "-cr"],
'ranlib' : None,
}
if sys.platform[:6] == "darwin":
executables['ranlib'] = ["ranlib"]
# Needed for the filename generation methods provided by the base
# class, CCompiler. NB. whoever instantiates/uses a particular
# UnixCCompiler instance should set 'shared_lib_ext' -- we set a
# reasonable common default here, but it's not necessarily used on all
# Unices!
src_extensions = [".c",".C",".cc",".cxx",".cpp",".m"]
obj_extension = ".o"
static_lib_extension = ".a"
shared_lib_extension = ".so"
dylib_lib_extension = ".dylib"
xcode_stub_lib_extension = ".tbd"
static_lib_format = shared_lib_format = dylib_lib_format = "lib%s%s"
xcode_stub_lib_format = dylib_lib_format
if sys.platform == "cygwin":
exe_extension = ".exe"
def preprocess(self, source,
output_file=None, macros=None, include_dirs=None,
extra_preargs=None, extra_postargs=None):
ignore, macros, include_dirs = \
self._fix_compile_args(None, macros, include_dirs)
pp_opts = gen_preprocess_options(macros, include_dirs)
pp_args = self.preprocessor + pp_opts
if output_file:
pp_args.extend(['-o', output_file])
if extra_preargs:
pp_args[:0] = extra_preargs
if extra_postargs:
pp_args.extend(extra_postargs)
pp_args.append(source)
# We need to preprocess: either we're being forced to, or we're
# generating output to stdout, or there's a target output file and
# the source file is newer than the target (or the target doesn't
# exist).
if self.force or output_file is None or newer(source, output_file):
if output_file:
self.mkpath(os.path.dirname(output_file))
try:
self.spawn(pp_args)
except DistutilsExecError, msg:
raise CompileError, msg
def _compile(self, obj, src, ext, cc_args, extra_postargs, pp_opts):
compiler_so = self.compiler_so
if sys.platform == 'darwin':
compiler_so = _osx_support.compiler_fixup(compiler_so,
cc_args + extra_postargs)
try:
self.spawn(compiler_so + cc_args + [src, '-o', obj] +
extra_postargs)
except DistutilsExecError, msg:
raise CompileError, msg
def create_static_lib(self, objects, output_libname,
output_dir=None, debug=0, target_lang=None):
objects, output_dir = self._fix_object_args(objects, output_dir)
output_filename = \
self.library_filename(output_libname, output_dir=output_dir)
if self._need_link(objects, output_filename):
self.mkpath(os.path.dirname(output_filename))
self.spawn(self.archiver +
[output_filename] +
objects + self.objects)
# Not many Unices required ranlib anymore -- SunOS 4.x is, I
# think the only major Unix that does. Maybe we need some
# platform intelligence here to skip ranlib if it's not
# needed -- or maybe Python's configure script took care of
# it for us, hence the check for leading colon.
if self.ranlib:
try:
self.spawn(self.ranlib + [output_filename])
except DistutilsExecError, msg:
raise LibError, msg
else:
log.debug("skipping %s (up-to-date)", output_filename)
def link(self, target_desc, objects,
output_filename, output_dir=None, libraries=None,
library_dirs=None, runtime_library_dirs=None,
export_symbols=None, debug=0, extra_preargs=None,
extra_postargs=None, build_temp=None, target_lang=None):
objects, output_dir = self._fix_object_args(objects, output_dir)
libraries, library_dirs, runtime_library_dirs = \
self._fix_lib_args(libraries, library_dirs, runtime_library_dirs)
lib_opts = gen_lib_options(self, library_dirs, runtime_library_dirs,
libraries)
if type(output_dir) not in (StringType, NoneType):
raise TypeError, "'output_dir' must be a string or None"
if output_dir is not None:
output_filename = os.path.join(output_dir, output_filename)
if self._need_link(objects, output_filename):
ld_args = (objects + self.objects +
lib_opts + ['-o', output_filename])
if debug:
ld_args[:0] = ['-g']
if extra_preargs:
ld_args[:0] = extra_preargs
if extra_postargs:
ld_args.extend(extra_postargs)
self.mkpath(os.path.dirname(output_filename))
try:
if target_desc == CCompiler.EXECUTABLE:
linker = self.linker_exe[:]
else:
linker = self.linker_so[:]
if target_lang == "c++" and self.compiler_cxx:
# skip over environment variable settings if /usr/bin/env
# is used to set up the linker's environment.
# This is needed on OSX. Note: this assumes that the
# normal and C++ compiler have the same environment
# settings.
i = 0
if os.path.basename(linker[0]) == "env":
i = 1
while '=' in linker[i]:
i = i + 1
linker[i] = self.compiler_cxx[i]
if sys.platform == 'darwin':
linker = _osx_support.compiler_fixup(linker, ld_args)
self.spawn(linker + ld_args)
except DistutilsExecError, msg:
raise LinkError, msg
else:
log.debug("skipping %s (up-to-date)", output_filename)
# -- Miscellaneous methods -----------------------------------------
# These are all used by the 'gen_lib_options() function, in
# ccompiler.py.
def library_dir_option(self, dir):
return "-L" + dir
def _is_gcc(self, compiler_name):
return "gcc" in compiler_name or "g++" in compiler_name
def runtime_library_dir_option(self, dir):
# XXX Hackish, at the very least. See Python bug #445902:
# http://sourceforge.net/tracker/index.php
# ?func=detail&aid=445902&group_id=5470&atid=105470
# Linkers on different platforms need different options to
# specify that directories need to be added to the list of
# directories searched for dependencies when a dynamic library
# is sought. GCC has to be told to pass the -R option through
# to the linker, whereas other compilers just know this.
# Other compilers may need something slightly different. At
# this time, there's no way to determine this information from
# the configuration data stored in the Python installation, so
# we use this hack.
compiler = os.path.basename(sysconfig.get_config_var("CC"))
if sys.platform[:6] == "darwin":
# MacOSX's linker doesn't understand the -R flag at all
return "-L" + dir
elif sys.platform[:7] == "freebsd":
return "-Wl,-rpath=" + dir
elif sys.platform[:5] == "hp-ux":
if self._is_gcc(compiler):
return ["-Wl,+s", "-L" + dir]
return ["+s", "-L" + dir]
elif sys.platform[:7] == "irix646" or sys.platform[:6] == "osf1V5":
return ["-rpath", dir]
elif self._is_gcc(compiler):
return "-Wl,-R" + dir
else:
return "-R" + dir
def library_option(self, lib):
return "-l" + lib
def find_library_file(self, dirs, lib, debug=0):
shared_f = self.library_filename(lib, lib_type='shared')
dylib_f = self.library_filename(lib, lib_type='dylib')
xcode_stub_f = self.library_filename(lib, lib_type='xcode_stub')
static_f = self.library_filename(lib, lib_type='static')
if sys.platform == 'darwin':
# On OSX users can specify an alternate SDK using
# '-isysroot', calculate the SDK root if it is specified
# (and use it further on)
#
# Note that, as of Xcode 7, Apple SDKs may contain textual stub
# libraries with .tbd extensions rather than the normal .dylib
# shared libraries installed in /. The Apple compiler tool
# chain handles this transparently but it can cause problems
# for programs that are being built with an SDK and searching
# for specific libraries. Callers of find_library_file need to
# keep in mind that the base filename of the returned SDK library
# file might have a different extension from that of the library
# file installed on the running system, for example:
# /Applications/Xcode.app/Contents/Developer/Platforms/
# MacOSX.platform/Developer/SDKs/MacOSX10.11.sdk/
# usr/lib/libedit.tbd
# vs
# /usr/lib/libedit.dylib
cflags = sysconfig.get_config_var('CFLAGS')
m = re.search(r'-isysroot\s+(\S+)', cflags)
if m is None:
sysroot = '/'
else:
sysroot = m.group(1)
for dir in dirs:
shared = os.path.join(dir, shared_f)
dylib = os.path.join(dir, dylib_f)
static = os.path.join(dir, static_f)
xcode_stub = os.path.join(dir, xcode_stub_f)
if sys.platform == 'darwin' and (
dir.startswith('/System/') or (
dir.startswith('/usr/') and not dir.startswith('/usr/local/'))):
shared = os.path.join(sysroot, dir[1:], shared_f)
dylib = os.path.join(sysroot, dir[1:], dylib_f)
static = os.path.join(sysroot, dir[1:], static_f)
xcode_stub = os.path.join(sysroot, dir[1:], xcode_stub_f)
# We're second-guessing the linker here, with not much hard
# data to go on: GCC seems to prefer the shared library, so I'm
# assuming that *all* Unix C compilers do. And of course I'm
# ignoring even GCC's "-static" option. So sue me.
if os.path.exists(dylib):
return dylib
elif os.path.exists(xcode_stub):
return xcode_stub
elif os.path.exists(shared):
return shared
elif os.path.exists(static):
return static
# Oops, didn't find it in *any* of 'dirs'
return None
|
beigenmann/WebIOPi
|
refs/heads/master
|
python/webiopi/devices/sensor/__init__.py
|
4
|
# Copyright 2012-2013 Eric Ptak - trouch.com
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from webiopi.utils.types import toint
from webiopi.utils.types import M_JSON
from webiopi.devices.instance import deviceInstance
from webiopi.decorators.rest import request, response
class Pressure():
def __init__(self, altitude=0, external=None):
self.altitude = toint(altitude)
if isinstance(external, str):
self.external = deviceInstance(external)
else:
self.external = external
if self.external != None and not isinstance(self.external, Temperature):
raise Exception("external must be a Temperature sensor")
def __family__(self):
return "Pressure"
def __getPascal__(self):
raise NotImplementedError
def __getPascalAtSea__(self):
raise NotImplementedError
@request("GET", "sensor/pressure/pa")
@response("%d")
def getPascal(self):
return self.__getPascal__()
@request("GET", "sensor/pressure/hpa")
@response("%.2f")
def getHectoPascal(self):
return float(self.__getPascal__()) / 100.0
@request("GET", "sensor/pressure/sea/pa")
@response("%d")
def getPascalAtSea(self):
pressure = self.__getPascal__()
if self.external != None:
k = self.external.getKelvin()
if k != 0:
return float(pressure) / (1.0 / (1.0 + 0.0065 / k * self.altitude)**5.255)
return float(pressure) / (1.0 - self.altitude / 44330.0)**5.255
@request("GET", "sensor/pressure/sea/hpa")
@response("%.2f")
def getHectoPascalAtSea(self):
return self.getPascalAtSea() / 100.0
class Temperature():
def __family__(self):
return "Temperature"
def __getKelvin__(self):
raise NotImplementedError
def __getCelsius__(self):
raise NotImplementedError
def __getFahrenheit__(self):
raise NotImplementedError
def Kelvin2Celsius(self, value=None):
if value == None:
value = self.getKelvin()
return value - 273.15
def Kelvin2Fahrenheit(self, value=None):
if value == None:
value = self.getKelvin()
return value * 1.8 - 459.67
def Celsius2Kelvin(self, value=None):
if value == None:
value = self.getCelsius()
return value + 273.15
def Celsius2Fahrenheit(self, value=None):
if value == None:
value = self.getCelsius()
return value * 1.8 + 32
def Fahrenheit2Kelvin(self, value=None):
if value == None:
value = self.getFahrenheit()
return (value - 459.67) / 1.8
def Fahrenheit2Celsius(self, value=None):
if value == None:
value = self.getFahrenheit()
return (value - 32) / 1.8
@request("GET", "sensor/temperature/k")
@response("%.02f")
def getKelvin(self):
return self.__getKelvin__()
@request("GET", "sensor/temperature/c")
@response("%.02f")
def getCelsius(self):
return self.__getCelsius__()
@request("GET", "sensor/temperature/f")
@response("%.02f")
def getFahrenheit(self):
return self.__getFahrenheit__()
class Luminosity():
def __family__(self):
return "Luminosity"
def __getLux__(self):
raise NotImplementedError
@request("GET", "sensor/luminosity/lux")
@response("%.02f")
def getLux(self):
return self.__getLux__()
class Distance():
def __family__(self):
return "Distance"
def __getMillimeter__(self):
raise NotImplementedError
@request("GET", "sensor/distance/mm")
@response("%.02f")
def getMillimeter(self):
return self.__getMillimeter__()
@request("GET", "sensor/distance/cm")
@response("%.02f")
def getCentimeter(self):
return self.getMillimeter() / 10
@request("GET", "sensor/distance/m")
@response("%.02f")
def getMeter(self):
return self.getMillimeter() / 1000
@request("GET", "sensor/distance/in")
@response("%.02f")
def getInch(self):
return self.getMillimeter() / 0.254
@request("GET", "sensor/distance/ft")
@response("%.02f")
def getFoot(self):
return self.getInch() / 12
@request("GET", "sensor/distance/yd")
@response("%.02f")
def getYard(self):
return self.getInch() / 36
class Humidity():
def __family__(self):
return "Humidity"
def __getHumidity__(self):
raise NotImplementedError
@request("GET", "sensor/humidity/float")
@response("%f")
def getHumidity(self):
return self.__getHumidity__()
@request("GET", "sensor/humidity/percent")
@response("%d")
def getHumidityPercent(self):
return self.__getHumidity__() * 100
DRIVERS = {}
DRIVERS["bmp085"] = ["BMP085", "BMP180"]
DRIVERS["onewiretemp"] = ["DS1822", "DS1825", "DS18B20", "DS18S20", "DS28EA00"]
DRIVERS["tmpXXX"] = ["TMP75", "TMP102", "TMP275"]
DRIVERS["tslXXXX"] = ["TSL2561", "TSL2561CS", "TSL2561T", "TSL4531", "TSL45311", "TSL45313", "TSL45315", "TSL45317"]
DRIVERS["vcnl4000"] = ["VCNL4000"]
DRIVERS["hytXXX"] = ["HYT221"]
|
CourseTalk/edx-platform
|
refs/heads/master
|
lms/djangoapps/commerce/api/v1/tests/test_serializers.py
|
109
|
""" Commerce API v1 serializer tests. """
from django.test import TestCase
from commerce.api.v1.serializers import serializers, validate_course_id
class CourseValidatorTests(TestCase):
""" Tests for Course Validator method. """
def test_validate_course_id_with_non_existent_course(self):
""" Verify a validator checking non-existent courses."""
course_key = 'non/existing/keyone'
error_msg = u"Course {} does not exist.".format(course_key)
with self.assertRaisesRegexp(serializers.ValidationError, error_msg):
validate_course_id(course_key)
def test_validate_course_id_with_invalid_key(self):
""" Verify a validator checking invalid course keys."""
course_key = 'invalidkey'
error_msg = u"{} is not a valid course key.".format(course_key)
with self.assertRaisesRegexp(serializers.ValidationError, error_msg):
validate_course_id(course_key)
|
kstrauser/ansible
|
refs/heads/devel
|
v1/ansible/runner/lookup_plugins/password.py
|
144
|
# (c) 2012, Daniel Hokka Zakrisson <daniel@hozac.com>
# (c) 2013, Javier Candeira <javier@candeira.com>
# (c) 2013, Maykel Moya <mmoya@speedyrails.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
from ansible import utils, errors
import os
import errno
from string import ascii_letters, digits
import string
import random
class LookupModule(object):
LENGTH = 20
def __init__(self, length=None, encrypt=None, basedir=None, **kwargs):
self.basedir = basedir
def random_salt(self):
salt_chars = ascii_letters + digits + './'
return utils.random_password(length=8, chars=salt_chars)
def run(self, terms, inject=None, **kwargs):
terms = utils.listify_lookup_plugin_terms(terms, self.basedir, inject)
ret = []
for term in terms:
# you can't have escaped spaces in yor pathname
params = term.split()
relpath = params[0]
paramvals = {
'length': LookupModule.LENGTH,
'encrypt': None,
'chars': ['ascii_letters','digits',".,:-_"],
}
# get non-default parameters if specified
try:
for param in params[1:]:
name, value = param.split('=')
assert(name in paramvals)
if name == 'length':
paramvals[name] = int(value)
elif name == 'chars':
use_chars=[]
if ",," in value:
use_chars.append(',')
use_chars.extend(value.replace(',,',',').split(','))
paramvals['chars'] = use_chars
else:
paramvals[name] = value
except (ValueError, AssertionError), e:
raise errors.AnsibleError(e)
length = paramvals['length']
encrypt = paramvals['encrypt']
use_chars = paramvals['chars']
# get password or create it if file doesn't exist
path = utils.path_dwim(self.basedir, relpath)
if not os.path.exists(path):
pathdir = os.path.dirname(path)
if not os.path.isdir(pathdir):
try:
os.makedirs(pathdir, mode=0700)
except OSError, e:
raise errors.AnsibleError("cannot create the path for the password lookup: %s (error was %s)" % (pathdir, str(e)))
chars = "".join([getattr(string,c,c) for c in use_chars]).replace('"','').replace("'",'')
password = ''.join(random.choice(chars) for _ in range(length))
if encrypt is not None:
salt = self.random_salt()
content = '%s salt=%s' % (password, salt)
else:
content = password
with open(path, 'w') as f:
os.chmod(path, 0600)
f.write(content + '\n')
else:
content = open(path).read().rstrip()
sep = content.find(' ')
if sep >= 0:
password = content[:sep]
salt = content[sep+1:].split('=')[1]
else:
password = content
salt = None
# crypt requested, add salt if missing
if (encrypt is not None and not salt):
salt = self.random_salt()
content = '%s salt=%s' % (password, salt)
with open(path, 'w') as f:
os.chmod(path, 0600)
f.write(content + '\n')
# crypt not requested, remove salt if present
elif (encrypt is None and salt):
with open(path, 'w') as f:
os.chmod(path, 0600)
f.write(password + '\n')
if encrypt:
password = utils.do_encrypt(password, encrypt, salt=salt)
ret.append(password)
return ret
|
neumerance/cloudloon2
|
refs/heads/master
|
.venv/lib/python2.7/site-packages/django/conf/locale/pt/__init__.py
|
12133432
| |
iivic/BoiseStateX
|
refs/heads/master
|
lms/djangoapps/instructor_task/migrations/__init__.py
|
12133432
| |
openjck/kuma
|
refs/heads/master
|
kuma/actioncounters/migrations/__init__.py
|
12133432
| |
switchboardOp/ansible
|
refs/heads/devel
|
contrib/inventory/digital_ocean.py
|
21
|
#!/usr/bin/env python
'''
DigitalOcean external inventory script
======================================
Generates Ansible inventory of DigitalOcean Droplets.
In addition to the --list and --host options used by Ansible, there are options
for generating JSON of other DigitalOcean data. This is useful when creating
droplets. For example, --regions will return all the DigitalOcean Regions.
This information can also be easily found in the cache file, whose default
location is /tmp/ansible-digital_ocean.cache).
The --pretty (-p) option pretty-prints the output for better human readability.
----
Although the cache stores all the information received from DigitalOcean,
the cache is not used for current droplet information (in --list, --host,
--all, and --droplets). This is so that accurate droplet information is always
found. You can force this script to use the cache with --force-cache.
----
Configuration is read from `digital_ocean.ini`, then from environment variables,
and then from command-line arguments.
Most notably, the DigitalOcean API Token must be specified. It can be specified
in the INI file or with the following environment variables:
export DO_API_TOKEN='abc123' or
export DO_API_KEY='abc123'
Alternatively, it can be passed on the command-line with --api-token.
If you specify DigitalOcean credentials in the INI file, a handy way to
get them into your environment (e.g., to use the digital_ocean module)
is to use the output of the --env option with export:
export $(digital_ocean.py --env)
----
The following groups are generated from --list:
- ID (droplet ID)
- NAME (droplet NAME)
- image_ID
- image_NAME
- distro_NAME (distribution NAME from image)
- region_NAME
- size_NAME
- status_STATUS
For each host, the following variables are registered:
- do_backup_ids
- do_created_at
- do_disk
- do_features - list
- do_id
- do_image - object
- do_ip_address
- do_private_ip_address
- do_kernel - object
- do_locked
- do_memory
- do_name
- do_networks - object
- do_next_backup_window
- do_region - object
- do_size - object
- do_size_slug
- do_snapshot_ids - list
- do_status
- do_tags
- do_vcpus
- do_volume_ids
-----
```
usage: digital_ocean.py [-h] [--list] [--host HOST] [--all]
[--droplets] [--regions] [--images] [--sizes]
[--ssh-keys] [--domains] [--pretty]
[--cache-path CACHE_PATH]
[--cache-max_age CACHE_MAX_AGE]
[--force-cache]
[--refresh-cache]
[--api-token API_TOKEN]
Produce an Ansible Inventory file based on DigitalOcean credentials
optional arguments:
-h, --help show this help message and exit
--list List all active Droplets as Ansible inventory
(default: True)
--host HOST Get all Ansible inventory variables about a specific
Droplet
--all List all DigitalOcean information as JSON
--droplets List Droplets as JSON
--regions List Regions as JSON
--images List Images as JSON
--sizes List Sizes as JSON
--ssh-keys List SSH keys as JSON
--domains List Domains as JSON
--pretty, -p Pretty-print results
--cache-path CACHE_PATH
Path to the cache files (default: .)
--cache-max_age CACHE_MAX_AGE
Maximum age of the cached items (default: 0)
--force-cache Only use data from the cache
--refresh-cache Force refresh of cache by making API requests to
DigitalOcean (default: False - use cache files)
--api-token API_TOKEN, -a API_TOKEN
DigitalOcean API Token
```
'''
# (c) 2013, Evan Wies <evan@neomantra.net>
#
# Inspired by the EC2 inventory plugin:
# https://github.com/ansible/ansible/blob/devel/contrib/inventory/ec2.py
#
# This file is part of Ansible,
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
######################################################################
import os
import sys
import re
import argparse
from time import time
import ast
try:
import ConfigParser
except ImportError:
import configparser as ConfigParser
try:
import json
except ImportError:
import simplejson as json
try:
from dopy.manager import DoManager
except ImportError as e:
sys.exit("failed=True msg='`dopy` library required for this script'")
class DigitalOceanInventory(object):
###########################################################################
# Main execution path
###########################################################################
def __init__(self):
''' Main execution path '''
# DigitalOceanInventory data
self.data = {} # All DigitalOcean data
self.inventory = {} # Ansible Inventory
# Define defaults
self.cache_path = '.'
self.cache_max_age = 0
self.use_private_network = False
self.group_variables = {}
# Read settings, environment variables, and CLI arguments
self.read_settings()
self.read_environment()
self.read_cli_args()
# Verify credentials were set
if not hasattr(self, 'api_token'):
sys.stderr.write('''Could not find values for DigitalOcean api_token.
They must be specified via either ini file, command line argument (--api-token),
or environment variables (DO_API_TOKEN)\n''')
sys.exit(-1)
# env command, show DigitalOcean credentials
if self.args.env:
print("DO_API_TOKEN=%s" % self.api_token)
sys.exit(0)
# Manage cache
self.cache_filename = self.cache_path + "/ansible-digital_ocean.cache"
self.cache_refreshed = False
if self.is_cache_valid():
self.load_from_cache()
if len(self.data) == 0:
if self.args.force_cache:
sys.stderr.write('''Cache is empty and --force-cache was specified\n''')
sys.exit(-1)
self.manager = DoManager(None, self.api_token, api_version=2)
# Pick the json_data to print based on the CLI command
if self.args.droplets:
self.load_from_digital_ocean('droplets')
json_data = {'droplets': self.data['droplets']}
elif self.args.regions:
self.load_from_digital_ocean('regions')
json_data = {'regions': self.data['regions']}
elif self.args.images:
self.load_from_digital_ocean('images')
json_data = {'images': self.data['images']}
elif self.args.sizes:
self.load_from_digital_ocean('sizes')
json_data = {'sizes': self.data['sizes']}
elif self.args.ssh_keys:
self.load_from_digital_ocean('ssh_keys')
json_data = {'ssh_keys': self.data['ssh_keys']}
elif self.args.domains:
self.load_from_digital_ocean('domains')
json_data = {'domains': self.data['domains']}
elif self.args.all:
self.load_from_digital_ocean()
json_data = self.data
elif self.args.host:
json_data = self.load_droplet_variables_for_host()
else: # '--list' this is last to make it default
self.load_from_digital_ocean('droplets')
self.build_inventory()
json_data = self.inventory
if self.cache_refreshed:
self.write_to_cache()
if self.args.pretty:
print(json.dumps(json_data, sort_keys=True, indent=2))
else:
print(json.dumps(json_data))
# That's all she wrote...
###########################################################################
# Script configuration
###########################################################################
def read_settings(self):
''' Reads the settings from the digital_ocean.ini file '''
config = ConfigParser.SafeConfigParser()
config.read(os.path.dirname(os.path.realpath(__file__)) + '/digital_ocean.ini')
# Credentials
if config.has_option('digital_ocean', 'api_token'):
self.api_token = config.get('digital_ocean', 'api_token')
# Cache related
if config.has_option('digital_ocean', 'cache_path'):
self.cache_path = config.get('digital_ocean', 'cache_path')
if config.has_option('digital_ocean', 'cache_max_age'):
self.cache_max_age = config.getint('digital_ocean', 'cache_max_age')
# Private IP Address
if config.has_option('digital_ocean', 'use_private_network'):
self.use_private_network = config.getboolean('digital_ocean', 'use_private_network')
# Group variables
if config.has_option('digital_ocean', 'group_variables'):
self.group_variables = ast.literal_eval(config.get('digital_ocean', 'group_variables'))
def read_environment(self):
''' Reads the settings from environment variables '''
# Setup credentials
if os.getenv("DO_API_TOKEN"):
self.api_token = os.getenv("DO_API_TOKEN")
if os.getenv("DO_API_KEY"):
self.api_token = os.getenv("DO_API_KEY")
def read_cli_args(self):
''' Command line argument processing '''
parser = argparse.ArgumentParser(description='Produce an Ansible Inventory file based on DigitalOcean credentials')
parser.add_argument('--list', action='store_true', help='List all active Droplets as Ansible inventory (default: True)')
parser.add_argument('--host', action='store', help='Get all Ansible inventory variables about a specific Droplet')
parser.add_argument('--all', action='store_true', help='List all DigitalOcean information as JSON')
parser.add_argument('--droplets', '-d', action='store_true', help='List Droplets as JSON')
parser.add_argument('--regions', action='store_true', help='List Regions as JSON')
parser.add_argument('--images', action='store_true', help='List Images as JSON')
parser.add_argument('--sizes', action='store_true', help='List Sizes as JSON')
parser.add_argument('--ssh-keys', action='store_true', help='List SSH keys as JSON')
parser.add_argument('--domains', action='store_true', help='List Domains as JSON')
parser.add_argument('--pretty', '-p', action='store_true', help='Pretty-print results')
parser.add_argument('--cache-path', action='store', help='Path to the cache files (default: .)')
parser.add_argument('--cache-max_age', action='store', help='Maximum age of the cached items (default: 0)')
parser.add_argument('--force-cache', action='store_true', default=False, help='Only use data from the cache')
parser.add_argument('--refresh-cache', '-r', action='store_true', default=False,
help='Force refresh of cache by making API requests to DigitalOcean (default: False - use cache files)')
parser.add_argument('--env', '-e', action='store_true', help='Display DO_API_TOKEN')
parser.add_argument('--api-token', '-a', action='store', help='DigitalOcean API Token')
self.args = parser.parse_args()
if self.args.api_token:
self.api_token = self.args.api_token
# Make --list default if none of the other commands are specified
if (not self.args.droplets and not self.args.regions and
not self.args.images and not self.args.sizes and
not self.args.ssh_keys and not self.args.domains and
not self.args.all and not self.args.host):
self.args.list = True
###########################################################################
# Data Management
###########################################################################
def load_from_digital_ocean(self, resource=None):
'''Get JSON from DigitalOcean API'''
if self.args.force_cache and os.path.isfile(self.cache_filename):
return
# We always get fresh droplets
if self.is_cache_valid() and not (resource == 'droplets' or resource is None):
return
if self.args.refresh_cache:
resource = None
if resource == 'droplets' or resource is None:
self.data['droplets'] = self.manager.all_active_droplets()
self.cache_refreshed = True
if resource == 'regions' or resource is None:
self.data['regions'] = self.manager.all_regions()
self.cache_refreshed = True
if resource == 'images' or resource is None:
self.data['images'] = self.manager.all_images(filter=None)
self.cache_refreshed = True
if resource == 'sizes' or resource is None:
self.data['sizes'] = self.manager.sizes()
self.cache_refreshed = True
if resource == 'ssh_keys' or resource is None:
self.data['ssh_keys'] = self.manager.all_ssh_keys()
self.cache_refreshed = True
if resource == 'domains' or resource is None:
self.data['domains'] = self.manager.all_domains()
self.cache_refreshed = True
def build_inventory(self):
'''Build Ansible inventory of droplets'''
self.inventory = {
'all': {
'hosts': [],
'vars': self.group_variables
},
'_meta': {'hostvars': {}}
}
# add all droplets by id and name
for droplet in self.data['droplets']:
# when using private_networking, the API reports the private one in "ip_address".
if 'private_networking' in droplet['features'] and not self.use_private_network:
for net in droplet['networks']['v4']:
if net['type'] == 'public':
dest = net['ip_address']
else:
continue
else:
dest = droplet['ip_address']
self.inventory['all']['hosts'].append(dest)
self.inventory[droplet['id']] = [dest]
self.inventory[droplet['name']] = [dest]
# groups that are always present
for group in ('region_' + droplet['region']['slug'],
'image_' + str(droplet['image']['id']),
'size_' + droplet['size']['slug'],
'distro_' + self.to_safe(droplet['image']['distribution']),
'status_' + droplet['status']):
if group not in self.inventory:
self.inventory[group] = {'hosts': [], 'vars': {}}
self.inventory[group]['hosts'].append(dest)
# groups that are not always present
for group in (droplet['image']['slug'],
droplet['image']['name']):
if group:
image = 'image_' + self.to_safe(group)
if image not in self.inventory:
self.inventory[image] = {'hosts': [], 'vars': {}}
self.inventory[image]['hosts'].append(dest)
if droplet['tags']:
for tag in droplet['tags']:
if tag not in self.inventory:
self.inventory[tag] = {'hosts': [], 'vars': {}}
self.inventory[tag]['hosts'].append(dest)
# hostvars
info = self.do_namespace(droplet)
self.inventory['_meta']['hostvars'][dest] = info
def load_droplet_variables_for_host(self):
'''Generate a JSON response to a --host call'''
host = int(self.args.host)
droplet = self.manager.show_droplet(host)
info = self.do_namespace(droplet)
return {'droplet': info}
###########################################################################
# Cache Management
###########################################################################
def is_cache_valid(self):
''' Determines if the cache files have expired, or if it is still valid '''
if os.path.isfile(self.cache_filename):
mod_time = os.path.getmtime(self.cache_filename)
current_time = time()
if (mod_time + self.cache_max_age) > current_time:
return True
return False
def load_from_cache(self):
''' Reads the data from the cache file and assigns it to member variables as Python Objects'''
try:
cache = open(self.cache_filename, 'r')
json_data = cache.read()
cache.close()
data = json.loads(json_data)
except IOError:
data = {'data': {}, 'inventory': {}}
self.data = data['data']
self.inventory = data['inventory']
def write_to_cache(self):
''' Writes data in JSON format to a file '''
data = {'data': self.data, 'inventory': self.inventory}
json_data = json.dumps(data, sort_keys=True, indent=2)
cache = open(self.cache_filename, 'w')
cache.write(json_data)
cache.close()
###########################################################################
# Utilities
###########################################################################
def push(self, my_dict, key, element):
''' Pushed an element onto an array that may not have been defined in the dict '''
if key in my_dict:
my_dict[key].append(element)
else:
my_dict[key] = [element]
def to_safe(self, word):
''' Converts 'bad' characters in a string to underscores so they can be used as Ansible groups '''
return re.sub("[^A-Za-z0-9\-\.]", "_", word)
def do_namespace(self, data):
''' Returns a copy of the dictionary with all the keys put in a 'do_' namespace '''
info = {}
for k, v in data.items():
info['do_' + k] = v
return info
###########################################################################
# Run the script
DigitalOceanInventory()
|
pombredanne/scancode-toolkit
|
refs/heads/master
|
setup.py
|
1
|
#!/usr/bin/env python
# -*- encoding: utf-8 -*-
from __future__ import absolute_import, print_function
import io
import os
import re
from glob import glob
from os.path import basename
from os.path import dirname
from os.path import join
from os.path import splitext
from setuptools import find_packages
from setuptools import setup
def read(*names, **kwargs):
return io.open(
join(dirname(__file__), *names),
encoding=kwargs.get('encoding', 'utf8')
).read()
long_description = '%s\n%s' % (
read('README.rst'),
re.sub(':obj:`~?(.*?)`', r'``\1``', read('CHANGELOG.rst'))
)
setup(
name='scancode-toolkit',
version='1.3.1',
license='Apache-2.0 with ScanCode acknowledgment and CC0-1.0 and others',
description='ScanCode is a tool to scan code for license, copyright and other interesting facts.',
long_description=long_description,
author='ScanCode',
author_email='info@scancode.io',
url='https://github.com/nexB/scancode-toolkit',
packages=find_packages('src'),
package_dir={'': 'src'},
py_modules=[splitext(basename(path))[0] for path in glob('src/*.py')],
include_package_data=True,
zip_safe=False,
classifiers=[
# complete classifier list: http://pypi.python.org/pypi?%3Aaction=list_classifiers
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: Apache Software License',
'License :: OSI Approved :: CC0',
'Programming Language :: Python',
'Programming Language :: Python :: 2.7',
'Topic :: Utilities',
],
keywords=[
'license', 'filetype', 'urn', 'date', 'codec',
],
install_requires=[
# cluecode
'py2-ipaddress>=2.0',
'url>=0.1.4',
'publicsuffix2',
# TODO: upgrade to nltk==3.0.1
'nltk==2.0b4',
# extractcode
'patch >= 1.14.2, < 1.15 ',
# to work around bug http://bugs.python.org/issue19839
# on multistream bzip2 files
'bz2file >= 0.98',
# licensedcode
'PyYAML >=3.0, <4.0',
# textcode
'Beautifulsoup == 3.2.1',
'Beautifulsoup4 == 4.3.2',
'html5lib',
'six',
# typecode and textcode
'pygments>=2.0.1',
'pdfminer>=20140328',
# typecode
'chardet>=2.1.1',
'binaryornot>=0.3.0',
# scancode and AboutCode
'click',
'jinja2==2.7.3',
'MarkupSafe==0.23',
'colorama',
# AboutCode
'about-code-tool==0.9.0',
],
extras_require={
'base': [
'certifi',
'setuptools',
'wheel',
'pip',
'wincertstore',
],
'dev': [
'pytest',
'execnet',
'py',
'pytest-xdist',
'bumpversion',
],
},
entry_points={
'console_scripts': [
'scancode = scancode.cli:scancode',
],
},
)
|
brain-hacker-antilammer/pygooglevoice
|
refs/heads/master
|
examples/folders.py
|
39
|
from googlevoice import Voice,util,settings
voice = Voice()
voice.login()
for feed in settings.FEEDS:
util.print_(feed.title())
for message in getattr(voice, feed)().messages:
util.print_('\t', message)
|
tavendo/AutobahnPython
|
refs/heads/master
|
examples/twisted/wamp/pubsub/basic/frontend.py
|
3
|
###############################################################################
#
# The MIT License (MIT)
#
# Copyright (c) Crossbar.io Technologies GmbH
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
#
###############################################################################
from os import environ
from twisted.internet import reactor
from twisted.internet.defer import inlineCallbacks
from autobahn.twisted.wamp import ApplicationSession, ApplicationRunner
class Component(ApplicationSession):
"""
An application component that subscribes and receives events, and
stop after having received 5 events.
"""
@inlineCallbacks
def onJoin(self, details):
print("session attached")
self.received = 0
sub = yield self.subscribe(self.on_event, 'com.myapp.topic1')
print("Subscribed to com.myapp.topic1 with {}".format(sub.id))
def on_event(self, i):
print("Got event: {}".format(i))
self.received += 1
# self.config.extra for configuration, etc. (see [A])
if self.received > self.config.extra['max_events']:
print("Received enough events; disconnecting.")
self.leave()
def onDisconnect(self):
print("disconnected")
if reactor.running:
reactor.stop()
if __name__ == '__main__':
url = environ.get("AUTOBAHN_DEMO_ROUTER", "ws://127.0.0.1:8080/ws")
realm = "crossbardemo"
extra=dict(
max_events=5, # [A] pass in additional configuration
)
runner = ApplicationRunner(url, realm, extra)
runner.run(Component)
runner.run(Component)
|
wileykestner/sf_real_estate
|
refs/heads/master
|
sf.py
|
1
|
#!/usr/bin/env python
import argparse
from sf_real_estate.present_apns_with_address.present_apns_with_address_use_case import PresentAPNSWithAddressUseCase
from sf_real_estate.present_apns_with_address.remote_apn_repository import RemoteAPNRepository
from sf_real_estate.present_apns_with_address.apn_list_deserializer import APNListDeserializer
from sf_real_estate.present_recorded_documents.present_recorded_documents_use_case import \
PresentRecordedDocumentsUseCase
from sf_real_estate.present_recorded_documents.recorded_document_list_deserializer import \
RecordedDocumentListDeserializer
from sf_real_estate.present_recorded_documents.remote_recorded_document_repository import \
RemoteRecordedDocumentRepository
from sf_real_estate.present_recorded_document_participants.recorded_document_participant_deserializer import \
RecordedDocumentParticipantDeserializer
from sf_real_estate.present_recorded_document_participants.remote_recorded_document_participant_repository import \
RemoteRecordedDocumentParticipantRepository
from sf_real_estate.present_recorded_document_participants.present_recorded_document_participants_use_case import \
PresentRecordedDocumentParticipantsUseCase
class PresentRecordedDocumentParticipantsObserver(object):
def __init__(self, present_recorded_documents_observer):
self.present_recorded_documents_observer = present_recorded_documents_observer
def did_present_particpants(self, participants, recorded_document):
for index, participant in enumerate(participants):
print("\t%d. %s, %s" % (index + 1, participant.name, participant.role))
print("")
self.present_recorded_documents_observer.present_cached_recorded_documents()
def did_fail_to_present_particpants(self, recorded_document, exception):
print(exception)
class PresentRecordedDocumentsObserver(object):
def did_present_recorded_documents(self, recorded_documents, apn_result):
self._recorded_documents = recorded_documents
self.present_cached_recorded_documents()
def present_cached_recorded_documents(self):
for index, document in enumerate(self._recorded_documents):
values = (document.recorded_date, document.document_identifier, document.document_type)
formatted_document = "%s\t%s\t%s" % values
print("%d. %s" % (index + 1, formatted_document))
print("")
selected_document_index = int(input("Enter document number to see more detail: ")) - 1
selected_document = self._recorded_documents[selected_document_index]
print("")
self.present_recorded_document(selected_document)
@staticmethod
def did_fail_to_present_recorded_documents(exception, context):
print(exception, context)
def present_recorded_document(self, recorded_document):
print("Now fetching recorded document '%s' ..." % (recorded_document.document_identifier))
print("")
participants_observer = PresentRecordedDocumentParticipantsObserver(self)
deserializer = RecordedDocumentParticipantDeserializer()
repository = RemoteRecordedDocumentParticipantRepository(deserializer)
use_case = PresentRecordedDocumentParticipantsUseCase(repository=repository)
use_case.execute(recorded_document_identifier=recorded_document.document_url,
observer=participants_observer)
class APNListObserver(object):
def did_find_apn_list_for_address(self, apn_list, address, county):
self._apn_list = apn_list
print("")
print("%d APN's found for: '%s', '%s'" % (len(apn_list), address, county))
print("")
for index, apn_result in enumerate(self._apn_list):
formatted_apn_result = APNListObserver.apn_result_to_string(apn_result)
print("%d. %s" % (index + 1, formatted_apn_result))
print("")
selected_apn_index = int(input("Enter address number for more detail: ")) - 1
selected_apn = self._apn_list[selected_apn_index]
print("")
APNListObserver.present_recorded_documents(selected_apn)
@staticmethod
def did_find_apn_for_address(apn, address, county):
template = "Assessor's Parcel Number (APN) '{:s}-{:s}' found for '{:s}'"
message = template.format(apn.block_number, apn.lot_number, apn.address)
print("")
print(message)
print("")
APNListObserver.present_recorded_documents(apn)
@staticmethod
def present_recorded_documents(apn):
template = "Now fetching recorded documents referencing APN '{:s}-{:s}'…"
message = template.format(apn.block_number, apn.lot_number)
print(message)
print("")
present_recorded_documents_observer = PresentRecordedDocumentsObserver()
deserializer = RecordedDocumentListDeserializer()
recorded_document_repository = RemoteRecordedDocumentRepository(deserializer)
use_case = PresentRecordedDocumentsUseCase(recorded_document_repository=recorded_document_repository)
use_case.execute(apn_identifier=apn, observer=present_recorded_documents_observer)
@staticmethod
def did_fail_to_find_apn_for_address(address, county):
print("There were no APNs found for: %s, '%s'", address, county)
@staticmethod
def did_fail_to_fetch_apn_for_address(address, county, exception, context):
print("Error fetching APN for: '%s', '%s'" % (address, county))
print("")
print("Exception:")
print("\t%s" % exception)
print("Context:")
print("\t%s" % context)
@staticmethod
def apn_result_to_string(apn_result):
values = (apn_result.address, apn_result.block_number, apn_result.lot_number)
formatted_apn_result = "%s | %s | %s" % values
return formatted_apn_result
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('-a', '--address', help='Street address, i.e. \'1250 Jones\'')
parser.add_argument('-c', '--county', help='County, i.e. \'sanfrancisco\'')
args = parser.parse_args()
observer = APNListObserver()
deserializer = APNListDeserializer()
repository = RemoteAPNRepository(deserializer)
use_case = PresentAPNSWithAddressUseCase(apn_repository=repository)
print("")
address = args.address or input("Enter a street address in San Francisco: ")
county = args.county or 'sanfrancisco'
use_case.execute(address, county, observer)
|
chrisburr/scikit-learn
|
refs/heads/master
|
sklearn/datasets/mlcomp.py
|
289
|
# Copyright (c) 2010 Olivier Grisel <olivier.grisel@ensta.org>
# License: BSD 3 clause
"""Glue code to load http://mlcomp.org data as a scikit.learn dataset"""
import os
import numbers
from sklearn.datasets.base import load_files
def _load_document_classification(dataset_path, metadata, set_=None, **kwargs):
if set_ is not None:
dataset_path = os.path.join(dataset_path, set_)
return load_files(dataset_path, metadata.get('description'), **kwargs)
LOADERS = {
'DocumentClassification': _load_document_classification,
# TODO: implement the remaining domain formats
}
def load_mlcomp(name_or_id, set_="raw", mlcomp_root=None, **kwargs):
"""Load a datasets as downloaded from http://mlcomp.org
Parameters
----------
name_or_id : the integer id or the string name metadata of the MLComp
dataset to load
set_ : select the portion to load: 'train', 'test' or 'raw'
mlcomp_root : the filesystem path to the root folder where MLComp datasets
are stored, if mlcomp_root is None, the MLCOMP_DATASETS_HOME
environment variable is looked up instead.
**kwargs : domain specific kwargs to be passed to the dataset loader.
Read more in the :ref:`User Guide <datasets>`.
Returns
-------
data : Bunch
Dictionary-like object, the interesting attributes are:
'filenames', the files holding the raw to learn, 'target', the
classification labels (integer index), 'target_names',
the meaning of the labels, and 'DESCR', the full description of the
dataset.
Note on the lookup process: depending on the type of name_or_id,
will choose between integer id lookup or metadata name lookup by
looking at the unzipped archives and metadata file.
TODO: implement zip dataset loading too
"""
if mlcomp_root is None:
try:
mlcomp_root = os.environ['MLCOMP_DATASETS_HOME']
except KeyError:
raise ValueError("MLCOMP_DATASETS_HOME env variable is undefined")
mlcomp_root = os.path.expanduser(mlcomp_root)
mlcomp_root = os.path.abspath(mlcomp_root)
mlcomp_root = os.path.normpath(mlcomp_root)
if not os.path.exists(mlcomp_root):
raise ValueError("Could not find folder: " + mlcomp_root)
# dataset lookup
if isinstance(name_or_id, numbers.Integral):
# id lookup
dataset_path = os.path.join(mlcomp_root, str(name_or_id))
else:
# assume name based lookup
dataset_path = None
expected_name_line = "name: " + name_or_id
for dataset in os.listdir(mlcomp_root):
metadata_file = os.path.join(mlcomp_root, dataset, 'metadata')
if not os.path.exists(metadata_file):
continue
with open(metadata_file) as f:
for line in f:
if line.strip() == expected_name_line:
dataset_path = os.path.join(mlcomp_root, dataset)
break
if dataset_path is None:
raise ValueError("Could not find dataset with metadata line: " +
expected_name_line)
# loading the dataset metadata
metadata = dict()
metadata_file = os.path.join(dataset_path, 'metadata')
if not os.path.exists(metadata_file):
raise ValueError(dataset_path + ' is not a valid MLComp dataset')
with open(metadata_file) as f:
for line in f:
if ":" in line:
key, value = line.split(":", 1)
metadata[key.strip()] = value.strip()
format = metadata.get('format', 'unknow')
loader = LOADERS.get(format)
if loader is None:
raise ValueError("No loader implemented for format: " + format)
return loader(dataset_path, metadata, set_=set_, **kwargs)
|
fernandezcuesta/ansible
|
refs/heads/devel
|
lib/ansible/modules/network/avi/avi_poolgroupdeploymentpolicy.py
|
7
|
#!/usr/bin/python
#
# Created on Aug 25, 2016
# @author: Gaurav Rastogi (grastogi@avinetworks.com)
# Eric Anderson (eanderson@avinetworks.com)
# module_check: supported
#
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
ANSIBLE_METADATA = {'metadata_version': '1.0',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: avi_poolgroupdeploymentpolicy
author: Gaurav Rastogi (grastogi@avinetworks.com)
short_description: Module for setup of PoolGroupDeploymentPolicy Avi RESTful Object
description:
- This module is used to configure PoolGroupDeploymentPolicy object
- more examples at U(https://github.com/avinetworks/devops)
requirements: [ avisdk ]
version_added: "2.4"
options:
state:
description:
- The state that should be applied on the entity.
default: present
choices: ["absent","present"]
auto_disable_old_prod_pools:
description:
- It will automatically disable old production pools once there is a new production candidate.
- Default value when not specified in API or module is interpreted by Avi Controller as True.
cloud_ref:
description:
- It is a reference to an object of type cloud.
description:
description:
- User defined description for the object.
evaluation_duration:
description:
- Duration of evaluation period for automatic deployment.
- Allowed values are 60-86400.
- Default value when not specified in API or module is interpreted by Avi Controller as 300.
name:
description:
- The name of the pool group deployment policy.
required: true
rules:
description:
- List of pgdeploymentrule.
scheme:
description:
- Deployment scheme.
- Enum options - BLUE_GREEN, CANARY.
- Default value when not specified in API or module is interpreted by Avi Controller as BLUE_GREEN.
target_test_traffic_ratio:
description:
- Target traffic ratio before pool is made production.
- Allowed values are 1-100.
- Default value when not specified in API or module is interpreted by Avi Controller as 100.
tenant_ref:
description:
- It is a reference to an object of type tenant.
test_traffic_ratio_rampup:
description:
- Ratio of the traffic that is sent to the pool under test.
- Test ratio of 100 means blue green.
- Allowed values are 1-100.
- Default value when not specified in API or module is interpreted by Avi Controller as 100.
url:
description:
- Avi controller URL of the object.
uuid:
description:
- Uuid of the pool group deployment policy.
webhook_ref:
description:
- Webhook configured with url that avi controller will pass back information about pool group, old and new pool information and current deployment
- rule results.
- It is a reference to an object of type webhook.
- Field introduced in 17.1.1.
extends_documentation_fragment:
- avi
'''
EXAMPLES = """
- name: Example to create PoolGroupDeploymentPolicy object
avi_poolgroupdeploymentpolicy:
controller: 10.10.25.42
username: admin
password: something
state: present
name: sample_poolgroupdeploymentpolicy
"""
RETURN = '''
obj:
description: PoolGroupDeploymentPolicy (api/poolgroupdeploymentpolicy) object
returned: success, changed
type: dict
'''
from ansible.module_utils.basic import AnsibleModule
try:
from ansible.module_utils.avi import (
avi_common_argument_spec, HAS_AVI, avi_ansible_api)
except ImportError:
HAS_AVI = False
def main():
argument_specs = dict(
state=dict(default='present',
choices=['absent', 'present']),
auto_disable_old_prod_pools=dict(type='bool',),
cloud_ref=dict(type='str',),
description=dict(type='str',),
evaluation_duration=dict(type='int',),
name=dict(type='str', required=True),
rules=dict(type='list',),
scheme=dict(type='str',),
target_test_traffic_ratio=dict(type='int',),
tenant_ref=dict(type='str',),
test_traffic_ratio_rampup=dict(type='int',),
url=dict(type='str',),
uuid=dict(type='str',),
webhook_ref=dict(type='str',),
)
argument_specs.update(avi_common_argument_spec())
module = AnsibleModule(
argument_spec=argument_specs, supports_check_mode=True)
if not HAS_AVI:
return module.fail_json(msg=(
'Avi python API SDK (avisdk>=17.1) is not installed. '
'For more details visit https://github.com/avinetworks/sdk.'))
return avi_ansible_api(module, 'poolgroupdeploymentpolicy',
set([]))
if __name__ == '__main__':
main()
|
joshuajan/odoo
|
refs/heads/master
|
addons/website_membership/controllers/__init__.py
|
7372
|
import main
|
Commonists/Commons2Data
|
refs/heads/master
|
commons2data.py
|
1
|
# -*- coding: utf-8 -*-
import facts
import items
import statements
def main():
from argparse import ArgumentParser
description = 'Translating Commons categories semantic into Wikidata statements'
parser = ArgumentParser(description=description)
parser.add_argument('-c', '--category',
type=str,
dest='category',
required=True,
default="query",
help='Category from which to generate the statements')
args = parser.parse_args()
its = items.loads_items(args.category)
fcts = facts.loads_facts(args.category)
statements.write(args.category, its, fcts)
if __name__ == "__main__":
main()
|
brijeshkesariya/odoo
|
refs/heads/8.0
|
addons/mass_mailing/__openerp__.py
|
312
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2013-Today OpenERP SA (<http://www.openerp.com>)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>
#
##############################################################################
{
'name': 'Mass Mailing Campaigns',
'summary': 'Design, send and track emails',
'description': """
Easily send mass mailing to your leads, opportunities or customers. Track
marketing campaigns performance to improve conversion rates. Design
professional emails and reuse templates in a few clicks.
""",
'version': '2.0',
'author': 'OpenERP',
'website': 'https://www.odoo.com/page/mailing',
'category': 'Marketing',
'depends': [
'mail',
'email_template',
'marketing',
'web_kanban_gauge',
'web_kanban_sparkline',
'website_mail',
],
'data': [
'data/mail_data.xml',
'data/mass_mailing_data.xml',
'wizard/mail_compose_message_view.xml',
'wizard/test_mailing.xml',
'views/mass_mailing_report.xml',
'views/mass_mailing.xml',
'views/res_config.xml',
'views/res_partner.xml',
'views/email_template.xml',
'views/website_mass_mailing.xml',
'views/snippets.xml',
'security/ir.model.access.csv',
'views/mass_mailing.xml',
],
'qweb': [],
'demo': [
'data/mass_mailing_demo.xml',
],
'installable': True,
'auto_install': False,
}
|
pontikos/phenopolis
|
refs/heads/master
|
vcf/__init__.py
|
4
|
from config import config
if config.IMPORT_PYSAM_PRIMER3:
import pysam
# VCF query
def vcf_query(chrom=None, pos=None, ref=None, alt=None, variant_str=None, individual=None, verbose=False, limit=100, release='mainset_July2016'):
if variant_str:
variant_str=str(variant_str).strip().replace('_','-')
chrom, pos, ref, alt = variant_str.split('-')
tb=pysam.TabixFile('UCLex/%s/%s_chr%s.vcf.gz' % (release, release, chrom,))
#mainset_February2016_chrX_filtered.vcf.gz
region=str('%s:%s-%s'%(chrom, pos, int(pos),))
headers=[h for h in tb.header]
headers=(headers[len(headers)-1]).strip().split('\t')
records=tb.fetch(region=region)
records=[r.split('\t') for r in records]
def response(POS, REF, ALT, index, geno, chrom, pos):
alleles=[geno['REF']]+geno['ALT'].split(',')
homozygous_genotype='/'.join([str(index),str(index)])
heterozygous_genotype='/'.join(['0',str(index)])
variant=dict()
variant['POS']=POS
variant['REF']=REF
variant['ALT']=ALT
variant['index']=index
variant['variant_id']='-'.join([str(chrom),str(POS),variant['REF'],variant['ALT']])
variant['synonym_variant_id']='{}-{}-{}-{}'.format(str(chrom), str(pos), ref, alt,)
variant['hgvs']='chr%s:g.%s%s>%s' % (str(chrom), str(POS), REF, ALT,)
#print [geno[h].split(':')[0].split('/') for h in geno]
variant['hom_samples']=[h for h in geno if geno[h].split(':')[0]==homozygous_genotype][0:limit]
variant['HOM_COUNT']=len(variant['hom_samples'])
variant['het_samples']=[h for h in geno if geno[h].split(':')[0]==heterozygous_genotype][0:limit]
variant['HET_COUNT']=len(variant['het_samples'])
variant['wt_samples']=[h for h in geno if geno[h].split(':')[0]=='0/0'][1:100]
variant['WT_COUNT']=len([h for h in geno if geno[h].split(':')[0]=='0/0'])
variant['MISS_COUNT']=len([h for h in geno if geno[h].split(':')[0]=='./.'])
variant['allele_num']= 2*(variant['HOM_COUNT'] + variant['HET_COUNT']+variant['WT_COUNT'])
variant['allele_count']=2*variant['HOM_COUNT'] + variant['HET_COUNT']
if individual: variant['individual']=geno[individual]
#variant['site_quality'] = variant['QUAL']
#variant['filter'] = variant['FILTER']
if variant['WT_COUNT']==0:
variant['allele_freq'] = None
else:
variant['allele_freq'] = float(variant['HET_COUNT']+2*variant['HOM_COUNT']) / float(2*variant['WT_COUNT'])
samples=variant['het_samples']+variant['hom_samples']
#variant['hpo']=[p for p in get_db(app.config['DB_NAME_PATIENTS']).patients.find({'external_id':{'$in':samples}},{'_id':0,'features':1,'external_id':1})]
return variant
for r in records:
geno=dict(zip(headers, r))
POS=geno['POS']
REF=geno['REF']
if verbose:
print 'POS', POS
print 'REF', REF
for i, ALT, in enumerate(geno['ALT'].split(',')):
if verbose: print 'ALT', ALT
# insertion
if ref=='-' and REF+alt==ALT: return response(POS=int(POS), REF=REF, ALT=ALT, index=i+1, geno=geno, chrom=chrom, pos=pos)
# deletion
# replace leftmost
elif alt=='-' and ALT==REF.replace(ref,''): return response(POS=int(POS), REF=REF, ALT=ALT, index=i+1, geno=geno, chrom=chrom, pos=pos)
# replace rightmost
elif alt=='-' and ALT==REF[::-1].replace(ref[::-1], "", 1)[::-1]: return response(POS=int(POS), REF=REF, ALT=ALT, index=i+1, geno=geno, chrom=chrom, pos=pos)
#
elif alt=='-' and ref==REF and ALT=='*': return response(POS=int(POS), REF=REF, ALT=ALT, index=i+1, geno=geno, chrom=chrom, pos=pos)
elif alt=='0' and ALT=='*' and ref==REF: return response(POS=int(POS), REF=REF, ALT=ALT, index=i+1, geno=geno, chrom=chrom, pos=pos)
elif alt==ALT and ref==REF: return response(POS=int(POS), REF=REF, ALT=ALT, index=i+1, geno=geno, chrom=chrom, pos=pos)
continue
# VCF query
def vcf_query2(chrom=None, pos=None, ref=None, alt=None, variant_str=None, individual=None, verbose=False, limit=100):
if variant_str:
variant_str=str(variant_str).strip().replace('_','-')
chrom, pos, ref, alt = variant_str.split('-')
tb=pysam.TabixFile('uclex_files/current/chr%s.vcf.gz' % chrom,)
#mainset_February2016_chrX_filtered.vcf.gz
region=str('%s:%s-%s'%(chrom, pos, int(pos),))
headers=[h for h in tb.header]
headers=(headers[len(headers)-1]).strip().split('\t')
records=tb.fetch(region=region)
records=[r.split('\t') for r in records]
def response(POS, REF, ALT, index, geno, chrom, pos):
alleles=[geno['REF']]+geno['ALT'].split(',')
homozygous_genotype='/'.join([str(index),str(index)])
heterozygous_genotype='/'.join(['0',str(index)])
variant=dict()
variant['POS']=POS
variant['REF']=REF
variant['ALT']=ALT
variant['index']=index
variant['variant_id']='-'.join([str(chrom),str(POS),variant['REF'],variant['ALT']])
variant['synonym_variant_id']='{}-{}-{}-{}'.format(str(chrom), str(pos), ref, alt,)
variant['hgvs']='chr%s:g.%s%s>%s' % (str(chrom), str(POS), REF, ALT,)
#print [geno[h].split(':')[0].split('/') for h in geno]
variant['hom_samples']=[h for h in geno if geno[h].split(':')[0]==homozygous_genotype][0:limit]
variant['HOM_COUNT']=len(variant['hom_samples'])
variant['het_samples']=[h for h in geno if geno[h].split(':')[0]==heterozygous_genotype][0:limit]
variant['HET_COUNT']=len(variant['het_samples'])
variant['wt_samples']=[h for h in geno if geno[h].split(':')[0]=='0/0'][1:100]
variant['WT_COUNT']=len([h for h in geno if geno[h].split(':')[0]=='0/0'])
variant['MISS_COUNT']=len([h for h in geno if geno[h].split(':')[0]=='./.'])
variant['allele_num']= 2*(variant['HOM_COUNT'] + variant['HET_COUNT']+variant['WT_COUNT'])
variant['allele_count']=2*variant['HOM_COUNT'] + variant['HET_COUNT']
if individual: variant['individual']=geno[individual]
#variant['site_quality'] = variant['QUAL']
#variant['filter'] = variant['FILTER']
if variant['WT_COUNT']==0:
variant['allele_freq'] = None
else:
variant['allele_freq'] = float(variant['HET_COUNT']+2*variant['HOM_COUNT']) / float(2*variant['WT_COUNT'])
samples=variant['het_samples']+variant['hom_samples']
#variant['hpo']=[p for p in get_db(app.config['DB_NAME_PATIENTS']).patients.find({'external_id':{'$in':samples}},{'_id':0,'features':1,'external_id':1})]
return variant
for r in records:
geno=dict(zip(headers, r))
POS=geno['POS']
REF=geno['REF']
if verbose:
print 'POS', POS
print 'REF', REF
for i, ALT, in enumerate(geno['ALT'].split(',')):
if verbose: print 'ALT', ALT
# insertion
if ref=='-' and REF+alt==ALT: return response(POS=int(POS), REF=REF, ALT=ALT, index=i+1, geno=geno, chrom=chrom, pos=pos)
# deletion
# replace leftmost
elif alt=='-' and ALT==REF.replace(ref,''): return response(POS=int(POS), REF=REF, ALT=ALT, index=i+1, geno=geno, chrom=chrom, pos=pos)
# replace rightmost
elif alt=='-' and ALT==REF[::-1].replace(ref[::-1], "", 1)[::-1]: return response(POS=int(POS), REF=REF, ALT=ALT, index=i+1, geno=geno, chrom=chrom, pos=pos)
#
elif alt=='-' and ref==REF and ALT=='*': return response(POS=int(POS), REF=REF, ALT=ALT, index=i+1, geno=geno, chrom=chrom, pos=pos)
elif alt=='0' and ALT=='*' and ref==REF: return response(POS=int(POS), REF=REF, ALT=ALT, index=i+1, geno=geno, chrom=chrom, pos=pos)
elif alt==ALT and ref==REF: return response(POS=int(POS), REF=REF, ALT=ALT, index=i+1, geno=geno, chrom=chrom, pos=pos)
continue
# VCF query
def vcf_query3(chrom=None, pos=None, ref=None, alt=None, variant_str=None, individual=None, verbose=False, limit=100):
if variant_str:
variant_str=str(variant_str).strip().replace('_','-')
chrom, pos, ref, alt = variant_str.split('-')
tb=pysam.TabixFile('/slms/UGI/vm_exports/vyp/phenotips/uclex_files/current/chr%s.vcf.gz' % chrom,)
#mainset_February2016_chrX_filtered.vcf.gz
region=str('%s:%s-%s'%(chrom, pos, int(pos),))
headers=[h for h in tb.header]
headers=(headers[len(headers)-1]).strip().split('\t')
records=tb.fetch(region=region)
records=[r.split('\t') for r in records]
def response(POS, REF, ALT, index, geno, chrom, pos):
alleles=[geno['REF']]+geno['ALT'].split(',')
homozygous_genotype='/'.join([str(index),str(index)])
heterozygous_genotype='/'.join(['0',str(index)])
variant=dict()
variant['POS']=POS
variant['REF']=REF
variant['ALT']=ALT
variant['index']=index
variant['variant_id']='-'.join([str(chrom),str(POS),variant['REF'],variant['ALT']])
variant['synonym_variant_id']='{}-{}-{}-{}'.format(str(chrom), str(pos), ref, alt,)
variant['hgvs']='chr%s:g.%s%s>%s' % (str(chrom), str(POS), REF, ALT,)
#print [geno[h].split(':')[0].split('/') for h in geno]
variant['hom_samples']=[h for h in geno if geno[h].split(':')[0]==homozygous_genotype][0:limit]
variant['HOM_COUNT']=len(variant['hom_samples'])
variant['het_samples']=[h for h in geno if geno[h].split(':')[0]==heterozygous_genotype][0:limit]
variant['HET_COUNT']=len(variant['het_samples'])
variant['wt_samples']=[h for h in geno if geno[h].split(':')[0]=='0/0'][1:100]
variant['WT_COUNT']=len([h for h in geno if geno[h].split(':')[0]=='0/0'])
variant['MISS_COUNT']=len([h for h in geno if geno[h].split(':')[0]=='./.'])
variant['allele_num']= 2*(variant['HOM_COUNT'] + variant['HET_COUNT']+variant['WT_COUNT'])
variant['allele_count']=2*variant['HOM_COUNT'] + variant['HET_COUNT']
if individual: variant['individual']=geno[individual]
#variant['site_quality'] = variant['QUAL']
#variant['filter'] = variant['FILTER']
if variant['WT_COUNT']==0:
variant['allele_freq'] = None
else:
variant['allele_freq'] = float(variant['HET_COUNT']+2*variant['HOM_COUNT']) / float(2*variant['WT_COUNT'])
samples=variant['het_samples']+variant['hom_samples']
#variant['hpo']=[p for p in get_db(app.config['DB_NAME_PATIENTS']).patients.find({'external_id':{'$in':samples}},{'_id':0,'features':1,'external_id':1})]
return variant
for r in records:
geno=dict(zip(headers, r))
POS=geno['POS']
REF=geno['REF']
if verbose:
print 'POS', POS
print 'REF', REF
for i, ALT, in enumerate(geno['ALT'].split(',')):
if verbose: print 'ALT', ALT
# insertion
if ref=='-' and REF+alt==ALT: return response(POS=int(POS), REF=REF, ALT=ALT, index=i+1, geno=geno, chrom=chrom, pos=pos)
# deletion
# replace leftmost
elif alt=='-' and ALT==REF.replace(ref,''): return response(POS=int(POS), REF=REF, ALT=ALT, index=i+1, geno=geno, chrom=chrom, pos=pos)
# replace rightmost
elif alt=='-' and ALT==REF[::-1].replace(ref[::-1], "", 1)[::-1]: return response(POS=int(POS), REF=REF, ALT=ALT, index=i+1, geno=geno, chrom=chrom, pos=pos)
#
elif alt=='-' and ref==REF and ALT=='*': return response(POS=int(POS), REF=REF, ALT=ALT, index=i+1, geno=geno, chrom=chrom, pos=pos)
elif alt=='0' and ALT=='*' and ref==REF: return response(POS=int(POS), REF=REF, ALT=ALT, index=i+1, geno=geno, chrom=chrom, pos=pos)
elif alt==ALT and ref==REF: return response(POS=int(POS), REF=REF, ALT=ALT, index=i+1, geno=geno, chrom=chrom, pos=pos)
continue
def vcf_query_gene():
tb=pysam.TabixFile('/slms/gee/research/vyplab/UCLex/%s/%s_chr%s.vcf.gz' % (RELEASE, RELEASE, gene.chrom,))
region ='%s:%s-%s' % (str(gene.chrom), str(gene.start), str(gene.stop),)
headers=[h for h in tb.header]
headers=(headers[len(headers)-1]).strip('#').strip().split('\t')
records=[dict(zip(headers,r.strip().split('\t'))) for r in tb.fetch(region)]
print(len(records))
records=dict([('%s-%s-%s-%s' % (r['CHROM'], r['POS'], r['REF'], r['ALT'],),r,) for r in records])
|
alfanugraha/LUMENS-repo
|
refs/heads/master
|
processing/tools/help.py
|
6
|
# -*- coding: utf-8 -*-
"""
***************************************************************************
help.py
---------------------
Date : March 2013
Copyright : (C) 2013 by Victor Olaya
Email : volayaf at gmail dot com
***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************
"""
__author__ = 'Victor Olaya'
__date__ = 'March 2013'
__copyright__ = '(C) 2013, Victor Olaya'
# This will get replaced with a git SHA1 when you do a git archive
__revision__ = '$Format:%H$'
import os
from processing.core.Processing import Processing
from processing.tools.system import mkdir
from processing.parameters.ParameterSelection import ParameterSelection
def createBaseHelpFile(alg, folder):
folder = os.path.join(folder, alg.provider.getName().lower())
mkdir(folder)
cmdLineName = alg.commandLineName()[
alg.commandLineName().find(':') + 1:].lower()
validChars = \
'abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789'
safeFilename = ''.join(c for c in cmdLineName if c in validChars)
filepath = os.path.join(folder, safeFilename + '.rst')
file = open(filepath, 'w')
file.write(alg.name.upper())
file.write('\n')
file.write('=' * len(alg.name))
file.write('\n\n')
file.write('Description\n')
file.write('-----------\n\n')
file.write('Parameters\n')
file.write('----------\n\n')
for param in alg.parameters:
file.write('- ``' + param.description + '[' +
param.parameterName()[9:] + ']``:\n')
file.write('\nOutputs\n')
file.write('-------\n\n')
for out in alg.outputs:
file.write('- ``' + out.description + '[' +
out.outputTypeName()[6:] + ']``:\n')
file.write('\nSee also\n')
file.write('---------\n\n')
file.write('\nConsole usage\n')
file.write('-------------\n\n')
file.write('\n::\n\n')
s = "\tprocessing.runalg('" + alg.commandLineName() + "', "
for param in alg.parameters:
s += str(param.name.lower().strip()) + ', '
for out in alg.outputs:
if not out.hidden:
s += str(out.name.lower().strip()) + ', '
s = s[:-2] + ')\n'
file.write(s)
s = ''
hasSelection = False
for param in alg.parameters:
if isinstance(param, ParameterSelection):
hasSelection = True
s += '\n\t' + param.name.lower() + '(' + param.description + ')\n'
i = 0
for option in param.options:
s += '\t\t' + str(i) + ' - ' + str(option) + '\n'
i += 1
if hasSelection:
file.write('\n\tAvailable options for selection parameters:\n')
file.write(s)
file.close()
def createBaseHelpFiles(folder):
for provider in Processing.providers:
for alg in provider.algs:
createBaseHelpFile(alg, folder)
|
guohongze/adminset
|
refs/heads/master
|
cmdb/migrations/0001_initial.py
|
1
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.20 on 2019-04-18 05:56
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
('appconf', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='Cabinet',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=100, verbose_name='\u673a\u67dc')),
('desc', models.CharField(blank=True, max_length=100, verbose_name='\u63cf\u8ff0')),
],
),
migrations.CreateModel(
name='Host',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('hostname', models.CharField(max_length=50, unique=True, verbose_name='\u4e3b\u673a\u540d')),
('ip', models.GenericIPAddressField(verbose_name='\u7ba1\u7406IP')),
('other_ip', models.CharField(blank=True, max_length=100, verbose_name='\u5176\u5b83IP')),
('asset_no', models.CharField(blank=True, max_length=50, verbose_name='\u8d44\u4ea7\u7f16\u53f7')),
('asset_type', models.CharField(blank=True, choices=[(b'1', '\u7269\u7406\u673a'), (b'2', '\u865a\u62df\u673a'), (b'3', '\u5bb9\u5668'), (b'4', '\u7f51\u7edc\u8bbe\u5907'), (b'5', '\u5b89\u5168\u8bbe\u5907'), (b'6', '\u5176\u4ed6')], max_length=30, null=True, verbose_name='\u8bbe\u5907\u7c7b\u578b')),
('status', models.CharField(blank=True, choices=[(b'1', '\u4f7f\u7528\u4e2d'), (b'2', '\u672a\u4f7f\u7528'), (b'3', '\u6545\u969c'), (b'4', '\u5176\u5b83')], max_length=30, null=True, verbose_name='\u8bbe\u5907\u72b6\u6001')),
('os', models.CharField(blank=True, max_length=100, verbose_name='\u64cd\u4f5c\u7cfb\u7edf')),
('vendor', models.CharField(blank=True, max_length=50, verbose_name='\u8bbe\u5907\u5382\u5546')),
('up_time', models.CharField(blank=True, max_length=50, verbose_name='\u4e0a\u67b6\u65f6\u95f4')),
('cpu_model', models.CharField(blank=True, max_length=100, verbose_name='CPU\u578b\u53f7')),
('cpu_num', models.CharField(blank=True, max_length=100, verbose_name='CPU\u6570\u91cf')),
('memory', models.CharField(blank=True, max_length=30, verbose_name='\u5185\u5b58\u5927\u5c0f')),
('disk', models.CharField(blank=True, max_length=255, verbose_name='\u786c\u76d8\u4fe1\u606f')),
('sn', models.CharField(blank=True, max_length=60, verbose_name='SN\u53f7 \u7801')),
('position', models.CharField(blank=True, max_length=100, verbose_name='\u6240\u5728\u4f4d\u7f6e')),
('memo', models.TextField(blank=True, max_length=200, verbose_name='\u5907\u6ce8\u4fe1\u606f')),
('account', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to='appconf.AuthInfo', verbose_name='\u8d26\u53f7\u4fe1\u606f')),
],
),
migrations.CreateModel(
name='HostGroup',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=30, unique=True, verbose_name='\u670d\u52a1\u5668\u7ec4\u540d')),
('desc', models.CharField(blank=True, max_length=100, verbose_name='\u63cf\u8ff0')),
('serverList', models.ManyToManyField(blank=True, to='cmdb.Host', verbose_name='\u6240\u5728\u670d\u52a1\u5668')),
],
),
migrations.CreateModel(
name='Idc',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('ids', models.CharField(max_length=255, unique=True, verbose_name='\u673a\u623f\u6807\u8bc6')),
('name', models.CharField(max_length=255, unique=True, verbose_name='\u673a\u623f\u540d\u79f0')),
('address', models.CharField(blank=True, max_length=100, verbose_name='\u673a\u623f\u5730\u5740')),
('tel', models.CharField(blank=True, max_length=30, verbose_name='\u673a\u623f\u7535\u8bdd')),
('contact', models.CharField(blank=True, max_length=30, verbose_name='\u5ba2\u6237\u7ecf\u7406')),
('contact_phone', models.CharField(blank=True, max_length=30, verbose_name='\u79fb\u52a8\u7535\u8bdd')),
('jigui', models.CharField(blank=True, max_length=30, verbose_name='\u673a\u67dc\u4fe1\u606f')),
('ip_range', models.CharField(blank=True, max_length=30, verbose_name='IP\u8303\u56f4')),
('bandwidth', models.CharField(blank=True, max_length=30, verbose_name='\u63a5\u5165\u5e26\u5bbd')),
('memo', models.TextField(blank=True, max_length=200, verbose_name='\u5907\u6ce8\u4fe1\u606f')),
],
options={
'verbose_name': '\u6570\u636e\u4e2d\u5fc3',
'verbose_name_plural': '\u6570\u636e\u4e2d\u5fc3',
},
),
migrations.CreateModel(
name='InterFace',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=30)),
('vendor', models.CharField(max_length=30, null=True)),
('bandwidth', models.CharField(max_length=30, null=True)),
('tel', models.CharField(max_length=30, null=True)),
('contact', models.CharField(max_length=30, null=True)),
('startdate', models.DateField()),
('enddate', models.DateField()),
('price', models.IntegerField(verbose_name='\u4ef7\u683c')),
],
),
migrations.CreateModel(
name='IpSource',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('net', models.CharField(max_length=30)),
('subnet', models.CharField(max_length=30, null=True)),
('describe', models.CharField(max_length=30, null=True)),
],
),
migrations.CreateModel(
name='UserInfo',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('username', models.CharField(max_length=30, null=True)),
('password', models.CharField(max_length=30, null=True)),
],
),
migrations.AddField(
model_name='host',
name='idc',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to='cmdb.Idc', verbose_name='\u6240\u5728\u673a\u623f'),
),
migrations.AddField(
model_name='cabinet',
name='idc',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to='cmdb.Idc', verbose_name='\u6240\u5728\u673a\u623f'),
),
migrations.AddField(
model_name='cabinet',
name='serverList',
field=models.ManyToManyField(blank=True, to='cmdb.Host', verbose_name='\u6240\u5728\u670d\u52a1\u5668'),
),
]
|
anybox/sale-workflow
|
refs/heads/8.0
|
sale_owner_stock_sourcing/tests/__init__.py
|
37
|
from . import test_propagate_owner_to_move
from . import test_int_sale_to_reservation
|
paplorinc/intellij-community
|
refs/heads/master
|
python/testData/docstrings/googleKeywordArgumentsSection.py
|
53
|
def f():
"""
Keyword arguments:
"""
|
fdroidtravis/fdroidserver
|
refs/heads/master
|
fdroidserver/checkupdates.py
|
1
|
#!/usr/bin/env python3
#
# checkupdates.py - part of the FDroid server tools
# Copyright (C) 2010-2015, Ciaran Gultnieks, ciaran@ciarang.com
# Copyright (C) 2013-2014 Daniel Martí <mvdan@mvdan.cc>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import os
import re
import urllib.request
import urllib.error
import time
import subprocess
import sys
from argparse import ArgumentParser
import traceback
import html
from distutils.version import LooseVersion
import logging
import copy
import urllib.parse
from pathlib import Path
from . import _
from . import common
from . import metadata
from . import net
from .exception import VCSException, NoSubmodulesException, FDroidException, MetaDataException
# Check for a new version by looking at a document retrieved via HTTP.
# The app's Update Check Data field is used to provide the information
# required.
def check_http(app):
ignoreversions = app.UpdateCheckIgnore
ignoresearch = re.compile(ignoreversions).search if ignoreversions else None
try:
if not app.UpdateCheckData:
raise FDroidException('Missing Update Check Data')
urlcode, codeex, urlver, verex = app.UpdateCheckData.split('|')
parsed = urllib.parse.urlparse(urlcode)
if not parsed.netloc or not parsed.scheme or parsed.scheme != 'https':
raise FDroidException(_('UpdateCheckData has invalid URL: {url}').format(url=urlcode))
if urlver != '.':
parsed = urllib.parse.urlparse(urlver)
if not parsed.netloc or not parsed.scheme or parsed.scheme != 'https':
raise FDroidException(_('UpdateCheckData has invalid URL: {url}').format(url=urlcode))
vercode = None
if urlcode:
logging.debug("...requesting {0}".format(urlcode))
req = urllib.request.Request(urlcode, None, headers=net.HEADERS)
resp = urllib.request.urlopen(req, None, 20) # nosec B310 scheme is filtered above
page = resp.read().decode('utf-8')
m = re.search(codeex, page)
if not m:
raise FDroidException("No RE match for version code")
vercode = m.group(1).strip()
version = "??"
if urlver:
if urlver != '.':
logging.debug("...requesting {0}".format(urlver))
req = urllib.request.Request(urlver, None)
resp = urllib.request.urlopen(req, None, 20) # nosec B310 scheme is filtered above
page = resp.read().decode('utf-8')
m = re.search(verex, page)
if not m:
raise FDroidException("No RE match for version")
version = m.group(1)
if ignoresearch and version:
if not ignoresearch(version):
return (version, vercode)
else:
return (None, ("Version {version} is ignored").format(version=version))
else:
return (version, vercode)
except FDroidException:
msg = "Could not complete http check for app {0} due to unknown error: {1}".format(app.id, traceback.format_exc())
return (None, msg)
# Check for a new version by looking at the tags in the source repo.
# Whether this can be used reliably or not depends on
# the development procedures used by the project's developers. Use it with
# caution, because it's inappropriate for many projects.
# Returns (None, "a message") if this didn't work, or (version, vercode, tag) for
# the details of the current version.
def check_tags(app, pattern):
try:
if app.RepoType == 'srclib':
build_dir = Path('build/srclib') / app.Repo
repotype = common.getsrclibvcs(app.Repo)
else:
build_dir = Path('build') / app.id
repotype = app.RepoType
if repotype not in ('git', 'git-svn', 'hg', 'bzr'):
return (None, 'Tags update mode only works for git, hg, bzr and git-svn repositories currently', None)
if repotype == 'git-svn' and ';' not in app.Repo:
return (None, 'Tags update mode used in git-svn, but the repo was not set up with tags', None)
# Set up vcs interface and make sure we have the latest code...
vcs = common.getvcs(app.RepoType, app.Repo, build_dir)
vcs.gotorevision(None)
last_build = app.get_last_build()
try_init_submodules(app, last_build, vcs)
htag = None
hver = None
hcode = "0"
tags = []
if repotype == 'git':
tags = vcs.latesttags()
else:
tags = vcs.gettags()
if not tags:
return (None, "No tags found", None)
logging.debug("All tags: " + ','.join(tags))
if pattern:
pat = re.compile(pattern)
tags = [tag for tag in tags if pat.match(tag)]
if not tags:
return (None, "No matching tags found", None)
logging.debug("Matching tags: " + ','.join(tags))
if len(tags) > 5 and repotype == 'git':
tags = tags[:5]
logging.debug("Latest tags: " + ','.join(tags))
for tag in tags:
logging.debug("Check tag: '{0}'".format(tag))
vcs.gotorevision(tag)
if app.UpdateCheckData:
filecode, codeex, filever, verex = app.UpdateCheckData.split('|')
vercode = None
if filecode:
filecontent = (build_dir / filecode).read_text()
m = re.search(codeex, filecontent)
if m:
vercode = m.group(1).strip()
version = "??"
if filever:
if filever != '.':
filecontent = (build_dir / filever).read_text()
m = re.search(verex, filecontent)
if m:
version = m.group(1)
if vercode:
logging.debug("UpdateCheckData found version {0} ({1})"
.format(version, vercode))
i_vercode = common.version_code_string_to_int(vercode)
if i_vercode > common.version_code_string_to_int(hcode):
htag = tag
hcode = str(i_vercode)
hver = version
else:
for subdir in possible_subdirs(app):
root_dir = build_dir / subdir
paths = common.manifest_paths(root_dir, last_build.gradle)
version, vercode, _package = common.parse_androidmanifests(paths, app)
if vercode:
logging.debug("Manifest exists in subdir '{0}'. Found version {1} ({2})"
.format(subdir, version, vercode))
i_vercode = common.version_code_string_to_int(vercode)
if i_vercode > common.version_code_string_to_int(hcode):
htag = tag
hcode = str(i_vercode)
hver = version
if hver:
return (hver, hcode, htag)
return (None, "Couldn't find any version information", None)
except VCSException as vcse:
msg = "VCS error while scanning app {0}: {1}".format(app.id, vcse)
return (None, msg, None)
except Exception:
msg = "Could not scan app {0} due to unknown error: {1}".format(app.id, traceback.format_exc())
return (None, msg, None)
# Check for a new version by looking at the AndroidManifest.xml at the HEAD
# of the source repo. Whether this can be used reliably or not depends on
# the development procedures used by the project's developers. Use it with
# caution, because it's inappropriate for many projects.
# Returns (None, "a message") if this didn't work, or (version, vercode) for
# the details of the current version.
def check_repomanifest(app, branch=None):
try:
if app.RepoType == 'srclib':
build_dir = Path('build/srclib') / app.Repo
repotype = common.getsrclibvcs(app.Repo)
else:
build_dir = Path('build') / app.id
repotype = app.RepoType
# Set up vcs interface and make sure we have the latest code...
vcs = common.getvcs(app.RepoType, app.Repo, build_dir)
if repotype == 'git':
if branch:
branch = 'origin/' + branch
vcs.gotorevision(branch)
elif repotype == 'git-svn':
vcs.gotorevision(branch)
elif repotype == 'hg':
vcs.gotorevision(branch)
elif repotype == 'bzr':
vcs.gotorevision(None)
last_build = metadata.Build()
if app.get('Builds', []):
last_build = app.get('Builds', [])[-1]
try_init_submodules(app, last_build, vcs)
hpak = None
hver = None
hcode = "0"
for subdir in possible_subdirs(app):
root_dir = build_dir / subdir
paths = common.manifest_paths(root_dir, last_build.gradle)
version, vercode, package = common.parse_androidmanifests(paths, app)
if vercode:
logging.debug("Manifest exists in subdir '{0}'. Found version {1} ({2})"
.format(subdir, version, vercode))
i_vercode = common.version_code_string_to_int(vercode)
if i_vercode > common.version_code_string_to_int(hcode):
hpak = package
hcode = str(i_vercode)
hver = version
if not hpak:
return (None, "Couldn't find package ID")
if hver:
return (hver, hcode)
return (None, "Couldn't find any version information")
except VCSException as vcse:
msg = "VCS error while scanning app {0}: {1}".format(app.id, vcse)
return (None, msg)
except Exception:
msg = "Could not scan app {0} due to unknown error: {1}".format(app.id, traceback.format_exc())
return (None, msg)
def check_repotrunk(app):
try:
if app.RepoType == 'srclib':
build_dir = Path('build/srclib') / app.Repo
repotype = common.getsrclibvcs(app.Repo)
else:
build_dir = Path('build') / app.id
repotype = app.RepoType
if repotype not in ('git-svn', ):
return (None, 'RepoTrunk update mode only makes sense in git-svn repositories')
# Set up vcs interface and make sure we have the latest code...
vcs = common.getvcs(app.RepoType, app.Repo, build_dir)
vcs.gotorevision(None)
ref = vcs.getref()
return (ref, ref)
except VCSException as vcse:
msg = "VCS error while scanning app {0}: {1}".format(app.id, vcse)
return (None, msg)
except Exception:
msg = "Could not scan app {0} due to unknown error: {1}".format(app.id, traceback.format_exc())
return (None, msg)
# Check for a new version by looking at the Google Play Store.
# Returns (None, "a message") if this didn't work, or (version, None) for
# the details of the current version.
def check_gplay(app):
time.sleep(15)
url = 'https://play.google.com/store/apps/details?id=' + app.id
headers = {'User-Agent': 'Mozilla/5.0 (X11; Linux i686; rv:18.0) Gecko/20100101 Firefox/18.0'}
req = urllib.request.Request(url, None, headers)
try:
resp = urllib.request.urlopen(req, None, 20) # nosec B310 URL base is hardcoded above
page = resp.read().decode()
except urllib.error.HTTPError as e:
return (None, str(e.code))
except Exception as e:
return (None, 'Failed:' + str(e))
version = None
m = re.search('itemprop="softwareVersion">[ ]*([^<]+)[ ]*</div>', page)
if m:
version = html.unescape(m.group(1))
if version == 'Varies with device':
return (None, 'Device-variable version, cannot use this method')
if not version:
return (None, "Couldn't find version")
return (version.strip(), None)
def try_init_submodules(app, last_build, vcs):
"""Try to init submodules if the last build entry used them.
They might have been removed from the app's repo in the meantime,
so if we can't find any submodules we continue with the updates check.
If there is any other error in initializing them then we stop the check.
"""
if last_build.submodules:
try:
vcs.initsubmodules()
except NoSubmodulesException:
logging.info("No submodules present for {}".format(_getappname(app)))
# Return all directories under startdir that contain any of the manifest
# files, and thus are probably an Android project.
def dirs_with_manifest(startdir):
# TODO: Python3.6: Accepts a path-like object.
for root, _dirs, files in os.walk(str(startdir)):
if any(m in files for m in [
'AndroidManifest.xml', 'pom.xml', 'build.gradle', 'build.gradle.kts']):
yield Path(root)
# Tries to find a new subdir starting from the root build_dir. Returns said
# subdir relative to the build dir if found, None otherwise.
def possible_subdirs(app):
if app.RepoType == 'srclib':
build_dir = Path('build/srclib') / app.Repo
else:
build_dir = Path('build') / app.id
last_build = app.get_last_build()
for d in dirs_with_manifest(build_dir):
m_paths = common.manifest_paths(d, last_build.gradle)
package = common.parse_androidmanifests(m_paths, app)[2]
if package is not None:
subdir = d.relative_to(build_dir)
logging.debug("Adding possible subdir %s" % subdir)
yield subdir
def _getappname(app):
return common.get_app_display_name(app)
def _getcvname(app):
return '%s (%s)' % (app.CurrentVersion, app.CurrentVersionCode)
def fetch_autoname(app, tag):
if not app.RepoType or app.UpdateCheckMode in ('None', 'Static') \
or app.UpdateCheckName == "Ignore":
return None
if app.RepoType == 'srclib':
build_dir = Path('build/srclib') / app.Repo
else:
build_dir = Path('build') / app.id
try:
vcs = common.getvcs(app.RepoType, app.Repo, build_dir)
vcs.gotorevision(tag)
except VCSException:
return None
last_build = app.get_last_build()
logging.debug("...fetch auto name from " + str(build_dir))
new_name = None
for subdir in possible_subdirs(app):
root_dir = build_dir / subdir
new_name = common.fetch_real_name(root_dir, last_build.gradle)
if new_name is not None:
break
commitmsg = None
if new_name:
logging.debug("...got autoname '" + new_name + "'")
if new_name != app.AutoName:
app.AutoName = new_name
if not commitmsg:
commitmsg = "Set autoname of {0}".format(_getappname(app))
else:
logging.debug("...couldn't get autoname")
return commitmsg
def checkupdates_app(app):
# If a change is made, commitmsg should be set to a description of it.
# Only if this is set will changes be written back to the metadata.
commitmsg = None
tag = None
msg = None
vercode = None
noverok = False
mode = app.UpdateCheckMode
if mode.startswith('Tags'):
pattern = mode[5:] if len(mode) > 4 else None
(version, vercode, tag) = check_tags(app, pattern)
if version == 'Unknown':
version = tag
msg = vercode
elif mode == 'RepoManifest':
(version, vercode) = check_repomanifest(app)
msg = vercode
elif mode.startswith('RepoManifest/'):
tag = mode[13:]
(version, vercode) = check_repomanifest(app, tag)
msg = vercode
elif mode == 'RepoTrunk':
(version, vercode) = check_repotrunk(app)
msg = vercode
elif mode == 'HTTP':
(version, vercode) = check_http(app)
msg = vercode
elif mode in ('None', 'Static'):
version = None
msg = 'Checking disabled'
noverok = True
else:
version = None
msg = 'Invalid update check method'
if version and vercode and app.VercodeOperation:
if not common.VERCODE_OPERATION_RE.match(app.VercodeOperation):
raise MetaDataException(_('Invalid VercodeOperation: {field}')
.format(field=app.VercodeOperation))
oldvercode = str(int(vercode))
op = app.VercodeOperation.replace("%c", oldvercode)
vercode = str(common.calculate_math_string(op))
logging.debug("Applied vercode operation: %s -> %s" % (oldvercode, vercode))
if version and any(version.startswith(s) for s in [
'${', # Gradle variable names
'@string/', # Strings we could not resolve
]):
version = "Unknown"
updating = False
if version is None:
logmsg = "...{0} : {1}".format(app.id, msg)
if noverok:
logging.info(logmsg)
else:
logging.warning(logmsg)
elif vercode == app.CurrentVersionCode:
logging.info("...up to date")
else:
logging.debug("...updating - old vercode={0}, new vercode={1}".format(
app.CurrentVersionCode, vercode))
app.CurrentVersion = version
app.CurrentVersionCode = str(int(vercode))
updating = True
commitmsg = fetch_autoname(app, tag)
if updating:
name = _getappname(app)
ver = _getcvname(app)
logging.info('...updating to version %s' % ver)
commitmsg = 'Update CurrentVersion of %s to %s' % (name, ver)
if options.auto:
mode = app.AutoUpdateMode
if not app.CurrentVersionCode:
logging.warning("Can't auto-update app with no CurrentVersionCode: " + app.id)
elif mode in ('None', 'Static'):
pass
elif mode.startswith('Version'):
pattern = mode[8:]
suffix = ''
if pattern.startswith('+'):
try:
suffix, pattern = pattern[1:].split(' ', 1)
except ValueError:
raise MetaDataException("Invalid AutoUpdateMode: " + mode)
gotcur = False
latest = None
for build in app.get('Builds', []):
if int(build.versionCode) >= int(app.CurrentVersionCode):
gotcur = True
if not latest or int(build.versionCode) > int(latest.versionCode):
latest = build
if int(latest.versionCode) > int(app.CurrentVersionCode):
logging.info("Refusing to auto update, since the latest build is newer")
if not gotcur:
newbuild = copy.deepcopy(latest)
newbuild.disable = False
newbuild.versionCode = app.CurrentVersionCode
newbuild.versionName = app.CurrentVersion + suffix.replace('%c', newbuild.versionCode)
logging.info("...auto-generating build for " + newbuild.versionName)
if tag:
newbuild.commit = tag
else:
commit = pattern.replace('%v', app.CurrentVersion)
commit = commit.replace('%c', newbuild.versionCode)
newbuild.commit = commit
app['Builds'].append(newbuild)
name = _getappname(app)
ver = _getcvname(app)
commitmsg = "Update %s to %s" % (name, ver)
else:
logging.warning('Invalid auto update mode "' + mode + '" on ' + app.id)
if commitmsg:
metadata.write_metadata(app.metadatapath, app)
if options.commit:
logging.info("Commiting update for " + app.metadatapath)
gitcmd = ["git", "commit", "-m", commitmsg]
if 'auto_author' in config:
gitcmd.extend(['--author', config['auto_author']])
gitcmd.extend(["--", app.metadatapath])
if subprocess.call(gitcmd) != 0:
raise FDroidException("Git commit failed")
def status_update_json(processed, failed):
"""Output a JSON file with metadata about this run"""
logging.debug(_('Outputting JSON'))
output = common.setup_status_output(start_timestamp)
if processed:
output['processed'] = processed
if failed:
output['failed'] = failed
common.write_status_json(output)
def update_wiki(gplaylog, locallog):
if config.get('wiki_server') and config.get('wiki_path'):
try:
import mwclient
site = mwclient.Site((config['wiki_protocol'], config['wiki_server']),
path=config['wiki_path'])
site.login(config['wiki_user'], config['wiki_password'])
# Write a page with the last build log for this version code
wiki_page_path = 'checkupdates_' + time.strftime('%s', start_timestamp)
newpage = site.Pages[wiki_page_path]
txt = ''
txt += "* command line: <code>" + ' '.join(sys.argv) + "</code>\n"
txt += common.get_git_describe_link()
txt += "* started at " + common.get_wiki_timestamp(start_timestamp) + '\n'
txt += "* completed at " + common.get_wiki_timestamp() + '\n'
txt += "\n\n"
txt += common.get_android_tools_version_log()
txt += "\n\n"
if gplaylog:
txt += '== --gplay check ==\n\n'
txt += gplaylog
if locallog:
txt += '== local source check ==\n\n'
txt += locallog
newpage.save(txt, summary='Run log')
newpage = site.Pages['checkupdates']
newpage.save('#REDIRECT [[' + wiki_page_path + ']]', summary='Update redirect')
except Exception as e:
logging.error(_('Error while attempting to publish log: %s') % e)
config = None
options = None
start_timestamp = time.gmtime()
def main():
global config, options
# Parse command line...
parser = ArgumentParser()
common.setup_global_opts(parser)
parser.add_argument("appid", nargs='*', help=_("application ID of file to operate on"))
parser.add_argument("--auto", action="store_true", default=False,
help=_("Process auto-updates"))
parser.add_argument("--autoonly", action="store_true", default=False,
help=_("Only process apps with auto-updates"))
parser.add_argument("--commit", action="store_true", default=False,
help=_("Commit changes"))
parser.add_argument("--allow-dirty", action="store_true", default=False,
help=_("Run on git repo that has uncommitted changes"))
parser.add_argument("--gplay", action="store_true", default=False,
help=_("Only print differences with the Play Store"))
metadata.add_metadata_arguments(parser)
options = parser.parse_args()
metadata.warnings_action = options.W
config = common.read_config(options)
if not options.allow_dirty:
status = subprocess.check_output(['git', 'status', '--porcelain'])
if status:
logging.error(_('Build metadata git repo has uncommited changes!'))
sys.exit(1)
# Get all apps...
allapps = metadata.read_metadata()
apps = common.read_app_args(options.appid, allapps, False)
gplaylog = ''
if options.gplay:
for appid, app in apps.items():
gplaylog += '* ' + appid + '\n'
version, reason = check_gplay(app)
if version is None:
if reason == '404':
logging.info("{0} is not in the Play Store".format(_getappname(app)))
else:
logging.info("{0} encountered a problem: {1}".format(_getappname(app), reason))
if version is not None:
stored = app.CurrentVersion
if not stored:
logging.info("{0} has no Current Version but has version {1} on the Play Store"
.format(_getappname(app), version))
elif LooseVersion(stored) < LooseVersion(version):
logging.info("{0} has version {1} on the Play Store, which is bigger than {2}"
.format(_getappname(app), version, stored))
else:
if stored != version:
logging.info("{0} has version {1} on the Play Store, which differs from {2}"
.format(_getappname(app), version, stored))
else:
logging.info("{0} has the same version {1} on the Play Store"
.format(_getappname(app), version))
update_wiki(gplaylog, None)
return
locallog = ''
processed = []
failed = dict()
for appid, app in apps.items():
if options.autoonly and app.AutoUpdateMode in ('None', 'Static'):
logging.debug(_("Nothing to do for {appid}.").format(appid=appid))
continue
msg = _("Processing {appid}").format(appid=appid)
logging.info(msg)
locallog += '* ' + msg + '\n'
try:
checkupdates_app(app)
processed.append(appid)
except Exception as e:
msg = _("...checkupdate failed for {appid} : {error}").format(appid=appid, error=e)
logging.error(msg)
locallog += msg + '\n'
failed[appid] = str(e)
update_wiki(None, locallog)
status_update_json(processed, failed)
logging.info(_("Finished"))
if __name__ == "__main__":
main()
|
ProfessionalIT/professionalit-webiste
|
refs/heads/master
|
sdk/google_appengine/lib/django-1.3/django/conf/locale/de/formats.py
|
329
|
# -*- encoding: utf-8 -*-
# This file is distributed under the same license as the Django package.
#
# The *_FORMAT strings use the Django date format syntax,
# see http://docs.djangoproject.com/en/dev/ref/templates/builtins/#date
DATE_FORMAT = 'j. F Y'
TIME_FORMAT = 'H:i:s'
DATETIME_FORMAT = 'j. F Y H:i:s'
YEAR_MONTH_FORMAT = 'F Y'
MONTH_DAY_FORMAT = 'j. F'
SHORT_DATE_FORMAT = 'd.m.Y'
SHORT_DATETIME_FORMAT = 'd.m.Y H:i:s'
FIRST_DAY_OF_WEEK = 1 # Monday
# The *_INPUT_FORMATS strings use the Python strftime format syntax,
# see http://docs.python.org/library/datetime.html#strftime-strptime-behavior
DATE_INPUT_FORMATS = (
'%d.%m.%Y', '%d.%m.%y', # '25.10.2006', '25.10.06'
'%Y-%m-%d', '%y-%m-%d', # '2006-10-25', '06-10-25'
# '%d. %B %Y', '%d. %b. %Y', # '25. October 2006', '25. Oct. 2006'
)
TIME_INPUT_FORMATS = (
'%H:%M:%S', # '14:30:59'
'%H:%M', # '14:30'
)
DATETIME_INPUT_FORMATS = (
'%d.%m.%Y %H:%M:%S', # '25.10.2006 14:30:59'
'%d.%m.%Y %H:%M', # '25.10.2006 14:30'
'%d.%m.%Y', # '25.10.2006'
'%Y-%m-%d %H:%M:%S', # '2006-10-25 14:30:59'
'%Y-%m-%d %H:%M', # '2006-10-25 14:30'
'%Y-%m-%d', # '2006-10-25'
)
DECIMAL_SEPARATOR = ','
THOUSAND_SEPARATOR = '.'
NUMBER_GROUPING = 3
|
krzycz/prd
|
refs/heads/master
|
tools/perf/python/twatch.py
|
1565
|
#! /usr/bin/python
# -*- python -*-
# -*- coding: utf-8 -*-
# twatch - Experimental use of the perf python interface
# Copyright (C) 2011 Arnaldo Carvalho de Melo <acme@redhat.com>
#
# This application is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; version 2.
#
# This application is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
import perf
def main():
cpus = perf.cpu_map()
threads = perf.thread_map()
evsel = perf.evsel(task = 1, comm = 1, mmap = 0,
wakeup_events = 1, watermark = 1,
sample_id_all = 1,
sample_type = perf.SAMPLE_PERIOD | perf.SAMPLE_TID | perf.SAMPLE_CPU)
evsel.open(cpus = cpus, threads = threads);
evlist = perf.evlist(cpus, threads)
evlist.add(evsel)
evlist.mmap()
while True:
evlist.poll(timeout = -1)
for cpu in cpus:
event = evlist.read_on_cpu(cpu)
if not event:
continue
print "cpu: %2d, pid: %4d, tid: %4d" % (event.sample_cpu,
event.sample_pid,
event.sample_tid),
print event
if __name__ == '__main__':
main()
|
ceefour/opencog
|
refs/heads/master
|
opencog/python/dingjie/m_util.py
|
34
|
##
# @file m_util.py
# @brief developing python library
# @author Dingjie.Wang
# @version 1.0
# @date 2012-08-04
import re
import inspect
from pprint import pprint
# ---------------------------------------------------------------------
def format_log(offset, dsp_caller = True, *args):
''' '''
caller = ""
if dsp_caller:
stack = inspect.stack()
caller = " -- %s %s" % (stack[1][2], stack[1][3])
out = ' ' * offset + ' '.join(map(str, args)) + caller
return out
# Note that this has the same name, but works differently, than the
# logger class in opencog/cython/opencog/logger.pyx
#
# Note that there is yet a third logger in
# opencog/python/util/util.py that is like this, but not colorized.
# XXX FIXME All these different versions should be merged to one
# version.
#
class Logger(object):
DEBUG = 0
INFO = 1
WARNING = 2
ERROR = 3
# colorful output
BLUE = '\033[94m'
GREEN = '\033[92m'
YELLOW = '\033[93m'
RED = '\033[91m'
COLOR_END = '\033[0m'
def __init__(self, f = None):
# open logging file
if f:
try:
self._file = open(f,'w')
except IOError:
print " error: can't open logging file %s " % f
self._filename = f
# default setting
self.offset = 0
self.to_stdout = True
self.to_file = True
self._levels = set()
self.add_level(Logger.ERROR)
def debug(self,msg, head = "" ):
try:
if self.to_file and Logger.DEBUG in self._levels:
temp = "[DEBUG]" + str(head) + ":" + str(msg) if head else "[DEBUG]" + str(msg)
print >>self._file, temp
except IOError:
print Logger.RED + " error: can't write logging file %s " % self._filename + Logger.COLOR_END
if self.to_stdout and Logger.DEBUG in self._levels:
temp = "[DEBUG]" + str(head) + ":" + str(msg) if head else "[DEBUG]" + str(msg)
print Logger.BLUE + temp + Logger.COLOR_END
def info(self, msg, head = "" ):
try:
if self.to_file and Logger.INFO in self._levels:
temp = "[INFO]" + str(head) + ":" + str(msg) if head else "[INFO]" + str(msg)
print >>self._file, temp
except IOError:
print Logger.RED + " error: can't write logging file %s " % self._filename + Logger.COLOR_END
if self.to_stdout and Logger.INFO in self._levels:
temp = "[INFO]" + str(head) + ":" + str(msg) if head else "[INFO]" + str(msg)
print Logger.GREEN + temp + Logger.COLOR_END
def warning(self,msg, head = "" ):
try:
if self.to_file and Logger.WARNING in self._levels:
temp = "[WARNING]" + str(head) + ":" + str(msg) if head else "[WARNING]" + str(msg)
print >>self._file, temp
except IOError:
print Logger.RED + " error: can't write logging file %s " % self._filename + Logger.COLOR_END
if self.to_stdout and Logger.WARNING in self._levels:
temp = "[WARNING]" + str(head) + ":" + str(msg) if head else "[WARNING]" + str(msg)
print Logger.YELLOW + temp + Logger.COLOR_END
def error(self, msg, head = "" ):
try:
if self.to_file and Logger.ERROR in self._levels:
temp = "[ERROR]" + str(head) + ":" + str(msg) if head else "[ERROR]" + str(msg)
print >>self._file, temp
except IOError:
print Logger.RED + " error: can't write logging file %s " % self._filename + Logger.COLOR_END
if self.to_stdout and Logger.ERROR in self._levels:
temp = "[ERROR]" + str(head) + ":" + str(msg) if head else "[ERROR]" + str(msg)
print Logger.RED + temp + Logger.COLOR_END
def pprint(self, obj, head = "" ):
'''docstring for pprint()'''
try:
if self.to_file:
#print head
pprint(obj, self._file)
except IOError:
print Logger.RED + " error: can't write logging file %s " % self._filename + Logger.COLOR_END
if self.to_stdout:
#print str(head)
pprint(obj)
def flush(self):
self._file.flush()
def use_stdout(self, use):
self.to_stdout = use
#def setLevel(self, level):
#self._levels.append(level)
def add_level(self, level):
self._levels.add(level)
'''docstring for add_level'''
log = Logger("default.log")
# --------------------------------------------------------------------------------------------------------------
def dict_sub(text, d):
""" Replace in 'text' non-overlapping occurences of REs whose patterns are keys
in dictionary 'd' by corresponding values (which must be constant strings: may
have named backreferences but not numeric ones). The keys must not contain
anonymous matching-groups.
Returns the new string."""
try:
# Create a regular expression from the dictionary keys
regex = re.compile("|".join("(%s)" % k for k in d))
# Facilitate lookup from group number to value
lookup = dict((i+1, v) for i, v in enumerate(d.itervalues()))
# For each match, find which group matched and expand its value
return regex.sub(lambda mo: mo.expand(lookup[mo.lastindex]), text)
except Exception:
return text
# --------------------------------------------------------------------------------------------------------------
class hs_dict(dict):
"""hashable dict
@attention: should not be modified after added to a set or dict!"""
def __init__(self, arg = None):
super(hs_dict, self).__init__(arg)
self._hash = None
#self._dict = { }
def __cmp__(self, other):
'''docstring for __c'''
if len(self) < len(other):
return -1
elif len(self) > len(other):
return 1
def __eq__(self, other):
return tuple(sorted(self.items())) == tuple(sorted(other.items()))
#def __setitem__(self, key, value):
#'''docstring for __setitem__'''
#pass
def __hash__(self):
if not self._hash:
self._hash = hash(tuple(sorted(self.items())))
return self._hash
else:
return self._hash
# --------------------------------------------------------------------------------------------------------------
from datetime import datetime
class Simple_Time_Interval(object):
""" help to make a rough estimate about the time interval of two time points in seconds"""
def __init__(self):
self.start_time_stamp = None
self.end_time_stamp = None
# in seonds
self.interval_time_stamp = None
def start(self):
self.start_time_stamp = datetime.now()
def end(self):
self.end_time_stamp = datetime.now()
def interval(self):
''' return interval in seconds'''
self.interval_time_stamp = (self.end_time_stamp - self.start_time_stamp).seconds
return self.interval_time_stamp
time_interval = Simple_Time_Interval()
# --------------------------------------------------------------------------------------------------------------
def rough_compare_files(s_filename, t_filename):
''' roughly estimate if two files is the same consider lines in random order'''
try:
s_file = open(s_filename, 'r')
t_file = open(t_filename, 'r')
diff_file = open("diff.log", 'w')
print s_filename + " not including:"
print >> diff_file, s_filename + " not including:"
#
source = set()
for line in s_file.readlines():
source.add(line)
# compare it with output of atomspace load with cogserver
for i,line in enumerate(t_file.readlines()):
if line not in source:
print "line %s failed: %s"%(i+1,line)
print >> diff_file, "line %s failed: %s"%(i+1,line)
except IOError,e:
print e
#print >> diff_file, e
#s_file.close()
#t_file.close()
#diff_file.close()
return False
else:
return True
# --------------------------------------------------------------------------------------------------------------
from SimpleXMLRPCServer import SimpleXMLRPCServer
class RpcServer(object):
"""docstring for RpcServer"""
def __init__(self, port = 8000, ip = "localhost"):
_ip = ip
_port = port
_server = SimpleXMLRPCServer((ip, port))
def register_function(func, funcName):
'''docstring for register_function'''
_server.register_function(func, funcName)
def run(self):
'''docstring for run'''
server.serve_forever()
log.info("server is Listening on port " + port)
rpcServer = RpcServer()
# --------------------------------------------------------------------------------------------------------------
__all__ = ["log", "time_interval", "Logger", "dict_sub","hs_dict","format_log", "RpcServer", "rpcServer" ]
|
slonopotamus/git_svn_server
|
refs/heads/master
|
GitSvnServer/report.py
|
3
|
import parse
from errors import *
rpt_cmds = {}
def rpt_func(name):
def _rpt_func(f):
rpt_cmds.setdefault(name, f)
return f
return _rpt_func
@rpt_func('set-path')
def set_path(command, args):
path = parse.string(args[0])
rev = int(args[1])
start_empty = args[2].lower() == 'true'
lock_token = None
if len(args) > 3 and len(args[3]) != 0:
lock_token = parse.string(args[3][0])
depth = None
if len(args) > 4:
depth = args[4]
command.report_set_path(path, rev, start_empty, lock_token, depth)
@rpt_func('delete-path')
def delete_path(command, args):
path = parse.string(args[0])
command.report_delete_path(path)
@rpt_func('link-path')
def link_path(command, args):
path = parse.string(args[0])
url = parse.string(args[1])
rev = int(args[2])
start_empty = parse.bool(args[3])
lock_token = None
if len(args) > 4:
lock_token = parse.string(args[4][0])
depth = None
if len(args) > 5:
depth = args[5]
command.report_link_path(path, url, rev, start_empty, lock_token, depth)
@rpt_func('finish-report')
def finish_report(command, args):
command.report_finish()
@rpt_func('abort-report')
def abort_report(command, args):
command.report_abort()
def process(link):
msg = parse.msg(link.read_msg())
command = link.command
if command is None:
raise ModeError('report mode requires a current command')
report = msg[0]
args = msg[1]
if report not in rpt_cmds:
command.log_report_error(210001, "Unknown command '%s'" % report)
return
rpt_cmds[report](command, args)
|
acsone/hr
|
refs/heads/8.0
|
hr_contract_multi_jobs/models/hr_job.py
|
23
|
# -*- coding:utf-8 -*-
##############################################################################
#
# Copyright (C) 2014 Savoir-faire Linux. All Rights Reserved.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published
# by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp import models, fields
class hr_job(models.Model):
_inherit = 'hr.job'
contract_job_ids = fields.One2many('hr.contract.job',
'job_id',
string='Contract Jobs')
|
jimpick/jaikuengine
|
refs/heads/master
|
badge/views.py
|
4
|
# Copyright 2009 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from django import http
from django import template
from django.conf import settings
from django.template import loader
from common import api
from common import clean
def badge_badge(request, format, nick):
view = api.actor_get(request.user, nick)
presence = api.presence_get(request.user, view.nick)
if not presence:
# look offline please
line = 'Offline'
light = 'gray'
location = ''
else:
line = presence.extra.get('status', 'Offline')
light = presence.extra.get('light', 'gray')
location = presence.extra.get('location', '')
if format == 'image':
return http.HttpResponseRedirect('/images/badge_%s.gif' % light)
if format == 'js-small':
multiline = len(line) > 17
truncated_line = len(line) > 30 and "%s..." % (line[:27]) or line
content_type = 'text/javascript'
template_path = 'js_small.js'
elif format == 'js-medium' or format == 'js-large':
truncated_line = len(line) > 40 and "%s..." % (line[:27]) or line
content_type = 'text/javascript'
template_path = '%s.js' % format.replace('-', '_')
elif format == 'json':
content_type = 'text/javascript'
template_path = 'badge.json'
elif format == 'xml':
content_type = 'application/xml'
template_path = 'badge.xml'
c = template.RequestContext(request, locals())
t = loader.get_template('badge/templates/%s' % template_path)
r = http.HttpResponse(t.render(c))
r['Content-type'] = content_type
return r
|
msebire/intellij-community
|
refs/heads/master
|
python/lib/Lib/site-packages/django/contrib/gis/db/backends/spatialite/__init__.py
|
12133432
| |
hyesun03/k-board
|
refs/heads/master
|
kboard/accounts/tests/__init__.py
|
12133432
| |
numansiddique/contrail-controller
|
refs/heads/master
|
src/opserver/__init__.py
|
12133432
| |
jinnykoo/wuyisj
|
refs/heads/master
|
src/oscar/apps/order/reports.py
|
13
|
import datetime
from oscar.core.loading import get_model
from django.utils.translation import ugettext_lazy as _
from oscar.core.loading import get_class
ReportGenerator = get_class('dashboard.reports.reports', 'ReportGenerator')
ReportCSVFormatter = get_class('dashboard.reports.reports',
'ReportCSVFormatter')
ReportHTMLFormatter = get_class('dashboard.reports.reports',
'ReportHTMLFormatter')
Order = get_model('order', 'Order')
class OrderReportCSVFormatter(ReportCSVFormatter):
filename_template = 'orders-%s-to-%s.csv'
def generate_csv(self, response, orders):
writer = self.get_csv_writer(response)
header_row = [_('Order number'),
_('Name'),
_('Email'),
_('Total incl. tax'),
_('Date placed')]
writer.writerow(header_row)
for order in orders:
row = [
order.number,
'-' if order.is_anonymous else order.user.get_full_name(),
order.email,
order.total_incl_tax,
self.format_datetime(order.date_placed)]
writer.writerow(row)
def filename(self, **kwargs):
return self.filename_template % (
kwargs['start_date'], kwargs['end_date'])
class OrderReportHTMLFormatter(ReportHTMLFormatter):
filename_template = 'dashboard/reports/partials/order_report.html'
class OrderReportGenerator(ReportGenerator):
code = 'order_report'
description = _("Orders placed")
date_range_field_name = 'date_placed'
formatters = {
'CSV_formatter': OrderReportCSVFormatter,
'HTML_formatter': OrderReportHTMLFormatter,
}
def generate(self):
orders = Order._default_manager.filter(
date_placed__gte=self.start_date,
date_placed__lt=self.end_date + datetime.timedelta(days=1)
)
additional_data = {
'start_date': self.start_date,
'end_date': self.end_date
}
return self.formatter.generate_response(orders, **additional_data)
def is_available_to(self, user):
return user.is_staff
|
Ronak6892/servo
|
refs/heads/master
|
components/script/dom/bindings/codegen/parser/WebIDL.py
|
28
|
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
""" A WebIDL parser. """
from ply import lex, yacc
import re
import os
import traceback
import math
from collections import defaultdict
# Machinery
def parseInt(literal):
string = literal
sign = 0
base = 0
if string[0] == '-':
sign = -1
string = string[1:]
else:
sign = 1
if string[0] == '0' and len(string) > 1:
if string[1] == 'x' or string[1] == 'X':
base = 16
string = string[2:]
else:
base = 8
string = string[1:]
else:
base = 10
value = int(string, base)
return value * sign
# Magic for creating enums
def M_add_class_attribs(attribs, start):
def foo(name, bases, dict_):
for v, k in enumerate(attribs):
dict_[k] = start + v
assert 'length' not in dict_
dict_['length'] = start + len(attribs)
return type(name, bases, dict_)
return foo
def enum(*names, **kw):
if len(kw) == 1:
base = kw['base'].__class__
start = base.length
else:
assert len(kw) == 0
base = object
start = 0
class Foo(base):
__metaclass__ = M_add_class_attribs(names, start)
def __setattr__(self, name, value): # this makes it read-only
raise NotImplementedError
return Foo()
class WebIDLError(Exception):
def __init__(self, message, locations, warning=False):
self.message = message
self.locations = [str(loc) for loc in locations]
self.warning = warning
def __str__(self):
return "%s: %s%s%s" % (self.warning and 'warning' or 'error',
self.message,
", " if len(self.locations) != 0 else "",
"\n".join(self.locations))
class Location(object):
def __init__(self, lexer, lineno, lexpos, filename):
self._line = None
self._lineno = lineno
self._lexpos = lexpos
self._lexdata = lexer.lexdata
self._file = filename if filename else "<unknown>"
def __eq__(self, other):
return (self._lexpos == other._lexpos and
self._file == other._file)
def filename(self):
return self._file
def resolve(self):
if self._line:
return
startofline = self._lexdata.rfind('\n', 0, self._lexpos) + 1
endofline = self._lexdata.find('\n', self._lexpos, self._lexpos + 80)
if endofline != -1:
self._line = self._lexdata[startofline:endofline]
else:
self._line = self._lexdata[startofline:]
self._colno = self._lexpos - startofline
# Our line number seems to point to the start of self._lexdata
self._lineno += self._lexdata.count('\n', 0, startofline)
def get(self):
self.resolve()
return "%s line %s:%s" % (self._file, self._lineno, self._colno)
def _pointerline(self):
return " " * self._colno + "^"
def __str__(self):
self.resolve()
return "%s line %s:%s\n%s\n%s" % (self._file, self._lineno, self._colno,
self._line, self._pointerline())
class BuiltinLocation(object):
def __init__(self, text):
self.msg = text + "\n"
def __eq__(self, other):
return (isinstance(other, BuiltinLocation) and
self.msg == other.msg)
def filename(self):
return '<builtin>'
def resolve(self):
pass
def get(self):
return self.msg
def __str__(self):
return self.get()
# Data Model
class IDLObject(object):
def __init__(self, location):
self.location = location
self.userData = dict()
def filename(self):
return self.location.filename()
def isInterface(self):
return False
def isEnum(self):
return False
def isCallback(self):
return False
def isType(self):
return False
def isDictionary(self):
return False
def isUnion(self):
return False
def isTypedef(self):
return False
def getUserData(self, key, default):
return self.userData.get(key, default)
def setUserData(self, key, value):
self.userData[key] = value
def addExtendedAttributes(self, attrs):
assert False # Override me!
def handleExtendedAttribute(self, attr):
assert False # Override me!
def _getDependentObjects(self):
assert False # Override me!
def getDeps(self, visited=None):
""" Return a set of files that this object depends on. If any of
these files are changed the parser needs to be rerun to regenerate
a new IDLObject.
The visited argument is a set of all the objects already visited.
We must test to see if we are in it, and if so, do nothing. This
prevents infinite recursion."""
# NB: We can't use visited=set() above because the default value is
# evaluated when the def statement is evaluated, not when the function
# is executed, so there would be one set for all invocations.
if visited is None:
visited = set()
if self in visited:
return set()
visited.add(self)
deps = set()
if self.filename() != "<builtin>":
deps.add(self.filename())
for d in self._getDependentObjects():
deps.update(d.getDeps(visited))
return deps
class IDLScope(IDLObject):
def __init__(self, location, parentScope, identifier):
IDLObject.__init__(self, location)
self.parentScope = parentScope
if identifier:
assert isinstance(identifier, IDLIdentifier)
self._name = identifier
else:
self._name = None
self._dict = {}
self.globalNames = set()
# A mapping from global name to the set of global interfaces
# that have that global name.
self.globalNameMapping = defaultdict(set)
self.primaryGlobalAttr = None
self.primaryGlobalName = None
def __str__(self):
return self.QName()
def QName(self):
if self._name:
return self._name.QName() + "::"
return "::"
def ensureUnique(self, identifier, object):
"""
Ensure that there is at most one 'identifier' in scope ('self').
Note that object can be None. This occurs if we end up here for an
interface type we haven't seen yet.
"""
assert isinstance(identifier, IDLUnresolvedIdentifier)
assert not object or isinstance(object, IDLObjectWithIdentifier)
assert not object or object.identifier == identifier
if identifier.name in self._dict:
if not object:
return
# ensureUnique twice with the same object is not allowed
assert id(object) != id(self._dict[identifier.name])
replacement = self.resolveIdentifierConflict(self, identifier,
self._dict[identifier.name],
object)
self._dict[identifier.name] = replacement
return
assert object
self._dict[identifier.name] = object
def resolveIdentifierConflict(self, scope, identifier, originalObject, newObject):
if (isinstance(originalObject, IDLExternalInterface) and
isinstance(newObject, IDLExternalInterface) and
originalObject.identifier.name == newObject.identifier.name):
return originalObject
if (isinstance(originalObject, IDLExternalInterface) or
isinstance(newObject, IDLExternalInterface)):
raise WebIDLError(
"Name collision between "
"interface declarations for identifier '%s' at '%s' and '%s'"
% (identifier.name,
originalObject.location, newObject.location), [])
if (isinstance(originalObject, IDLDictionary) or
isinstance(newObject, IDLDictionary)):
raise WebIDLError(
"Name collision between dictionary declarations for "
"identifier '%s'.\n%s\n%s"
% (identifier.name,
originalObject.location, newObject.location), [])
# We do the merging of overloads here as opposed to in IDLInterface
# because we need to merge overloads of NamedConstructors and we need to
# detect conflicts in those across interfaces. See also the comment in
# IDLInterface.addExtendedAttributes for "NamedConstructor".
if (originalObject.tag == IDLInterfaceMember.Tags.Method and
newObject.tag == IDLInterfaceMember.Tags.Method):
return originalObject.addOverload(newObject)
# Default to throwing, derived classes can override.
conflictdesc = "\n\t%s at %s\n\t%s at %s" % (originalObject,
originalObject.location,
newObject,
newObject.location)
raise WebIDLError(
"Multiple unresolvable definitions of identifier '%s' in scope '%s%s"
% (identifier.name, str(self), conflictdesc), [])
def _lookupIdentifier(self, identifier):
return self._dict[identifier.name]
def lookupIdentifier(self, identifier):
assert isinstance(identifier, IDLIdentifier)
assert identifier.scope == self
return self._lookupIdentifier(identifier)
class IDLIdentifier(IDLObject):
def __init__(self, location, scope, name):
IDLObject.__init__(self, location)
self.name = name
assert isinstance(scope, IDLScope)
self.scope = scope
def __str__(self):
return self.QName()
def QName(self):
return self.scope.QName() + self.name
def __hash__(self):
return self.QName().__hash__()
def __eq__(self, other):
return self.QName() == other.QName()
def object(self):
return self.scope.lookupIdentifier(self)
class IDLUnresolvedIdentifier(IDLObject):
def __init__(self, location, name, allowDoubleUnderscore=False,
allowForbidden=False):
IDLObject.__init__(self, location)
assert len(name) > 0
if name == "__noSuchMethod__":
raise WebIDLError("__noSuchMethod__ is deprecated", [location])
if name[:2] == "__" and name != "__content" and not allowDoubleUnderscore:
raise WebIDLError("Identifiers beginning with __ are reserved",
[location])
if name[0] == '_' and not allowDoubleUnderscore:
name = name[1:]
# TODO: Bug 872377, Restore "toJSON" to below list.
# We sometimes need custom serialization, so allow toJSON for now.
if (name in ["constructor", "toString"] and
not allowForbidden):
raise WebIDLError("Cannot use reserved identifier '%s'" % (name),
[location])
self.name = name
def __str__(self):
return self.QName()
def QName(self):
return "<unresolved scope>::" + self.name
def resolve(self, scope, object):
assert isinstance(scope, IDLScope)
assert not object or isinstance(object, IDLObjectWithIdentifier)
assert not object or object.identifier == self
scope.ensureUnique(self, object)
identifier = IDLIdentifier(self.location, scope, self.name)
if object:
object.identifier = identifier
return identifier
def finish(self):
assert False # Should replace with a resolved identifier first.
class IDLObjectWithIdentifier(IDLObject):
def __init__(self, location, parentScope, identifier):
IDLObject.__init__(self, location)
assert isinstance(identifier, IDLUnresolvedIdentifier)
self.identifier = identifier
if parentScope:
self.resolve(parentScope)
self.treatNullAs = "Default"
def resolve(self, parentScope):
assert isinstance(parentScope, IDLScope)
assert isinstance(self.identifier, IDLUnresolvedIdentifier)
self.identifier.resolve(parentScope, self)
def checkForStringHandlingExtendedAttributes(self, attrs,
isDictionaryMember=False,
isOptional=False):
"""
A helper function to deal with TreatNullAs. Returns the list
of attrs it didn't handle itself.
"""
assert isinstance(self, IDLArgument) or isinstance(self, IDLAttribute)
unhandledAttrs = list()
for attr in attrs:
if not attr.hasValue():
unhandledAttrs.append(attr)
continue
identifier = attr.identifier()
value = attr.value()
if identifier == "TreatNullAs":
if not self.type.isDOMString() or self.type.nullable():
raise WebIDLError("[TreatNullAs] is only allowed on "
"arguments or attributes whose type is "
"DOMString",
[self.location])
if isDictionaryMember:
raise WebIDLError("[TreatNullAs] is not allowed for "
"dictionary members", [self.location])
if value != 'EmptyString':
raise WebIDLError("[TreatNullAs] must take the identifier "
"'EmptyString', not '%s'" % value,
[self.location])
self.treatNullAs = value
else:
unhandledAttrs.append(attr)
return unhandledAttrs
class IDLObjectWithScope(IDLObjectWithIdentifier, IDLScope):
def __init__(self, location, parentScope, identifier):
assert isinstance(identifier, IDLUnresolvedIdentifier)
IDLObjectWithIdentifier.__init__(self, location, parentScope, identifier)
IDLScope.__init__(self, location, parentScope, self.identifier)
class IDLIdentifierPlaceholder(IDLObjectWithIdentifier):
def __init__(self, location, identifier):
assert isinstance(identifier, IDLUnresolvedIdentifier)
IDLObjectWithIdentifier.__init__(self, location, None, identifier)
def finish(self, scope):
try:
scope._lookupIdentifier(self.identifier)
except:
raise WebIDLError("Unresolved type '%s'." % self.identifier,
[self.location])
obj = self.identifier.resolve(scope, None)
return scope.lookupIdentifier(obj)
class IDLExposureMixins():
def __init__(self, location):
# _exposureGlobalNames are the global names listed in our [Exposed]
# extended attribute. exposureSet is the exposure set as defined in the
# Web IDL spec: it contains interface names.
self._exposureGlobalNames = set()
self.exposureSet = set()
self._location = location
self._globalScope = None
def finish(self, scope):
assert scope.parentScope is None
self._globalScope = scope
# Verify that our [Exposed] value, if any, makes sense.
for globalName in self._exposureGlobalNames:
if globalName not in scope.globalNames:
raise WebIDLError("Unknown [Exposed] value %s" % globalName,
[self._location])
if len(self._exposureGlobalNames) == 0:
self._exposureGlobalNames.add(scope.primaryGlobalName)
globalNameSetToExposureSet(scope, self._exposureGlobalNames,
self.exposureSet)
def isExposedInWindow(self):
return 'Window' in self.exposureSet
def isExposedInAnyWorker(self):
return len(self.getWorkerExposureSet()) > 0
def isExposedInSystemGlobals(self):
return 'BackstagePass' in self.exposureSet
def isExposedInSomeButNotAllWorkers(self):
"""
Returns true if the Exposed extended attribute for this interface
exposes it in some worker globals but not others. The return value does
not depend on whether the interface is exposed in Window or System
globals.
"""
if not self.isExposedInAnyWorker():
return False
workerScopes = self.parentScope.globalNameMapping["Worker"]
return len(workerScopes.difference(self.exposureSet)) > 0
def getWorkerExposureSet(self):
workerScopes = self._globalScope.globalNameMapping["Worker"]
return workerScopes.intersection(self.exposureSet)
class IDLExternalInterface(IDLObjectWithIdentifier, IDLExposureMixins):
def __init__(self, location, parentScope, identifier):
raise WebIDLError("Servo does not support external interfaces.",
[self.location])
class IDLPartialInterface(IDLObject):
def __init__(self, location, name, members, nonPartialInterface):
assert isinstance(name, IDLUnresolvedIdentifier)
IDLObject.__init__(self, location)
self.identifier = name
self.members = members
# propagatedExtendedAttrs are the ones that should get
# propagated to our non-partial interface.
self.propagatedExtendedAttrs = []
self._nonPartialInterface = nonPartialInterface
self._finished = False
nonPartialInterface.addPartialInterface(self)
def addExtendedAttributes(self, attrs):
for attr in attrs:
identifier = attr.identifier()
if identifier in ["Constructor", "NamedConstructor"]:
self.propagatedExtendedAttrs.append(attr)
elif identifier == "Exposed":
# This just gets propagated to all our members.
for member in self.members:
if len(member._exposureGlobalNames) != 0:
raise WebIDLError("[Exposed] specified on both a "
"partial interface member and on the "
"partial interface itself",
[member.location, attr.location])
member.addExtendedAttributes([attr])
else:
raise WebIDLError("Unknown extended attribute %s on partial "
"interface" % identifier,
[attr.location])
def finish(self, scope):
if self._finished:
return
self._finished = True
# Need to make sure our non-partial interface gets finished so it can
# report cases when we only have partial interfaces.
self._nonPartialInterface.finish(scope)
def validate(self):
pass
def convertExposedAttrToGlobalNameSet(exposedAttr, targetSet):
assert len(targetSet) == 0
if exposedAttr.hasValue():
targetSet.add(exposedAttr.value())
else:
assert exposedAttr.hasArgs()
targetSet.update(exposedAttr.args())
def globalNameSetToExposureSet(globalScope, nameSet, exposureSet):
for name in nameSet:
exposureSet.update(globalScope.globalNameMapping[name])
class IDLInterface(IDLObjectWithScope, IDLExposureMixins):
def __init__(self, location, parentScope, name, parent, members,
isKnownNonPartial):
assert isinstance(parentScope, IDLScope)
assert isinstance(name, IDLUnresolvedIdentifier)
assert isKnownNonPartial or not parent
assert isKnownNonPartial or len(members) == 0
self.parent = None
self._callback = False
self._finished = False
self.members = []
self.maplikeOrSetlike = None
self._partialInterfaces = []
self._extendedAttrDict = {}
# namedConstructors needs deterministic ordering because bindings code
# outputs the constructs in the order that namedConstructors enumerates
# them.
self.namedConstructors = list()
self.implementedInterfaces = set()
self._consequential = False
self._isKnownNonPartial = False
# self.interfacesBasedOnSelf is the set of interfaces that inherit from
# self or have self as a consequential interface, including self itself.
# Used for distinguishability checking.
self.interfacesBasedOnSelf = set([self])
# self.interfacesImplementingSelf is the set of interfaces that directly
# have self as a consequential interface
self.interfacesImplementingSelf = set()
self._hasChildInterfaces = False
self._isOnGlobalProtoChain = False
# Tracking of the number of reserved slots we need for our
# members and those of ancestor interfaces.
self.totalMembersInSlots = 0
# Tracking of the number of own own members we have in slots
self._ownMembersInSlots = 0
IDLObjectWithScope.__init__(self, location, parentScope, name)
IDLExposureMixins.__init__(self, location)
if isKnownNonPartial:
self.setNonPartial(location, parent, members)
def __str__(self):
return "Interface '%s'" % self.identifier.name
def ctor(self):
identifier = IDLUnresolvedIdentifier(self.location, "constructor",
allowForbidden=True)
try:
return self._lookupIdentifier(identifier)
except:
return None
def resolveIdentifierConflict(self, scope, identifier, originalObject, newObject):
assert isinstance(scope, IDLScope)
assert isinstance(originalObject, IDLInterfaceMember)
assert isinstance(newObject, IDLInterfaceMember)
retval = IDLScope.resolveIdentifierConflict(self, scope, identifier,
originalObject, newObject)
# Might be a ctor, which isn't in self.members
if newObject in self.members:
self.members.remove(newObject)
return retval
def finish(self, scope):
if self._finished:
return
self._finished = True
if not self._isKnownNonPartial:
raise WebIDLError("Interface %s does not have a non-partial "
"declaration" % self.identifier.name,
[self.location])
IDLExposureMixins.finish(self, scope)
# Now go ahead and merge in our partial interfaces.
for partial in self._partialInterfaces:
partial.finish(scope)
self.addExtendedAttributes(partial.propagatedExtendedAttrs)
self.members.extend(partial.members)
# Generate maplike/setlike interface members. Since generated members
# need to be treated like regular interface members, do this before
# things like exposure setting.
for member in self.members:
if member.isMaplikeOrSetlike():
# Check that we only have one interface declaration (currently
# there can only be one maplike/setlike declaration per
# interface)
if self.maplikeOrSetlike:
raise WebIDLError("%s declaration used on "
"interface that already has %s "
"declaration" %
(member.maplikeOrSetlikeType,
self.maplikeOrSetlike.maplikeOrSetlikeType),
[self.maplikeOrSetlike.location,
member.location])
self.maplikeOrSetlike = member
# If we've got a maplike or setlike declaration, we'll be building all of
# our required methods in Codegen. Generate members now.
self.maplikeOrSetlike.expand(self.members, self.isJSImplemented())
# Now that we've merged in our partial interfaces, set the
# _exposureGlobalNames on any members that don't have it set yet. Note
# that any partial interfaces that had [Exposed] set have already set up
# _exposureGlobalNames on all the members coming from them, so this is
# just implementing the "members default to interface that defined them"
# and "partial interfaces default to interface they're a partial for"
# rules from the spec.
for m in self.members:
# If m, or the partial interface m came from, had [Exposed]
# specified, it already has a nonempty exposure global names set.
if len(m._exposureGlobalNames) == 0:
m._exposureGlobalNames.update(self._exposureGlobalNames)
assert not self.parent or isinstance(self.parent, IDLIdentifierPlaceholder)
parent = self.parent.finish(scope) if self.parent else None
if parent and isinstance(parent, IDLExternalInterface):
raise WebIDLError("%s inherits from %s which does not have "
"a definition" %
(self.identifier.name,
self.parent.identifier.name),
[self.location])
assert not parent or isinstance(parent, IDLInterface)
self.parent = parent
assert iter(self.members)
if self.parent:
self.parent.finish(scope)
self.parent._hasChildInterfaces = True
self.totalMembersInSlots = self.parent.totalMembersInSlots
# Interfaces with [Global] or [PrimaryGlobal] must not
# have anything inherit from them
if (self.parent.getExtendedAttribute("Global") or
self.parent.getExtendedAttribute("PrimaryGlobal")):
# Note: This is not a self.parent.isOnGlobalProtoChain() check
# because ancestors of a [Global] interface can have other
# descendants.
raise WebIDLError("[Global] interface has another interface "
"inheriting from it",
[self.location, self.parent.location])
# Make sure that we're not exposed in places where our parent is not
if not self.exposureSet.issubset(self.parent.exposureSet):
raise WebIDLError("Interface %s is exposed in globals where its "
"parent interface %s is not exposed." %
(self.identifier.name,
self.parent.identifier.name),
[self.location, self.parent.location])
# Callbacks must not inherit from non-callbacks or inherit from
# anything that has consequential interfaces.
# XXXbz Can non-callbacks inherit from callbacks? Spec issue pending.
# XXXbz Can callbacks have consequential interfaces? Spec issue pending
if self.isCallback():
if not self.parent.isCallback():
raise WebIDLError("Callback interface %s inheriting from "
"non-callback interface %s" %
(self.identifier.name,
self.parent.identifier.name),
[self.location, self.parent.location])
elif self.parent.isCallback():
raise WebIDLError("Non-callback interface %s inheriting from "
"callback interface %s" %
(self.identifier.name,
self.parent.identifier.name),
[self.location, self.parent.location])
# Interfaces which have interface objects can't inherit
# from [NoInterfaceObject] interfaces.
if (self.parent.getExtendedAttribute("NoInterfaceObject") and
not self.getExtendedAttribute("NoInterfaceObject")):
raise WebIDLError("Interface %s does not have "
"[NoInterfaceObject] but inherits from "
"interface %s which does" %
(self.identifier.name,
self.parent.identifier.name),
[self.location, self.parent.location])
for iface in self.implementedInterfaces:
iface.finish(scope)
cycleInGraph = self.findInterfaceLoopPoint(self)
if cycleInGraph:
raise WebIDLError("Interface %s has itself as ancestor or "
"implemented interface" % self.identifier.name,
[self.location, cycleInGraph.location])
if self.isCallback():
# "implements" should have made sure we have no
# consequential interfaces.
assert len(self.getConsequentialInterfaces()) == 0
# And that we're not consequential.
assert not self.isConsequential()
# Now resolve() and finish() our members before importing the
# ones from our implemented interfaces.
# resolve() will modify self.members, so we need to iterate
# over a copy of the member list here.
for member in list(self.members):
member.resolve(self)
for member in self.members:
member.finish(scope)
# Now that we've finished our members, which has updated their exposure
# sets, make sure they aren't exposed in places where we are not.
for member in self.members:
if not member.exposureSet.issubset(self.exposureSet):
raise WebIDLError("Interface member has larger exposure set "
"than the interface itself",
[member.location, self.location])
ctor = self.ctor()
if ctor is not None:
assert len(ctor._exposureGlobalNames) == 0
ctor._exposureGlobalNames.update(self._exposureGlobalNames)
ctor.finish(scope)
for ctor in self.namedConstructors:
assert len(ctor._exposureGlobalNames) == 0
ctor._exposureGlobalNames.update(self._exposureGlobalNames)
ctor.finish(scope)
# Make a copy of our member list, so things that implement us
# can get those without all the stuff we implement ourselves
# admixed.
self.originalMembers = list(self.members)
# Import everything from our consequential interfaces into
# self.members. Sort our consequential interfaces by name
# just so we have a consistent order.
for iface in sorted(self.getConsequentialInterfaces(),
cmp=cmp,
key=lambda x: x.identifier.name):
# Flag the interface as being someone's consequential interface
iface.setIsConsequentialInterfaceOf(self)
# Verify that we're not exposed somewhere where iface is not exposed
if not self.exposureSet.issubset(iface.exposureSet):
raise WebIDLError("Interface %s is exposed in globals where its "
"consequential interface %s is not exposed." %
(self.identifier.name, iface.identifier.name),
[self.location, iface.location])
# If we have a maplike or setlike, and the consequential interface
# also does, throw an error.
if iface.maplikeOrSetlike and self.maplikeOrSetlike:
raise WebIDLError("Maplike/setlike interface %s cannot have "
"maplike/setlike interface %s as a "
"consequential interface" %
(self.identifier.name,
iface.identifier.name),
[self.maplikeOrSetlike.location,
iface.maplikeOrSetlike.location])
additionalMembers = iface.originalMembers
for additionalMember in additionalMembers:
for member in self.members:
if additionalMember.identifier.name == member.identifier.name:
raise WebIDLError(
"Multiple definitions of %s on %s coming from 'implements' statements" %
(member.identifier.name, self),
[additionalMember.location, member.location])
self.members.extend(additionalMembers)
iface.interfacesImplementingSelf.add(self)
for ancestor in self.getInheritedInterfaces():
ancestor.interfacesBasedOnSelf.add(self)
if (ancestor.maplikeOrSetlike is not None and
self.maplikeOrSetlike is not None):
raise WebIDLError("Cannot have maplike/setlike on %s that "
"inherits %s, which is already "
"maplike/setlike" %
(self.identifier.name,
ancestor.identifier.name),
[self.maplikeOrSetlike.location,
ancestor.maplikeOrSetlike.location])
for ancestorConsequential in ancestor.getConsequentialInterfaces():
ancestorConsequential.interfacesBasedOnSelf.add(self)
# Deal with interfaces marked [Unforgeable], now that we have our full
# member list, except unforgeables pulled in from parents. We want to
# do this before we set "originatingInterface" on our unforgeable
# members.
if self.getExtendedAttribute("Unforgeable"):
# Check that the interface already has all the things the
# spec would otherwise require us to synthesize and is
# missing the ones we plan to synthesize.
if not any(m.isMethod() and m.isStringifier() for m in self.members):
raise WebIDLError("Unforgeable interface %s does not have a "
"stringifier" % self.identifier.name,
[self.location])
for m in self.members:
if ((m.isMethod() and m.isJsonifier()) or
m.identifier.name == "toJSON"):
raise WebIDLError("Unforgeable interface %s has a "
"jsonifier so we won't be able to add "
"one ourselves" % self.identifier.name,
[self.location, m.location])
if m.identifier.name == "valueOf" and not m.isStatic():
raise WebIDLError("Unforgeable interface %s has a valueOf "
"member so we won't be able to add one "
"ourselves" % self.identifier.name,
[self.location, m.location])
for member in self.members:
if ((member.isAttr() or member.isMethod()) and
member.isUnforgeable() and
not hasattr(member, "originatingInterface")):
member.originatingInterface = self
# Compute slot indices for our members before we pull in unforgeable
# members from our parent. Also, maplike/setlike declarations get a
# slot to hold their backing object.
for member in self.members:
if ((member.isAttr() and
(member.getExtendedAttribute("StoreInSlot") or
member.getExtendedAttribute("Cached"))) or
member.isMaplikeOrSetlike()):
member.slotIndex = self.totalMembersInSlots
self.totalMembersInSlots += 1
if member.getExtendedAttribute("StoreInSlot"):
self._ownMembersInSlots += 1
if self.parent:
# Make sure we don't shadow any of the [Unforgeable] attributes on
# our ancestor interfaces. We don't have to worry about
# consequential interfaces here, because those have already been
# imported into the relevant .members lists. And we don't have to
# worry about anything other than our parent, because it has already
# imported its ancestors unforgeable attributes into its member
# list.
for unforgeableMember in (member for member in self.parent.members if
(member.isAttr() or member.isMethod()) and
member.isUnforgeable()):
shadows = [m for m in self.members if
(m.isAttr() or m.isMethod()) and
not m.isStatic() and
m.identifier.name == unforgeableMember.identifier.name]
if len(shadows) != 0:
locs = [unforgeableMember.location] + [s.location for s
in shadows]
raise WebIDLError("Interface %s shadows [Unforgeable] "
"members of %s" %
(self.identifier.name,
ancestor.identifier.name),
locs)
# And now just stick it in our members, since we won't be
# inheriting this down the proto chain. If we really cared we
# could try to do something where we set up the unforgeable
# attributes/methods of ancestor interfaces, with their
# corresponding getters, on our interface, but that gets pretty
# complicated and seems unnecessary.
self.members.append(unforgeableMember)
# At this point, we have all of our members. If the current interface
# uses maplike/setlike, check for collisions anywhere in the current
# interface or higher in the inheritance chain.
if self.maplikeOrSetlike:
testInterface = self
isAncestor = False
while testInterface:
self.maplikeOrSetlike.checkCollisions(testInterface.members,
isAncestor)
isAncestor = True
testInterface = testInterface.parent
# Ensure that there's at most one of each {named,indexed}
# {getter,setter,creator,deleter}, at most one stringifier,
# and at most one legacycaller. Note that this last is not
# quite per spec, but in practice no one overloads
# legacycallers.
specialMembersSeen = {}
for member in self.members:
if not member.isMethod():
continue
if member.isGetter():
memberType = "getters"
elif member.isSetter():
memberType = "setters"
elif member.isCreator():
memberType = "creators"
elif member.isDeleter():
memberType = "deleters"
elif member.isStringifier():
memberType = "stringifiers"
elif member.isJsonifier():
memberType = "jsonifiers"
elif member.isLegacycaller():
memberType = "legacycallers"
else:
continue
if (memberType != "stringifiers" and memberType != "legacycallers" and
memberType != "jsonifiers"):
if member.isNamed():
memberType = "named " + memberType
else:
assert member.isIndexed()
memberType = "indexed " + memberType
if memberType in specialMembersSeen:
raise WebIDLError("Multiple " + memberType + " on %s" % (self),
[self.location,
specialMembersSeen[memberType].location,
member.location])
specialMembersSeen[memberType] = member
if self._isOnGlobalProtoChain:
# Make sure we have no named setters, creators, or deleters
for memberType in ["setter", "creator", "deleter"]:
memberId = "named " + memberType + "s"
if memberId in specialMembersSeen:
raise WebIDLError("Interface with [Global] has a named %s" %
memberType,
[self.location,
specialMembersSeen[memberId].location])
# Make sure we're not [OverrideBuiltins]
if self.getExtendedAttribute("OverrideBuiltins"):
raise WebIDLError("Interface with [Global] also has "
"[OverrideBuiltins]",
[self.location])
# Mark all of our ancestors as being on the global's proto chain too
parent = self.parent
while parent:
# Must not inherit from an interface with [OverrideBuiltins]
if parent.getExtendedAttribute("OverrideBuiltins"):
raise WebIDLError("Interface with [Global] inherits from "
"interface with [OverrideBuiltins]",
[self.location, parent.location])
parent._isOnGlobalProtoChain = True
parent = parent.parent
def validate(self):
# We don't support consequential unforgeable interfaces. Need to check
# this here, becaue in finish() an interface might not know yet that
# it's consequential.
if self.getExtendedAttribute("Unforgeable") and self.isConsequential():
raise WebIDLError(
"%s is an unforgeable consequential interface" %
self.identifier.name,
[self.location] +
list(i.location for i in
(self.interfacesBasedOnSelf - {self})))
# We also don't support inheriting from unforgeable interfaces.
if self.getExtendedAttribute("Unforgeable") and self.hasChildInterfaces():
locations = ([self.location] +
list(i.location for i in
self.interfacesBasedOnSelf if i.parent == self))
raise WebIDLError("%s is an unforgeable ancestor interface" %
self.identifier.name,
locations)
for member in self.members:
member.validate()
if self.isCallback() and member.getExtendedAttribute("Replaceable"):
raise WebIDLError("[Replaceable] used on an attribute on "
"interface %s which is a callback interface" %
self.identifier.name,
[self.location, member.location])
# Check that PutForwards refers to another attribute and that no
# cycles exist in forwarded assignments.
if member.isAttr():
iface = self
attr = member
putForwards = attr.getExtendedAttribute("PutForwards")
if putForwards and self.isCallback():
raise WebIDLError("[PutForwards] used on an attribute "
"on interface %s which is a callback "
"interface" % self.identifier.name,
[self.location, member.location])
while putForwards is not None:
forwardIface = attr.type.unroll().inner
fowardAttr = None
for forwardedMember in forwardIface.members:
if (not forwardedMember.isAttr() or
forwardedMember.identifier.name != putForwards[0]):
continue
if forwardedMember == member:
raise WebIDLError("Cycle detected in forwarded "
"assignments for attribute %s on "
"%s" %
(member.identifier.name, self),
[member.location])
fowardAttr = forwardedMember
break
if fowardAttr is None:
raise WebIDLError("Attribute %s on %s forwards to "
"missing attribute %s" %
(attr.identifier.name, iface, putForwards),
[attr.location])
iface = forwardIface
attr = fowardAttr
putForwards = attr.getExtendedAttribute("PutForwards")
# Check that the name of an [Alias] doesn't conflict with an
# interface member.
if member.isMethod():
for alias in member.aliases:
if self.isOnGlobalProtoChain():
raise WebIDLError("[Alias] must not be used on a "
"[Global] interface operation",
[member.location])
if (member.getExtendedAttribute("Exposed") or
member.getExtendedAttribute("ChromeOnly") or
member.getExtendedAttribute("Pref") or
member.getExtendedAttribute("Func") or
member.getExtendedAttribute("AvailableIn") or
member.getExtendedAttribute("CheckAnyPermissions") or
member.getExtendedAttribute("CheckAllPermissions")):
raise WebIDLError("[Alias] must not be used on a "
"conditionally exposed operation",
[member.location])
if member.isStatic():
raise WebIDLError("[Alias] must not be used on a "
"static operation",
[member.location])
if member.isIdentifierLess():
raise WebIDLError("[Alias] must not be used on an "
"identifierless operation",
[member.location])
if member.isUnforgeable():
raise WebIDLError("[Alias] must not be used on an "
"[Unforgeable] operation",
[member.location])
for m in self.members:
if m.identifier.name == alias:
raise WebIDLError("[Alias=%s] has same name as "
"interface member" % alias,
[member.location, m.location])
if m.isMethod() and m != member and alias in m.aliases:
raise WebIDLError("duplicate [Alias=%s] definitions" %
alias,
[member.location, m.location])
if (self.getExtendedAttribute("Pref") and
self._exposureGlobalNames != set([self.parentScope.primaryGlobalName])):
raise WebIDLError("[Pref] used on an interface that is not %s-only" %
self.parentScope.primaryGlobalName,
[self.location])
for attribute in ["CheckAnyPermissions", "CheckAllPermissions"]:
if (self.getExtendedAttribute(attribute) and
self._exposureGlobalNames != set([self.parentScope.primaryGlobalName])):
raise WebIDLError("[%s] used on an interface that is "
"not %s-only" %
(attribute, self.parentScope.primaryGlobalName),
[self.location])
# Conditional exposure makes no sense for interfaces with no
# interface object, unless they're navigator properties.
if (self.isExposedConditionally() and
not self.hasInterfaceObject() and
not self.getNavigatorProperty()):
raise WebIDLError("Interface with no interface object is "
"exposed conditionally",
[self.location])
def isInterface(self):
return True
def isExternal(self):
return False
def setIsConsequentialInterfaceOf(self, other):
self._consequential = True
self.interfacesBasedOnSelf.add(other)
def isConsequential(self):
return self._consequential
def setCallback(self, value):
self._callback = value
def isCallback(self):
return self._callback
def isSingleOperationInterface(self):
assert self.isCallback() or self.isJSImplemented()
return (
# JS-implemented things should never need the
# this-handling weirdness of single-operation interfaces.
not self.isJSImplemented() and
# Not inheriting from another interface
not self.parent and
# No consequential interfaces
len(self.getConsequentialInterfaces()) == 0 and
# No attributes of any kinds
not any(m.isAttr() for m in self.members) and
# There is at least one regular operation, and all regular
# operations have the same identifier
len(set(m.identifier.name for m in self.members if
m.isMethod() and not m.isStatic())) == 1)
def inheritanceDepth(self):
depth = 0
parent = self.parent
while parent:
depth = depth + 1
parent = parent.parent
return depth
def hasConstants(self):
return any(m.isConst() for m in self.members)
def hasInterfaceObject(self):
if self.isCallback():
return self.hasConstants()
return not hasattr(self, "_noInterfaceObject")
def hasInterfacePrototypeObject(self):
return not self.isCallback() and self.getUserData('hasConcreteDescendant', False)
def addExtendedAttributes(self, attrs):
for attr in attrs:
identifier = attr.identifier()
# Special cased attrs
if identifier == "TreatNonCallableAsNull":
raise WebIDLError("TreatNonCallableAsNull cannot be specified on interfaces",
[attr.location, self.location])
if identifier == "TreatNonObjectAsNull":
raise WebIDLError("TreatNonObjectAsNull cannot be specified on interfaces",
[attr.location, self.location])
elif identifier == "NoInterfaceObject":
if not attr.noArguments():
raise WebIDLError("[NoInterfaceObject] must take no arguments",
[attr.location])
if self.ctor():
raise WebIDLError("Constructor and NoInterfaceObject are incompatible",
[self.location])
self._noInterfaceObject = True
elif identifier == "Constructor" or identifier == "NamedConstructor" or identifier == "ChromeConstructor":
if identifier == "Constructor" and not self.hasInterfaceObject():
raise WebIDLError(str(identifier) + " and NoInterfaceObject are incompatible",
[self.location])
if identifier == "NamedConstructor" and not attr.hasValue():
raise WebIDLError("NamedConstructor must either take an identifier or take a named argument list",
[attr.location])
if identifier == "ChromeConstructor" and not self.hasInterfaceObject():
raise WebIDLError(str(identifier) + " and NoInterfaceObject are incompatible",
[self.location])
args = attr.args() if attr.hasArgs() else []
if self.identifier.name == "Promise":
promiseType = BuiltinTypes[IDLBuiltinType.Types.any]
else:
promiseType = None
retType = IDLWrapperType(self.location, self, promiseType)
if identifier == "Constructor" or identifier == "ChromeConstructor":
name = "constructor"
allowForbidden = True
else:
name = attr.value()
allowForbidden = False
methodIdentifier = IDLUnresolvedIdentifier(self.location, name,
allowForbidden=allowForbidden)
method = IDLMethod(self.location, methodIdentifier, retType,
args, static=True)
# Constructors are always NewObject and are always
# assumed to be able to throw (since there's no way to
# indicate otherwise) and never have any other
# extended attributes.
method.addExtendedAttributes(
[IDLExtendedAttribute(self.location, ("NewObject",)),
IDLExtendedAttribute(self.location, ("Throws",))])
if identifier == "ChromeConstructor":
method.addExtendedAttributes(
[IDLExtendedAttribute(self.location, ("ChromeOnly",))])
if identifier == "Constructor" or identifier == "ChromeConstructor":
method.resolve(self)
else:
# We need to detect conflicts for NamedConstructors across
# interfaces. We first call resolve on the parentScope,
# which will merge all NamedConstructors with the same
# identifier accross interfaces as overloads.
method.resolve(self.parentScope)
# Then we look up the identifier on the parentScope. If the
# result is the same as the method we're adding then it
# hasn't been added as an overload and it's the first time
# we've encountered a NamedConstructor with that identifier.
# If the result is not the same as the method we're adding
# then it has been added as an overload and we need to check
# whether the result is actually one of our existing
# NamedConstructors.
newMethod = self.parentScope.lookupIdentifier(method.identifier)
if newMethod == method:
self.namedConstructors.append(method)
elif newMethod not in self.namedConstructors:
raise WebIDLError("NamedConstructor conflicts with a NamedConstructor of a different interface",
[method.location, newMethod.location])
elif (identifier == "ArrayClass"):
if not attr.noArguments():
raise WebIDLError("[ArrayClass] must take no arguments",
[attr.location])
if self.parent:
raise WebIDLError("[ArrayClass] must not be specified on "
"an interface with inherited interfaces",
[attr.location, self.location])
elif (identifier == "ExceptionClass"):
if not attr.noArguments():
raise WebIDLError("[ExceptionClass] must take no arguments",
[attr.location])
if self.parent:
raise WebIDLError("[ExceptionClass] must not be specified on "
"an interface with inherited interfaces",
[attr.location, self.location])
elif identifier == "Global":
if attr.hasValue():
self.globalNames = [attr.value()]
elif attr.hasArgs():
self.globalNames = attr.args()
else:
self.globalNames = [self.identifier.name]
self.parentScope.globalNames.update(self.globalNames)
for globalName in self.globalNames:
self.parentScope.globalNameMapping[globalName].add(self.identifier.name)
self._isOnGlobalProtoChain = True
elif identifier == "PrimaryGlobal":
if not attr.noArguments():
raise WebIDLError("[PrimaryGlobal] must take no arguments",
[attr.location])
if self.parentScope.primaryGlobalAttr is not None:
raise WebIDLError(
"[PrimaryGlobal] specified twice",
[attr.location,
self.parentScope.primaryGlobalAttr.location])
self.parentScope.primaryGlobalAttr = attr
self.parentScope.primaryGlobalName = self.identifier.name
self.parentScope.globalNames.add(self.identifier.name)
self.parentScope.globalNameMapping[self.identifier.name].add(self.identifier.name)
self._isOnGlobalProtoChain = True
elif (identifier == "NeedResolve" or
identifier == "OverrideBuiltins" or
identifier == "ChromeOnly" or
identifier == "Unforgeable" or
identifier == "UnsafeInPrerendering" or
identifier == "LegacyEventInit" or
identifier == "Abstract"):
# Known extended attributes that do not take values
if not attr.noArguments():
raise WebIDLError("[%s] must take no arguments" % identifier,
[attr.location])
elif identifier == "Exposed":
convertExposedAttrToGlobalNameSet(attr,
self._exposureGlobalNames)
elif (identifier == "Pref" or
identifier == "JSImplementation" or
identifier == "HeaderFile" or
identifier == "NavigatorProperty" or
identifier == "AvailableIn" or
identifier == "Func" or
identifier == "CheckAnyPermissions" or
identifier == "CheckAllPermissions"):
# Known extended attributes that take a string value
if not attr.hasValue():
raise WebIDLError("[%s] must have a value" % identifier,
[attr.location])
else:
raise WebIDLError("Unknown extended attribute %s on interface" % identifier,
[attr.location])
attrlist = attr.listValue()
self._extendedAttrDict[identifier] = attrlist if len(attrlist) else True
def addImplementedInterface(self, implementedInterface):
assert(isinstance(implementedInterface, IDLInterface))
self.implementedInterfaces.add(implementedInterface)
def getInheritedInterfaces(self):
"""
Returns a list of the interfaces this interface inherits from
(not including this interface itself). The list is in order
from most derived to least derived.
"""
assert(self._finished)
if not self.parent:
return []
parentInterfaces = self.parent.getInheritedInterfaces()
parentInterfaces.insert(0, self.parent)
return parentInterfaces
def getConsequentialInterfaces(self):
assert(self._finished)
# The interfaces we implement directly
consequentialInterfaces = set(self.implementedInterfaces)
# And their inherited interfaces
for iface in self.implementedInterfaces:
consequentialInterfaces |= set(iface.getInheritedInterfaces())
# And now collect up the consequential interfaces of all of those
temp = set()
for iface in consequentialInterfaces:
temp |= iface.getConsequentialInterfaces()
return consequentialInterfaces | temp
def findInterfaceLoopPoint(self, otherInterface):
"""
Finds an interface, amongst our ancestors and consequential interfaces,
that inherits from otherInterface or implements otherInterface
directly. If there is no such interface, returns None.
"""
if self.parent:
if self.parent == otherInterface:
return self
loopPoint = self.parent.findInterfaceLoopPoint(otherInterface)
if loopPoint:
return loopPoint
if otherInterface in self.implementedInterfaces:
return self
for iface in self.implementedInterfaces:
loopPoint = iface.findInterfaceLoopPoint(otherInterface)
if loopPoint:
return loopPoint
return None
def getExtendedAttribute(self, name):
return self._extendedAttrDict.get(name, None)
def setNonPartial(self, location, parent, members):
assert not parent or isinstance(parent, IDLIdentifierPlaceholder)
if self._isKnownNonPartial:
raise WebIDLError("Two non-partial definitions for the "
"same interface",
[location, self.location])
self._isKnownNonPartial = True
# Now make it look like we were parsed at this new location, since
# that's the place where the interface is "really" defined
self.location = location
assert not self.parent
self.parent = parent
# Put the new members at the beginning
self.members = members + self.members
def addPartialInterface(self, partial):
assert self.identifier.name == partial.identifier.name
self._partialInterfaces.append(partial)
def getJSImplementation(self):
classId = self.getExtendedAttribute("JSImplementation")
if not classId:
return classId
assert isinstance(classId, list)
assert len(classId) == 1
return classId[0]
def isJSImplemented(self):
return bool(self.getJSImplementation())
def getNavigatorProperty(self):
naviProp = self.getExtendedAttribute("NavigatorProperty")
if not naviProp:
return None
assert len(naviProp) == 1
assert isinstance(naviProp, list)
assert len(naviProp[0]) != 0
return naviProp[0]
def hasChildInterfaces(self):
return self._hasChildInterfaces
def isOnGlobalProtoChain(self):
return self._isOnGlobalProtoChain
def _getDependentObjects(self):
deps = set(self.members)
deps.update(self.implementedInterfaces)
if self.parent:
deps.add(self.parent)
return deps
def hasMembersInSlots(self):
return self._ownMembersInSlots != 0
def isExposedConditionally(self):
return (self.getExtendedAttribute("Pref") or
self.getExtendedAttribute("ChromeOnly") or
self.getExtendedAttribute("Func") or
self.getExtendedAttribute("AvailableIn") or
self.getExtendedAttribute("CheckAnyPermissions") or
self.getExtendedAttribute("CheckAllPermissions"))
class IDLDictionary(IDLObjectWithScope):
def __init__(self, location, parentScope, name, parent, members):
assert isinstance(parentScope, IDLScope)
assert isinstance(name, IDLUnresolvedIdentifier)
assert not parent or isinstance(parent, IDLIdentifierPlaceholder)
self.parent = parent
self._finished = False
self.members = list(members)
IDLObjectWithScope.__init__(self, location, parentScope, name)
def __str__(self):
return "Dictionary '%s'" % self.identifier.name
def isDictionary(self):
return True
def canBeEmpty(self):
"""
Returns true if this dictionary can be empty (that is, it has no
required members and neither do any of its ancestors).
"""
return (all(member.optional for member in self.members) and
(not self.parent or self.parent.canBeEmpty()))
def finish(self, scope):
if self._finished:
return
self._finished = True
if self.parent:
assert isinstance(self.parent, IDLIdentifierPlaceholder)
oldParent = self.parent
self.parent = self.parent.finish(scope)
if not isinstance(self.parent, IDLDictionary):
raise WebIDLError("Dictionary %s has parent that is not a dictionary" %
self.identifier.name,
[oldParent.location, self.parent.location])
# Make sure the parent resolves all its members before we start
# looking at them.
self.parent.finish(scope)
for member in self.members:
member.resolve(self)
if not member.isComplete():
member.complete(scope)
assert member.type.isComplete()
# Members of a dictionary are sorted in lexicographic order
self.members.sort(cmp=cmp, key=lambda x: x.identifier.name)
inheritedMembers = []
ancestor = self.parent
while ancestor:
if ancestor == self:
raise WebIDLError("Dictionary %s has itself as an ancestor" %
self.identifier.name,
[self.identifier.location])
inheritedMembers.extend(ancestor.members)
ancestor = ancestor.parent
# Catch name duplication
for inheritedMember in inheritedMembers:
for member in self.members:
if member.identifier.name == inheritedMember.identifier.name:
raise WebIDLError("Dictionary %s has two members with name %s" %
(self.identifier.name, member.identifier.name),
[member.location, inheritedMember.location])
def validate(self):
def typeContainsDictionary(memberType, dictionary):
"""
Returns a tuple whose:
- First element is a Boolean value indicating whether
memberType contains dictionary.
- Second element is:
A list of locations that leads from the type that was passed in
the memberType argument, to the dictionary being validated,
if the boolean value in the first element is True.
None, if the boolean value in the first element is False.
"""
if (memberType.nullable() or
memberType.isArray() or
memberType.isSequence() or
memberType.isMozMap()):
return typeContainsDictionary(memberType.inner, dictionary)
if memberType.isDictionary():
if memberType.inner == dictionary:
return (True, [memberType.location])
(contains, locations) = dictionaryContainsDictionary(memberType.inner,
dictionary)
if contains:
return (True, [memberType.location] + locations)
if memberType.isUnion():
for member in memberType.flatMemberTypes:
(contains, locations) = typeContainsDictionary(member, dictionary)
if contains:
return (True, locations)
return (False, None)
def dictionaryContainsDictionary(dictMember, dictionary):
for member in dictMember.members:
(contains, locations) = typeContainsDictionary(member.type, dictionary)
if contains:
return (True, [member.location] + locations)
if dictMember.parent:
if dictMember.parent == dictionary:
return (True, [dictMember.location])
else:
(contains, locations) = dictionaryContainsDictionary(dictMember.parent, dictionary)
if contains:
return (True, [dictMember.location] + locations)
return (False, None)
for member in self.members:
if member.type.isDictionary() and member.type.nullable():
raise WebIDLError("Dictionary %s has member with nullable "
"dictionary type" % self.identifier.name,
[member.location])
(contains, locations) = typeContainsDictionary(member.type, self)
if contains:
raise WebIDLError("Dictionary %s has member with itself as type." %
self.identifier.name,
[member.location] + locations)
def module(self):
return self.location.filename().split('/')[-1].split('.webidl')[0] + 'Binding'
def addExtendedAttributes(self, attrs):
assert len(attrs) == 0
def _getDependentObjects(self):
deps = set(self.members)
if (self.parent):
deps.add(self.parent)
return deps
class IDLEnum(IDLObjectWithIdentifier):
def __init__(self, location, parentScope, name, values):
assert isinstance(parentScope, IDLScope)
assert isinstance(name, IDLUnresolvedIdentifier)
if len(values) != len(set(values)):
raise WebIDLError("Enum %s has multiple identical strings" % name.name,
[location])
IDLObjectWithIdentifier.__init__(self, location, parentScope, name)
self._values = values
def values(self):
return self._values
def finish(self, scope):
pass
def validate(self):
pass
def isEnum(self):
return True
def addExtendedAttributes(self, attrs):
assert len(attrs) == 0
def _getDependentObjects(self):
return set()
class IDLType(IDLObject):
Tags = enum(
# The integer types
'int8',
'uint8',
'int16',
'uint16',
'int32',
'uint32',
'int64',
'uint64',
# Additional primitive types
'bool',
'unrestricted_float',
'float',
'unrestricted_double',
# "double" last primitive type to match IDLBuiltinType
'double',
# Other types
'any',
'domstring',
'bytestring',
'usvstring',
'object',
'date',
'void',
# Funny stuff
'interface',
'dictionary',
'enum',
'callback',
'union',
'sequence',
'mozmap',
'array'
)
def __init__(self, location, name):
IDLObject.__init__(self, location)
self.name = name
self.builtin = False
def __eq__(self, other):
return other and self.builtin == other.builtin and self.name == other.name
def __ne__(self, other):
return not self == other
def __str__(self):
return str(self.name)
def isType(self):
return True
def nullable(self):
return False
def isPrimitive(self):
return False
def isBoolean(self):
return False
def isNumeric(self):
return False
def isString(self):
return False
def isByteString(self):
return False
def isDOMString(self):
return False
def isUSVString(self):
return False
def isVoid(self):
return self.name == "Void"
def isSequence(self):
return False
def isMozMap(self):
return False
def isArray(self):
return False
def isArrayBuffer(self):
return False
def isArrayBufferView(self):
return False
def isSharedArrayBuffer(self):
return False
def isSharedArrayBufferView(self):
return False
def isTypedArray(self):
return False
def isSharedTypedArray(self):
return False
def isCallbackInterface(self):
return False
def isNonCallbackInterface(self):
return False
def isGeckoInterface(self):
""" Returns a boolean indicating whether this type is an 'interface'
type that is implemented in Gecko. At the moment, this returns
true for all interface types that are not types from the TypedArray
spec."""
return self.isInterface() and not self.isSpiderMonkeyInterface()
def isSpiderMonkeyInterface(self):
""" Returns a boolean indicating whether this type is an 'interface'
type that is implemented in Spidermonkey. At the moment, this
only returns true for the types from the TypedArray spec. """
return self.isInterface() and (self.isArrayBuffer() or
self.isArrayBufferView() or
self.isSharedArrayBuffer() or
self.isSharedArrayBufferView() or
self.isTypedArray() or
self.isSharedTypedArray())
def isDictionary(self):
return False
def isInterface(self):
return False
def isAny(self):
return self.tag() == IDLType.Tags.any
def isDate(self):
return self.tag() == IDLType.Tags.date
def isObject(self):
return self.tag() == IDLType.Tags.object
def isPromise(self):
return False
def isComplete(self):
return True
def includesRestrictedFloat(self):
return False
def isFloat(self):
return False
def isUnrestricted(self):
# Should only call this on float types
assert self.isFloat()
def isSerializable(self):
return False
def tag(self):
assert False # Override me!
def treatNonCallableAsNull(self):
assert self.tag() == IDLType.Tags.callback
return self.nullable() and self.inner.callback._treatNonCallableAsNull
def treatNonObjectAsNull(self):
assert self.tag() == IDLType.Tags.callback
return self.nullable() and self.inner.callback._treatNonObjectAsNull
def addExtendedAttributes(self, attrs):
assert len(attrs) == 0
def resolveType(self, parentScope):
pass
def unroll(self):
return self
def isDistinguishableFrom(self, other):
raise TypeError("Can't tell whether a generic type is or is not "
"distinguishable from other things")
def isExposedInAllOf(self, exposureSet):
return True
class IDLUnresolvedType(IDLType):
"""
Unresolved types are interface types
"""
def __init__(self, location, name, promiseInnerType=None):
IDLType.__init__(self, location, name)
self._promiseInnerType = promiseInnerType
def isComplete(self):
return False
def complete(self, scope):
obj = None
try:
obj = scope._lookupIdentifier(self.name)
except:
raise WebIDLError("Unresolved type '%s'." % self.name,
[self.location])
assert obj
if obj.isType():
print obj
assert not obj.isType()
if obj.isTypedef():
assert self.name.name == obj.identifier.name
typedefType = IDLTypedefType(self.location, obj.innerType,
obj.identifier)
assert not typedefType.isComplete()
return typedefType.complete(scope)
elif obj.isCallback() and not obj.isInterface():
assert self.name.name == obj.identifier.name
return IDLCallbackType(self.location, obj)
if self._promiseInnerType and not self._promiseInnerType.isComplete():
self._promiseInnerType = self._promiseInnerType.complete(scope)
name = self.name.resolve(scope, None)
return IDLWrapperType(self.location, obj, self._promiseInnerType)
def isDistinguishableFrom(self, other):
raise TypeError("Can't tell whether an unresolved type is or is not "
"distinguishable from other things")
class IDLNullableType(IDLType):
def __init__(self, location, innerType):
assert not innerType.isVoid()
assert not innerType == BuiltinTypes[IDLBuiltinType.Types.any]
name = innerType.name
if innerType.isComplete():
name += "OrNull"
IDLType.__init__(self, location, name)
self.inner = innerType
self.builtin = False
def __eq__(self, other):
return isinstance(other, IDLNullableType) and self.inner == other.inner
def __str__(self):
return self.inner.__str__() + "OrNull"
def nullable(self):
return True
def isCallback(self):
return self.inner.isCallback()
def isPrimitive(self):
return self.inner.isPrimitive()
def isBoolean(self):
return self.inner.isBoolean()
def isNumeric(self):
return self.inner.isNumeric()
def isString(self):
return self.inner.isString()
def isByteString(self):
return self.inner.isByteString()
def isDOMString(self):
return self.inner.isDOMString()
def isUSVString(self):
return self.inner.isUSVString()
def isFloat(self):
return self.inner.isFloat()
def isUnrestricted(self):
return self.inner.isUnrestricted()
def includesRestrictedFloat(self):
return self.inner.includesRestrictedFloat()
def isInteger(self):
return self.inner.isInteger()
def isVoid(self):
return False
def isSequence(self):
return self.inner.isSequence()
def isMozMap(self):
return self.inner.isMozMap()
def isArray(self):
return self.inner.isArray()
def isArrayBuffer(self):
return self.inner.isArrayBuffer()
def isArrayBufferView(self):
return self.inner.isArrayBufferView()
def isSharedArrayBuffer(self):
return self.inner.isSharedArrayBuffer()
def isSharedArrayBufferView(self):
return self.inner.isSharedArrayBufferView()
def isTypedArray(self):
return self.inner.isTypedArray()
def isSharedTypedArray(self):
return self.inner.isSharedTypedArray()
def isDictionary(self):
return self.inner.isDictionary()
def isInterface(self):
return self.inner.isInterface()
def isPromise(self):
return self.inner.isPromise()
def isCallbackInterface(self):
return self.inner.isCallbackInterface()
def isNonCallbackInterface(self):
return self.inner.isNonCallbackInterface()
def isEnum(self):
return self.inner.isEnum()
def isUnion(self):
return self.inner.isUnion()
def isSerializable(self):
return self.inner.isSerializable()
def tag(self):
return self.inner.tag()
def resolveType(self, parentScope):
assert isinstance(parentScope, IDLScope)
self.inner.resolveType(parentScope)
def isComplete(self):
return self.inner.isComplete()
def complete(self, scope):
self.inner = self.inner.complete(scope)
if self.inner.nullable():
raise WebIDLError("The inner type of a nullable type must not be "
"a nullable type",
[self.location, self.inner.location])
if self.inner.isUnion():
if self.inner.hasNullableType:
raise WebIDLError("The inner type of a nullable type must not "
"be a union type that itself has a nullable "
"type as a member type", [self.location])
self.name = self.inner.name + "OrNull"
return self
def unroll(self):
return self.inner.unroll()
def isDistinguishableFrom(self, other):
if (other.nullable() or (other.isUnion() and other.hasNullableType) or
other.isDictionary()):
# Can't tell which type null should become
return False
return self.inner.isDistinguishableFrom(other)
def _getDependentObjects(self):
return self.inner._getDependentObjects()
class IDLSequenceType(IDLType):
def __init__(self, location, parameterType):
assert not parameterType.isVoid()
IDLType.__init__(self, location, parameterType.name)
self.inner = parameterType
self.builtin = False
# Need to set self.name up front if our inner type is already complete,
# since in that case our .complete() won't be called.
if self.inner.isComplete():
self.name = self.inner.name + "Sequence"
def __eq__(self, other):
return isinstance(other, IDLSequenceType) and self.inner == other.inner
def __str__(self):
return self.inner.__str__() + "Sequence"
def nullable(self):
return False
def isPrimitive(self):
return False
def isString(self):
return False
def isByteString(self):
return False
def isDOMString(self):
return False
def isUSVString(self):
return False
def isVoid(self):
return False
def isSequence(self):
return True
def isArray(self):
return False
def isDictionary(self):
return False
def isInterface(self):
return False
def isEnum(self):
return False
def isSerializable(self):
return self.inner.isSerializable()
def includesRestrictedFloat(self):
return self.inner.includesRestrictedFloat()
def tag(self):
return IDLType.Tags.sequence
def resolveType(self, parentScope):
assert isinstance(parentScope, IDLScope)
self.inner.resolveType(parentScope)
def isComplete(self):
return self.inner.isComplete()
def complete(self, scope):
self.inner = self.inner.complete(scope)
self.name = self.inner.name + "Sequence"
return self
def unroll(self):
return self.inner.unroll()
def isDistinguishableFrom(self, other):
if other.isPromise():
return False
if other.isUnion():
# Just forward to the union; it'll deal
return other.isDistinguishableFrom(self)
return (other.isPrimitive() or other.isString() or other.isEnum() or
other.isDate() or other.isInterface() or
other.isDictionary() or
other.isCallback() or other.isMozMap())
def _getDependentObjects(self):
return self.inner._getDependentObjects()
class IDLMozMapType(IDLType):
# XXXbz This is pretty similar to IDLSequenceType in various ways.
# And maybe to IDLNullableType. Should we have a superclass for
# "type containing this other type"? Bug 1015318.
def __init__(self, location, parameterType):
assert not parameterType.isVoid()
IDLType.__init__(self, location, parameterType.name)
self.inner = parameterType
self.builtin = False
# Need to set self.name up front if our inner type is already complete,
# since in that case our .complete() won't be called.
if self.inner.isComplete():
self.name = self.inner.name + "MozMap"
def __eq__(self, other):
return isinstance(other, IDLMozMapType) and self.inner == other.inner
def __str__(self):
return self.inner.__str__() + "MozMap"
def isMozMap(self):
return True
def includesRestrictedFloat(self):
return self.inner.includesRestrictedFloat()
def tag(self):
return IDLType.Tags.mozmap
def resolveType(self, parentScope):
assert isinstance(parentScope, IDLScope)
self.inner.resolveType(parentScope)
def isComplete(self):
return self.inner.isComplete()
def complete(self, scope):
self.inner = self.inner.complete(scope)
self.name = self.inner.name + "MozMap"
return self
def unroll(self):
# We do not unroll our inner. Just stop at ourselves. That
# lets us add headers for both ourselves and our inner as
# needed.
return self
def isDistinguishableFrom(self, other):
if other.isPromise():
return False
if other.isUnion():
# Just forward to the union; it'll deal
return other.isDistinguishableFrom(self)
return (other.isPrimitive() or other.isString() or other.isEnum() or
other.isDate() or other.isNonCallbackInterface() or other.isSequence())
def isExposedInAllOf(self, exposureSet):
return self.inner.unroll().isExposedInAllOf(exposureSet)
def _getDependentObjects(self):
return self.inner._getDependentObjects()
class IDLUnionType(IDLType):
def __init__(self, location, memberTypes):
IDLType.__init__(self, location, "")
self.memberTypes = memberTypes
self.hasNullableType = False
self._dictionaryType = None
self.flatMemberTypes = None
self.builtin = False
def __eq__(self, other):
return isinstance(other, IDLUnionType) and self.memberTypes == other.memberTypes
def __hash__(self):
assert self.isComplete()
return self.name.__hash__()
def isVoid(self):
return False
def isUnion(self):
return True
def isSerializable(self):
return all(m.isSerializable() for m in self.memberTypes)
def includesRestrictedFloat(self):
return any(t.includesRestrictedFloat() for t in self.memberTypes)
def tag(self):
return IDLType.Tags.union
def resolveType(self, parentScope):
assert isinstance(parentScope, IDLScope)
for t in self.memberTypes:
t.resolveType(parentScope)
def isComplete(self):
return self.flatMemberTypes is not None
def complete(self, scope):
def typeName(type):
if isinstance(type, IDLNullableType):
return typeName(type.inner) + "OrNull"
if isinstance(type, IDLWrapperType):
return typeName(type._identifier.object())
if isinstance(type, IDLObjectWithIdentifier):
return typeName(type.identifier)
return type.name
for (i, type) in enumerate(self.memberTypes):
if not type.isComplete():
self.memberTypes[i] = type.complete(scope)
self.name = "Or".join(typeName(type) for type in self.memberTypes)
self.flatMemberTypes = list(self.memberTypes)
i = 0
while i < len(self.flatMemberTypes):
if self.flatMemberTypes[i].nullable():
if self.hasNullableType:
raise WebIDLError("Can't have more than one nullable types in a union",
[nullableType.location, self.flatMemberTypes[i].location])
if self.hasDictionaryType():
raise WebIDLError("Can't have a nullable type and a "
"dictionary type in a union",
[self._dictionaryType.location,
self.flatMemberTypes[i].location])
self.hasNullableType = True
nullableType = self.flatMemberTypes[i]
self.flatMemberTypes[i] = self.flatMemberTypes[i].inner
continue
if self.flatMemberTypes[i].isDictionary():
if self.hasNullableType:
raise WebIDLError("Can't have a nullable type and a "
"dictionary type in a union",
[nullableType.location,
self.flatMemberTypes[i].location])
self._dictionaryType = self.flatMemberTypes[i]
elif self.flatMemberTypes[i].isUnion():
self.flatMemberTypes[i:i + 1] = self.flatMemberTypes[i].memberTypes
continue
i += 1
for (i, t) in enumerate(self.flatMemberTypes[:-1]):
for u in self.flatMemberTypes[i + 1:]:
if not t.isDistinguishableFrom(u):
raise WebIDLError("Flat member types of a union should be "
"distinguishable, " + str(t) + " is not "
"distinguishable from " + str(u),
[self.location, t.location, u.location])
return self
def isDistinguishableFrom(self, other):
if self.hasNullableType and other.nullable():
# Can't tell which type null should become
return False
if other.isUnion():
otherTypes = other.unroll().memberTypes
else:
otherTypes = [other]
# For every type in otherTypes, check that it's distinguishable from
# every type in our types
for u in otherTypes:
if any(not t.isDistinguishableFrom(u) for t in self.memberTypes):
return False
return True
def isExposedInAllOf(self, exposureSet):
# We could have different member types in different globals. Just make sure that each thing in exposureSet has one of our member types exposed in it.
for globalName in exposureSet:
if not any(t.unroll().isExposedInAllOf(set([globalName])) for t
in self.flatMemberTypes):
return False
return True
def hasDictionaryType(self):
return self._dictionaryType is not None
def hasPossiblyEmptyDictionaryType(self):
return (self._dictionaryType is not None and
self._dictionaryType.inner.canBeEmpty())
def _getDependentObjects(self):
return set(self.memberTypes)
class IDLArrayType(IDLType):
def __init__(self, location, parameterType):
assert not parameterType.isVoid()
if parameterType.isSequence():
raise WebIDLError("Array type cannot parameterize over a sequence type",
[location])
if parameterType.isMozMap():
raise WebIDLError("Array type cannot parameterize over a MozMap type",
[location])
if parameterType.isDictionary():
raise WebIDLError("Array type cannot parameterize over a dictionary type",
[location])
IDLType.__init__(self, location, parameterType.name)
self.inner = parameterType
self.builtin = False
def __eq__(self, other):
return isinstance(other, IDLArrayType) and self.inner == other.inner
def __str__(self):
return self.inner.__str__() + "Array"
def nullable(self):
return False
def isPrimitive(self):
return False
def isString(self):
return False
def isByteString(self):
return False
def isDOMString(self):
return False
def isUSVString(self):
return False
def isVoid(self):
return False
def isSequence(self):
assert not self.inner.isSequence()
return False
def isArray(self):
return True
def isDictionary(self):
assert not self.inner.isDictionary()
return False
def isInterface(self):
return False
def isEnum(self):
return False
def tag(self):
return IDLType.Tags.array
def resolveType(self, parentScope):
assert isinstance(parentScope, IDLScope)
self.inner.resolveType(parentScope)
def isComplete(self):
return self.inner.isComplete()
def complete(self, scope):
self.inner = self.inner.complete(scope)
self.name = self.inner.name
if self.inner.isDictionary():
raise WebIDLError("Array type must not contain "
"dictionary as element type.",
[self.inner.location])
assert not self.inner.isSequence()
return self
def unroll(self):
return self.inner.unroll()
def isDistinguishableFrom(self, other):
if other.isPromise():
return False
if other.isUnion():
# Just forward to the union; it'll deal
return other.isDistinguishableFrom(self)
return (other.isPrimitive() or other.isString() or other.isEnum() or
other.isDate() or other.isNonCallbackInterface())
def _getDependentObjects(self):
return self.inner._getDependentObjects()
class IDLTypedefType(IDLType):
def __init__(self, location, innerType, name):
IDLType.__init__(self, location, name)
self.inner = innerType
self.builtin = False
def __eq__(self, other):
return isinstance(other, IDLTypedefType) and self.inner == other.inner
def __str__(self):
return self.name
def nullable(self):
return self.inner.nullable()
def isPrimitive(self):
return self.inner.isPrimitive()
def isBoolean(self):
return self.inner.isBoolean()
def isNumeric(self):
return self.inner.isNumeric()
def isString(self):
return self.inner.isString()
def isByteString(self):
return self.inner.isByteString()
def isDOMString(self):
return self.inner.isDOMString()
def isUSVString(self):
return self.inner.isUSVString()
def isVoid(self):
return self.inner.isVoid()
def isSequence(self):
return self.inner.isSequence()
def isMozMap(self):
return self.inner.isMozMap()
def isArray(self):
return self.inner.isArray()
def isDictionary(self):
return self.inner.isDictionary()
def isArrayBuffer(self):
return self.inner.isArrayBuffer()
def isArrayBufferView(self):
return self.inner.isArrayBufferView()
def isSharedArrayBuffer(self):
return self.inner.isSharedArrayBuffer()
def isSharedArrayBufferView(self):
return self.inner.isSharedArrayBufferView()
def isTypedArray(self):
return self.inner.isTypedArray()
def isSharedTypedArray(self):
return self.inner.isSharedTypedArray()
def isInterface(self):
return self.inner.isInterface()
def isCallbackInterface(self):
return self.inner.isCallbackInterface()
def isNonCallbackInterface(self):
return self.inner.isNonCallbackInterface()
def isComplete(self):
return False
def complete(self, parentScope):
if not self.inner.isComplete():
self.inner = self.inner.complete(parentScope)
assert self.inner.isComplete()
return self.inner
# Do we need a resolveType impl? I don't think it's particularly useful....
def tag(self):
return self.inner.tag()
def unroll(self):
return self.inner.unroll()
def isDistinguishableFrom(self, other):
return self.inner.isDistinguishableFrom(other)
def _getDependentObjects(self):
return self.inner._getDependentObjects()
class IDLTypedef(IDLObjectWithIdentifier):
def __init__(self, location, parentScope, innerType, name):
identifier = IDLUnresolvedIdentifier(location, name)
IDLObjectWithIdentifier.__init__(self, location, parentScope, identifier)
self.innerType = innerType
def __str__(self):
return "Typedef %s %s" % (self.identifier.name, self.innerType)
def finish(self, parentScope):
if not self.innerType.isComplete():
self.innerType = self.innerType.complete(parentScope)
def validate(self):
pass
def isTypedef(self):
return True
def addExtendedAttributes(self, attrs):
assert len(attrs) == 0
def _getDependentObjects(self):
return self.innerType._getDependentObjects()
class IDLWrapperType(IDLType):
def __init__(self, location, inner, promiseInnerType=None):
IDLType.__init__(self, location, inner.identifier.name)
self.inner = inner
self._identifier = inner.identifier
self.builtin = False
assert not promiseInnerType or inner.identifier.name == "Promise"
self._promiseInnerType = promiseInnerType
def __eq__(self, other):
return (isinstance(other, IDLWrapperType) and
self._identifier == other._identifier and
self.builtin == other.builtin)
def __str__(self):
return str(self.name) + " (Wrapper)"
def nullable(self):
return False
def isPrimitive(self):
return False
def isString(self):
return False
def isByteString(self):
return False
def isDOMString(self):
return False
def isUSVString(self):
return False
def isVoid(self):
return False
def isSequence(self):
return False
def isArray(self):
return False
def isDictionary(self):
return isinstance(self.inner, IDLDictionary)
def isInterface(self):
return (isinstance(self.inner, IDLInterface) or
isinstance(self.inner, IDLExternalInterface))
def isCallbackInterface(self):
return self.isInterface() and self.inner.isCallback()
def isNonCallbackInterface(self):
return self.isInterface() and not self.inner.isCallback()
def isEnum(self):
return isinstance(self.inner, IDLEnum)
def isPromise(self):
return (isinstance(self.inner, IDLInterface) and
self.inner.identifier.name == "Promise")
def promiseInnerType(self):
assert self.isPromise()
return self._promiseInnerType
def isSerializable(self):
if self.isInterface():
if self.inner.isExternal():
return False
return any(m.isMethod() and m.isJsonifier() for m in self.inner.members)
elif self.isEnum():
return True
elif self.isDictionary():
return all(m.type.isSerializable() for m in self.inner.members)
else:
raise WebIDLError("IDLWrapperType wraps type %s that we don't know if "
"is serializable" % type(self.inner), [self.location])
def resolveType(self, parentScope):
assert isinstance(parentScope, IDLScope)
self.inner.resolve(parentScope)
def isComplete(self):
return True
def tag(self):
if self.isInterface():
return IDLType.Tags.interface
elif self.isEnum():
return IDLType.Tags.enum
elif self.isDictionary():
return IDLType.Tags.dictionary
else:
assert False
def isDistinguishableFrom(self, other):
if self.isPromise():
return False
if other.isPromise():
return False
if other.isUnion():
# Just forward to the union; it'll deal
return other.isDistinguishableFrom(self)
assert self.isInterface() or self.isEnum() or self.isDictionary()
if self.isEnum():
return (other.isPrimitive() or other.isInterface() or other.isObject() or
other.isCallback() or other.isDictionary() or
other.isSequence() or other.isMozMap() or other.isArray() or
other.isDate())
if self.isDictionary() and other.nullable():
return False
if (other.isPrimitive() or other.isString() or other.isEnum() or
other.isDate() or other.isSequence()):
return True
if self.isDictionary():
return other.isNonCallbackInterface()
assert self.isInterface()
if other.isInterface():
if other.isSpiderMonkeyInterface():
# Just let |other| handle things
return other.isDistinguishableFrom(self)
assert self.isGeckoInterface() and other.isGeckoInterface()
if self.inner.isExternal() or other.unroll().inner.isExternal():
return self != other
return (len(self.inner.interfacesBasedOnSelf &
other.unroll().inner.interfacesBasedOnSelf) == 0 and
(self.isNonCallbackInterface() or
other.isNonCallbackInterface()))
if (other.isDictionary() or other.isCallback() or
other.isMozMap() or other.isArray()):
return self.isNonCallbackInterface()
# Not much else |other| can be
assert other.isObject()
return False
def isExposedInAllOf(self, exposureSet):
if not self.isInterface():
return True
iface = self.inner
if iface.isExternal():
# Let's say true, though ideally we'd only do this when
# exposureSet contains the primary global's name.
return True
if (self.isPromise() and
# Check the internal type
not self.promiseInnerType().unroll().isExposedInAllOf(exposureSet)):
return False
return iface.exposureSet.issuperset(exposureSet)
def _getDependentObjects(self):
# NB: The codegen for an interface type depends on
# a) That the identifier is in fact an interface (as opposed to
# a dictionary or something else).
# b) The native type of the interface.
# If we depend on the interface object we will also depend on
# anything the interface depends on which is undesirable. We
# considered implementing a dependency just on the interface type
# file, but then every modification to an interface would cause this
# to be regenerated which is still undesirable. We decided not to
# depend on anything, reasoning that:
# 1) Changing the concrete type of the interface requires modifying
# Bindings.conf, which is still a global dependency.
# 2) Changing an interface to a dictionary (or vice versa) with the
# same identifier should be incredibly rare.
#
# On the other hand, if our type is a dictionary, we should
# depend on it, because the member types of a dictionary
# affect whether a method taking the dictionary as an argument
# takes a JSContext* argument or not.
if self.isDictionary():
return set([self.inner])
return set()
class IDLBuiltinType(IDLType):
Types = enum(
# The integer types
'byte',
'octet',
'short',
'unsigned_short',
'long',
'unsigned_long',
'long_long',
'unsigned_long_long',
# Additional primitive types
'boolean',
'unrestricted_float',
'float',
'unrestricted_double',
# IMPORTANT: "double" must be the last primitive type listed
'double',
# Other types
'any',
'domstring',
'bytestring',
'usvstring',
'object',
'date',
'void',
# Funny stuff
'ArrayBuffer',
'ArrayBufferView',
'SharedArrayBuffer',
'SharedArrayBufferView',
'Int8Array',
'Uint8Array',
'Uint8ClampedArray',
'Int16Array',
'Uint16Array',
'Int32Array',
'Uint32Array',
'Float32Array',
'Float64Array',
'SharedInt8Array',
'SharedUint8Array',
'SharedUint8ClampedArray',
'SharedInt16Array',
'SharedUint16Array',
'SharedInt32Array',
'SharedUint32Array',
'SharedFloat32Array',
'SharedFloat64Array'
)
TagLookup = {
Types.byte: IDLType.Tags.int8,
Types.octet: IDLType.Tags.uint8,
Types.short: IDLType.Tags.int16,
Types.unsigned_short: IDLType.Tags.uint16,
Types.long: IDLType.Tags.int32,
Types.unsigned_long: IDLType.Tags.uint32,
Types.long_long: IDLType.Tags.int64,
Types.unsigned_long_long: IDLType.Tags.uint64,
Types.boolean: IDLType.Tags.bool,
Types.unrestricted_float: IDLType.Tags.unrestricted_float,
Types.float: IDLType.Tags.float,
Types.unrestricted_double: IDLType.Tags.unrestricted_double,
Types.double: IDLType.Tags.double,
Types.any: IDLType.Tags.any,
Types.domstring: IDLType.Tags.domstring,
Types.bytestring: IDLType.Tags.bytestring,
Types.usvstring: IDLType.Tags.usvstring,
Types.object: IDLType.Tags.object,
Types.date: IDLType.Tags.date,
Types.void: IDLType.Tags.void,
Types.ArrayBuffer: IDLType.Tags.interface,
Types.ArrayBufferView: IDLType.Tags.interface,
Types.SharedArrayBuffer: IDLType.Tags.interface,
Types.SharedArrayBufferView: IDLType.Tags.interface,
Types.Int8Array: IDLType.Tags.interface,
Types.Uint8Array: IDLType.Tags.interface,
Types.Uint8ClampedArray: IDLType.Tags.interface,
Types.Int16Array: IDLType.Tags.interface,
Types.Uint16Array: IDLType.Tags.interface,
Types.Int32Array: IDLType.Tags.interface,
Types.Uint32Array: IDLType.Tags.interface,
Types.Float32Array: IDLType.Tags.interface,
Types.Float64Array: IDLType.Tags.interface,
Types.SharedInt8Array: IDLType.Tags.interface,
Types.SharedUint8Array: IDLType.Tags.interface,
Types.SharedUint8ClampedArray: IDLType.Tags.interface,
Types.SharedInt16Array: IDLType.Tags.interface,
Types.SharedUint16Array: IDLType.Tags.interface,
Types.SharedInt32Array: IDLType.Tags.interface,
Types.SharedUint32Array: IDLType.Tags.interface,
Types.SharedFloat32Array: IDLType.Tags.interface,
Types.SharedFloat64Array: IDLType.Tags.interface
}
def __init__(self, location, name, type):
IDLType.__init__(self, location, name)
self.builtin = True
self._typeTag = type
def isPrimitive(self):
return self._typeTag <= IDLBuiltinType.Types.double
def isBoolean(self):
return self._typeTag == IDLBuiltinType.Types.boolean
def isNumeric(self):
return self.isPrimitive() and not self.isBoolean()
def isString(self):
return (self._typeTag == IDLBuiltinType.Types.domstring or
self._typeTag == IDLBuiltinType.Types.bytestring or
self._typeTag == IDLBuiltinType.Types.usvstring)
def isByteString(self):
return self._typeTag == IDLBuiltinType.Types.bytestring
def isDOMString(self):
return self._typeTag == IDLBuiltinType.Types.domstring
def isUSVString(self):
return self._typeTag == IDLBuiltinType.Types.usvstring
def isInteger(self):
return self._typeTag <= IDLBuiltinType.Types.unsigned_long_long
def isArrayBuffer(self):
return self._typeTag == IDLBuiltinType.Types.ArrayBuffer
def isArrayBufferView(self):
return self._typeTag == IDLBuiltinType.Types.ArrayBufferView
def isSharedArrayBuffer(self):
return self._typeTag == IDLBuiltinType.Types.SharedArrayBuffer
def isSharedArrayBufferView(self):
return self._typeTag == IDLBuiltinType.Types.SharedArrayBufferView
def isTypedArray(self):
return (self._typeTag >= IDLBuiltinType.Types.Int8Array and
self._typeTag <= IDLBuiltinType.Types.Float64Array)
def isSharedTypedArray(self):
return (self._typeTag >= IDLBuiltinType.Types.SharedInt8Array and
self._typeTag <= IDLBuiltinType.Types.SharedFloat64Array)
def isInterface(self):
# TypedArray things are interface types per the TypedArray spec,
# but we handle them as builtins because SpiderMonkey implements
# all of it internally.
return (self.isArrayBuffer() or
self.isArrayBufferView() or
self.isSharedArrayBuffer() or
self.isSharedArrayBufferView() or
self.isTypedArray() or
self.isSharedTypedArray())
def isNonCallbackInterface(self):
# All the interfaces we can be are non-callback
return self.isInterface()
def isFloat(self):
return (self._typeTag == IDLBuiltinType.Types.float or
self._typeTag == IDLBuiltinType.Types.double or
self._typeTag == IDLBuiltinType.Types.unrestricted_float or
self._typeTag == IDLBuiltinType.Types.unrestricted_double)
def isUnrestricted(self):
assert self.isFloat()
return (self._typeTag == IDLBuiltinType.Types.unrestricted_float or
self._typeTag == IDLBuiltinType.Types.unrestricted_double)
def isSerializable(self):
return self.isPrimitive() or self.isString() or self.isDate()
def includesRestrictedFloat(self):
return self.isFloat() and not self.isUnrestricted()
def tag(self):
return IDLBuiltinType.TagLookup[self._typeTag]
def isDistinguishableFrom(self, other):
if other.isPromise():
return False
if other.isUnion():
# Just forward to the union; it'll deal
return other.isDistinguishableFrom(self)
if self.isBoolean():
return (other.isNumeric() or other.isString() or other.isEnum() or
other.isInterface() or other.isObject() or
other.isCallback() or other.isDictionary() or
other.isSequence() or other.isMozMap() or other.isArray() or
other.isDate())
if self.isNumeric():
return (other.isBoolean() or other.isString() or other.isEnum() or
other.isInterface() or other.isObject() or
other.isCallback() or other.isDictionary() or
other.isSequence() or other.isMozMap() or other.isArray() or
other.isDate())
if self.isString():
return (other.isPrimitive() or other.isInterface() or
other.isObject() or
other.isCallback() or other.isDictionary() or
other.isSequence() or other.isMozMap() or other.isArray() or
other.isDate())
if self.isAny():
# Can't tell "any" apart from anything
return False
if self.isObject():
return other.isPrimitive() or other.isString() or other.isEnum()
if self.isDate():
return (other.isPrimitive() or other.isString() or other.isEnum() or
other.isInterface() or other.isCallback() or
other.isDictionary() or other.isSequence() or
other.isMozMap() or other.isArray())
if self.isVoid():
return not other.isVoid()
# Not much else we could be!
assert self.isSpiderMonkeyInterface()
# Like interfaces, but we know we're not a callback
return (other.isPrimitive() or other.isString() or other.isEnum() or
other.isCallback() or other.isDictionary() or
other.isSequence() or other.isMozMap() or other.isArray() or
other.isDate() or
(other.isInterface() and (
# ArrayBuffer is distinguishable from everything
# that's not an ArrayBuffer or a callback interface
(self.isArrayBuffer() and not other.isArrayBuffer()) or
(self.isSharedArrayBuffer() and not other.isSharedArrayBuffer()) or
# ArrayBufferView is distinguishable from everything
# that's not an ArrayBufferView or typed array.
(self.isArrayBufferView() and not other.isArrayBufferView() and
not other.isTypedArray()) or
(self.isSharedArrayBufferView() and not other.isSharedArrayBufferView() and
not other.isSharedTypedArray()) or
# Typed arrays are distinguishable from everything
# except ArrayBufferView and the same type of typed
# array
(self.isTypedArray() and not other.isArrayBufferView() and not
(other.isTypedArray() and other.name == self.name)) or
(self.isSharedTypedArray() and not other.isSharedArrayBufferView() and not
(other.isSharedTypedArray() and other.name == self.name)))))
def _getDependentObjects(self):
return set()
BuiltinTypes = {
IDLBuiltinType.Types.byte:
IDLBuiltinType(BuiltinLocation("<builtin type>"), "Byte",
IDLBuiltinType.Types.byte),
IDLBuiltinType.Types.octet:
IDLBuiltinType(BuiltinLocation("<builtin type>"), "Octet",
IDLBuiltinType.Types.octet),
IDLBuiltinType.Types.short:
IDLBuiltinType(BuiltinLocation("<builtin type>"), "Short",
IDLBuiltinType.Types.short),
IDLBuiltinType.Types.unsigned_short:
IDLBuiltinType(BuiltinLocation("<builtin type>"), "UnsignedShort",
IDLBuiltinType.Types.unsigned_short),
IDLBuiltinType.Types.long:
IDLBuiltinType(BuiltinLocation("<builtin type>"), "Long",
IDLBuiltinType.Types.long),
IDLBuiltinType.Types.unsigned_long:
IDLBuiltinType(BuiltinLocation("<builtin type>"), "UnsignedLong",
IDLBuiltinType.Types.unsigned_long),
IDLBuiltinType.Types.long_long:
IDLBuiltinType(BuiltinLocation("<builtin type>"), "LongLong",
IDLBuiltinType.Types.long_long),
IDLBuiltinType.Types.unsigned_long_long:
IDLBuiltinType(BuiltinLocation("<builtin type>"), "UnsignedLongLong",
IDLBuiltinType.Types.unsigned_long_long),
IDLBuiltinType.Types.boolean:
IDLBuiltinType(BuiltinLocation("<builtin type>"), "Boolean",
IDLBuiltinType.Types.boolean),
IDLBuiltinType.Types.float:
IDLBuiltinType(BuiltinLocation("<builtin type>"), "Float",
IDLBuiltinType.Types.float),
IDLBuiltinType.Types.unrestricted_float:
IDLBuiltinType(BuiltinLocation("<builtin type>"), "UnrestrictedFloat",
IDLBuiltinType.Types.unrestricted_float),
IDLBuiltinType.Types.double:
IDLBuiltinType(BuiltinLocation("<builtin type>"), "Double",
IDLBuiltinType.Types.double),
IDLBuiltinType.Types.unrestricted_double:
IDLBuiltinType(BuiltinLocation("<builtin type>"), "UnrestrictedDouble",
IDLBuiltinType.Types.unrestricted_double),
IDLBuiltinType.Types.any:
IDLBuiltinType(BuiltinLocation("<builtin type>"), "Any",
IDLBuiltinType.Types.any),
IDLBuiltinType.Types.domstring:
IDLBuiltinType(BuiltinLocation("<builtin type>"), "String",
IDLBuiltinType.Types.domstring),
IDLBuiltinType.Types.bytestring:
IDLBuiltinType(BuiltinLocation("<builtin type>"), "ByteString",
IDLBuiltinType.Types.bytestring),
IDLBuiltinType.Types.usvstring:
IDLBuiltinType(BuiltinLocation("<builtin type>"), "USVString",
IDLBuiltinType.Types.usvstring),
IDLBuiltinType.Types.object:
IDLBuiltinType(BuiltinLocation("<builtin type>"), "Object",
IDLBuiltinType.Types.object),
IDLBuiltinType.Types.date:
IDLBuiltinType(BuiltinLocation("<builtin type>"), "Date",
IDLBuiltinType.Types.date),
IDLBuiltinType.Types.void:
IDLBuiltinType(BuiltinLocation("<builtin type>"), "Void",
IDLBuiltinType.Types.void),
IDLBuiltinType.Types.ArrayBuffer:
IDLBuiltinType(BuiltinLocation("<builtin type>"), "ArrayBuffer",
IDLBuiltinType.Types.ArrayBuffer),
IDLBuiltinType.Types.ArrayBufferView:
IDLBuiltinType(BuiltinLocation("<builtin type>"), "ArrayBufferView",
IDLBuiltinType.Types.ArrayBufferView),
IDLBuiltinType.Types.SharedArrayBuffer:
IDLBuiltinType(BuiltinLocation("<builtin type>"), "SharedArrayBuffer",
IDLBuiltinType.Types.SharedArrayBuffer),
IDLBuiltinType.Types.SharedArrayBufferView:
IDLBuiltinType(BuiltinLocation("<builtin type>"), "SharedArrayBufferView",
IDLBuiltinType.Types.SharedArrayBufferView),
IDLBuiltinType.Types.Int8Array:
IDLBuiltinType(BuiltinLocation("<builtin type>"), "Int8Array",
IDLBuiltinType.Types.Int8Array),
IDLBuiltinType.Types.Uint8Array:
IDLBuiltinType(BuiltinLocation("<builtin type>"), "Uint8Array",
IDLBuiltinType.Types.Uint8Array),
IDLBuiltinType.Types.Uint8ClampedArray:
IDLBuiltinType(BuiltinLocation("<builtin type>"), "Uint8ClampedArray",
IDLBuiltinType.Types.Uint8ClampedArray),
IDLBuiltinType.Types.Int16Array:
IDLBuiltinType(BuiltinLocation("<builtin type>"), "Int16Array",
IDLBuiltinType.Types.Int16Array),
IDLBuiltinType.Types.Uint16Array:
IDLBuiltinType(BuiltinLocation("<builtin type>"), "Uint16Array",
IDLBuiltinType.Types.Uint16Array),
IDLBuiltinType.Types.Int32Array:
IDLBuiltinType(BuiltinLocation("<builtin type>"), "Int32Array",
IDLBuiltinType.Types.Int32Array),
IDLBuiltinType.Types.Uint32Array:
IDLBuiltinType(BuiltinLocation("<builtin type>"), "Uint32Array",
IDLBuiltinType.Types.Uint32Array),
IDLBuiltinType.Types.Float32Array:
IDLBuiltinType(BuiltinLocation("<builtin type>"), "Float32Array",
IDLBuiltinType.Types.Float32Array),
IDLBuiltinType.Types.Float64Array:
IDLBuiltinType(BuiltinLocation("<builtin type>"), "Float64Array",
IDLBuiltinType.Types.Float64Array),
IDLBuiltinType.Types.SharedInt8Array:
IDLBuiltinType(BuiltinLocation("<builtin type>"), "SharedInt8Array",
IDLBuiltinType.Types.SharedInt8Array),
IDLBuiltinType.Types.SharedUint8Array:
IDLBuiltinType(BuiltinLocation("<builtin type>"), "SharedUint8Array",
IDLBuiltinType.Types.SharedUint8Array),
IDLBuiltinType.Types.SharedUint8ClampedArray:
IDLBuiltinType(BuiltinLocation("<builtin type>"), "SharedUint8ClampedArray",
IDLBuiltinType.Types.SharedUint8ClampedArray),
IDLBuiltinType.Types.SharedInt16Array:
IDLBuiltinType(BuiltinLocation("<builtin type>"), "SharedInt16Array",
IDLBuiltinType.Types.SharedInt16Array),
IDLBuiltinType.Types.SharedUint16Array:
IDLBuiltinType(BuiltinLocation("<builtin type>"), "SharedUint16Array",
IDLBuiltinType.Types.SharedUint16Array),
IDLBuiltinType.Types.SharedInt32Array:
IDLBuiltinType(BuiltinLocation("<builtin type>"), "SharedInt32Array",
IDLBuiltinType.Types.SharedInt32Array),
IDLBuiltinType.Types.SharedUint32Array:
IDLBuiltinType(BuiltinLocation("<builtin type>"), "SharedUint32Array",
IDLBuiltinType.Types.SharedUint32Array),
IDLBuiltinType.Types.SharedFloat32Array:
IDLBuiltinType(BuiltinLocation("<builtin type>"), "SharedFloat32Array",
IDLBuiltinType.Types.SharedFloat32Array),
IDLBuiltinType.Types.SharedFloat64Array:
IDLBuiltinType(BuiltinLocation("<builtin type>"), "SharedFloat64Array",
IDLBuiltinType.Types.SharedFloat64Array)
}
integerTypeSizes = {
IDLBuiltinType.Types.byte: (-128, 127),
IDLBuiltinType.Types.octet: (0, 255),
IDLBuiltinType.Types.short: (-32768, 32767),
IDLBuiltinType.Types.unsigned_short: (0, 65535),
IDLBuiltinType.Types.long: (-2147483648, 2147483647),
IDLBuiltinType.Types.unsigned_long: (0, 4294967295),
IDLBuiltinType.Types.long_long: (-9223372036854775808, 9223372036854775807),
IDLBuiltinType.Types.unsigned_long_long: (0, 18446744073709551615)
}
def matchIntegerValueToType(value):
for type, extremes in integerTypeSizes.items():
(min, max) = extremes
if value <= max and value >= min:
return BuiltinTypes[type]
return None
class IDLValue(IDLObject):
def __init__(self, location, type, value):
IDLObject.__init__(self, location)
self.type = type
assert isinstance(type, IDLType)
self.value = value
def coerceToType(self, type, location):
if type == self.type:
return self # Nothing to do
# We first check for unions to ensure that even if the union is nullable
# we end up with the right flat member type, not the union's type.
if type.isUnion():
# We use the flat member types here, because if we have a nullable
# member type, or a nested union, we want the type the value
# actually coerces to, not the nullable or nested union type.
for subtype in type.unroll().flatMemberTypes:
try:
coercedValue = self.coerceToType(subtype, location)
# Create a new IDLValue to make sure that we have the
# correct float/double type. This is necessary because we
# use the value's type when it is a default value of a
# union, and the union cares about the exact float type.
return IDLValue(self.location, subtype, coercedValue.value)
except:
pass
# If the type allows null, rerun this matching on the inner type, except
# nullable enums. We handle those specially, because we want our
# default string values to stay strings even when assigned to a nullable
# enum.
elif type.nullable() and not type.isEnum():
innerValue = self.coerceToType(type.inner, location)
return IDLValue(self.location, type, innerValue.value)
elif self.type.isInteger() and type.isInteger():
# We're both integer types. See if we fit.
(min, max) = integerTypeSizes[type._typeTag]
if self.value <= max and self.value >= min:
# Promote
return IDLValue(self.location, type, self.value)
else:
raise WebIDLError("Value %s is out of range for type %s." %
(self.value, type), [location])
elif self.type.isInteger() and type.isFloat():
# Convert an integer literal into float
if -2**24 <= self.value <= 2**24:
return IDLValue(self.location, type, float(self.value))
else:
raise WebIDLError("Converting value %s to %s will lose precision." %
(self.value, type), [location])
elif self.type.isString() and type.isEnum():
# Just keep our string, but make sure it's a valid value for this enum
enum = type.unroll().inner
if self.value not in enum.values():
raise WebIDLError("'%s' is not a valid default value for enum %s"
% (self.value, enum.identifier.name),
[location, enum.location])
return self
elif self.type.isFloat() and type.isFloat():
if (not type.isUnrestricted() and
(self.value == float("inf") or self.value == float("-inf") or
math.isnan(self.value))):
raise WebIDLError("Trying to convert unrestricted value %s to non-unrestricted"
% self.value, [location]);
return IDLValue(self.location, type, self.value)
elif self.type.isString() and type.isUSVString():
# Allow USVStrings to use default value just like
# DOMString. No coercion is required in this case as Codegen.py
# treats USVString just like DOMString, but with an
# extra normalization step.
assert self.type.isDOMString()
return self
raise WebIDLError("Cannot coerce type %s to type %s." %
(self.type, type), [location])
def _getDependentObjects(self):
return set()
class IDLNullValue(IDLObject):
def __init__(self, location):
IDLObject.__init__(self, location)
self.type = None
self.value = None
def coerceToType(self, type, location):
if (not isinstance(type, IDLNullableType) and
not (type.isUnion() and type.hasNullableType) and
not (type.isUnion() and type.hasDictionaryType()) and
not type.isDictionary() and
not type.isAny()):
raise WebIDLError("Cannot coerce null value to type %s." % type,
[location])
nullValue = IDLNullValue(self.location)
if type.isUnion() and not type.nullable() and type.hasDictionaryType():
# We're actually a default value for the union's dictionary member.
# Use its type.
for t in type.flatMemberTypes:
if t.isDictionary():
nullValue.type = t
return nullValue
nullValue.type = type
return nullValue
def _getDependentObjects(self):
return set()
class IDLEmptySequenceValue(IDLObject):
def __init__(self, location):
IDLObject.__init__(self, location)
self.type = None
self.value = None
def coerceToType(self, type, location):
if type.isUnion():
# We use the flat member types here, because if we have a nullable
# member type, or a nested union, we want the type the value
# actually coerces to, not the nullable or nested union type.
for subtype in type.unroll().flatMemberTypes:
try:
return self.coerceToType(subtype, location)
except:
pass
if not type.isSequence():
raise WebIDLError("Cannot coerce empty sequence value to type %s." % type,
[location])
emptySequenceValue = IDLEmptySequenceValue(self.location)
emptySequenceValue.type = type
return emptySequenceValue
def _getDependentObjects(self):
return set()
class IDLUndefinedValue(IDLObject):
def __init__(self, location):
IDLObject.__init__(self, location)
self.type = None
self.value = None
def coerceToType(self, type, location):
if not type.isAny():
raise WebIDLError("Cannot coerce undefined value to type %s." % type,
[location])
undefinedValue = IDLUndefinedValue(self.location)
undefinedValue.type = type
return undefinedValue
def _getDependentObjects(self):
return set()
class IDLInterfaceMember(IDLObjectWithIdentifier, IDLExposureMixins):
Tags = enum(
'Const',
'Attr',
'Method',
'MaplikeOrSetlike'
)
Special = enum(
'Static',
'Stringifier'
)
AffectsValues = ("Nothing", "Everything")
DependsOnValues = ("Nothing", "DOMState", "DeviceState", "Everything")
def __init__(self, location, identifier, tag):
IDLObjectWithIdentifier.__init__(self, location, None, identifier)
IDLExposureMixins.__init__(self, location)
self.tag = tag
self._extendedAttrDict = {}
def isMethod(self):
return self.tag == IDLInterfaceMember.Tags.Method
def isAttr(self):
return self.tag == IDLInterfaceMember.Tags.Attr
def isConst(self):
return self.tag == IDLInterfaceMember.Tags.Const
def isMaplikeOrSetlike(self):
return self.tag == IDLInterfaceMember.Tags.MaplikeOrSetlike
def addExtendedAttributes(self, attrs):
for attr in attrs:
self.handleExtendedAttribute(attr)
attrlist = attr.listValue()
self._extendedAttrDict[attr.identifier()] = attrlist if len(attrlist) else True
def handleExtendedAttribute(self, attr):
pass
def getExtendedAttribute(self, name):
return self._extendedAttrDict.get(name, None)
def finish(self, scope):
# We better be exposed _somewhere_.
if (len(self._exposureGlobalNames) == 0):
print self.identifier.name
assert len(self._exposureGlobalNames) != 0
IDLExposureMixins.finish(self, scope)
def validate(self):
if (self.getExtendedAttribute("Pref") and
self.exposureSet != set([self._globalScope.primaryGlobalName])):
raise WebIDLError("[Pref] used on an interface member that is not "
"%s-only" % self._globalScope.primaryGlobalName,
[self.location])
for attribute in ["CheckAnyPermissions", "CheckAllPermissions"]:
if (self.getExtendedAttribute(attribute) and
self.exposureSet != set([self._globalScope.primaryGlobalName])):
raise WebIDLError("[%s] used on an interface member that is "
"not %s-only" %
(attribute, self.parentScope.primaryGlobalName),
[self.location])
if self.isAttr() or self.isMethod():
if self.affects == "Everything" and self.dependsOn != "Everything":
raise WebIDLError("Interface member is flagged as affecting "
"everything but not depending on everything. "
"That seems rather unlikely.",
[self.location])
if self.getExtendedAttribute("NewObject"):
if self.dependsOn == "Nothing" or self.dependsOn == "DOMState":
raise WebIDLError("A [NewObject] method is not idempotent, "
"so it has to depend on something other than DOM state.",
[self.location])
def _setDependsOn(self, dependsOn):
if self.dependsOn != "Everything":
raise WebIDLError("Trying to specify multiple different DependsOn, "
"Pure, or Constant extended attributes for "
"attribute", [self.location])
if dependsOn not in IDLInterfaceMember.DependsOnValues:
raise WebIDLError("Invalid [DependsOn=%s] on attribute" % dependsOn,
[self.location])
self.dependsOn = dependsOn
def _setAffects(self, affects):
if self.affects != "Everything":
raise WebIDLError("Trying to specify multiple different Affects, "
"Pure, or Constant extended attributes for "
"attribute", [self.location])
if affects not in IDLInterfaceMember.AffectsValues:
raise WebIDLError("Invalid [Affects=%s] on attribute" % dependsOn,
[self.location])
self.affects = affects
def _addAlias(self, alias):
if alias in self.aliases:
raise WebIDLError("Duplicate [Alias=%s] on attribute" % alias,
[self.location])
self.aliases.append(alias)
# MaplikeOrSetlike adds a trait to an interface, like map or iteration
# functions. To handle them while still getting all of the generated binding
# code taken care of, we treat them as macros that are expanded into members
# based on parsed values.
class IDLMaplikeOrSetlike(IDLInterfaceMember):
MaplikeOrSetlikeTypes = enum(
'maplike',
'setlike'
)
def __init__(self, location, identifier, maplikeOrSetlikeType,
readonly, keyType, valueType):
IDLInterfaceMember.__init__(self, location, identifier,
IDLInterfaceMember.Tags.MaplikeOrSetlike)
assert isinstance(keyType, IDLType)
assert isinstance(valueType, IDLType)
self.maplikeOrSetlikeType = maplikeOrSetlikeType
self.readonly = readonly
self.keyType = keyType
self.valueType = valueType
self.slotIndex = None
self.disallowedMemberNames = []
self.disallowedNonMethodNames = []
# When generating JSAPI access code, we need to know the backing object
# type prefix to create the correct function. Generate here for reuse.
if self.isMaplike():
self.prefix = 'Map'
elif self.isSetlike():
self.prefix = 'Set'
def __str__(self):
return "declared '%s' with key '%s'" % (self.maplikeOrSetlikeType, self.keyType)
def isMaplike(self):
return self.maplikeOrSetlikeType == "maplike"
def isSetlike(self):
return self.maplikeOrSetlikeType == "setlike"
def checkCollisions(self, members, isAncestor):
for member in members:
# Check that there are no disallowed members
if (member.identifier.name in self.disallowedMemberNames and
not ((member.isMethod() and member.isMaplikeOrSetlikeMethod()) or
(member.isAttr() and member.isMaplikeOrSetlikeAttr()))):
raise WebIDLError("Member '%s' conflicts "
"with reserved %s name." %
(member.identifier.name,
self.maplikeOrSetlikeType),
[self.location, member.location])
# Check that there are no disallowed non-method members
if (isAncestor or (member.isAttr() or member.isConst()) and
member.identifier.name in self.disallowedNonMethodNames):
raise WebIDLError("Member '%s' conflicts "
"with reserved %s method." %
(member.identifier.name,
self.maplikeOrSetlikeType),
[self.location, member.location])
def expand(self, members, isJSImplemented):
"""
In order to take advantage of all of the method machinery in Codegen,
we generate our functions as if they were part of the interface
specification during parsing.
"""
def addMethod(name, allowExistingOperations, returnType, args=[],
chromeOnly=False, isPure=False, affectsNothing=False):
"""
Create an IDLMethod based on the parameters passed in. chromeOnly is only
True for read-only js implemented classes, to implement underscore
prefixed convenience functions would otherwise not be available,
unlike the case of C++ bindings. isPure is only True for
idempotent functions, so it is not valid for things like keys,
values, etc. that return a new object every time.
"""
# Only add name to lists for collision checks if it's not chrome
# only.
if chromeOnly:
name = "__" + name
else:
if not allowExistingOperations:
self.disallowedMemberNames.append(name)
else:
self.disallowedNonMethodNames.append(name)
# If allowExistingOperations is True, and another operation exists
# with the same name as the one we're trying to add, don't add the
# maplike/setlike operation. However, if the operation is static,
# then fail by way of creating the function, which will cause a
# naming conflict, per the spec.
if allowExistingOperations:
for m in members:
if m.identifier.name == name and m.isMethod() and not m.isStatic():
return
method = IDLMethod(self.location,
IDLUnresolvedIdentifier(self.location, name, allowDoubleUnderscore=chromeOnly),
returnType, args, maplikeOrSetlike=self)
# We need to be able to throw from declaration methods
method.addExtendedAttributes(
[IDLExtendedAttribute(self.location, ("Throws",))])
if chromeOnly:
method.addExtendedAttributes(
[IDLExtendedAttribute(self.location, ("ChromeOnly",))])
if isPure:
method.addExtendedAttributes(
[IDLExtendedAttribute(self.location, ("Pure",))])
# Following attributes are used for keys/values/entries. Can't mark
# them pure, since they return a new object each time they are run.
if affectsNothing:
method.addExtendedAttributes(
[IDLExtendedAttribute(self.location, ("DependsOn", "Everything")),
IDLExtendedAttribute(self.location, ("Affects", "Nothing"))])
members.append(method)
# Both maplike and setlike have a size attribute
members.append(IDLAttribute(self.location,
IDLUnresolvedIdentifier(BuiltinLocation("<auto-generated-identifier>"), "size"),
BuiltinTypes[IDLBuiltinType.Types.unsigned_long],
True,
maplikeOrSetlike=self))
self.reserved_ro_names = ["size"]
# object entries()
addMethod("entries", False, BuiltinTypes[IDLBuiltinType.Types.object],
affectsNothing=True)
# object keys()
addMethod("keys", False, BuiltinTypes[IDLBuiltinType.Types.object],
affectsNothing=True)
# object values()
addMethod("values", False, BuiltinTypes[IDLBuiltinType.Types.object],
affectsNothing=True)
# void forEach(callback(valueType, keyType), thisVal)
foreachArguments = [IDLArgument(self.location,
IDLUnresolvedIdentifier(BuiltinLocation("<auto-generated-identifier>"),
"callback"),
BuiltinTypes[IDLBuiltinType.Types.object]),
IDLArgument(self.location,
IDLUnresolvedIdentifier(BuiltinLocation("<auto-generated-identifier>"),
"thisArg"),
BuiltinTypes[IDLBuiltinType.Types.any],
optional=True)]
addMethod("forEach", False, BuiltinTypes[IDLBuiltinType.Types.void],
foreachArguments)
def getKeyArg():
return IDLArgument(self.location,
IDLUnresolvedIdentifier(self.location, "key"),
self.keyType)
# boolean has(keyType key)
addMethod("has", False, BuiltinTypes[IDLBuiltinType.Types.boolean],
[getKeyArg()], isPure=True)
if not self.readonly:
# void clear()
addMethod("clear", True, BuiltinTypes[IDLBuiltinType.Types.void],
[])
# boolean delete(keyType key)
addMethod("delete", True,
BuiltinTypes[IDLBuiltinType.Types.boolean], [getKeyArg()])
# Always generate underscored functions (e.g. __add, __clear) for js
# implemented interfaces as convenience functions.
if isJSImplemented:
# void clear()
addMethod("clear", True, BuiltinTypes[IDLBuiltinType.Types.void],
[], chromeOnly=True)
# boolean delete(keyType key)
addMethod("delete", True,
BuiltinTypes[IDLBuiltinType.Types.boolean], [getKeyArg()],
chromeOnly=True)
if self.isSetlike():
if not self.readonly:
# Add returns the set object it just added to.
# object add(keyType key)
addMethod("add", True,
BuiltinTypes[IDLBuiltinType.Types.object], [getKeyArg()])
if isJSImplemented:
addMethod("add", True,
BuiltinTypes[IDLBuiltinType.Types.object], [getKeyArg()],
chromeOnly=True)
return
# If we get this far, we're a maplike declaration.
# valueType get(keyType key)
#
# Note that instead of the value type, we're using any here. The
# validity checks should happen as things are inserted into the map,
# and using any as the return type makes code generation much simpler.
#
# TODO: Bug 1155340 may change this to use specific type to provide
# more info to JIT.
addMethod("get", False, BuiltinTypes[IDLBuiltinType.Types.any],
[getKeyArg()], isPure=True)
def getValueArg():
return IDLArgument(self.location,
IDLUnresolvedIdentifier(self.location, "value"),
self.valueType)
if not self.readonly:
addMethod("set", True, BuiltinTypes[IDLBuiltinType.Types.object],
[getKeyArg(), getValueArg()])
if isJSImplemented:
addMethod("set", True, BuiltinTypes[IDLBuiltinType.Types.object],
[getKeyArg(), getValueArg()], chromeOnly=True)
def resolve(self, parentScope):
self.keyType.resolveType(parentScope)
self.valueType.resolveType(parentScope)
def finish(self, scope):
IDLInterfaceMember.finish(self, scope)
if not self.keyType.isComplete():
t = self.keyType.complete(scope)
assert not isinstance(t, IDLUnresolvedType)
assert not isinstance(t, IDLTypedefType)
assert not isinstance(t.name, IDLUnresolvedIdentifier)
self.keyType = t
if not self.valueType.isComplete():
t = self.valueType.complete(scope)
assert not isinstance(t, IDLUnresolvedType)
assert not isinstance(t, IDLTypedefType)
assert not isinstance(t.name, IDLUnresolvedIdentifier)
self.valueType = t
def validate(self):
IDLInterfaceMember.validate(self)
def handleExtendedAttribute(self, attr):
IDLInterfaceMember.handleExtendedAttribute(self, attr)
def _getDependentObjects(self):
return set([self.keyType, self.valueType])
class IDLConst(IDLInterfaceMember):
def __init__(self, location, identifier, type, value):
IDLInterfaceMember.__init__(self, location, identifier,
IDLInterfaceMember.Tags.Const)
assert isinstance(type, IDLType)
if type.isDictionary():
raise WebIDLError("A constant cannot be of a dictionary type",
[self.location])
self.type = type
self.value = value
if identifier.name == "prototype":
raise WebIDLError("The identifier of a constant must not be 'prototype'",
[location])
def __str__(self):
return "'%s' const '%s'" % (self.type, self.identifier)
def finish(self, scope):
IDLInterfaceMember.finish(self, scope)
if not self.type.isComplete():
type = self.type.complete(scope)
if not type.isPrimitive() and not type.isString():
locations = [self.type.location, type.location]
try:
locations.append(type.inner.location)
except:
pass
raise WebIDLError("Incorrect type for constant", locations)
self.type = type
# The value might not match the type
coercedValue = self.value.coerceToType(self.type, self.location)
assert coercedValue
self.value = coercedValue
def validate(self):
IDLInterfaceMember.validate(self)
def handleExtendedAttribute(self, attr):
identifier = attr.identifier()
if identifier == "Exposed":
convertExposedAttrToGlobalNameSet(attr, self._exposureGlobalNames)
elif (identifier == "Pref" or
identifier == "ChromeOnly" or
identifier == "Func" or
identifier == "AvailableIn" or
identifier == "CheckAnyPermissions" or
identifier == "CheckAllPermissions"):
# Known attributes that we don't need to do anything with here
pass
else:
raise WebIDLError("Unknown extended attribute %s on constant" % identifier,
[attr.location])
IDLInterfaceMember.handleExtendedAttribute(self, attr)
def _getDependentObjects(self):
return set([self.type, self.value])
class IDLAttribute(IDLInterfaceMember):
def __init__(self, location, identifier, type, readonly, inherit=False,
static=False, stringifier=False, maplikeOrSetlike=None):
IDLInterfaceMember.__init__(self, location, identifier,
IDLInterfaceMember.Tags.Attr)
assert isinstance(type, IDLType)
self.type = type
self.readonly = readonly
self.inherit = inherit
self.static = static
self.lenientThis = False
self._unforgeable = False
self.stringifier = stringifier
self.enforceRange = False
self.clamp = False
self.slotIndex = None
assert maplikeOrSetlike is None or isinstance(maplikeOrSetlike, IDLMaplikeOrSetlike)
self.maplikeOrSetlike = maplikeOrSetlike
self.dependsOn = "Everything"
self.affects = "Everything"
if static and identifier.name == "prototype":
raise WebIDLError("The identifier of a static attribute must not be 'prototype'",
[location])
if readonly and inherit:
raise WebIDLError("An attribute cannot be both 'readonly' and 'inherit'",
[self.location])
def isStatic(self):
return self.static
def __str__(self):
return "'%s' attribute '%s'" % (self.type, self.identifier)
def finish(self, scope):
IDLInterfaceMember.finish(self, scope)
if not self.type.isComplete():
t = self.type.complete(scope)
assert not isinstance(t, IDLUnresolvedType)
assert not isinstance(t, IDLTypedefType)
assert not isinstance(t.name, IDLUnresolvedIdentifier)
self.type = t
if self.type.isDictionary() and not self.getExtendedAttribute("Cached"):
raise WebIDLError("An attribute cannot be of a dictionary type",
[self.location])
if self.type.isSequence() and not self.getExtendedAttribute("Cached"):
raise WebIDLError("A non-cached attribute cannot be of a sequence "
"type", [self.location])
if self.type.isMozMap() and not self.getExtendedAttribute("Cached"):
raise WebIDLError("A non-cached attribute cannot be of a MozMap "
"type", [self.location])
if self.type.isUnion():
for f in self.type.unroll().flatMemberTypes:
if f.isDictionary():
raise WebIDLError("An attribute cannot be of a union "
"type if one of its member types (or "
"one of its member types's member "
"types, and so on) is a dictionary "
"type", [self.location, f.location])
if f.isSequence():
raise WebIDLError("An attribute cannot be of a union "
"type if one of its member types (or "
"one of its member types's member "
"types, and so on) is a sequence "
"type", [self.location, f.location])
if f.isMozMap():
raise WebIDLError("An attribute cannot be of a union "
"type if one of its member types (or "
"one of its member types's member "
"types, and so on) is a MozMap "
"type", [self.location, f.location])
if not self.type.isInterface() and self.getExtendedAttribute("PutForwards"):
raise WebIDLError("An attribute with [PutForwards] must have an "
"interface type as its type", [self.location])
if not self.type.isInterface() and self.getExtendedAttribute("SameObject"):
raise WebIDLError("An attribute with [SameObject] must have an "
"interface type as its type", [self.location])
def validate(self):
IDLInterfaceMember.validate(self)
if ((self.getExtendedAttribute("Cached") or
self.getExtendedAttribute("StoreInSlot")) and
not self.affects == "Nothing"):
raise WebIDLError("Cached attributes and attributes stored in "
"slots must be Constant or Pure or "
"Affects=Nothing, since the getter won't always "
"be called.",
[self.location])
if self.getExtendedAttribute("Frozen"):
if (not self.type.isSequence() and not self.type.isDictionary() and
not self.type.isMozMap()):
raise WebIDLError("[Frozen] is only allowed on "
"sequence-valued, dictionary-valued, and "
"MozMap-valued attributes",
[self.location])
if not self.type.unroll().isExposedInAllOf(self.exposureSet):
raise WebIDLError("Attribute returns a type that is not exposed "
"everywhere where the attribute is exposed",
[self.location])
def handleExtendedAttribute(self, attr):
identifier = attr.identifier()
if identifier == "SetterThrows" and self.readonly:
raise WebIDLError("Readonly attributes must not be flagged as "
"[SetterThrows]",
[self.location])
elif (((identifier == "Throws" or identifier == "GetterThrows") and
self.getExtendedAttribute("StoreInSlot")) or
(identifier == "StoreInSlot" and
(self.getExtendedAttribute("Throws") or
self.getExtendedAttribute("GetterThrows")))):
raise WebIDLError("Throwing things can't be [StoreInSlot]",
[attr.location])
elif identifier == "LenientThis":
if not attr.noArguments():
raise WebIDLError("[LenientThis] must take no arguments",
[attr.location])
if self.isStatic():
raise WebIDLError("[LenientThis] is only allowed on non-static "
"attributes", [attr.location, self.location])
if self.getExtendedAttribute("CrossOriginReadable"):
raise WebIDLError("[LenientThis] is not allowed in combination "
"with [CrossOriginReadable]",
[attr.location, self.location])
if self.getExtendedAttribute("CrossOriginWritable"):
raise WebIDLError("[LenientThis] is not allowed in combination "
"with [CrossOriginWritable]",
[attr.location, self.location])
self.lenientThis = True
elif identifier == "Unforgeable":
if self.isStatic():
raise WebIDLError("[Unforgeable] is only allowed on non-static "
"attributes", [attr.location, self.location])
self._unforgeable = True
elif identifier == "SameObject" and not self.readonly:
raise WebIDLError("[SameObject] only allowed on readonly attributes",
[attr.location, self.location])
elif identifier == "Constant" and not self.readonly:
raise WebIDLError("[Constant] only allowed on readonly attributes",
[attr.location, self.location])
elif identifier == "PutForwards":
if not self.readonly:
raise WebIDLError("[PutForwards] is only allowed on readonly "
"attributes", [attr.location, self.location])
if self.isStatic():
raise WebIDLError("[PutForwards] is only allowed on non-static "
"attributes", [attr.location, self.location])
if self.getExtendedAttribute("Replaceable") is not None:
raise WebIDLError("[PutForwards] and [Replaceable] can't both "
"appear on the same attribute",
[attr.location, self.location])
if not attr.hasValue():
raise WebIDLError("[PutForwards] takes an identifier",
[attr.location, self.location])
elif identifier == "Replaceable":
if not attr.noArguments():
raise WebIDLError("[Replaceable] must take no arguments",
[attr.location])
if not self.readonly:
raise WebIDLError("[Replaceable] is only allowed on readonly "
"attributes", [attr.location, self.location])
if self.isStatic():
raise WebIDLError("[Replaceable] is only allowed on non-static "
"attributes", [attr.location, self.location])
if self.getExtendedAttribute("PutForwards") is not None:
raise WebIDLError("[PutForwards] and [Replaceable] can't both "
"appear on the same attribute",
[attr.location, self.location])
elif identifier == "LenientFloat":
if self.readonly:
raise WebIDLError("[LenientFloat] used on a readonly attribute",
[attr.location, self.location])
if not self.type.includesRestrictedFloat():
raise WebIDLError("[LenientFloat] used on an attribute with a "
"non-restricted-float type",
[attr.location, self.location])
elif identifier == "EnforceRange":
if self.readonly:
raise WebIDLError("[EnforceRange] used on a readonly attribute",
[attr.location, self.location])
self.enforceRange = True
elif identifier == "Clamp":
if self.readonly:
raise WebIDLError("[Clamp] used on a readonly attribute",
[attr.location, self.location])
self.clamp = True
elif identifier == "StoreInSlot":
if self.getExtendedAttribute("Cached"):
raise WebIDLError("[StoreInSlot] and [Cached] must not be "
"specified on the same attribute",
[attr.location, self.location])
elif identifier == "Cached":
if self.getExtendedAttribute("StoreInSlot"):
raise WebIDLError("[Cached] and [StoreInSlot] must not be "
"specified on the same attribute",
[attr.location, self.location])
elif (identifier == "CrossOriginReadable" or
identifier == "CrossOriginWritable"):
if not attr.noArguments() and identifier == "CrossOriginReadable":
raise WebIDLError("[%s] must take no arguments" % identifier,
[attr.location])
if self.isStatic():
raise WebIDLError("[%s] is only allowed on non-static "
"attributes" % identifier,
[attr.location, self.location])
if self.getExtendedAttribute("LenientThis"):
raise WebIDLError("[LenientThis] is not allowed in combination "
"with [%s]" % identifier,
[attr.location, self.location])
elif identifier == "Exposed":
convertExposedAttrToGlobalNameSet(attr, self._exposureGlobalNames)
elif identifier == "Pure":
if not attr.noArguments():
raise WebIDLError("[Pure] must take no arguments",
[attr.location])
self._setDependsOn("DOMState")
self._setAffects("Nothing")
elif identifier == "Constant" or identifier == "SameObject":
if not attr.noArguments():
raise WebIDLError("[%s] must take no arguments" % identifier,
[attr.location])
self._setDependsOn("Nothing")
self._setAffects("Nothing")
elif identifier == "Affects":
if not attr.hasValue():
raise WebIDLError("[Affects] takes an identifier",
[attr.location])
self._setAffects(attr.value())
elif identifier == "DependsOn":
if not attr.hasValue():
raise WebIDLError("[DependsOn] takes an identifier",
[attr.location])
if (attr.value() != "Everything" and attr.value() != "DOMState" and
not self.readonly):
raise WebIDLError("[DependsOn=%s] only allowed on "
"readonly attributes" % attr.value(),
[attr.location, self.location])
self._setDependsOn(attr.value())
elif (identifier == "Pref" or
identifier == "Deprecated" or
identifier == "SetterThrows" or
identifier == "Throws" or
identifier == "GetterThrows" or
identifier == "ChromeOnly" or
identifier == "Func" or
identifier == "Frozen" or
identifier == "AvailableIn" or
identifier == "NewObject" or
identifier == "UnsafeInPrerendering" or
identifier == "CheckAnyPermissions" or
identifier == "CheckAllPermissions" or
identifier == "BinaryName"):
# Known attributes that we don't need to do anything with here
pass
else:
raise WebIDLError("Unknown extended attribute %s on attribute" % identifier,
[attr.location])
IDLInterfaceMember.handleExtendedAttribute(self, attr)
def resolve(self, parentScope):
assert isinstance(parentScope, IDLScope)
self.type.resolveType(parentScope)
IDLObjectWithIdentifier.resolve(self, parentScope)
def addExtendedAttributes(self, attrs):
attrs = self.checkForStringHandlingExtendedAttributes(attrs)
IDLInterfaceMember.addExtendedAttributes(self, attrs)
def hasLenientThis(self):
return self.lenientThis
def isMaplikeOrSetlikeAttr(self):
"""
True if this attribute was generated from an interface with
maplike/setlike (e.g. this is the size attribute for
maplike/setlike)
"""
return self.maplikeOrSetlike is not None
def isUnforgeable(self):
return self._unforgeable
def _getDependentObjects(self):
return set([self.type])
class IDLArgument(IDLObjectWithIdentifier):
def __init__(self, location, identifier, type, optional=False, defaultValue=None, variadic=False, dictionaryMember=False):
IDLObjectWithIdentifier.__init__(self, location, None, identifier)
assert isinstance(type, IDLType)
self.type = type
self.optional = optional
self.defaultValue = defaultValue
self.variadic = variadic
self.dictionaryMember = dictionaryMember
self._isComplete = False
self.enforceRange = False
self.clamp = False
self._allowTreatNonCallableAsNull = False
assert not variadic or optional
assert not variadic or not defaultValue
def addExtendedAttributes(self, attrs):
attrs = self.checkForStringHandlingExtendedAttributes(
attrs,
isDictionaryMember=self.dictionaryMember,
isOptional=self.optional)
for attribute in attrs:
identifier = attribute.identifier()
if identifier == "Clamp":
if not attribute.noArguments():
raise WebIDLError("[Clamp] must take no arguments",
[attribute.location])
if self.enforceRange:
raise WebIDLError("[EnforceRange] and [Clamp] are mutually exclusive",
[self.location])
self.clamp = True
elif identifier == "EnforceRange":
if not attribute.noArguments():
raise WebIDLError("[EnforceRange] must take no arguments",
[attribute.location])
if self.clamp:
raise WebIDLError("[EnforceRange] and [Clamp] are mutually exclusive",
[self.location])
self.enforceRange = True
elif identifier == "TreatNonCallableAsNull":
self._allowTreatNonCallableAsNull = True
else:
raise WebIDLError("Unhandled extended attribute on %s" %
("a dictionary member" if self.dictionaryMember else
"an argument"),
[attribute.location])
def isComplete(self):
return self._isComplete
def complete(self, scope):
if self._isComplete:
return
self._isComplete = True
if not self.type.isComplete():
type = self.type.complete(scope)
assert not isinstance(type, IDLUnresolvedType)
assert not isinstance(type, IDLTypedefType)
assert not isinstance(type.name, IDLUnresolvedIdentifier)
self.type = type
if ((self.type.isDictionary() or
self.type.isUnion() and self.type.unroll().hasDictionaryType()) and
self.optional and not self.defaultValue and not self.variadic):
# Default optional non-variadic dictionaries to null,
# for simplicity, so the codegen doesn't have to special-case this.
self.defaultValue = IDLNullValue(self.location)
elif self.type.isAny():
assert (self.defaultValue is None or
isinstance(self.defaultValue, IDLNullValue))
# optional 'any' values always have a default value
if self.optional and not self.defaultValue and not self.variadic:
# Set the default value to undefined, for simplicity, so the
# codegen doesn't have to special-case this.
self.defaultValue = IDLUndefinedValue(self.location)
# Now do the coercing thing; this needs to happen after the
# above creation of a default value.
if self.defaultValue:
self.defaultValue = self.defaultValue.coerceToType(self.type,
self.location)
assert self.defaultValue
def allowTreatNonCallableAsNull(self):
return self._allowTreatNonCallableAsNull
def _getDependentObjects(self):
deps = set([self.type])
if self.defaultValue:
deps.add(self.defaultValue)
return deps
def canHaveMissingValue(self):
return self.optional and not self.defaultValue
class IDLCallback(IDLObjectWithScope):
def __init__(self, location, parentScope, identifier, returnType, arguments):
assert isinstance(returnType, IDLType)
self._returnType = returnType
# Clone the list
self._arguments = list(arguments)
IDLObjectWithScope.__init__(self, location, parentScope, identifier)
for (returnType, arguments) in self.signatures():
for argument in arguments:
argument.resolve(self)
self._treatNonCallableAsNull = False
self._treatNonObjectAsNull = False
def module(self):
return self.location.filename().split('/')[-1].split('.webidl')[0] + 'Binding'
def isCallback(self):
return True
def signatures(self):
return [(self._returnType, self._arguments)]
def finish(self, scope):
if not self._returnType.isComplete():
type = self._returnType.complete(scope)
assert not isinstance(type, IDLUnresolvedType)
assert not isinstance(type, IDLTypedefType)
assert not isinstance(type.name, IDLUnresolvedIdentifier)
self._returnType = type
for argument in self._arguments:
if argument.type.isComplete():
continue
type = argument.type.complete(scope)
assert not isinstance(type, IDLUnresolvedType)
assert not isinstance(type, IDLTypedefType)
assert not isinstance(type.name, IDLUnresolvedIdentifier)
argument.type = type
def validate(self):
pass
def addExtendedAttributes(self, attrs):
unhandledAttrs = []
for attr in attrs:
if attr.identifier() == "TreatNonCallableAsNull":
self._treatNonCallableAsNull = True
elif attr.identifier() == "TreatNonObjectAsNull":
self._treatNonObjectAsNull = True
else:
unhandledAttrs.append(attr)
if self._treatNonCallableAsNull and self._treatNonObjectAsNull:
raise WebIDLError("Cannot specify both [TreatNonCallableAsNull] "
"and [TreatNonObjectAsNull]", [self.location])
if len(unhandledAttrs) != 0:
IDLType.addExtendedAttributes(self, unhandledAttrs)
def _getDependentObjects(self):
return set([self._returnType] + self._arguments)
class IDLCallbackType(IDLType):
def __init__(self, location, callback):
IDLType.__init__(self, location, callback.identifier.name)
self.callback = callback
def isCallback(self):
return True
def tag(self):
return IDLType.Tags.callback
def isDistinguishableFrom(self, other):
if other.isPromise():
return False
if other.isUnion():
# Just forward to the union; it'll deal
return other.isDistinguishableFrom(self)
return (other.isPrimitive() or other.isString() or other.isEnum() or
other.isNonCallbackInterface() or other.isDate() or
other.isSequence())
def _getDependentObjects(self):
return self.callback._getDependentObjects()
class IDLMethodOverload:
"""
A class that represents a single overload of a WebIDL method. This is not
quite the same as an element of the "effective overload set" in the spec,
because separate IDLMethodOverloads are not created based on arguments being
optional. Rather, when multiple methods have the same name, there is an
IDLMethodOverload for each one, all hanging off an IDLMethod representing
the full set of overloads.
"""
def __init__(self, returnType, arguments, location):
self.returnType = returnType
# Clone the list of arguments, just in case
self.arguments = list(arguments)
self.location = location
def _getDependentObjects(self):
deps = set(self.arguments)
deps.add(self.returnType)
return deps
class IDLMethod(IDLInterfaceMember, IDLScope):
Special = enum(
'Getter',
'Setter',
'Creator',
'Deleter',
'LegacyCaller',
base=IDLInterfaceMember.Special
)
TypeSuffixModifier = enum(
'None',
'QMark',
'Brackets'
)
NamedOrIndexed = enum(
'Neither',
'Named',
'Indexed'
)
def __init__(self, location, identifier, returnType, arguments,
static=False, getter=False, setter=False, creator=False,
deleter=False, specialType=NamedOrIndexed.Neither,
legacycaller=False, stringifier=False, jsonifier=False,
maplikeOrSetlike=None):
# REVIEW: specialType is NamedOrIndexed -- wow, this is messed up.
IDLInterfaceMember.__init__(self, location, identifier,
IDLInterfaceMember.Tags.Method)
self._hasOverloads = False
assert isinstance(returnType, IDLType)
# self._overloads is a list of IDLMethodOverloads
self._overloads = [IDLMethodOverload(returnType, arguments, location)]
assert isinstance(static, bool)
self._static = static
assert isinstance(getter, bool)
self._getter = getter
assert isinstance(setter, bool)
self._setter = setter
assert isinstance(creator, bool)
self._creator = creator
assert isinstance(deleter, bool)
self._deleter = deleter
assert isinstance(legacycaller, bool)
self._legacycaller = legacycaller
assert isinstance(stringifier, bool)
self._stringifier = stringifier
assert isinstance(jsonifier, bool)
self._jsonifier = jsonifier
assert maplikeOrSetlike is None or isinstance(maplikeOrSetlike, IDLMaplikeOrSetlike)
self.maplikeOrSetlike = maplikeOrSetlike
self._specialType = specialType
self._unforgeable = False
self.dependsOn = "Everything"
self.affects = "Everything"
self.aliases = []
if static and identifier.name == "prototype":
raise WebIDLError("The identifier of a static operation must not be 'prototype'",
[location])
self.assertSignatureConstraints()
def __str__(self):
return "Method '%s'" % self.identifier
def assertSignatureConstraints(self):
if self._getter or self._deleter:
assert len(self._overloads) == 1
overload = self._overloads[0]
arguments = overload.arguments
assert len(arguments) == 1
assert (arguments[0].type == BuiltinTypes[IDLBuiltinType.Types.domstring] or
arguments[0].type == BuiltinTypes[IDLBuiltinType.Types.unsigned_long])
assert not arguments[0].optional and not arguments[0].variadic
assert not self._getter or not overload.returnType.isVoid()
if self._setter or self._creator:
assert len(self._overloads) == 1
arguments = self._overloads[0].arguments
assert len(arguments) == 2
assert (arguments[0].type == BuiltinTypes[IDLBuiltinType.Types.domstring] or
arguments[0].type == BuiltinTypes[IDLBuiltinType.Types.unsigned_long])
assert not arguments[0].optional and not arguments[0].variadic
assert not arguments[1].optional and not arguments[1].variadic
if self._stringifier:
assert len(self._overloads) == 1
overload = self._overloads[0]
assert len(overload.arguments) == 0
assert overload.returnType == BuiltinTypes[IDLBuiltinType.Types.domstring]
if self._jsonifier:
assert len(self._overloads) == 1
overload = self._overloads[0]
assert len(overload.arguments) == 0
assert overload.returnType == BuiltinTypes[IDLBuiltinType.Types.object]
def isStatic(self):
return self._static
def isGetter(self):
return self._getter
def isSetter(self):
return self._setter
def isCreator(self):
return self._creator
def isDeleter(self):
return self._deleter
def isNamed(self):
assert (self._specialType == IDLMethod.NamedOrIndexed.Named or
self._specialType == IDLMethod.NamedOrIndexed.Indexed)
return self._specialType == IDLMethod.NamedOrIndexed.Named
def isIndexed(self):
assert (self._specialType == IDLMethod.NamedOrIndexed.Named or
self._specialType == IDLMethod.NamedOrIndexed.Indexed)
return self._specialType == IDLMethod.NamedOrIndexed.Indexed
def isLegacycaller(self):
return self._legacycaller
def isStringifier(self):
return self._stringifier
def isJsonifier(self):
return self._jsonifier
def isMaplikeOrSetlikeMethod(self):
"""
True if this method was generated as part of a
maplike/setlike/etc interface (e.g. has/get methods)
"""
return self.maplikeOrSetlike is not None
def hasOverloads(self):
return self._hasOverloads
def isIdentifierLess(self):
"""
True if the method name started with __, and if the method is not a
maplike/setlike method. Interfaces with maplike/setlike will generate
methods starting with __ for chrome only backing object access in JS
implemented interfaces, so while these functions use what is considered
an non-identifier name, they actually DO have an identifier.
"""
return (self.identifier.name[:2] == "__" and
not self.isMaplikeOrSetlikeMethod())
def resolve(self, parentScope):
assert isinstance(parentScope, IDLScope)
IDLObjectWithIdentifier.resolve(self, parentScope)
IDLScope.__init__(self, self.location, parentScope, self.identifier)
for (returnType, arguments) in self.signatures():
for argument in arguments:
argument.resolve(self)
def addOverload(self, method):
assert len(method._overloads) == 1
if self._extendedAttrDict != method ._extendedAttrDict:
raise WebIDLError("Extended attributes differ on different "
"overloads of %s" % method.identifier,
[self.location, method.location])
self._overloads.extend(method._overloads)
self._hasOverloads = True
if self.isStatic() != method.isStatic():
raise WebIDLError("Overloaded identifier %s appears with different values of the 'static' attribute" % method.identifier,
[method.location])
if self.isLegacycaller() != method.isLegacycaller():
raise WebIDLError("Overloaded identifier %s appears with different values of the 'legacycaller' attribute" % method.identifier,
[method.location])
# Can't overload special things!
assert not self.isGetter()
assert not method.isGetter()
assert not self.isSetter()
assert not method.isSetter()
assert not self.isCreator()
assert not method.isCreator()
assert not self.isDeleter()
assert not method.isDeleter()
assert not self.isStringifier()
assert not method.isStringifier()
assert not self.isJsonifier()
assert not method.isJsonifier()
return self
def signatures(self):
return [(overload.returnType, overload.arguments) for overload in
self._overloads]
def finish(self, scope):
IDLInterfaceMember.finish(self, scope)
for overload in self._overloads:
returnType = overload.returnType
if not returnType.isComplete():
returnType = returnType.complete(scope)
assert not isinstance(returnType, IDLUnresolvedType)
assert not isinstance(returnType, IDLTypedefType)
assert not isinstance(returnType.name, IDLUnresolvedIdentifier)
overload.returnType = returnType
for argument in overload.arguments:
if not argument.isComplete():
argument.complete(scope)
assert argument.type.isComplete()
# Now compute various information that will be used by the
# WebIDL overload resolution algorithm.
self.maxArgCount = max(len(s[1]) for s in self.signatures())
self.allowedArgCounts = [i for i in range(self.maxArgCount+1)
if len(self.signaturesForArgCount(i)) != 0]
def validate(self):
IDLInterfaceMember.validate(self)
# Make sure our overloads are properly distinguishable and don't have
# different argument types before the distinguishing args.
for argCount in self.allowedArgCounts:
possibleOverloads = self.overloadsForArgCount(argCount)
if len(possibleOverloads) == 1:
continue
distinguishingIndex = self.distinguishingIndexForArgCount(argCount)
for idx in range(distinguishingIndex):
firstSigType = possibleOverloads[0].arguments[idx].type
for overload in possibleOverloads[1:]:
if overload.arguments[idx].type != firstSigType:
raise WebIDLError(
"Signatures for method '%s' with %d arguments have "
"different types of arguments at index %d, which "
"is before distinguishing index %d" %
(self.identifier.name, argCount, idx,
distinguishingIndex),
[self.location, overload.location])
overloadWithPromiseReturnType = None
overloadWithoutPromiseReturnType = None
for overload in self._overloads:
returnType = overload.returnType
if not returnType.unroll().isExposedInAllOf(self.exposureSet):
raise WebIDLError("Overload returns a type that is not exposed "
"everywhere where the method is exposed",
[overload.location])
variadicArgument = None
arguments = overload.arguments
for (idx, argument) in enumerate(arguments):
assert argument.type.isComplete()
if ((argument.type.isDictionary() and
argument.type.inner.canBeEmpty())or
(argument.type.isUnion() and
argument.type.unroll().hasPossiblyEmptyDictionaryType())):
# Optional dictionaries and unions containing optional
# dictionaries at the end of the list or followed by
# optional arguments must be optional.
if (not argument.optional and
all(arg.optional for arg in arguments[idx+1:])):
raise WebIDLError("Dictionary argument or union "
"argument containing a dictionary "
"not followed by a required argument "
"must be optional",
[argument.location])
# An argument cannot be a Nullable Dictionary
if argument.type.nullable():
raise WebIDLError("An argument cannot be a nullable "
"dictionary or nullable union "
"containing a dictionary",
[argument.location])
# Only the last argument can be variadic
if variadicArgument:
raise WebIDLError("Variadic argument is not last argument",
[variadicArgument.location])
if argument.variadic:
variadicArgument = argument
if returnType.isPromise():
overloadWithPromiseReturnType = overload
else:
overloadWithoutPromiseReturnType = overload
# Make sure either all our overloads return Promises or none do
if overloadWithPromiseReturnType and overloadWithoutPromiseReturnType:
raise WebIDLError("We have overloads with both Promise and "
"non-Promise return types",
[overloadWithPromiseReturnType.location,
overloadWithoutPromiseReturnType.location])
if overloadWithPromiseReturnType and self._legacycaller:
raise WebIDLError("May not have a Promise return type for a "
"legacycaller.",
[overloadWithPromiseReturnType.location])
if self.getExtendedAttribute("StaticClassOverride") and not \
(self.identifier.scope.isJSImplemented() and self.isStatic()):
raise WebIDLError("StaticClassOverride can be applied to static"
" methods on JS-implemented classes only.",
[self.location])
def overloadsForArgCount(self, argc):
return [overload for overload in self._overloads if
len(overload.arguments) == argc or
(len(overload.arguments) > argc and
all(arg.optional for arg in overload.arguments[argc:])) or
(len(overload.arguments) < argc and
len(overload.arguments) > 0 and
overload.arguments[-1].variadic)]
def signaturesForArgCount(self, argc):
return [(overload.returnType, overload.arguments) for overload
in self.overloadsForArgCount(argc)]
def locationsForArgCount(self, argc):
return [overload.location for overload in self.overloadsForArgCount(argc)]
def distinguishingIndexForArgCount(self, argc):
def isValidDistinguishingIndex(idx, signatures):
for (firstSigIndex, (firstRetval, firstArgs)) in enumerate(signatures[:-1]):
for (secondRetval, secondArgs) in signatures[firstSigIndex+1:]:
if idx < len(firstArgs):
firstType = firstArgs[idx].type
else:
assert(firstArgs[-1].variadic)
firstType = firstArgs[-1].type
if idx < len(secondArgs):
secondType = secondArgs[idx].type
else:
assert(secondArgs[-1].variadic)
secondType = secondArgs[-1].type
if not firstType.isDistinguishableFrom(secondType):
return False
return True
signatures = self.signaturesForArgCount(argc)
for idx in range(argc):
if isValidDistinguishingIndex(idx, signatures):
return idx
# No valid distinguishing index. Time to throw
locations = self.locationsForArgCount(argc)
raise WebIDLError("Signatures with %d arguments for method '%s' are not "
"distinguishable" % (argc, self.identifier.name),
locations)
def handleExtendedAttribute(self, attr):
identifier = attr.identifier()
if identifier == "GetterThrows":
raise WebIDLError("Methods must not be flagged as "
"[GetterThrows]",
[attr.location, self.location])
elif identifier == "SetterThrows":
raise WebIDLError("Methods must not be flagged as "
"[SetterThrows]",
[attr.location, self.location])
elif identifier == "Unforgeable":
if self.isStatic():
raise WebIDLError("[Unforgeable] is only allowed on non-static "
"methods", [attr.location, self.location])
self._unforgeable = True
elif identifier == "SameObject":
raise WebIDLError("Methods must not be flagged as [SameObject]",
[attr.location, self.location])
elif identifier == "Constant":
raise WebIDLError("Methods must not be flagged as [Constant]",
[attr.location, self.location])
elif identifier == "PutForwards":
raise WebIDLError("Only attributes support [PutForwards]",
[attr.location, self.location])
elif identifier == "LenientFloat":
# This is called before we've done overload resolution
assert len(self.signatures()) == 1
sig = self.signatures()[0]
if not sig[0].isVoid():
raise WebIDLError("[LenientFloat] used on a non-void method",
[attr.location, self.location])
if not any(arg.type.includesRestrictedFloat() for arg in sig[1]):
raise WebIDLError("[LenientFloat] used on an operation with no "
"restricted float type arguments",
[attr.location, self.location])
elif identifier == "Exposed":
convertExposedAttrToGlobalNameSet(attr, self._exposureGlobalNames)
elif (identifier == "CrossOriginCallable" or
identifier == "WebGLHandlesContextLoss"):
# Known no-argument attributes.
if not attr.noArguments():
raise WebIDLError("[%s] must take no arguments" % identifier,
[attr.location])
elif identifier == "Pure":
if not attr.noArguments():
raise WebIDLError("[Pure] must take no arguments",
[attr.location])
self._setDependsOn("DOMState")
self._setAffects("Nothing")
elif identifier == "Affects":
if not attr.hasValue():
raise WebIDLError("[Affects] takes an identifier",
[attr.location])
self._setAffects(attr.value())
elif identifier == "DependsOn":
if not attr.hasValue():
raise WebIDLError("[DependsOn] takes an identifier",
[attr.location])
self._setDependsOn(attr.value())
elif identifier == "Alias":
if not attr.hasValue():
raise WebIDLError("[Alias] takes an identifier or string",
[attr.location])
self._addAlias(attr.value())
elif (identifier == "Throws" or
identifier == "NewObject" or
identifier == "ChromeOnly" or
identifier == "UnsafeInPrerendering" or
identifier == "Pref" or
identifier == "Deprecated" or
identifier == "Func" or
identifier == "AvailableIn" or
identifier == "CheckAnyPermissions" or
identifier == "CheckAllPermissions" or
identifier == "BinaryName" or
identifier == "MethodIdentityTestable" or
identifier == "StaticClassOverride"):
# Known attributes that we don't need to do anything with here
pass
else:
raise WebIDLError("Unknown extended attribute %s on method" % identifier,
[attr.location])
IDLInterfaceMember.handleExtendedAttribute(self, attr)
def returnsPromise(self):
return self._overloads[0].returnType.isPromise()
def isUnforgeable(self):
return self._unforgeable
def _getDependentObjects(self):
deps = set()
for overload in self._overloads:
deps.update(overload._getDependentObjects())
return deps
class IDLImplementsStatement(IDLObject):
def __init__(self, location, implementor, implementee):
IDLObject.__init__(self, location)
self.implementor = implementor
self.implementee = implementee
self._finished = False
def finish(self, scope):
if self._finished:
return
assert(isinstance(self.implementor, IDLIdentifierPlaceholder))
assert(isinstance(self.implementee, IDLIdentifierPlaceholder))
implementor = self.implementor.finish(scope)
implementee = self.implementee.finish(scope)
# NOTE: we depend on not setting self.implementor and
# self.implementee here to keep track of the original
# locations.
if not isinstance(implementor, IDLInterface):
raise WebIDLError("Left-hand side of 'implements' is not an "
"interface",
[self.implementor.location])
if implementor.isCallback():
raise WebIDLError("Left-hand side of 'implements' is a callback "
"interface",
[self.implementor.location])
if not isinstance(implementee, IDLInterface):
raise WebIDLError("Right-hand side of 'implements' is not an "
"interface",
[self.implementee.location])
if implementee.isCallback():
raise WebIDLError("Right-hand side of 'implements' is a callback "
"interface",
[self.implementee.location])
implementor.addImplementedInterface(implementee)
self.implementor = implementor
self.implementee = implementee
def validate(self):
pass
def addExtendedAttributes(self, attrs):
assert len(attrs) == 0
class IDLExtendedAttribute(IDLObject):
"""
A class to represent IDL extended attributes so we can give them locations
"""
def __init__(self, location, tuple):
IDLObject.__init__(self, location)
self._tuple = tuple
def identifier(self):
return self._tuple[0]
def noArguments(self):
return len(self._tuple) == 1
def hasValue(self):
return len(self._tuple) >= 2 and isinstance(self._tuple[1], str)
def value(self):
assert(self.hasValue())
return self._tuple[1]
def hasArgs(self):
return (len(self._tuple) == 2 and isinstance(self._tuple[1], list) or
len(self._tuple) == 3)
def args(self):
assert(self.hasArgs())
# Our args are our last element
return self._tuple[-1]
def listValue(self):
"""
Backdoor for storing random data in _extendedAttrDict
"""
return list(self._tuple)[1:]
# Parser
class Tokenizer(object):
tokens = [
"INTEGER",
"FLOATLITERAL",
"IDENTIFIER",
"STRING",
"WHITESPACE",
"OTHER"
]
def t_FLOATLITERAL(self, t):
r'(-?(([0-9]+\.[0-9]*|[0-9]*\.[0-9]+)([Ee][+-]?[0-9]+)?|[0-9]+[Ee][+-]?[0-9]+|Infinity))|NaN'
t.value = float(t.value)
return t
def t_INTEGER(self, t):
r'-?(0([0-7]+|[Xx][0-9A-Fa-f]+)?|[1-9][0-9]*)'
try:
# Can't use int(), because that doesn't handle octal properly.
t.value = parseInt(t.value)
except:
raise WebIDLError("Invalid integer literal",
[Location(lexer=self.lexer,
lineno=self.lexer.lineno,
lexpos=self.lexer.lexpos,
filename=self._filename)])
return t
def t_IDENTIFIER(self, t):
r'[A-Z_a-z][0-9A-Z_a-z-]*'
t.type = self.keywords.get(t.value, 'IDENTIFIER')
return t
def t_STRING(self, t):
r'"[^"]*"'
t.value = t.value[1:-1]
return t
def t_WHITESPACE(self, t):
r'[\t\n\r ]+|[\t\n\r ]*((//[^\n]*|/\*.*?\*/)[\t\n\r ]*)+'
pass
def t_ELLIPSIS(self, t):
r'\.\.\.'
t.type = self.keywords.get(t.value)
return t
def t_OTHER(self, t):
r'[^\t\n\r 0-9A-Z_a-z]'
t.type = self.keywords.get(t.value, 'OTHER')
return t
keywords = {
"module": "MODULE",
"interface": "INTERFACE",
"partial": "PARTIAL",
"dictionary": "DICTIONARY",
"exception": "EXCEPTION",
"enum": "ENUM",
"callback": "CALLBACK",
"typedef": "TYPEDEF",
"implements": "IMPLEMENTS",
"const": "CONST",
"null": "NULL",
"true": "TRUE",
"false": "FALSE",
"serializer": "SERIALIZER",
"stringifier": "STRINGIFIER",
"jsonifier": "JSONIFIER",
"unrestricted": "UNRESTRICTED",
"attribute": "ATTRIBUTE",
"readonly": "READONLY",
"inherit": "INHERIT",
"static": "STATIC",
"getter": "GETTER",
"setter": "SETTER",
"creator": "CREATOR",
"deleter": "DELETER",
"legacycaller": "LEGACYCALLER",
"optional": "OPTIONAL",
"...": "ELLIPSIS",
"::": "SCOPE",
"Date": "DATE",
"DOMString": "DOMSTRING",
"ByteString": "BYTESTRING",
"USVString": "USVSTRING",
"any": "ANY",
"boolean": "BOOLEAN",
"byte": "BYTE",
"double": "DOUBLE",
"float": "FLOAT",
"long": "LONG",
"object": "OBJECT",
"octet": "OCTET",
"Promise": "PROMISE",
"required": "REQUIRED",
"sequence": "SEQUENCE",
"MozMap": "MOZMAP",
"short": "SHORT",
"unsigned": "UNSIGNED",
"void": "VOID",
":": "COLON",
";": "SEMICOLON",
"{": "LBRACE",
"}": "RBRACE",
"(": "LPAREN",
")": "RPAREN",
"[": "LBRACKET",
"]": "RBRACKET",
"?": "QUESTIONMARK",
",": "COMMA",
"=": "EQUALS",
"<": "LT",
">": "GT",
"ArrayBuffer": "ARRAYBUFFER",
"SharedArrayBuffer": "SHAREDARRAYBUFFER",
"or": "OR",
"maplike": "MAPLIKE",
"setlike": "SETLIKE"
}
tokens.extend(keywords.values())
def t_error(self, t):
raise WebIDLError("Unrecognized Input",
[Location(lexer=self.lexer,
lineno=self.lexer.lineno,
lexpos=self.lexer.lexpos,
filename=self.filename)])
def __init__(self, outputdir, lexer=None):
if lexer:
self.lexer = lexer
else:
self.lexer = lex.lex(object=self,
outputdir=outputdir,
lextab='webidllex',
reflags=re.DOTALL)
class SqueakyCleanLogger(object):
errorWhitelist = [
# Web IDL defines the WHITESPACE token, but doesn't actually
# use it ... so far.
"Token 'WHITESPACE' defined, but not used",
# And that means we have an unused token
"There is 1 unused token",
# Web IDL defines a OtherOrComma rule that's only used in
# ExtendedAttributeInner, which we don't use yet.
"Rule 'OtherOrComma' defined, but not used",
# And an unused rule
"There is 1 unused rule",
# And the OtherOrComma grammar symbol is unreachable.
"Symbol 'OtherOrComma' is unreachable",
# Which means the Other symbol is unreachable.
"Symbol 'Other' is unreachable",
]
def __init__(self):
self.errors = []
def debug(self, msg, *args, **kwargs):
pass
info = debug
def warning(self, msg, *args, **kwargs):
if msg == "%s:%d: Rule '%s' defined, but not used":
# Munge things so we don't have to hardcode filenames and
# line numbers in our whitelist.
whitelistmsg = "Rule '%s' defined, but not used"
whitelistargs = args[2:]
else:
whitelistmsg = msg
whitelistargs = args
if (whitelistmsg % whitelistargs) not in SqueakyCleanLogger.errorWhitelist:
self.errors.append(msg % args)
error = warning
def reportGrammarErrors(self):
if self.errors:
raise WebIDLError("\n".join(self.errors), [])
class Parser(Tokenizer):
def getLocation(self, p, i):
return Location(self.lexer, p.lineno(i), p.lexpos(i), self._filename)
def globalScope(self):
return self._globalScope
# The p_Foo functions here must match the WebIDL spec's grammar.
# It's acceptable to split things at '|' boundaries.
def p_Definitions(self, p):
"""
Definitions : ExtendedAttributeList Definition Definitions
"""
if p[2]:
p[0] = [p[2]]
p[2].addExtendedAttributes(p[1])
else:
assert not p[1]
p[0] = []
p[0].extend(p[3])
def p_DefinitionsEmpty(self, p):
"""
Definitions :
"""
p[0] = []
def p_Definition(self, p):
"""
Definition : CallbackOrInterface
| PartialInterface
| Dictionary
| Exception
| Enum
| Typedef
| ImplementsStatement
"""
p[0] = p[1]
assert p[1] # We might not have implemented something ...
def p_CallbackOrInterfaceCallback(self, p):
"""
CallbackOrInterface : CALLBACK CallbackRestOrInterface
"""
if p[2].isInterface():
assert isinstance(p[2], IDLInterface)
p[2].setCallback(True)
p[0] = p[2]
def p_CallbackOrInterfaceInterface(self, p):
"""
CallbackOrInterface : Interface
"""
p[0] = p[1]
def p_CallbackRestOrInterface(self, p):
"""
CallbackRestOrInterface : CallbackRest
| Interface
"""
assert p[1]
p[0] = p[1]
def p_Interface(self, p):
"""
Interface : INTERFACE IDENTIFIER Inheritance LBRACE InterfaceMembers RBRACE SEMICOLON
"""
location = self.getLocation(p, 1)
identifier = IDLUnresolvedIdentifier(self.getLocation(p, 2), p[2])
members = p[5]
parent = p[3]
try:
existingObj = self.globalScope()._lookupIdentifier(identifier)
if existingObj:
p[0] = existingObj
if not isinstance(p[0], IDLInterface):
raise WebIDLError("Interface has the same name as "
"non-interface object",
[location, p[0].location])
p[0].setNonPartial(location, parent, members)
return
except Exception, ex:
if isinstance(ex, WebIDLError):
raise ex
pass
p[0] = IDLInterface(location, self.globalScope(), identifier, parent,
members, isKnownNonPartial=True)
def p_InterfaceForwardDecl(self, p):
"""
Interface : INTERFACE IDENTIFIER SEMICOLON
"""
location = self.getLocation(p, 1)
identifier = IDLUnresolvedIdentifier(self.getLocation(p, 2), p[2])
try:
if self.globalScope()._lookupIdentifier(identifier):
p[0] = self.globalScope()._lookupIdentifier(identifier)
if not isinstance(p[0], IDLExternalInterface):
raise WebIDLError("Name collision between external "
"interface declaration for identifier "
"%s and %s" % (identifier.name, p[0]),
[location, p[0].location])
return
except Exception, ex:
if isinstance(ex, WebIDLError):
raise ex
pass
p[0] = IDLExternalInterface(location, self.globalScope(), identifier)
def p_PartialInterface(self, p):
"""
PartialInterface : PARTIAL INTERFACE IDENTIFIER LBRACE InterfaceMembers RBRACE SEMICOLON
"""
location = self.getLocation(p, 2)
identifier = IDLUnresolvedIdentifier(self.getLocation(p, 3), p[3])
members = p[5]
nonPartialInterface = None
try:
nonPartialInterface = self.globalScope()._lookupIdentifier(identifier)
if nonPartialInterface:
if not isinstance(nonPartialInterface, IDLInterface):
raise WebIDLError("Partial interface has the same name as "
"non-interface object",
[location, nonPartialInterface.location])
except Exception, ex:
if isinstance(ex, WebIDLError):
raise ex
pass
if not nonPartialInterface:
nonPartialInterface = IDLInterface(location, self.globalScope(),
identifier, None,
[], isKnownNonPartial=False)
partialInterface = IDLPartialInterface(location, identifier, members,
nonPartialInterface)
p[0] = partialInterface
def p_Inheritance(self, p):
"""
Inheritance : COLON ScopedName
"""
p[0] = IDLIdentifierPlaceholder(self.getLocation(p, 2), p[2])
def p_InheritanceEmpty(self, p):
"""
Inheritance :
"""
pass
def p_InterfaceMembers(self, p):
"""
InterfaceMembers : ExtendedAttributeList InterfaceMember InterfaceMembers
"""
p[0] = [p[2]] if p[2] else []
assert not p[1] or p[2]
p[2].addExtendedAttributes(p[1])
p[0].extend(p[3])
def p_InterfaceMembersEmpty(self, p):
"""
InterfaceMembers :
"""
p[0] = []
def p_InterfaceMember(self, p):
"""
InterfaceMember : Const
| AttributeOrOperationOrMaplikeOrSetlike
"""
p[0] = p[1]
def p_Dictionary(self, p):
"""
Dictionary : DICTIONARY IDENTIFIER Inheritance LBRACE DictionaryMembers RBRACE SEMICOLON
"""
location = self.getLocation(p, 1)
identifier = IDLUnresolvedIdentifier(self.getLocation(p, 2), p[2])
members = p[5]
p[0] = IDLDictionary(location, self.globalScope(), identifier, p[3], members)
def p_DictionaryMembers(self, p):
"""
DictionaryMembers : ExtendedAttributeList DictionaryMember DictionaryMembers
|
"""
if len(p) == 1:
# We're at the end of the list
p[0] = []
return
# Add our extended attributes
p[2].addExtendedAttributes(p[1])
p[0] = [p[2]]
p[0].extend(p[3])
def p_DictionaryMember(self, p):
"""
DictionaryMember : Required Type IDENTIFIER Default SEMICOLON
"""
# These quack a lot like optional arguments, so just treat them that way.
t = p[2]
assert isinstance(t, IDLType)
identifier = IDLUnresolvedIdentifier(self.getLocation(p, 3), p[3])
defaultValue = p[4]
optional = not p[1]
if not optional and defaultValue:
raise WebIDLError("Required dictionary members can't have a default value.",
[self.getLocation(p, 4)])
p[0] = IDLArgument(self.getLocation(p, 3), identifier, t,
optional=optional,
defaultValue=defaultValue, variadic=False,
dictionaryMember=True)
def p_Default(self, p):
"""
Default : EQUALS DefaultValue
|
"""
if len(p) > 1:
p[0] = p[2]
else:
p[0] = None
def p_DefaultValue(self, p):
"""
DefaultValue : ConstValue
| LBRACKET RBRACKET
"""
if len(p) == 2:
p[0] = p[1]
else:
assert len(p) == 3 # Must be []
p[0] = IDLEmptySequenceValue(self.getLocation(p, 1))
def p_Exception(self, p):
"""
Exception : EXCEPTION IDENTIFIER Inheritance LBRACE ExceptionMembers RBRACE SEMICOLON
"""
pass
def p_Enum(self, p):
"""
Enum : ENUM IDENTIFIER LBRACE EnumValueList RBRACE SEMICOLON
"""
location = self.getLocation(p, 1)
identifier = IDLUnresolvedIdentifier(self.getLocation(p, 2), p[2])
values = p[4]
assert values
p[0] = IDLEnum(location, self.globalScope(), identifier, values)
def p_EnumValueList(self, p):
"""
EnumValueList : STRING EnumValueListComma
"""
p[0] = [p[1]]
p[0].extend(p[2])
def p_EnumValueListComma(self, p):
"""
EnumValueListComma : COMMA EnumValueListString
"""
p[0] = p[2]
def p_EnumValueListCommaEmpty(self, p):
"""
EnumValueListComma :
"""
p[0] = []
def p_EnumValueListString(self, p):
"""
EnumValueListString : STRING EnumValueListComma
"""
p[0] = [p[1]]
p[0].extend(p[2])
def p_EnumValueListStringEmpty(self, p):
"""
EnumValueListString :
"""
p[0] = []
def p_CallbackRest(self, p):
"""
CallbackRest : IDENTIFIER EQUALS ReturnType LPAREN ArgumentList RPAREN SEMICOLON
"""
identifier = IDLUnresolvedIdentifier(self.getLocation(p, 1), p[1])
p[0] = IDLCallback(self.getLocation(p, 1), self.globalScope(),
identifier, p[3], p[5])
def p_ExceptionMembers(self, p):
"""
ExceptionMembers : ExtendedAttributeList ExceptionMember ExceptionMembers
|
"""
pass
def p_Typedef(self, p):
"""
Typedef : TYPEDEF Type IDENTIFIER SEMICOLON
"""
typedef = IDLTypedef(self.getLocation(p, 1), self.globalScope(),
p[2], p[3])
p[0] = typedef
def p_ImplementsStatement(self, p):
"""
ImplementsStatement : ScopedName IMPLEMENTS ScopedName SEMICOLON
"""
assert(p[2] == "implements")
implementor = IDLIdentifierPlaceholder(self.getLocation(p, 1), p[1])
implementee = IDLIdentifierPlaceholder(self.getLocation(p, 3), p[3])
p[0] = IDLImplementsStatement(self.getLocation(p, 1), implementor,
implementee)
def p_Const(self, p):
"""
Const : CONST ConstType IDENTIFIER EQUALS ConstValue SEMICOLON
"""
location = self.getLocation(p, 1)
type = p[2]
identifier = IDLUnresolvedIdentifier(self.getLocation(p, 3), p[3])
value = p[5]
p[0] = IDLConst(location, identifier, type, value)
def p_ConstValueBoolean(self, p):
"""
ConstValue : BooleanLiteral
"""
location = self.getLocation(p, 1)
booleanType = BuiltinTypes[IDLBuiltinType.Types.boolean]
p[0] = IDLValue(location, booleanType, p[1])
def p_ConstValueInteger(self, p):
"""
ConstValue : INTEGER
"""
location = self.getLocation(p, 1)
# We don't know ahead of time what type the integer literal is.
# Determine the smallest type it could possibly fit in and use that.
integerType = matchIntegerValueToType(p[1])
if integerType is None:
raise WebIDLError("Integer literal out of range", [location])
p[0] = IDLValue(location, integerType, p[1])
def p_ConstValueFloat(self, p):
"""
ConstValue : FLOATLITERAL
"""
location = self.getLocation(p, 1)
p[0] = IDLValue(location, BuiltinTypes[IDLBuiltinType.Types.unrestricted_float], p[1])
def p_ConstValueString(self, p):
"""
ConstValue : STRING
"""
location = self.getLocation(p, 1)
stringType = BuiltinTypes[IDLBuiltinType.Types.domstring]
p[0] = IDLValue(location, stringType, p[1])
def p_ConstValueNull(self, p):
"""
ConstValue : NULL
"""
p[0] = IDLNullValue(self.getLocation(p, 1))
def p_BooleanLiteralTrue(self, p):
"""
BooleanLiteral : TRUE
"""
p[0] = True
def p_BooleanLiteralFalse(self, p):
"""
BooleanLiteral : FALSE
"""
p[0] = False
def p_AttributeOrOperationOrMaplikeOrSetlike(self, p):
"""
AttributeOrOperationOrMaplikeOrSetlike : Attribute
| Maplike
| Setlike
| Operation
"""
p[0] = p[1]
def p_Setlike(self, p):
"""
Setlike : ReadOnly SETLIKE LT Type GT SEMICOLON
"""
readonly = p[1]
maplikeOrSetlikeType = p[2]
location = self.getLocation(p, 2)
identifier = IDLUnresolvedIdentifier(location, "__setlike",
allowDoubleUnderscore=True)
keyType = p[4]
valueType = keyType
p[0] = IDLMaplikeOrSetlike(location, identifier, maplikeOrSetlikeType,
readonly, keyType, valueType)
def p_Maplike(self, p):
"""
Maplike : ReadOnly MAPLIKE LT Type COMMA Type GT SEMICOLON
"""
readonly = p[1]
maplikeOrSetlikeType = p[2]
location = self.getLocation(p, 2)
identifier = IDLUnresolvedIdentifier(location, "__maplike",
allowDoubleUnderscore=True)
keyType = p[4]
valueType = p[6]
p[0] = IDLMaplikeOrSetlike(location, identifier, maplikeOrSetlikeType,
readonly, keyType, valueType)
def p_AttributeWithQualifier(self, p):
"""
Attribute : Qualifier AttributeRest
"""
static = IDLInterfaceMember.Special.Static in p[1]
stringifier = IDLInterfaceMember.Special.Stringifier in p[1]
(location, identifier, type, readonly) = p[2]
p[0] = IDLAttribute(location, identifier, type, readonly,
static=static, stringifier=stringifier)
def p_AttributeInherited(self, p):
"""
Attribute : INHERIT AttributeRest
"""
(location, identifier, type, readonly) = p[2]
p[0] = IDLAttribute(location, identifier, type, readonly, inherit=True)
def p_Attribute(self, p):
"""
Attribute : AttributeRest
"""
(location, identifier, type, readonly) = p[1]
p[0] = IDLAttribute(location, identifier, type, readonly, inherit=False)
def p_AttributeRest(self, p):
"""
AttributeRest : ReadOnly ATTRIBUTE Type AttributeName SEMICOLON
"""
location = self.getLocation(p, 2)
readonly = p[1]
t = p[3]
identifier = IDLUnresolvedIdentifier(self.getLocation(p, 4), p[4])
p[0] = (location, identifier, t, readonly)
def p_ReadOnly(self, p):
"""
ReadOnly : READONLY
"""
p[0] = True
def p_ReadOnlyEmpty(self, p):
"""
ReadOnly :
"""
p[0] = False
def p_Operation(self, p):
"""
Operation : Qualifiers OperationRest
"""
qualifiers = p[1]
# Disallow duplicates in the qualifier set
if not len(set(qualifiers)) == len(qualifiers):
raise WebIDLError("Duplicate qualifiers are not allowed",
[self.getLocation(p, 1)])
static = IDLInterfaceMember.Special.Static in p[1]
# If static is there that's all that's allowed. This is disallowed
# by the parser, so we can assert here.
assert not static or len(qualifiers) == 1
stringifier = IDLInterfaceMember.Special.Stringifier in p[1]
# If stringifier is there that's all that's allowed. This is disallowed
# by the parser, so we can assert here.
assert not stringifier or len(qualifiers) == 1
getter = True if IDLMethod.Special.Getter in p[1] else False
setter = True if IDLMethod.Special.Setter in p[1] else False
creator = True if IDLMethod.Special.Creator in p[1] else False
deleter = True if IDLMethod.Special.Deleter in p[1] else False
legacycaller = True if IDLMethod.Special.LegacyCaller in p[1] else False
if getter or deleter:
if setter or creator:
raise WebIDLError("getter and deleter are incompatible with setter and creator",
[self.getLocation(p, 1)])
(returnType, identifier, arguments) = p[2]
assert isinstance(returnType, IDLType)
specialType = IDLMethod.NamedOrIndexed.Neither
if getter or deleter:
if len(arguments) != 1:
raise WebIDLError("%s has wrong number of arguments" %
("getter" if getter else "deleter"),
[self.getLocation(p, 2)])
argType = arguments[0].type
if argType == BuiltinTypes[IDLBuiltinType.Types.domstring]:
specialType = IDLMethod.NamedOrIndexed.Named
elif argType == BuiltinTypes[IDLBuiltinType.Types.unsigned_long]:
specialType = IDLMethod.NamedOrIndexed.Indexed
else:
raise WebIDLError("%s has wrong argument type (must be DOMString or UnsignedLong)" %
("getter" if getter else "deleter"),
[arguments[0].location])
if arguments[0].optional or arguments[0].variadic:
raise WebIDLError("%s cannot have %s argument" %
("getter" if getter else "deleter",
"optional" if arguments[0].optional else "variadic"),
[arguments[0].location])
if getter:
if returnType.isVoid():
raise WebIDLError("getter cannot have void return type",
[self.getLocation(p, 2)])
if setter or creator:
if len(arguments) != 2:
raise WebIDLError("%s has wrong number of arguments" %
("setter" if setter else "creator"),
[self.getLocation(p, 2)])
argType = arguments[0].type
if argType == BuiltinTypes[IDLBuiltinType.Types.domstring]:
specialType = IDLMethod.NamedOrIndexed.Named
elif argType == BuiltinTypes[IDLBuiltinType.Types.unsigned_long]:
specialType = IDLMethod.NamedOrIndexed.Indexed
else:
raise WebIDLError("%s has wrong argument type (must be DOMString or UnsignedLong)" %
("setter" if setter else "creator"),
[arguments[0].location])
if arguments[0].optional or arguments[0].variadic:
raise WebIDLError("%s cannot have %s argument" %
("setter" if setter else "creator",
"optional" if arguments[0].optional else "variadic"),
[arguments[0].location])
if arguments[1].optional or arguments[1].variadic:
raise WebIDLError("%s cannot have %s argument" %
("setter" if setter else "creator",
"optional" if arguments[1].optional else "variadic"),
[arguments[1].location])
if stringifier:
if len(arguments) != 0:
raise WebIDLError("stringifier has wrong number of arguments",
[self.getLocation(p, 2)])
if not returnType.isDOMString():
raise WebIDLError("stringifier must have DOMString return type",
[self.getLocation(p, 2)])
# identifier might be None. This is only permitted for special methods.
if not identifier:
if (not getter and not setter and not creator and
not deleter and not legacycaller and not stringifier):
raise WebIDLError("Identifier required for non-special methods",
[self.getLocation(p, 2)])
location = BuiltinLocation("<auto-generated-identifier>")
identifier = IDLUnresolvedIdentifier(
location,
"__%s%s%s%s%s%s%s" %
("named" if specialType == IDLMethod.NamedOrIndexed.Named else
"indexed" if specialType == IDLMethod.NamedOrIndexed.Indexed else "",
"getter" if getter else "",
"setter" if setter else "",
"deleter" if deleter else "",
"creator" if creator else "",
"legacycaller" if legacycaller else "",
"stringifier" if stringifier else ""),
allowDoubleUnderscore=True)
method = IDLMethod(self.getLocation(p, 2), identifier, returnType, arguments,
static=static, getter=getter, setter=setter, creator=creator,
deleter=deleter, specialType=specialType,
legacycaller=legacycaller, stringifier=stringifier)
p[0] = method
def p_Stringifier(self, p):
"""
Operation : STRINGIFIER SEMICOLON
"""
identifier = IDLUnresolvedIdentifier(BuiltinLocation("<auto-generated-identifier>"),
"__stringifier",
allowDoubleUnderscore=True)
method = IDLMethod(self.getLocation(p, 1),
identifier,
returnType=BuiltinTypes[IDLBuiltinType.Types.domstring],
arguments=[],
stringifier=True)
p[0] = method
def p_Jsonifier(self, p):
"""
Operation : JSONIFIER SEMICOLON
"""
identifier = IDLUnresolvedIdentifier(BuiltinLocation("<auto-generated-identifier>"),
"__jsonifier", allowDoubleUnderscore=True)
method = IDLMethod(self.getLocation(p, 1),
identifier,
returnType=BuiltinTypes[IDLBuiltinType.Types.object],
arguments=[],
jsonifier=True)
p[0] = method
def p_QualifierStatic(self, p):
"""
Qualifier : STATIC
"""
p[0] = [IDLInterfaceMember.Special.Static]
def p_QualifierStringifier(self, p):
"""
Qualifier : STRINGIFIER
"""
p[0] = [IDLInterfaceMember.Special.Stringifier]
def p_Qualifiers(self, p):
"""
Qualifiers : Qualifier
| Specials
"""
p[0] = p[1]
def p_Specials(self, p):
"""
Specials : Special Specials
"""
p[0] = [p[1]]
p[0].extend(p[2])
def p_SpecialsEmpty(self, p):
"""
Specials :
"""
p[0] = []
def p_SpecialGetter(self, p):
"""
Special : GETTER
"""
p[0] = IDLMethod.Special.Getter
def p_SpecialSetter(self, p):
"""
Special : SETTER
"""
p[0] = IDLMethod.Special.Setter
def p_SpecialCreator(self, p):
"""
Special : CREATOR
"""
p[0] = IDLMethod.Special.Creator
def p_SpecialDeleter(self, p):
"""
Special : DELETER
"""
p[0] = IDLMethod.Special.Deleter
def p_SpecialLegacyCaller(self, p):
"""
Special : LEGACYCALLER
"""
p[0] = IDLMethod.Special.LegacyCaller
def p_OperationRest(self, p):
"""
OperationRest : ReturnType OptionalIdentifier LPAREN ArgumentList RPAREN SEMICOLON
"""
p[0] = (p[1], p[2], p[4])
def p_OptionalIdentifier(self, p):
"""
OptionalIdentifier : IDENTIFIER
"""
p[0] = IDLUnresolvedIdentifier(self.getLocation(p, 1), p[1])
def p_OptionalIdentifierEmpty(self, p):
"""
OptionalIdentifier :
"""
pass
def p_ArgumentList(self, p):
"""
ArgumentList : Argument Arguments
"""
p[0] = [p[1]] if p[1] else []
p[0].extend(p[2])
def p_ArgumentListEmpty(self, p):
"""
ArgumentList :
"""
p[0] = []
def p_Arguments(self, p):
"""
Arguments : COMMA Argument Arguments
"""
p[0] = [p[2]] if p[2] else []
p[0].extend(p[3])
def p_ArgumentsEmpty(self, p):
"""
Arguments :
"""
p[0] = []
def p_Argument(self, p):
"""
Argument : ExtendedAttributeList Optional Type Ellipsis ArgumentName Default
"""
t = p[3]
assert isinstance(t, IDLType)
identifier = IDLUnresolvedIdentifier(self.getLocation(p, 5), p[5])
optional = p[2]
variadic = p[4]
defaultValue = p[6]
if not optional and defaultValue:
raise WebIDLError("Mandatory arguments can't have a default value.",
[self.getLocation(p, 6)])
# We can't test t.isAny() here and give it a default value as needed,
# since at this point t is not a fully resolved type yet (e.g. it might
# be a typedef). We'll handle the 'any' case in IDLArgument.complete.
if variadic:
if optional:
raise WebIDLError("Variadic arguments should not be marked optional.",
[self.getLocation(p, 2)])
optional = variadic
p[0] = IDLArgument(self.getLocation(p, 5), identifier, t, optional, defaultValue, variadic)
p[0].addExtendedAttributes(p[1])
def p_ArgumentName(self, p):
"""
ArgumentName : IDENTIFIER
| ATTRIBUTE
| CALLBACK
| CONST
| CREATOR
| DELETER
| DICTIONARY
| ENUM
| EXCEPTION
| GETTER
| IMPLEMENTS
| INHERIT
| INTERFACE
| LEGACYCALLER
| MAPLIKE
| PARTIAL
| REQUIRED
| SERIALIZER
| SETLIKE
| SETTER
| STATIC
| STRINGIFIER
| JSONIFIER
| TYPEDEF
| UNRESTRICTED
"""
p[0] = p[1]
def p_AttributeName(self, p):
"""
AttributeName : IDENTIFIER
| REQUIRED
"""
p[0] = p[1]
def p_Optional(self, p):
"""
Optional : OPTIONAL
"""
p[0] = True
def p_OptionalEmpty(self, p):
"""
Optional :
"""
p[0] = False
def p_Required(self, p):
"""
Required : REQUIRED
"""
p[0] = True
def p_RequiredEmpty(self, p):
"""
Required :
"""
p[0] = False
def p_Ellipsis(self, p):
"""
Ellipsis : ELLIPSIS
"""
p[0] = True
def p_EllipsisEmpty(self, p):
"""
Ellipsis :
"""
p[0] = False
def p_ExceptionMember(self, p):
"""
ExceptionMember : Const
| ExceptionField
"""
pass
def p_ExceptionField(self, p):
"""
ExceptionField : Type IDENTIFIER SEMICOLON
"""
pass
def p_ExtendedAttributeList(self, p):
"""
ExtendedAttributeList : LBRACKET ExtendedAttribute ExtendedAttributes RBRACKET
"""
p[0] = [p[2]]
if p[3]:
p[0].extend(p[3])
def p_ExtendedAttributeListEmpty(self, p):
"""
ExtendedAttributeList :
"""
p[0] = []
def p_ExtendedAttribute(self, p):
"""
ExtendedAttribute : ExtendedAttributeNoArgs
| ExtendedAttributeArgList
| ExtendedAttributeIdent
| ExtendedAttributeNamedArgList
| ExtendedAttributeIdentList
"""
p[0] = IDLExtendedAttribute(self.getLocation(p, 1), p[1])
def p_ExtendedAttributeEmpty(self, p):
"""
ExtendedAttribute :
"""
pass
def p_ExtendedAttributes(self, p):
"""
ExtendedAttributes : COMMA ExtendedAttribute ExtendedAttributes
"""
p[0] = [p[2]] if p[2] else []
p[0].extend(p[3])
def p_ExtendedAttributesEmpty(self, p):
"""
ExtendedAttributes :
"""
p[0] = []
def p_Other(self, p):
"""
Other : INTEGER
| FLOATLITERAL
| IDENTIFIER
| STRING
| OTHER
| ELLIPSIS
| COLON
| SCOPE
| SEMICOLON
| LT
| EQUALS
| GT
| QUESTIONMARK
| DATE
| DOMSTRING
| BYTESTRING
| USVSTRING
| ANY
| ATTRIBUTE
| BOOLEAN
| BYTE
| LEGACYCALLER
| CONST
| CREATOR
| DELETER
| DOUBLE
| EXCEPTION
| FALSE
| FLOAT
| GETTER
| IMPLEMENTS
| INHERIT
| INTERFACE
| LONG
| MODULE
| NULL
| OBJECT
| OCTET
| OPTIONAL
| SEQUENCE
| MOZMAP
| SETTER
| SHORT
| STATIC
| STRINGIFIER
| JSONIFIER
| TRUE
| TYPEDEF
| UNSIGNED
| VOID
"""
pass
def p_OtherOrComma(self, p):
"""
OtherOrComma : Other
| COMMA
"""
pass
def p_TypeSingleType(self, p):
"""
Type : SingleType
"""
p[0] = p[1]
def p_TypeUnionType(self, p):
"""
Type : UnionType TypeSuffix
"""
p[0] = self.handleModifiers(p[1], p[2])
def p_SingleTypeNonAnyType(self, p):
"""
SingleType : NonAnyType
"""
p[0] = p[1]
def p_SingleTypeAnyType(self, p):
"""
SingleType : ANY TypeSuffixStartingWithArray
"""
p[0] = self.handleModifiers(BuiltinTypes[IDLBuiltinType.Types.any], p[2])
def p_UnionType(self, p):
"""
UnionType : LPAREN UnionMemberType OR UnionMemberType UnionMemberTypes RPAREN
"""
types = [p[2], p[4]]
types.extend(p[5])
p[0] = IDLUnionType(self.getLocation(p, 1), types)
def p_UnionMemberTypeNonAnyType(self, p):
"""
UnionMemberType : NonAnyType
"""
p[0] = p[1]
def p_UnionMemberTypeArrayOfAny(self, p):
"""
UnionMemberTypeArrayOfAny : ANY LBRACKET RBRACKET
"""
p[0] = IDLArrayType(self.getLocation(p, 2),
BuiltinTypes[IDLBuiltinType.Types.any])
def p_UnionMemberType(self, p):
"""
UnionMemberType : UnionType TypeSuffix
| UnionMemberTypeArrayOfAny TypeSuffix
"""
p[0] = self.handleModifiers(p[1], p[2])
def p_UnionMemberTypes(self, p):
"""
UnionMemberTypes : OR UnionMemberType UnionMemberTypes
"""
p[0] = [p[2]]
p[0].extend(p[3])
def p_UnionMemberTypesEmpty(self, p):
"""
UnionMemberTypes :
"""
p[0] = []
def p_NonAnyType(self, p):
"""
NonAnyType : PrimitiveOrStringType TypeSuffix
| ARRAYBUFFER TypeSuffix
| SHAREDARRAYBUFFER TypeSuffix
| OBJECT TypeSuffix
"""
if p[1] == "object":
type = BuiltinTypes[IDLBuiltinType.Types.object]
elif p[1] == "ArrayBuffer":
type = BuiltinTypes[IDLBuiltinType.Types.ArrayBuffer]
elif p[1] == "SharedArrayBuffer":
type = BuiltinTypes[IDLBuiltinType.Types.SharedArrayBuffer]
else:
type = BuiltinTypes[p[1]]
p[0] = self.handleModifiers(type, p[2])
def p_NonAnyTypeSequenceType(self, p):
"""
NonAnyType : SEQUENCE LT Type GT Null
"""
innerType = p[3]
type = IDLSequenceType(self.getLocation(p, 1), innerType)
if p[5]:
type = IDLNullableType(self.getLocation(p, 5), type)
p[0] = type
# Note: Promise<void> is allowed, so we want to parametrize on
# ReturnType, not Type. Also, we want this to end up picking up
# the Promise interface for now, hence the games with IDLUnresolvedType.
def p_NonAnyTypePromiseType(self, p):
"""
NonAnyType : PROMISE LT ReturnType GT Null
"""
innerType = p[3]
promiseIdent = IDLUnresolvedIdentifier(self.getLocation(p, 1), "Promise")
type = IDLUnresolvedType(self.getLocation(p, 1), promiseIdent, p[3])
if p[5]:
type = IDLNullableType(self.getLocation(p, 5), type)
p[0] = type
def p_NonAnyTypeMozMapType(self, p):
"""
NonAnyType : MOZMAP LT Type GT Null
"""
innerType = p[3]
type = IDLMozMapType(self.getLocation(p, 1), innerType)
if p[5]:
type = IDLNullableType(self.getLocation(p, 5), type)
p[0] = type
def p_NonAnyTypeScopedName(self, p):
"""
NonAnyType : ScopedName TypeSuffix
"""
assert isinstance(p[1], IDLUnresolvedIdentifier)
if p[1].name == "Promise":
raise WebIDLError("Promise used without saying what it's "
"parametrized over",
[self.getLocation(p, 1)])
type = None
try:
if self.globalScope()._lookupIdentifier(p[1]):
obj = self.globalScope()._lookupIdentifier(p[1])
assert not obj.isType()
if obj.isTypedef():
type = IDLTypedefType(self.getLocation(p, 1), obj.innerType,
obj.identifier.name)
elif obj.isCallback() and not obj.isInterface():
type = IDLCallbackType(self.getLocation(p, 1), obj)
else:
type = IDLWrapperType(self.getLocation(p, 1), p[1])
p[0] = self.handleModifiers(type, p[2])
return
except:
pass
type = IDLUnresolvedType(self.getLocation(p, 1), p[1])
p[0] = self.handleModifiers(type, p[2])
def p_NonAnyTypeDate(self, p):
"""
NonAnyType : DATE TypeSuffix
"""
p[0] = self.handleModifiers(BuiltinTypes[IDLBuiltinType.Types.date],
p[2])
def p_ConstType(self, p):
"""
ConstType : PrimitiveOrStringType Null
"""
type = BuiltinTypes[p[1]]
if p[2]:
type = IDLNullableType(self.getLocation(p, 1), type)
p[0] = type
def p_ConstTypeIdentifier(self, p):
"""
ConstType : IDENTIFIER Null
"""
identifier = IDLUnresolvedIdentifier(self.getLocation(p, 1), p[1])
type = IDLUnresolvedType(self.getLocation(p, 1), identifier)
if p[2]:
type = IDLNullableType(self.getLocation(p, 1), type)
p[0] = type
def p_PrimitiveOrStringTypeUint(self, p):
"""
PrimitiveOrStringType : UnsignedIntegerType
"""
p[0] = p[1]
def p_PrimitiveOrStringTypeBoolean(self, p):
"""
PrimitiveOrStringType : BOOLEAN
"""
p[0] = IDLBuiltinType.Types.boolean
def p_PrimitiveOrStringTypeByte(self, p):
"""
PrimitiveOrStringType : BYTE
"""
p[0] = IDLBuiltinType.Types.byte
def p_PrimitiveOrStringTypeOctet(self, p):
"""
PrimitiveOrStringType : OCTET
"""
p[0] = IDLBuiltinType.Types.octet
def p_PrimitiveOrStringTypeFloat(self, p):
"""
PrimitiveOrStringType : FLOAT
"""
p[0] = IDLBuiltinType.Types.float
def p_PrimitiveOrStringTypeUnrestictedFloat(self, p):
"""
PrimitiveOrStringType : UNRESTRICTED FLOAT
"""
p[0] = IDLBuiltinType.Types.unrestricted_float
def p_PrimitiveOrStringTypeDouble(self, p):
"""
PrimitiveOrStringType : DOUBLE
"""
p[0] = IDLBuiltinType.Types.double
def p_PrimitiveOrStringTypeUnrestictedDouble(self, p):
"""
PrimitiveOrStringType : UNRESTRICTED DOUBLE
"""
p[0] = IDLBuiltinType.Types.unrestricted_double
def p_PrimitiveOrStringTypeDOMString(self, p):
"""
PrimitiveOrStringType : DOMSTRING
"""
p[0] = IDLBuiltinType.Types.domstring
def p_PrimitiveOrStringTypeBytestring(self, p):
"""
PrimitiveOrStringType : BYTESTRING
"""
p[0] = IDLBuiltinType.Types.bytestring
def p_PrimitiveOrStringTypeUSVString(self, p):
"""
PrimitiveOrStringType : USVSTRING
"""
p[0] = IDLBuiltinType.Types.usvstring
def p_UnsignedIntegerTypeUnsigned(self, p):
"""
UnsignedIntegerType : UNSIGNED IntegerType
"""
# Adding one to a given signed integer type gets you the unsigned type:
p[0] = p[2] + 1
def p_UnsignedIntegerType(self, p):
"""
UnsignedIntegerType : IntegerType
"""
p[0] = p[1]
def p_IntegerTypeShort(self, p):
"""
IntegerType : SHORT
"""
p[0] = IDLBuiltinType.Types.short
def p_IntegerTypeLong(self, p):
"""
IntegerType : LONG OptionalLong
"""
if p[2]:
p[0] = IDLBuiltinType.Types.long_long
else:
p[0] = IDLBuiltinType.Types.long
def p_OptionalLong(self, p):
"""
OptionalLong : LONG
"""
p[0] = True
def p_OptionalLongEmpty(self, p):
"""
OptionalLong :
"""
p[0] = False
def p_TypeSuffixBrackets(self, p):
"""
TypeSuffix : LBRACKET RBRACKET TypeSuffix
"""
p[0] = [(IDLMethod.TypeSuffixModifier.Brackets, self.getLocation(p, 1))]
p[0].extend(p[3])
def p_TypeSuffixQMark(self, p):
"""
TypeSuffix : QUESTIONMARK TypeSuffixStartingWithArray
"""
p[0] = [(IDLMethod.TypeSuffixModifier.QMark, self.getLocation(p, 1))]
p[0].extend(p[2])
def p_TypeSuffixEmpty(self, p):
"""
TypeSuffix :
"""
p[0] = []
def p_TypeSuffixStartingWithArray(self, p):
"""
TypeSuffixStartingWithArray : LBRACKET RBRACKET TypeSuffix
"""
p[0] = [(IDLMethod.TypeSuffixModifier.Brackets, self.getLocation(p, 1))]
p[0].extend(p[3])
def p_TypeSuffixStartingWithArrayEmpty(self, p):
"""
TypeSuffixStartingWithArray :
"""
p[0] = []
def p_Null(self, p):
"""
Null : QUESTIONMARK
|
"""
if len(p) > 1:
p[0] = True
else:
p[0] = False
def p_ReturnTypeType(self, p):
"""
ReturnType : Type
"""
p[0] = p[1]
def p_ReturnTypeVoid(self, p):
"""
ReturnType : VOID
"""
p[0] = BuiltinTypes[IDLBuiltinType.Types.void]
def p_ScopedName(self, p):
"""
ScopedName : AbsoluteScopedName
| RelativeScopedName
"""
p[0] = p[1]
def p_AbsoluteScopedName(self, p):
"""
AbsoluteScopedName : SCOPE IDENTIFIER ScopedNameParts
"""
assert False
pass
def p_RelativeScopedName(self, p):
"""
RelativeScopedName : IDENTIFIER ScopedNameParts
"""
assert not p[2] # Not implemented!
p[0] = IDLUnresolvedIdentifier(self.getLocation(p, 1), p[1])
def p_ScopedNameParts(self, p):
"""
ScopedNameParts : SCOPE IDENTIFIER ScopedNameParts
"""
assert False
pass
def p_ScopedNamePartsEmpty(self, p):
"""
ScopedNameParts :
"""
p[0] = None
def p_ExtendedAttributeNoArgs(self, p):
"""
ExtendedAttributeNoArgs : IDENTIFIER
"""
p[0] = (p[1],)
def p_ExtendedAttributeArgList(self, p):
"""
ExtendedAttributeArgList : IDENTIFIER LPAREN ArgumentList RPAREN
"""
p[0] = (p[1], p[3])
def p_ExtendedAttributeIdent(self, p):
"""
ExtendedAttributeIdent : IDENTIFIER EQUALS STRING
| IDENTIFIER EQUALS IDENTIFIER
"""
p[0] = (p[1], p[3])
def p_ExtendedAttributeNamedArgList(self, p):
"""
ExtendedAttributeNamedArgList : IDENTIFIER EQUALS IDENTIFIER LPAREN ArgumentList RPAREN
"""
p[0] = (p[1], p[3], p[5])
def p_ExtendedAttributeIdentList(self, p):
"""
ExtendedAttributeIdentList : IDENTIFIER EQUALS LPAREN IdentifierList RPAREN
"""
p[0] = (p[1], p[4])
def p_IdentifierList(self, p):
"""
IdentifierList : IDENTIFIER Identifiers
"""
idents = list(p[2])
idents.insert(0, p[1])
p[0] = idents
def p_IdentifiersList(self, p):
"""
Identifiers : COMMA IDENTIFIER Identifiers
"""
idents = list(p[3])
idents.insert(0, p[2])
p[0] = idents
def p_IdentifiersEmpty(self, p):
"""
Identifiers :
"""
p[0] = []
def p_error(self, p):
if not p:
raise WebIDLError("Syntax Error at end of file. Possibly due to missing semicolon(;), braces(}) or both",
[self._filename])
else:
raise WebIDLError("invalid syntax", [Location(self.lexer, p.lineno, p.lexpos, self._filename)])
def __init__(self, outputdir='', lexer=None):
Tokenizer.__init__(self, outputdir, lexer)
logger = SqueakyCleanLogger()
self.parser = yacc.yacc(module=self,
outputdir=outputdir,
tabmodule='webidlyacc',
errorlog=logger
# Pickling the grammar is a speedup in
# some cases (older Python?) but a
# significant slowdown in others.
# We're not pickling for now, until it
# becomes a speedup again.
# , picklefile='WebIDLGrammar.pkl'
)
logger.reportGrammarErrors()
self._globalScope = IDLScope(BuiltinLocation("<Global Scope>"), None, None)
# To make our test harness work, pretend like we have a primary global already.
# Note that we _don't_ set _globalScope.primaryGlobalAttr,
# so we'll still be able to detect multiple PrimaryGlobal extended attributes.
self._globalScope.primaryGlobalName = "FakeTestPrimaryGlobal"
self._globalScope.globalNames.add("FakeTestPrimaryGlobal")
self._globalScope.globalNameMapping["FakeTestPrimaryGlobal"].add("FakeTestPrimaryGlobal")
# And we add the special-cased "System" global name, which
# doesn't have any corresponding interfaces.
self._globalScope.globalNames.add("System")
self._globalScope.globalNameMapping["System"].add("BackstagePass")
self._installBuiltins(self._globalScope)
self._productions = []
self._filename = "<builtin>"
self.lexer.input(Parser._builtins)
self._filename = None
self.parser.parse(lexer=self.lexer, tracking=True)
def _installBuiltins(self, scope):
assert isinstance(scope, IDLScope)
# xrange omits the last value.
for x in xrange(IDLBuiltinType.Types.ArrayBuffer, IDLBuiltinType.Types.SharedFloat64Array + 1):
builtin = BuiltinTypes[x]
name = builtin.name
typedef = IDLTypedef(BuiltinLocation("<builtin type>"), scope, builtin, name)
@ staticmethod
def handleModifiers(type, modifiers):
for (modifier, modifierLocation) in modifiers:
assert (modifier == IDLMethod.TypeSuffixModifier.QMark or
modifier == IDLMethod.TypeSuffixModifier.Brackets)
if modifier == IDLMethod.TypeSuffixModifier.QMark:
type = IDLNullableType(modifierLocation, type)
elif modifier == IDLMethod.TypeSuffixModifier.Brackets:
type = IDLArrayType(modifierLocation, type)
return type
def parse(self, t, filename=None):
self.lexer.input(t)
# for tok in iter(self.lexer.token, None):
# print tok
self._filename = filename
self._productions.extend(self.parser.parse(lexer=self.lexer, tracking=True))
self._filename = None
def finish(self):
# First, finish all the IDLImplementsStatements. In particular, we
# have to make sure we do those before we do the IDLInterfaces.
# XXX khuey hates this bit and wants to nuke it from orbit.
implementsStatements = [p for p in self._productions if
isinstance(p, IDLImplementsStatement)]
otherStatements = [p for p in self._productions if
not isinstance(p, IDLImplementsStatement)]
for production in implementsStatements:
production.finish(self.globalScope())
for production in otherStatements:
production.finish(self.globalScope())
# Do any post-finish validation we need to do
for production in self._productions:
production.validate()
# De-duplicate self._productions, without modifying its order.
seen = set()
result = []
for p in self._productions:
if p not in seen:
seen.add(p)
result.append(p)
return result
def reset(self):
return Parser(lexer=self.lexer)
# Builtin IDL defined by WebIDL
_builtins = """
typedef unsigned long long DOMTimeStamp;
typedef (ArrayBufferView or ArrayBuffer) BufferSource;
typedef (SharedArrayBufferView or SharedArrayBuffer) SharedBufferSource;
"""
def main():
# Parse arguments.
from optparse import OptionParser
usageString = "usage: %prog [options] files"
o = OptionParser(usage=usageString)
o.add_option("--cachedir", dest='cachedir', default=None,
help="Directory in which to cache lex/parse tables.")
o.add_option("--verbose-errors", action='store_true', default=False,
help="When an error happens, display the Python traceback.")
(options, args) = o.parse_args()
if len(args) < 1:
o.error(usageString)
fileList = args
baseDir = os.getcwd()
# Parse the WebIDL.
parser = Parser(options.cachedir)
try:
for filename in fileList:
fullPath = os.path.normpath(os.path.join(baseDir, filename))
f = open(fullPath, 'rb')
lines = f.readlines()
f.close()
print fullPath
parser.parse(''.join(lines), fullPath)
parser.finish()
except WebIDLError, e:
if options.verbose_errors:
traceback.print_exc()
else:
print e
if __name__ == '__main__':
main()
|
maljac/odoo-addons
|
refs/heads/8.0
|
analytic_surveyor/analytic.py
|
11
|
#-*- coding: utf-8 -*-
from openerp import fields, models
class res_country_state_town(models.Model):
_name = 'res.country.state.town'
# TODO mover esto a otro modulo
name = fields.Char('Name', required=True)
state_id = fields.Many2one('res.country.state', 'State', required=True)
class account_analytic_account_lot(models.Model):
_name = 'account.analytic.account.lot'
analytic_account_id = fields.Many2one(
'account.analytic.account', 'Project/Contract', required=True)
lot_number = fields.Char('Lot Number', required=True)
registration_number = fields.Char('Registration Number', required=True)
class account_analytic_account(models.Model):
_inherit = 'account.analytic.account'
file_number = fields.Char('File Number')
plane_number = fields.Char('Plane Number')
entry_date = fields.Date('Entre Date')
order_date = fields.Date('Order Date')
registration_date = fields.Date('Registration Date')
town_id = fields.Many2one('res.country.state.town', 'Town')
is_ccu = fields.Boolean('Is CCU?')
lot_ids = fields.One2many(
'account.analytic.account.lot', 'analytic_account_id', 'Lots')
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
dbmi-pitt/dbmi-annotator
|
refs/heads/master
|
translation/mp-evidence-base-ETL/deprecated/mpEvidenceQry.py
|
1
|
# Copyright 2016-2017 University of Pittsburgh
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http:www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import sys, uuid, datetime
from sets import Set
from model.micropublication import Annotation, DataMaterialRow, DMItem, DataRatioItem, MaterialDoseItem, MaterialParticipants, MaterialPhenotypeItem, DataReviewer, DataDips
######################### QUERY MP Annotation ##########################
# query all mp annotations
# return annotations with claim, data and material
def queryAllMpAnnotation(conn):
mpAnnotations = []
claimAnns = queryAllMpClaim(conn)
for claimId,claimAnn in claimAnns.items():
claimDataAnno = queryMpData(conn, claimAnn, claimId)
claimDataMatAnno = queryMpMaterial(conn, claimDataAnno, claimId)
mpAnnotations.append(claimDataMatAnno)
return mpAnnotations
# query all mp annotations
# return annotations with claim, data and material
def queryMpAnnotationByUrn(conn, annotationUrn):
claimAnn = queryMpClaimByUrn(conn, annotationUrn)
claimDataAnn = queryMpData(conn, claimAnn, claimAnn.claimid)
claimDataMatAnn = queryMpMaterial(conn, claimDataAnn, claimAnn.claimid)
return claimDataMatAnn
######################### QUERY MP Claim ##########################
## query all claim annotation by document URL
## return {{key: id-1, value: Ann-1"}, {key: id-2, value: Ann-2"}, ...}
def queryAllMpClaim(conn):
annotations = {} # key: id, value obj Annotation
cur = conn.cursor()
qry = """
select cann.id, t.has_source, cann.creator, cann.date_created, s.exact, s.prefix, s.suffix, cbody.label, qualifierrole(q.subject, q.predicate, q.object) as qtype, qvalue, cann.rejected_statement, cann.rejected_statement_reason, cann.rejected_statement_comment, met.entered_value, cann.negation, q.enantiomer, q.metabolite
from mp_claim_annotation cann join oa_claim_body cbody on cann.has_body = cbody.id
join qualifier q on cbody.id = q.claim_body_id
join method met on cann.id = met.mp_claim_id
join oa_target t on cann.has_target = t.id
join oa_selector s on t.has_selector = s.id;
"""
cur.execute(qry)
for row in cur.fetchall():
id = row[0]
if id not in annotations: ## Using existing annotation if it's available
annotation = Annotation()
annotations[id] = annotation
else:
annotation = annotations[id]
drugPC = "" ## define parent compound string
if row[15] and not row[16]:
drugPC = "enantiomer|"
elif row[16] and not row[15]:
drugPC = "|metabolite"
elif row[15] and row[16]:
drugPC = "enantiomer|metabolite"
## claim qualifiers
if row[8] == "subject":
annotation.csubject = row[9]
annotation.setSubjectPC(drugPC) # parent compound for subject
elif row[8] == "predicate":
annotation.cpredicate = row[9]
elif row[8] == "object":
annotation.cobject = row[9]
annotation.setObjectPC(drugPC) # parent compound for object
elif row[8] == "qualifer":
annotation.qualifier = row[9]
annotation.setQualifierPC(drugPC) # parent compound for qualifier
else:
print "[ERROR] qualifier role unidentified qvalue: %s (claimid %s)" % (row[8], id)
## claim source and label
if annotation.source == None:
annotation.source = row[1]
if annotation.label == None:
annotation.label = row[7]
## claim text selector
if annotation.exact == None:
annotation.setOaSelector(row[5], row[4], row[6])
## user entered method
if annotation.method == None:
annotation.method = row[13]
## rejected reason
if annotation.rejected == None and row[10] == True:
annotation.rejected = row[11] + "|" + row[12]
## assertion negation
if annotation.negation == None and row[14] != None:
annotation.negation = row[14]
return annotations
def queryMpClaimByUrn(conn, urn):
"""
query claim annotation by annotationId
return Annotation
"""
cur = conn.cursor()
qry = """
select cann.id, t.has_source, cann.creator, cann.date_created, s.exact, s.prefix, s.suffix, cbody.label, qualifierrole(q.subject, q.predicate, q.object) as qtype, qvalue, cann.rejected_statement, cann.rejected_statement_reason, cann.rejected_statement_comment, met.entered_value, cann.negation, q.enantiomer, q.metabolite
from mp_claim_annotation cann join oa_claim_body cbody on cann.has_body = cbody.id
join qualifier q on cbody.id = q.claim_body_id
join method met on cann.id = met.mp_claim_id
join oa_target t on cann.has_target = t.id
join oa_selector s on t.has_selector = s.id
where cann.urn = '%s'; """ % (urn)
cur.execute(qry)
annotation = Annotation()
for row in cur.fetchall():
annotation.claimid = row[0]
annotation.urn = urn
drugPC = "" ## define parent compound string
if row[15] and not row[16]:
drugPC = "enantiomer|"
elif row[16] and not row[15]:
drugPC = "|metabolite"
elif row[15] and row[16]:
drugPC = "enantiomer|metabolite"
## claim qualifiers
if row[8] == "subject":
annotation.csubject = row[9]
annotation.setSubjectPC(drugPC) # parent compound for subject
elif row[8] == "predicate":
annotation.cpredicate = row[9]
elif row[8] == "object":
annotation.cobject = row[9]
annotation.setObjectPC(drugPC) # parent compound for object
elif row[8] == "qualifer":
annotation.qualifier = row[9]
annotation.setQualifierPC(drugPC) # parent compound for qualifier
else:
print "[ERROR] qualifier role unidentified qvalue: %s (claimid %s)" % (row[8], annotation.claimid)
## claim source and label
if annotation.source == None:
annotation.source = row[1]
if annotation.label == None:
annotation.label = row[7]
## claim text selector
if annotation.exact == None:
annotation.setOaSelector(row[5], row[4], row[6])
## rejected reason
if annotation.rejected == None and row[10] == True:
annotation.rejected = row[11] + "|" + row[12]
## user entered method
if annotation.method == None:
annotation.method = row[13]
## assertion negation
if annotation.negation == None and row[14] != None:
annotation.negation = row[14]
return annotation
######################### QUERY MP Data ##########################
# query data items for claim annotation
# return list of annotation with data items attached
def queryMpData(conn, annotation, claimid):
qry = """
select dann.type, df.data_field_type, df.value_as_string, df.value_as_number, s.exact, s.prefix, s.suffix, dann.mp_data_index, dann.ev_supports, dann.rejected, dann.rejected_reason, dann.rejected_comment, met.entered_value, met.inferred_value, eq.question, eq.value_as_string
from mp_data_annotation dann
join oa_data_body dbody on dann.has_body = dbody.id
join data_field df on df.data_body_id = dbody.id
left join oa_target t on dann.has_target = t.id
left join oa_selector s on t.has_selector = s.id
join method met on dann.mp_claim_id = met.mp_claim_id and met.mp_data_index = dann.mp_data_index
left join evidence_question eq on met.id = eq.method_id
where dann.mp_claim_id = %s
""" % (claimid)
cur = conn.cursor()
cur.execute(qry)
for row in cur.fetchall():
dType = row[0] # data type
dfType = row[1] # data field
exact = row[4]; value = str(row[2] or row[3]) # value as string or number
index = row[7] # data index
evRelationship = row[8] # EV supports or refutes
dmRow = None
if annotation.getSpecificDataMaterial(index) == None:
dmRow = DataMaterialRow() # create new row of data & material
annotation.setSpecificDataMaterial(dmRow, index)
else: # current row of data & material exists
dmRow = annotation.getSpecificDataMaterial(index)
if dType in ["auc", "cmax" , "clearance", "halflife"]:
if dmRow.getDataRatioItemInRow(dType): # DataRatioItem exists
dataRatioItem = dmRow.getDataRatioItemInRow(dType)
else: # create new dataRatioItem
dataRatioItem = DataRatioItem(dType)
dataRatioItem.setSelector("", exact, "")
dataRatioItem.setAttribute(dfType, value) # add value
dmRow.setDataRatioItem(dataRatioItem)
if dType == "reviewer":
if dmRow.getDataReviewer(): # DataReviewer exists
dataReviewer = dmRow.getDataReviewer()
else:
dataReviewer = DataReviewer()
dataReviewer.setAttribute(dfType, value)
dmRow.setDataReviewer(dataReviewer)
if dType == "dipsquestion": # DataDips exists
if dmRow.getDataDips():
dips = dmRow.getDataDips()
else:
dips = DataDips()
dips.setQuestion(dfType, value)
dmRow.setDataDips(dips)
if not dmRow.getEvRelationship(): # add evidence relationship to dmRow
if evRelationship is True:
dmRow.setEvRelationship("supports")
elif evRelationship is False:
dmRow.setEvRelationship("refutes")
evqs = row[14]; evqsVal = row[15] # add evidence type questions
if evqs and evqsVal:
if evqs == "grouprandom" and not dmRow.getGroupRandom():
dmRow.setGroupRandom(evqsVal)
elif evqs == "parallelgroup" and not dmRow.getParallelGroup():
dmRow.setParallelGroup(evqsVal)
return annotation
######################### QUERY MP Material ##########################
# query material items for claim annotation
# return list of MaterialItems
def queryMpMaterial(conn, annotation, claimid):
qry = """
select mann.type, mf.material_field_type, mf.value_as_string, mf.value_as_number, s.exact, s.prefix, s.suffix, mann.mp_data_index, mann.ev_supports
from mp_material_annotation mann join oa_material_body mbody on mann.has_body = mbody.id
join material_field mf on mf.material_body_id = mbody.id
left join oa_target t on mann.has_target = t.id
left join oa_selector s on t.has_selector = s.id
where mann.mp_claim_id = %s
""" % (claimid)
results = []
cur = conn.cursor()
cur.execute(qry)
for row in cur.fetchall():
mType = row[0] # material type
mfType = row[1] # material field
exact = row[4]; value = str(row[2] or row[3]) # value as string or number
index = row[7] # data & material index
evRelationship = row[8] # EV supports or refutes
if annotation.getSpecificDataMaterial(index) == None:
dmRow = DataMaterialRow() # create new row of data & material
if evRelationship:
dmRow.setEvRelationship("supports")
else:
dmRow.setEvRelationship("refutes")
if mType in ["object_dose","subject_dose"]: # dose
doseItem = MaterialDoseItem(mType)
doseItem.setAttribute(mfType, value)
doseItem.setSelector("", exact, "")
dmRow.setMaterialDoseItem(doseItem)
elif mType == "participants":
partItem = MaterialParticipants(value)
partItem.setSelector("", exact, "")
dmRow.setParticipants(partItem)
elif mType == "phenotype":
phenoItem = MaterialPhenotypeItem()
phenoItem.setAttribute(mfType, value)
dmRow.setPhenotype(phenoItem)
annotation.setSpecificDataMaterial(dmRow, index)
else: # current row of material & material exists
dmRow = annotation.getSpecificDataMaterial(index)
if dmRow.getEvRelationship() == None and evRelationship is True:
dmRow.setEvRelationship("supports")
elif dmRow.getEvRelationship() == None and evRelationship is False:
dmRow.setEvRelationship("refutes")
if mType in ["object_dose","subject_dose"]:
if dmRow.getMaterialDoseInRow(mType): # current MaterialItem exists
doseItem = dmRow.getMaterialDoseInRow(mType)
else:
doseItem = MaterialDoseItem(mType)
doseItem.setAttribute(mfType, value)
doseItem.setSelector("", exact, "")
dmRow.setMaterialDoseItem(doseItem)
elif mType == "participants":
if dmRow.getParticipantsInRow(): # participants exists
partItem = dmRow.getParticipantsInRow()
partItem.setValue(value)
else:
partItem = MaterialParticipants(value)
dmRow.setParticipants(partItem)
partItem.setSelector("", exact, "")
elif mType == "phenotype":
if dmRow.getPhenotype():
phenoItem = dmRow.getPhenotype()
else:
phenoItem = MaterialPhenotypeItem()
phenoItem.setAttribute(mfType, value)
dmRow.setPhenotype(phenoItem)
return annotation
######################### QUERY Highlight Annotaiton ##########################
# query all highlight annotation
# return dict for drug set in document dict {"doc url": "drug set"}
def queryHighlightAnns(conn):
highlightD = {}
qry = """SELECT h.id, t.has_source, s.exact
FROM highlight_annotation h, oa_target t, oa_selector s
WHERE h.has_target = t.id
AND t.has_selector = s.id;"""
cur = conn.cursor()
cur.execute(qry)
for row in cur.fetchall():
source = row[1]; drugname = row[2]
if source in highlightD:
highlightD[source].add(drugname)
else:
highlightD[source] = Set([drugname])
return highlightD
|
GaloisInc/hacrypto
|
refs/heads/master
|
src/C++/Mozilla/old_snapshots/manager/ssl/tests/unit/test_cert_signatures/generate.py
|
1
|
#!/usr/bin/python
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
import tempfile, os, sys
import random
libpath = os.path.abspath('../psm_common_py')
sys.path.append(libpath)
import CertUtils
srcdir = os.getcwd()
db = tempfile.mkdtemp()
CA_basic_constraints = "basicConstraints=critical,CA:TRUE\n"
CA_limited_basic_constraints = "basicConstraints=critical,CA:TRUE, pathlen:0\n"
EE_basic_constraints = "basicConstraints=CA:FALSE\n"
CA_min_ku = "keyUsage=critical, keyCertSign\n"
CA_bad_ku = ("keyUsage=digitalSignature, nonRepudiation, keyEncipherment," +
" dataEncipherment, keyAgreement, cRLSign\n")
EE_full_ku = ("keyUsage=digitalSignature, nonRepudiation, keyEncipherment," +
" dataEncipherment, keyAgreement, keyCertSign, cRLSign\n")
Server_eku= "extendedKeyUsage=critical,serverAuth,clientAuth\n"
pk_name = {'rsa': 'rsa', 'dsa': 'dsa', 'p384': 'secp384r1'}
def tamper_cert(cert_name):
f = open(cert_name, 'r+b')
f.seek(-3, 2) # third byte from the end to ensure we only touch the
# signature value. The location for the perturbation ensures that we are
# modifying just the tbsCertificate without the need of parsing the
# certificate. Also this guarantees that if a failure occurs it is because
# of an invalid signature and not another field that might have become
# invalid.
b = bytearray(f.read(1))
for i in range(len(b)):
b[i] ^= 0x77
f.seek(-1, 1)
f.write(b)
f.close()
return 1
def generate_certs():
CertUtils.init_dsa(db)
ee_ext_text = EE_basic_constraints + EE_full_ku
for name, key_type in pk_name.iteritems():
ca_name = "ca-" + name
[ca_key, ca_cert] = CertUtils.generate_cert_generic(db,
srcdir,
random.randint(100,4000000),
key_type,
ca_name,
CA_basic_constraints + CA_min_ku)
[valid_int_key, valid_int_cert, ee_key, ee_cert] = (
CertUtils.generate_int_and_ee(db,
srcdir,
ca_key,
ca_cert,
name + "-valid",
CA_basic_constraints,
ee_ext_text,
key_type) )
[int_key, int_cert] = CertUtils.generate_cert_generic(db,
srcdir,
random.randint(100,4000000),
key_type,
"int-" + name + "-tampered",
ee_ext_text,
ca_key,
ca_cert)
[ee_key, ee_cert] = CertUtils.generate_cert_generic(db,
srcdir,
random.randint(100,4000000),
key_type,
name + "-tampered-int-valid-ee",
ee_ext_text,
int_key,
int_cert)
#only tamper after ee has been generated
tamper_cert(int_cert);
[ee_key, ee_cert] = CertUtils.generate_cert_generic(db,
srcdir,
random.randint(100,4000000),
key_type,
name + "-valid-int-tampered-ee",
ee_ext_text,
valid_int_key,
valid_int_cert)
tamper_cert(ee_cert);
generate_certs()
|
edxnercel/edx-platform
|
refs/heads/master
|
lms/djangoapps/shoppingcart/migrations/0007_auto__add_field_orderitem_service_fee.py
|
114
|
# -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'OrderItem.service_fee'
db.add_column('shoppingcart_orderitem', 'service_fee',
self.gf('django.db.models.fields.DecimalField')(default=0.0, max_digits=30, decimal_places=2),
keep_default=False)
# Adding index on 'OrderItem', fields ['status']
db.create_index('shoppingcart_orderitem', ['status'])
# Adding index on 'OrderItem', fields ['fulfilled_time']
db.create_index('shoppingcart_orderitem', ['fulfilled_time'])
# Adding index on 'OrderItem', fields ['refund_requested_time']
db.create_index('shoppingcart_orderitem', ['refund_requested_time'])
def backwards(self, orm):
# Removing index on 'OrderItem', fields ['refund_requested_time']
db.delete_index('shoppingcart_orderitem', ['refund_requested_time'])
# Removing index on 'OrderItem', fields ['fulfilled_time']
db.delete_index('shoppingcart_orderitem', ['fulfilled_time'])
# Removing index on 'OrderItem', fields ['status']
db.delete_index('shoppingcart_orderitem', ['status'])
# Deleting field 'OrderItem.service_fee'
db.delete_column('shoppingcart_orderitem', 'service_fee')
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'shoppingcart.certificateitem': {
'Meta': {'object_name': 'CertificateItem', '_ormbases': ['shoppingcart.OrderItem']},
'course_enrollment': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['student.CourseEnrollment']"}),
'course_id': ('django.db.models.fields.CharField', [], {'max_length': '128', 'db_index': 'True'}),
'mode': ('django.db.models.fields.SlugField', [], {'max_length': '50'}),
'orderitem_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['shoppingcart.OrderItem']", 'unique': 'True', 'primary_key': 'True'})
},
'shoppingcart.order': {
'Meta': {'object_name': 'Order'},
'bill_to_cardtype': ('django.db.models.fields.CharField', [], {'max_length': '32', 'blank': 'True'}),
'bill_to_ccnum': ('django.db.models.fields.CharField', [], {'max_length': '8', 'blank': 'True'}),
'bill_to_city': ('django.db.models.fields.CharField', [], {'max_length': '64', 'blank': 'True'}),
'bill_to_country': ('django.db.models.fields.CharField', [], {'max_length': '64', 'blank': 'True'}),
'bill_to_first': ('django.db.models.fields.CharField', [], {'max_length': '64', 'blank': 'True'}),
'bill_to_last': ('django.db.models.fields.CharField', [], {'max_length': '64', 'blank': 'True'}),
'bill_to_postalcode': ('django.db.models.fields.CharField', [], {'max_length': '16', 'blank': 'True'}),
'bill_to_state': ('django.db.models.fields.CharField', [], {'max_length': '8', 'blank': 'True'}),
'bill_to_street1': ('django.db.models.fields.CharField', [], {'max_length': '128', 'blank': 'True'}),
'bill_to_street2': ('django.db.models.fields.CharField', [], {'max_length': '128', 'blank': 'True'}),
'currency': ('django.db.models.fields.CharField', [], {'default': "'usd'", 'max_length': '8'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'processor_reply_dump': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'purchase_time': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'refunded_time': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'status': ('django.db.models.fields.CharField', [], {'default': "'cart'", 'max_length': '32'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"})
},
'shoppingcart.orderitem': {
'Meta': {'object_name': 'OrderItem'},
'currency': ('django.db.models.fields.CharField', [], {'default': "'usd'", 'max_length': '8'}),
'fulfilled_time': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'db_index': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'line_desc': ('django.db.models.fields.CharField', [], {'default': "'Misc. Item'", 'max_length': '1024'}),
'order': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['shoppingcart.Order']"}),
'qty': ('django.db.models.fields.IntegerField', [], {'default': '1'}),
'refund_requested_time': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'db_index': 'True'}),
'report_comments': ('django.db.models.fields.TextField', [], {'default': "''"}),
'service_fee': ('django.db.models.fields.DecimalField', [], {'default': '0.0', 'max_digits': '30', 'decimal_places': '2'}),
'status': ('django.db.models.fields.CharField', [], {'default': "'cart'", 'max_length': '32', 'db_index': 'True'}),
'unit_cost': ('django.db.models.fields.DecimalField', [], {'default': '0.0', 'max_digits': '30', 'decimal_places': '2'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"})
},
'shoppingcart.paidcourseregistration': {
'Meta': {'object_name': 'PaidCourseRegistration', '_ormbases': ['shoppingcart.OrderItem']},
'course_id': ('django.db.models.fields.CharField', [], {'max_length': '128', 'db_index': 'True'}),
'mode': ('django.db.models.fields.SlugField', [], {'default': "'honor'", 'max_length': '50'}),
'orderitem_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['shoppingcart.OrderItem']", 'unique': 'True', 'primary_key': 'True'})
},
'shoppingcart.paidcourseregistrationannotation': {
'Meta': {'object_name': 'PaidCourseRegistrationAnnotation'},
'annotation': ('django.db.models.fields.TextField', [], {'null': 'True'}),
'course_id': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '128', 'db_index': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'})
},
'student.courseenrollment': {
'Meta': {'ordering': "('user', 'course_id')", 'unique_together': "(('user', 'course_id'),)", 'object_name': 'CourseEnrollment'},
'course_id': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'null': 'True', 'db_index': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'mode': ('django.db.models.fields.CharField', [], {'default': "'honor'", 'max_length': '100'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"})
}
}
complete_apps = ['shoppingcart']
|
gorkinovich/DefendersOfMankind
|
refs/heads/master
|
dependencies/boost-1.46.0/boost-build/test/boostbook.py
|
4
|
#!/usr/bin/python
# Copyright 2004, 2006 Vladimir Prus
# Distributed under the Boost Software License, Version 1.0.
# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
from BoostBuild import Tester, List
import string
# Create a temporary working directory
t = Tester()
t.set_tree("boostbook")
# For some reason, the messages are sent to stderr.
t.run_build_system()
t.fail_test(string.find(t.stdout(), """Writing boost/A.html for refentry(boost.A)
Writing library/reference.html for section(library.reference)
Writing index.html for chapter(library)
Writing docs_HTML.manifest
""") == -1)
t.expect_addition(["html/boost/A.html", "html/index.html"])
t.cleanup()
|
piquadrat/django
|
refs/heads/master
|
django/test/selenium.py
|
56
|
import sys
import unittest
from contextlib import contextmanager
from django.test import LiveServerTestCase, tag
from django.utils.module_loading import import_string
from django.utils.text import capfirst
class SeleniumTestCaseBase(type(LiveServerTestCase)):
# List of browsers to dynamically create test classes for.
browsers = []
# Sentinel value to differentiate browser-specific instances.
browser = None
def __new__(cls, name, bases, attrs):
"""
Dynamically create new classes and add them to the test module when
multiple browsers specs are provided (e.g. --selenium=firefox,chrome).
"""
test_class = super().__new__(cls, name, bases, attrs)
# If the test class is either browser-specific or a test base, return it.
if test_class.browser or not any(name.startswith('test') and callable(value) for name, value in attrs.items()):
return test_class
elif test_class.browsers:
# Reuse the created test class to make it browser-specific.
# We can't rename it to include the browser name or create a
# subclass like we do with the remaining browsers as it would
# either duplicate tests or prevent pickling of its instances.
first_browser = test_class.browsers[0]
test_class.browser = first_browser
# Create subclasses for each of the remaining browsers and expose
# them through the test's module namespace.
module = sys.modules[test_class.__module__]
for browser in test_class.browsers[1:]:
browser_test_class = cls.__new__(
cls,
"%s%s" % (capfirst(browser), name),
(test_class,),
{'browser': browser, '__module__': test_class.__module__}
)
setattr(module, browser_test_class.__name__, browser_test_class)
return test_class
# If no browsers were specified, skip this class (it'll still be discovered).
return unittest.skip('No browsers specified.')(test_class)
@classmethod
def import_webdriver(cls, browser):
return import_string("selenium.webdriver.%s.webdriver.WebDriver" % browser)
def create_webdriver(self):
return self.import_webdriver(self.browser)()
@tag('selenium')
class SeleniumTestCase(LiveServerTestCase, metaclass=SeleniumTestCaseBase):
implicit_wait = 10
@classmethod
def setUpClass(cls):
cls.selenium = cls.create_webdriver()
cls.selenium.implicitly_wait(cls.implicit_wait)
super().setUpClass()
@classmethod
def _tearDownClassInternal(cls):
# quit() the WebDriver before attempting to terminate and join the
# single-threaded LiveServerThread to avoid a dead lock if the browser
# kept a connection alive.
if hasattr(cls, 'selenium'):
cls.selenium.quit()
super()._tearDownClassInternal()
@contextmanager
def disable_implicit_wait(self):
"""Disable the default implicit wait."""
self.selenium.implicitly_wait(0)
try:
yield
finally:
self.selenium.implicitly_wait(self.implicit_wait)
|
le9i0nx/ansible
|
refs/heads/devel
|
test/units/modules/network/f5/test_bigip_configsync_action.py
|
7
|
# -*- coding: utf-8 -*-
#
# Copyright (c) 2017 F5 Networks Inc.
# GNU General Public License v3.0 (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import os
import json
import sys
from nose.plugins.skip import SkipTest
if sys.version_info < (2, 7):
raise SkipTest("F5 Ansible modules require Python >= 2.7")
from ansible.compat.tests import unittest
from ansible.compat.tests.mock import Mock
from ansible.compat.tests.mock import patch
from ansible.module_utils.f5_utils import AnsibleF5Client
try:
from library.bigip_configsync_actions import Parameters
from library.bigip_configsync_actions import ModuleManager
from library.bigip_configsync_actions import ArgumentSpec
from ansible.module_utils.f5_utils import iControlUnexpectedHTTPError
from test.unit.modules.utils import set_module_args
except ImportError:
try:
from ansible.modules.network.f5.bigip_configsync_actions import Parameters
from ansible.modules.network.f5.bigip_configsync_actions import ModuleManager
from ansible.modules.network.f5.bigip_configsync_actions import ArgumentSpec
from ansible.module_utils.f5_utils import iControlUnexpectedHTTPError
from units.modules.utils import set_module_args
except ImportError:
raise SkipTest("F5 Ansible modules require the f5-sdk Python library")
fixture_path = os.path.join(os.path.dirname(__file__), 'fixtures')
fixture_data = {}
def load_fixture(name):
path = os.path.join(fixture_path, name)
if path in fixture_data:
return fixture_data[path]
with open(path) as f:
data = f.read()
try:
data = json.loads(data)
except Exception:
pass
fixture_data[path] = data
return data
class TestParameters(unittest.TestCase):
def test_module_parameters(self):
args = dict(
sync_device_to_group=True,
sync_group_to_device=True,
overwrite_config=True,
device_group="foo"
)
p = Parameters(args)
assert p.sync_device_to_group is True
assert p.sync_group_to_device is True
assert p.overwrite_config is True
assert p.device_group == 'foo'
def test_module_parameters_yes_no(self):
args = dict(
sync_device_to_group='yes',
sync_group_to_device='no',
overwrite_config='yes',
device_group="foo"
)
p = Parameters(args)
assert p.sync_device_to_group is True
assert p.sync_group_to_device is False
assert p.overwrite_config is True
assert p.device_group == 'foo'
@patch('ansible.module_utils.f5_utils.AnsibleF5Client._get_mgmt_root',
return_value=True)
class TestManager(unittest.TestCase):
def setUp(self):
self.spec = ArgumentSpec()
def test_update_agent_status_traps(self, *args):
set_module_args(dict(
sync_device_to_group='yes',
device_group="foo",
password='passsword',
server='localhost',
user='admin'
))
client = AnsibleF5Client(
argument_spec=self.spec.argument_spec,
supports_check_mode=self.spec.supports_check_mode,
f5_product_name=self.spec.f5_product_name
)
mm = ModuleManager(client)
# Override methods to force specific logic in the module to happen
mm._device_group_exists = Mock(return_value=True)
mm._sync_to_group_required = Mock(return_value=False)
mm.execute_on_device = Mock(return_value=True)
mm.read_current_from_device = Mock(return_value=None)
mm._get_status_from_resource = Mock()
mm._get_status_from_resource.side_effect = [
'Changes Pending', 'Awaiting Initial Sync', 'In Sync'
]
results = mm.exec_module()
assert results['changed'] is True
|
DMLoy/ECommerceBasic
|
refs/heads/master
|
bin/bin/activate/lib/python2.7/site-packages/pip/vendor/html5lib/treebuilders/dom.py
|
249
|
from __future__ import absolute_import, division, unicode_literals
from xml.dom import minidom, Node, XML_NAMESPACE, XMLNS_NAMESPACE
import weakref
from . import _base
from .. import constants
from ..constants import namespaces
from ..utils import moduleFactoryFactory
def getDomBuilder(DomImplementation):
Dom = DomImplementation
class AttrList(object):
def __init__(self, element):
self.element = element
def __iter__(self):
return list(self.element.attributes.items()).__iter__()
def __setitem__(self, name, value):
self.element.setAttribute(name, value)
def __len__(self):
return len(list(self.element.attributes.items()))
def items(self):
return [(item[0], item[1]) for item in
list(self.element.attributes.items())]
def keys(self):
return list(self.element.attributes.keys())
def __getitem__(self, name):
return self.element.getAttribute(name)
def __contains__(self, name):
if isinstance(name, tuple):
raise NotImplementedError
else:
return self.element.hasAttribute(name)
class NodeBuilder(_base.Node):
def __init__(self, element):
_base.Node.__init__(self, element.nodeName)
self.element = element
namespace = property(lambda self: hasattr(self.element, "namespaceURI")
and self.element.namespaceURI or None)
def appendChild(self, node):
node.parent = self
self.element.appendChild(node.element)
def insertText(self, data, insertBefore=None):
text = self.element.ownerDocument.createTextNode(data)
if insertBefore:
self.element.insertBefore(text, insertBefore.element)
else:
self.element.appendChild(text)
def insertBefore(self, node, refNode):
self.element.insertBefore(node.element, refNode.element)
node.parent = self
def removeChild(self, node):
if node.element.parentNode == self.element:
self.element.removeChild(node.element)
node.parent = None
def reparentChildren(self, newParent):
while self.element.hasChildNodes():
child = self.element.firstChild
self.element.removeChild(child)
newParent.element.appendChild(child)
self.childNodes = []
def getAttributes(self):
return AttrList(self.element)
def setAttributes(self, attributes):
if attributes:
for name, value in list(attributes.items()):
if isinstance(name, tuple):
if name[0] is not None:
qualifiedName = (name[0] + ":" + name[1])
else:
qualifiedName = name[1]
self.element.setAttributeNS(name[2], qualifiedName,
value)
else:
self.element.setAttribute(
name, value)
attributes = property(getAttributes, setAttributes)
def cloneNode(self):
return NodeBuilder(self.element.cloneNode(False))
def hasContent(self):
return self.element.hasChildNodes()
def getNameTuple(self):
if self.namespace is None:
return namespaces["html"], self.name
else:
return self.namespace, self.name
nameTuple = property(getNameTuple)
class TreeBuilder(_base.TreeBuilder):
def documentClass(self):
self.dom = Dom.getDOMImplementation().createDocument(None, None, None)
return weakref.proxy(self)
def insertDoctype(self, token):
name = token["name"]
publicId = token["publicId"]
systemId = token["systemId"]
domimpl = Dom.getDOMImplementation()
doctype = domimpl.createDocumentType(name, publicId, systemId)
self.document.appendChild(NodeBuilder(doctype))
if Dom == minidom:
doctype.ownerDocument = self.dom
def elementClass(self, name, namespace=None):
if namespace is None and self.defaultNamespace is None:
node = self.dom.createElement(name)
else:
node = self.dom.createElementNS(namespace, name)
return NodeBuilder(node)
def commentClass(self, data):
return NodeBuilder(self.dom.createComment(data))
def fragmentClass(self):
return NodeBuilder(self.dom.createDocumentFragment())
def appendChild(self, node):
self.dom.appendChild(node.element)
def testSerializer(self, element):
return testSerializer(element)
def getDocument(self):
return self.dom
def getFragment(self):
return _base.TreeBuilder.getFragment(self).element
def insertText(self, data, parent=None):
data = data
if parent != self:
_base.TreeBuilder.insertText(self, data, parent)
else:
# HACK: allow text nodes as children of the document node
if hasattr(self.dom, '_child_node_types'):
if not Node.TEXT_NODE in self.dom._child_node_types:
self.dom._child_node_types = list(self.dom._child_node_types)
self.dom._child_node_types.append(Node.TEXT_NODE)
self.dom.appendChild(self.dom.createTextNode(data))
implementation = DomImplementation
name = None
def testSerializer(element):
element.normalize()
rv = []
def serializeElement(element, indent=0):
if element.nodeType == Node.DOCUMENT_TYPE_NODE:
if element.name:
if element.publicId or element.systemId:
publicId = element.publicId or ""
systemId = element.systemId or ""
rv.append("""|%s<!DOCTYPE %s "%s" "%s">""" %
(' ' * indent, element.name, publicId, systemId))
else:
rv.append("|%s<!DOCTYPE %s>" % (' ' * indent, element.name))
else:
rv.append("|%s<!DOCTYPE >" % (' ' * indent,))
elif element.nodeType == Node.DOCUMENT_NODE:
rv.append("#document")
elif element.nodeType == Node.DOCUMENT_FRAGMENT_NODE:
rv.append("#document-fragment")
elif element.nodeType == Node.COMMENT_NODE:
rv.append("|%s<!-- %s -->" % (' ' * indent, element.nodeValue))
elif element.nodeType == Node.TEXT_NODE:
rv.append("|%s\"%s\"" % (' ' * indent, element.nodeValue))
else:
if (hasattr(element, "namespaceURI") and
element.namespaceURI is not None):
name = "%s %s" % (constants.prefixes[element.namespaceURI],
element.nodeName)
else:
name = element.nodeName
rv.append("|%s<%s>" % (' ' * indent, name))
if element.hasAttributes():
attributes = []
for i in range(len(element.attributes)):
attr = element.attributes.item(i)
name = attr.nodeName
value = attr.value
ns = attr.namespaceURI
if ns:
name = "%s %s" % (constants.prefixes[ns], attr.localName)
else:
name = attr.nodeName
attributes.append((name, value))
for name, value in sorted(attributes):
rv.append('|%s%s="%s"' % (' ' * (indent + 2), name, value))
indent += 2
for child in element.childNodes:
serializeElement(child, indent)
serializeElement(element, 0)
return "\n".join(rv)
def dom2sax(node, handler, nsmap={'xml': XML_NAMESPACE}):
if node.nodeType == Node.ELEMENT_NODE:
if not nsmap:
handler.startElement(node.nodeName, node.attributes)
for child in node.childNodes:
dom2sax(child, handler, nsmap)
handler.endElement(node.nodeName)
else:
attributes = dict(node.attributes.itemsNS())
# gather namespace declarations
prefixes = []
for attrname in list(node.attributes.keys()):
attr = node.getAttributeNode(attrname)
if (attr.namespaceURI == XMLNS_NAMESPACE or
(attr.namespaceURI is None and attr.nodeName.startswith('xmlns'))):
prefix = (attr.nodeName != 'xmlns' and attr.nodeName or None)
handler.startPrefixMapping(prefix, attr.nodeValue)
prefixes.append(prefix)
nsmap = nsmap.copy()
nsmap[prefix] = attr.nodeValue
del attributes[(attr.namespaceURI, attr.nodeName)]
# apply namespace declarations
for attrname in list(node.attributes.keys()):
attr = node.getAttributeNode(attrname)
if attr.namespaceURI is None and ':' in attr.nodeName:
prefix = attr.nodeName.split(':')[0]
if prefix in nsmap:
del attributes[(attr.namespaceURI, attr.nodeName)]
attributes[(nsmap[prefix], attr.nodeName)] = attr.nodeValue
# SAX events
ns = node.namespaceURI or nsmap.get(None, None)
handler.startElementNS((ns, node.nodeName), node.nodeName, attributes)
for child in node.childNodes:
dom2sax(child, handler, nsmap)
handler.endElementNS((ns, node.nodeName), node.nodeName)
for prefix in prefixes:
handler.endPrefixMapping(prefix)
elif node.nodeType in [Node.TEXT_NODE, Node.CDATA_SECTION_NODE]:
handler.characters(node.nodeValue)
elif node.nodeType == Node.DOCUMENT_NODE:
handler.startDocument()
for child in node.childNodes:
dom2sax(child, handler, nsmap)
handler.endDocument()
elif node.nodeType == Node.DOCUMENT_FRAGMENT_NODE:
for child in node.childNodes:
dom2sax(child, handler, nsmap)
else:
# ATTRIBUTE_NODE
# ENTITY_NODE
# PROCESSING_INSTRUCTION_NODE
# COMMENT_NODE
# DOCUMENT_TYPE_NODE
# NOTATION_NODE
pass
return locals()
# The actual means to get a module!
getDomModule = moduleFactoryFactory(getDomBuilder)
|
SnakeJenny/TensorFlow
|
refs/heads/master
|
tensorflow/contrib/linear_optimizer/python/sdca_estimator_test.py
|
16
|
# Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for linear_optimizer.sdca_estimator."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
from tensorflow.contrib.layers.python.layers import feature_column as feature_column_lib
from tensorflow.contrib.linear_optimizer.python import sdca_estimator
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import sparse_tensor
from tensorflow.python.platform import test
class SDCALogisticClassifierTest(test.TestCase):
def testRealValuedFeatures(self):
"""Tests SDCALogisticClassifier works with real valued features."""
def input_fn():
return {
'example_id': constant_op.constant(['1', '2']),
'maintenance_cost': constant_op.constant([[500.0], [200.0]]),
'sq_footage': constant_op.constant([[800.0], [600.0]]),
'weights': constant_op.constant([[1.0], [1.0]])
}, constant_op.constant([[0], [1]])
maintenance_cost = feature_column_lib.real_valued_column('maintenance_cost')
sq_footage = feature_column_lib.real_valued_column('sq_footage')
classifier = sdca_estimator.SDCALogisticClassifier(
example_id_column='example_id',
feature_columns=[maintenance_cost, sq_footage],
weight_column_name='weights')
classifier.fit(input_fn=input_fn, steps=100)
loss = classifier.evaluate(input_fn=input_fn, steps=1)['loss']
self.assertLess(loss, 0.05)
def testRealValuedFeatureWithHigherDimension(self):
"""Tests SDCALogisticClassifier with high-dimension real valued features."""
# input_fn is identical to the one in testRealValuedFeatures where 2
# 1-dimensional dense features are replaced by a 2-dimensional feature.
def input_fn():
return {
'example_id':
constant_op.constant(['1', '2']),
'dense_feature':
constant_op.constant([[500.0, 800.0], [200.0, 600.0]])
}, constant_op.constant([[0], [1]])
dense_feature = feature_column_lib.real_valued_column(
'dense_feature', dimension=2)
classifier = sdca_estimator.SDCALogisticClassifier(
example_id_column='example_id', feature_columns=[dense_feature])
classifier.fit(input_fn=input_fn, steps=100)
loss = classifier.evaluate(input_fn=input_fn, steps=1)['loss']
self.assertLess(loss, 0.05)
def testBucketizedFeatures(self):
"""Tests SDCALogisticClassifier with bucketized features."""
def input_fn():
return {
'example_id': constant_op.constant(['1', '2', '3']),
'price': constant_op.constant([[600.0], [1000.0], [400.0]]),
'sq_footage': constant_op.constant([[1000.0], [600.0], [700.0]]),
'weights': constant_op.constant([[1.0], [1.0], [1.0]])
}, constant_op.constant([[1], [0], [1]])
price_bucket = feature_column_lib.bucketized_column(
feature_column_lib.real_valued_column('price'),
boundaries=[500.0, 700.0])
sq_footage_bucket = feature_column_lib.bucketized_column(
feature_column_lib.real_valued_column('sq_footage'), boundaries=[650.0])
classifier = sdca_estimator.SDCALogisticClassifier(
example_id_column='example_id',
feature_columns=[price_bucket, sq_footage_bucket],
weight_column_name='weights',
l2_regularization=1.0)
classifier.fit(input_fn=input_fn, steps=50)
metrics = classifier.evaluate(input_fn=input_fn, steps=1)
self.assertGreater(metrics['accuracy'], 0.9)
def testSparseFeatures(self):
"""Tests SDCALogisticClassifier with sparse features."""
def input_fn():
return {
'example_id':
constant_op.constant(['1', '2', '3']),
'price':
constant_op.constant([[0.4], [0.6], [0.3]]),
'country':
sparse_tensor.SparseTensor(
values=['IT', 'US', 'GB'],
indices=[[0, 0], [1, 3], [2, 1]],
dense_shape=[3, 5]),
'weights':
constant_op.constant([[1.0], [1.0], [1.0]])
}, constant_op.constant([[1], [0], [1]])
price = feature_column_lib.real_valued_column('price')
country = feature_column_lib.sparse_column_with_hash_bucket(
'country', hash_bucket_size=5)
classifier = sdca_estimator.SDCALogisticClassifier(
example_id_column='example_id',
feature_columns=[price, country],
weight_column_name='weights')
classifier.fit(input_fn=input_fn, steps=50)
metrics = classifier.evaluate(input_fn=input_fn, steps=1)
self.assertGreater(metrics['accuracy'], 0.9)
def testWeightedSparseFeatures(self):
"""Tests SDCALogisticClassifier with weighted sparse features."""
def input_fn():
return {
'example_id':
constant_op.constant(['1', '2', '3']),
'price':
sparse_tensor.SparseTensor(
values=[2., 3., 1.],
indices=[[0, 0], [1, 0], [2, 0]],
dense_shape=[3, 5]),
'country':
sparse_tensor.SparseTensor(
values=['IT', 'US', 'GB'],
indices=[[0, 0], [1, 0], [2, 0]],
dense_shape=[3, 5])
}, constant_op.constant([[1], [0], [1]])
country = feature_column_lib.sparse_column_with_hash_bucket(
'country', hash_bucket_size=5)
country_weighted_by_price = feature_column_lib.weighted_sparse_column(
country, 'price')
classifier = sdca_estimator.SDCALogisticClassifier(
example_id_column='example_id',
feature_columns=[country_weighted_by_price])
classifier.fit(input_fn=input_fn, steps=50)
metrics = classifier.evaluate(input_fn=input_fn, steps=1)
self.assertGreater(metrics['accuracy'], 0.9)
def testCrossedFeatures(self):
"""Tests SDCALogisticClassifier with crossed features."""
def input_fn():
return {
'example_id':
constant_op.constant(['1', '2', '3']),
'language':
sparse_tensor.SparseTensor(
values=['english', 'italian', 'spanish'],
indices=[[0, 0], [1, 0], [2, 0]],
dense_shape=[3, 1]),
'country':
sparse_tensor.SparseTensor(
values=['US', 'IT', 'MX'],
indices=[[0, 0], [1, 0], [2, 0]],
dense_shape=[3, 1])
}, constant_op.constant([[0], [0], [1]])
language = feature_column_lib.sparse_column_with_hash_bucket(
'language', hash_bucket_size=5)
country = feature_column_lib.sparse_column_with_hash_bucket(
'country', hash_bucket_size=5)
country_language = feature_column_lib.crossed_column(
[language, country], hash_bucket_size=10)
classifier = sdca_estimator.SDCALogisticClassifier(
example_id_column='example_id', feature_columns=[country_language])
classifier.fit(input_fn=input_fn, steps=10)
metrics = classifier.evaluate(input_fn=input_fn, steps=1)
self.assertGreater(metrics['accuracy'], 0.9)
def testMixedFeatures(self):
"""Tests SDCALogisticClassifier with a mix of features."""
def input_fn():
return {
'example_id':
constant_op.constant(['1', '2', '3']),
'price':
constant_op.constant([[0.6], [0.8], [0.3]]),
'sq_footage':
constant_op.constant([[900.0], [700.0], [600.0]]),
'country':
sparse_tensor.SparseTensor(
values=['IT', 'US', 'GB'],
indices=[[0, 0], [1, 3], [2, 1]],
dense_shape=[3, 5]),
'weights':
constant_op.constant([[3.0], [1.0], [1.0]])
}, constant_op.constant([[1], [0], [1]])
price = feature_column_lib.real_valued_column('price')
sq_footage_bucket = feature_column_lib.bucketized_column(
feature_column_lib.real_valued_column('sq_footage'),
boundaries=[650.0, 800.0])
country = feature_column_lib.sparse_column_with_hash_bucket(
'country', hash_bucket_size=5)
sq_footage_country = feature_column_lib.crossed_column(
[sq_footage_bucket, country], hash_bucket_size=10)
classifier = sdca_estimator.SDCALogisticClassifier(
example_id_column='example_id',
feature_columns=[price, sq_footage_bucket, country, sq_footage_country],
weight_column_name='weights')
classifier.fit(input_fn=input_fn, steps=50)
metrics = classifier.evaluate(input_fn=input_fn, steps=1)
self.assertGreater(metrics['accuracy'], 0.9)
class SDCALinearRegressorTest(test.TestCase):
def testRealValuedLinearFeatures(self):
"""Tests SDCALinearRegressor works with real valued features."""
x = [[1.2, 2.0, -1.5], [-2.0, 3.0, -0.5], [1.0, -0.5, 4.0]]
weights = [[3.0], [-1.2], [0.5]]
y = np.dot(x, weights)
def input_fn():
return {
'example_id': constant_op.constant(['1', '2', '3']),
'x': constant_op.constant(x),
'weights': constant_op.constant([[10.0], [10.0], [10.0]])
}, constant_op.constant(y)
x_column = feature_column_lib.real_valued_column('x', dimension=3)
regressor = sdca_estimator.SDCALinearRegressor(
example_id_column='example_id',
feature_columns=[x_column],
weight_column_name='weights')
regressor.fit(input_fn=input_fn, steps=20)
loss = regressor.evaluate(input_fn=input_fn, steps=1)['loss']
self.assertLess(loss, 0.01)
self.assertIn('linear/x/weight', regressor.get_variable_names())
regressor_weights = regressor.get_variable_value('linear/x/weight')
self.assertAllClose(
[w[0] for w in weights], regressor_weights.flatten(), rtol=0.1)
def testMixedFeaturesArbitraryWeights(self):
"""Tests SDCALinearRegressor works with a mix of features."""
def input_fn():
return {
'example_id':
constant_op.constant(['1', '2', '3']),
'price':
constant_op.constant([[0.6], [0.8], [0.3]]),
'sq_footage':
constant_op.constant([[900.0], [700.0], [600.0]]),
'country':
sparse_tensor.SparseTensor(
values=['IT', 'US', 'GB'],
indices=[[0, 0], [1, 3], [2, 1]],
dense_shape=[3, 5]),
'weights':
constant_op.constant([[3.0], [5.0], [7.0]])
}, constant_op.constant([[1.55], [-1.25], [-3.0]])
price = feature_column_lib.real_valued_column('price')
sq_footage_bucket = feature_column_lib.bucketized_column(
feature_column_lib.real_valued_column('sq_footage'),
boundaries=[650.0, 800.0])
country = feature_column_lib.sparse_column_with_hash_bucket(
'country', hash_bucket_size=5)
sq_footage_country = feature_column_lib.crossed_column(
[sq_footage_bucket, country], hash_bucket_size=10)
regressor = sdca_estimator.SDCALinearRegressor(
example_id_column='example_id',
feature_columns=[price, sq_footage_bucket, country, sq_footage_country],
l2_regularization=1.0,
weight_column_name='weights')
regressor.fit(input_fn=input_fn, steps=20)
loss = regressor.evaluate(input_fn=input_fn, steps=1)['loss']
self.assertLess(loss, 0.05)
def testSdcaOptimizerSparseFeaturesWithL1Reg(self):
"""SDCALinearRegressor works with sparse features and L1 regularization."""
def input_fn():
return {
'example_id':
constant_op.constant(['1', '2', '3']),
'price':
constant_op.constant([[0.4], [0.6], [0.3]]),
'country':
sparse_tensor.SparseTensor(
values=['IT', 'US', 'GB'],
indices=[[0, 0], [1, 3], [2, 1]],
dense_shape=[3, 5]),
'weights':
constant_op.constant([[10.0], [10.0], [10.0]])
}, constant_op.constant([[1.4], [-0.8], [2.6]])
price = feature_column_lib.real_valued_column('price')
country = feature_column_lib.sparse_column_with_hash_bucket(
'country', hash_bucket_size=5)
# Regressor with no L1 regularization.
regressor = sdca_estimator.SDCALinearRegressor(
example_id_column='example_id',
feature_columns=[price, country],
weight_column_name='weights')
regressor.fit(input_fn=input_fn, steps=20)
no_l1_reg_loss = regressor.evaluate(input_fn=input_fn, steps=1)['loss']
variable_names = regressor.get_variable_names()
self.assertIn('linear/price/weight', variable_names)
self.assertIn('linear/country/weights', variable_names)
no_l1_reg_weights = {
'linear/price/weight':
regressor.get_variable_value('linear/price/weight'),
'linear/country/weights':
regressor.get_variable_value('linear/country/weights'),
}
# Regressor with L1 regularization.
regressor = sdca_estimator.SDCALinearRegressor(
example_id_column='example_id',
feature_columns=[price, country],
l1_regularization=1.0,
weight_column_name='weights')
regressor.fit(input_fn=input_fn, steps=20)
l1_reg_loss = regressor.evaluate(input_fn=input_fn, steps=1)['loss']
l1_reg_weights = {
'linear/price/weight':
regressor.get_variable_value('linear/price/weight'),
'linear/country/weights':
regressor.get_variable_value('linear/country/weights'),
}
# Unregularized loss is lower when there is no L1 regularization.
self.assertLess(no_l1_reg_loss, l1_reg_loss)
self.assertLess(no_l1_reg_loss, 0.05)
# But weights returned by the regressor with L1 regularization have smaller
# L1 norm.
l1_reg_weights_norm, no_l1_reg_weights_norm = 0.0, 0.0
for var_name in sorted(l1_reg_weights):
l1_reg_weights_norm += sum(
np.absolute(l1_reg_weights[var_name].flatten()))
no_l1_reg_weights_norm += sum(
np.absolute(no_l1_reg_weights[var_name].flatten()))
print('Var name: %s, value: %s' % (var_name,
no_l1_reg_weights[var_name].flatten()))
self.assertLess(l1_reg_weights_norm, no_l1_reg_weights_norm)
def testBiasOnly(self):
"""Tests SDCALinearRegressor has a valid bias weight."""
def input_fn():
"""Testing the bias weight when it's the only feature present.
All of the instances in this input only have the bias feature, and a
1/4 of the labels are positive. This means that the expected weight for
the bias should be close to the average prediction, i.e 0.25.
Returns:
Training data for the test.
"""
num_examples = 40
return {
'example_id':
constant_op.constant([str(x + 1) for x in range(num_examples)]),
# place_holder is an empty column which is always 0 (absent), because
# LinearClassifier requires at least one column.
'place_holder':
constant_op.constant([[0.0]] * num_examples),
}, constant_op.constant([[1 if i % 4 is 0 else 0]
for i in range(num_examples)])
place_holder = feature_column_lib.real_valued_column('place_holder')
regressor = sdca_estimator.SDCALinearRegressor(
example_id_column='example_id', feature_columns=[place_holder])
regressor.fit(input_fn=input_fn, steps=100)
self.assertNear(
regressor.get_variable_value('linear/bias_weight')[0], 0.25, err=0.1)
def testBiasAndOtherColumns(self):
"""SDCALinearRegressor has valid bias weight with other columns present."""
def input_fn():
"""Testing the bias weight when there are other features present.
1/2 of the instances in this input have feature 'a', the rest have
feature 'b', and we expect the bias to be added to each instance as well.
0.4 of all instances that have feature 'a' are positive, and 0.2 of all
instances that have feature 'b' are positive. The labels in the dataset
are ordered to appear shuffled since SDCA expects shuffled data, and
converges faster with this pseudo-random ordering.
If the bias was centered we would expect the weights to be:
bias: 0.3
a: 0.1
b: -0.1
Until b/29339026 is resolved, the bias gets regularized with the same
global value for the other columns, and so the expected weights get
shifted and are:
bias: 0.2
a: 0.2
b: 0.0
Returns:
The test dataset.
"""
num_examples = 200
half = int(num_examples / 2)
return {
'example_id':
constant_op.constant([str(x + 1) for x in range(num_examples)]),
'a':
constant_op.constant([[1]] * int(half) + [[0]] * int(half)),
'b':
constant_op.constant([[0]] * int(half) + [[1]] * int(half)),
}, constant_op.constant(
[[x]
for x in [1, 0, 0, 1, 1, 0, 0, 0, 1, 0] * int(half / 10) +
[0, 1, 0, 0, 0, 0, 0, 0, 1, 0] * int(half / 10)])
regressor = sdca_estimator.SDCALinearRegressor(
example_id_column='example_id',
feature_columns=[
feature_column_lib.real_valued_column('a'),
feature_column_lib.real_valued_column('b')
])
regressor.fit(input_fn=input_fn, steps=200)
variable_names = regressor.get_variable_names()
self.assertIn('linear/bias_weight', variable_names)
self.assertIn('linear/a/weight', variable_names)
self.assertIn('linear/b/weight', variable_names)
# TODO(b/29339026): Change the expected results to expect a centered bias.
self.assertNear(
regressor.get_variable_value('linear/bias_weight')[0], 0.2, err=0.05)
self.assertNear(
regressor.get_variable_value('linear/a/weight')[0], 0.2, err=0.05)
self.assertNear(
regressor.get_variable_value('linear/b/weight')[0], 0.0, err=0.05)
def testBiasAndOtherColumnsFabricatedCentered(self):
"""SDCALinearRegressor has valid bias weight when instances are centered."""
def input_fn():
"""Testing the bias weight when there are other features present.
1/2 of the instances in this input have feature 'a', the rest have
feature 'b', and we expect the bias to be added to each instance as well.
0.1 of all instances that have feature 'a' have a label of 1, and 0.1 of
all instances that have feature 'b' have a label of -1.
We can expect the weights to be:
bias: 0.0
a: 0.1
b: -0.1
Returns:
The test dataset.
"""
num_examples = 200
half = int(num_examples / 2)
return {
'example_id':
constant_op.constant([str(x + 1) for x in range(num_examples)]),
'a':
constant_op.constant([[1]] * int(half) + [[0]] * int(half)),
'b':
constant_op.constant([[0]] * int(half) + [[1]] * int(half)),
}, constant_op.constant([[1 if x % 10 == 0 else 0] for x in range(half)] +
[[-1 if x % 10 == 0 else 0] for x in range(half)])
regressor = sdca_estimator.SDCALinearRegressor(
example_id_column='example_id',
feature_columns=[
feature_column_lib.real_valued_column('a'),
feature_column_lib.real_valued_column('b')
])
regressor.fit(input_fn=input_fn, steps=100)
variable_names = regressor.get_variable_names()
self.assertIn('linear/bias_weight', variable_names)
self.assertIn('linear/a/weight', variable_names)
self.assertIn('linear/b/weight', variable_names)
self.assertNear(
regressor.get_variable_value('linear/bias_weight')[0], 0.0, err=0.05)
self.assertNear(
regressor.get_variable_value('linear/a/weight')[0], 0.1, err=0.05)
self.assertNear(
regressor.get_variable_value('linear/b/weight')[0], -0.1, err=0.05)
if __name__ == '__main__':
test.main()
|
schreifels/sublime-goto-related
|
refs/heads/master
|
test/test_goto_related.py
|
1
|
import unittest
from mock import Mock
import os, sys
root_path = os.path.abspath(os.path.join(__file__, '..', '..'))
sys.path.insert(0, root_path)
from test.stubs import sublime, sublime_plugin
from goto_related import GotoRelatedCommand
class TestGotoRelatedCommand(unittest.TestCase):
def assert_goto(self, current_file_path, patterns_to_strip, expected_overlay_text):
subject = GotoRelatedCommand()
subject.show_goto = Mock()
subject.current_file_path = Mock(return_value=current_file_path)
subject.patterns_to_strip = Mock(return_value=patterns_to_strip)
subject.run(Mock())
subject.show_goto.assert_called_with(expected_overlay_text)
def test_run(self):
self.assert_goto('/path/to/main.rb', [], 'main.rb')
self.assert_goto('/path/to/main_test.rb', [], 'main_test.rb')
self.assert_goto('/path/to/my_test_test.rb', ['_test'], 'my.rb')
self.assert_goto('/path/to/my_test_test.rb', ['_test$'], 'my_test_test.rb')
self.assert_goto('/path/to/my_test_test.rb', ['\..+$', '_test$'], 'my_test')
self.assert_goto('/path/to/home.html.erb', ['\..+$'], 'home')
self.assert_goto('/path/to/_my_partial.html.erb', ['^_', '\..+$'], 'my_partial')
if __name__ == '__main__':
unittest.main()
|
vbmendes/django-transmeta
|
refs/heads/master
|
transmeta/management/__init__.py
|
12133432
| |
FlySorterLLC/SantaFeControlSoftware
|
refs/heads/master
|
commonYeastTasks.py
|
1
|
##
## This copyrighted software is distributed under the GPL v2.0 license.
## See the LICENSE file for more details.
##
import random as rand
import time
import numpy as np
import cv2
## Highest order commands for yeast colony manipulation
# Loads applicator from applicator module
def applicatorEquip(robot, YeastApplicatorPlate, ID, applicatorZ=22.5, vacDur=500):
coordX = YeastApplicatorPlate.getApplicatorCoords(ID)[0]
coordY = YeastApplicatorPlate.getApplicatorCoords(ID)[1]
robot.moveToSpd(pt=[float(coordX), float(coordY), 0, 0, 0], spd=5000)
robot.dwell(t=1)
robot.smallPartManipVac(True)
robot.dwell(t=1)
trylower = robot.lowerCare(z=applicatorZ, descendZ=5, retreatZ=5) # lower onto applicator ID
if trylower['limit'] == 1:
robot.dwell(10)
robot.homeZ2()
for tries in range(0,1):
robot.moveToSpd(pt=[float(coordX)-(tries+1/2), float(coordY), 0, 0, applicatorZ-19], spd=2000)
robot.dwell(10)
trylower = robot.lowerCare(z=applicatorZ, descendZ=10, retreatZ=10)
if trylower['limit'] == 0:
break
else:
robot.homeZ2()
if trylower['limit'] == 1:
for tries in range(0,1):
robot.moveToSpd(pt=[float(coordX)+(tries+1/2), float(coordY), 0, 0, applicatorZ-19], spd=2000)
robot.dwell(10)
trylower = robot.lowerCare(z=applicatorZ, descendZ=10, retreatZ=10)
if trylower['limit'] == 0:
break
else:
robot.homeZ2()
if trylower['limit'] == 1:
for tries in range(0,1):
robot.moveToSpd(pt=[float(coordX), float(coordY)-(tries+1/2), 0, 0, applicatorZ-19], spd=2000)
robot.dwell(10)
trylower = robot.lowerCare(z=applicatorZ, descendZ=10, retreatZ=10)
if trylower['limit'] == 0:
break
else:
robot.homeZ2()
if trylower['limit'] == 1:
for tries in range(0,1):
robot.moveToSpd(pt=[float(coordX), float(coordY)+(tries+1/2), 0, 0, applicatorZ-19], spd=2000)
robot.dwell(10)
trylower = robot.lowerCare(z=applicatorZ, descendZ=10, retreatZ=10)
if trylower['limit'] == 0:
break
else:
robot.homeZ2()
robot.dwell(t=vacDur)
robot.homeZ2() # retrieve applicator
robot.moveToSpd(pt=[415, 247, 0, 0, 0, 0], spd=3000) # move safely into neutral position
# Discards applicator into applicator discard module
def applicatorDiscard(robot, discardX= 438, discardY = 116, discardZ=50, airBurst=2, airDur=1000):
robot.moveToSpd(pt=[float(discardX), float(discardY), 0, 0, 0], spd=5000)
robot.dwell(t=1)
robot.lowerCare(z=discardZ, descendZ=10, retreatZ=10) # lower into discard receptacle
robot.smallPartManipVac(False)
for b in range(0,airBurst):
robot.flyManipAir(True)
robot.dwell(t=airDur)
robot.flyManipAir(False)
robot.dwell(t=airDur/10)
robot.moveRel(pt=[0, 0, 0, 0, -discardZ]) #
# Tests whether applicator is loaded by engaging limit switch
def applicatorTest(robot, testX=415, testY=255, testZ=13):
robot.moveToSpd(pt=[float(testX), float(testY), 0, 0, 0], spd=5000)
robot.dwell(t=1)
test = robot.lowerCare(z=testZ, descendZ=10, retreatZ=10) # test whether applicator is equipped
robot.homeZ2()
if test['limit'] == 1:
print 'Applicator is equipped.'
else:
print 'Applicator is not equipped.'
return test['limit']
# Engages object manipulator to withdraw lid from arena ID
def lidWithdraw(robot, YeastArena, ID, adjZ=0):
coordX = YeastArena.getSPCoords(ID)[0]
coordY = YeastArena.getSPCoords(ID)[1]
coordZ = YeastArena.SPz+adjZ
robot.moveToSpd(pt=[float(coordX), float(coordY), 0, 0, 0], spd=5000)
robot.dwell(1)
robot.moveToSpd(pt=[float(coordX), float(coordY), coordZ, 0, 0], spd=3000)
robot.dwell(1)
robot.flyManipVac(True)
robot.dwell(300)
robot.moveRel(pt=[0, 0, -coordZ+10, 0, 0]) # so lid does not fall off
# Engages object manipulator to withdraw lid on arena ID
def lidPlace(robot, YeastArena, ID):
coordX = YeastArena.getSPCoords(ID)[0]
coordY = YeastArena.getSPCoords(ID)[1]
coordZ = YeastArena.SPz
robot.moveToSpd(pt=[float(coordX), float(coordY), 10, 0, 0], spd=5000)
robot.dwell(1)
robot.moveToSpd(pt=[float(coordX), float(coordY), coordZ-1, 0, 0], spd=4000)
robot.dwell(1)
robot.flyManipVac(False)
robot.smallPartManipAir(True)
robot.dwell(100)
robot.moveRel(pt=[0, 0, -coordZ+1, 0, 0])
robot.dwell(1)
robot.smallPartManipAir(False)
# Engages loaded applicator to test for successful lid withdrawal/placement. Care to use sterile applicator
def lidTest(robot, YeastArena, ID):
coordX = YeastArena.getArenaCoords(ID)[0]
coordY = YeastArena.getArenaCoords(ID)[1]
coordZ = YeastArena.SPz-20
robot.moveToSpd(pt=[float(coordX), float(coordY), 10, 0, 0], spd=5000)
robot.dwell(1)
test = robot.lowerCare(z=coordZ+1, descendZ=10, retreatZ=10) # test whether lid is present
robot.homeZ2()
if test['limit'] == 1:
print 'Lid detected.'
else:
print 'Lid not detected.'
return test['limit']
# Check object manipulator flow sensor (experimental)
def lidCheck(robot):
vacSens = robot.smoothie.sendCmdGetReply("M109\n").split(' ')
print vacSens
return vacSens
# Engages loaded applicator to descend and touch colony at coordinates
def colonyProbe(robot, YeastArena, ID, colonyX, colonyY, probeT=100, skipAnchor=False, skipLidProbe=True, agarZ=0):
arenaX = YeastArena.getArenaCoords(ID)[0]
arenaY = YeastArena.getArenaCoords(ID)[1]
if agarZ == 0:
agarZ = YeastArena.agarZ
spZ = YeastArena.SPz
if skipAnchor == False:
robot.moveToSpd(pt=[float(arenaX-10), float(arenaY-10), 10, 0, 0], spd=5000)
robot.dwell(1)
CurCoord = robot.getCurrentPosition()
robot.moveToSpd(pt=[float(colonyX), CurCoord[1], 10, 0, CurCoord[4]], spd=3000)
time.sleep(0.1)
CurCoord = robot.getCurrentPosition()
robot.moveToSpd(pt=[float(colonyX), float(colonyY), 10, 0, CurCoord[4]], spd=3000)
time.sleep(0.1)
CurCoord = robot.getCurrentPosition()
robot.moveToSpd(pt=[CurCoord[0], CurCoord[1], 10, 0, agarZ-10], spd=3000)
if skipLidProbe==False:
trylower = robot.lowerCare(z=spZ-22, descendZ=10, retreatZ=10) # probe for closed lid
if trylower['limit'] == 1:
robot.homeZ2()
print 'Check yeast arena', ID, 'lid.'
return trylower
robot.dwell(100)
reset = robot.lowerCare(z=agarZ, descendZ=5, retreatZ=5) # move applicator towards agar surface
robot.dwell(probeT)
if reset['limit'] ==1:
robot.homeZ2()
else:
robot.moveRel(pt=[0, 0, 0, 0, -10])
time.sleep(0.1)
# Coordinate space resembling 'MAPLE'
def getLogoCoord(ID):
# M
Logo = [ [310, 156], [310,152],[310, 148], [310,146],[310, 142], [310,138],
[309,140], [307,144],[305,148], [303,144],[301, 140],
[300,138], [300,142],[300,146],[300,148],[300,152],[300,156],
# A
[295,156], [295,152],[295, 148], [295,146],[295, 142], [295,138],
[293,138],[290,138],[287,138],
[285,138], [285, 142], [285, 146],[285, 148], [285,152],[285, 156],
[293, 148], [290, 148], [287, 148],
# P
[282,156], [282,152],[282, 148], [282,146],[282, 142], [282,138],
[280,138], [277,138],[274,138],
[280,148],[277,148],[274,148],
[274,148], [274,144],[274, 142], [274,138],
# L
[269,156], [269,152],[269, 148], [269,146],[269, 142], [269,138],
[267,156],[264,156],[261,156],
# E
[256,156], [256,152],[256, 148], [256,146],[256, 142], [256,138],
[253,156],[250,156],[247,156],
[253,147],[250,147],[247,147],
[253,138],[250,138],[247,138] ]
return Logo[ID]
# Coordinate space resembling 'MAPLE' in higher resolution
def getLogoCoordFull(ID):
# M
Logo = [ [310, 156],[310, 154], [310,152],[310, 150],[310, 148], [310,146], [310,144],[310, 142],[310, 140], [310,138],
[309,140], [308,142], [307,144],[306,146],[305,148], [304,146], [303,144],[302, 142],[301, 140],
[300,138], [300,140], [300,142],[300,144],[300,146],[300,148],[300,150],[300,152],[300,154],[300,156],
# A
[295,156], [295, 154], [295,152],[295, 150],[295, 148], [295,146], [295,144],[295, 142],[295, 140], [295,138],
[293,138], [291,138],[289,138],[287,138],
[285,138], [285,140], [285, 142], [285,144],[285, 146],[285, 148], [285,150], [285,152],[285, 154],[285, 156],
[293, 148], [291, 148], [289, 148], [287, 148],
# P
[282,156], [282, 154], [282,152],[282, 150],[282, 148], [282,146], [282,144],[282, 142],[282, 140], [282,138],
[280,138], [278,138],[276,138],[274,138],
[280,148], [278,148],[276,148],[274,148],
[274,148], [274,146], [274,144],[274, 142],[274, 140], [274,138],
# L
[269,156], [269, 154], [269,152],[269, 150],[269, 148], [269,146], [269,144],[269, 142],[269, 140], [269,138],
[267,156], [265,156],[263,156],[261,156],
# E
[256,156], [256, 154], [256,152],[256, 150],[256, 148], [256,146], [256,144],[256, 142],[256, 140], [256,138],
[254,156], [252,156],[250,156],[248,156],
[254,147], [252,147],[250,147],[248,147],
[254,138], [252,138],[250,138],[248,138] ]
return Logo[ID]
# Engage camera to monitor arena ID
def imgArena(robot, YeastArena, ID, light=0):
coordX = YeastArena.getCamCoords(ID)[0]
coordY = YeastArena.getCamCoords(ID)[1]
coordZ = YeastArena.camsharpz
robot.moveToSpd(pt=[float(coordX), float(coordY), 10, coordZ, 0], spd=5000)
if light != 0:
time.sleep(0.1)
robot.light(True)
time.sleep(0.1)
img = robot.captureImage()
if light != 0:
robot.light(False)
return img
# Find (first detected) colony in arena ID using machine vision
def detectColony(robot, YeastArena, ID):
img_width=1280
img_height=960
coordX = YeastArena.getCamCoords(ID)[0]
coordY = YeastArena.getCamCoords(ID)[1]
coordZ = YeastArena.camsharpz
robot.moveToSpd(pt=[float(coordX), float(coordY), 10, coordZ, 0], spd=5000)
robot.dwell(1)
robot.light(True)
time.sleep(0.2)
img = robot.captureImage()
robot.light(False)
circles = robot.findOpening(img, slowmode=False, MAX_SIZE=30, MIN_SIZE=8, startp1=139, startp2=170, startp3=2.6, imgshow=0)
x = circles[0][0]
y = circles[0][1]
imgmid = [img_width/2, img_height/2]
xrel = imgmid[0] - x
yrel = imgmid[1] - y
return xrel, yrel
# Streak out single colony in predetermined motion at arena ID
def streakColony(robot, YeastArena, ID, agarZ=0):
arenaX = YeastArena.getArenaCoords(ID)[0]
arenaY = YeastArena.getArenaCoords(ID)[1]
if agarZ == 0:
agarZ = YeastArena.agarZ
CurCoord = robot.getCurrentPosition()
robot.moveToSpd(pt=[float(arenaX), CurCoord[1], 10, 0, CurCoord[4]], spd=3000)
time.sleep(0.2)
CurCoord = robot.getCurrentPosition()
robot.moveToSpd(pt=[float(arenaX), float(arenaY), 10, 0, CurCoord[4]], spd=3000)
time.sleep(0.2)
robot.moveRel([25, 0, 0, 0, 0])
time.sleep(0.2)
robot.moveRel([0, 21, 0, 0, 0])
CurCoord = robot.getCurrentPosition()
robot.moveToSpd(pt=[CurCoord[0], CurCoord[1], 10, 0, agarZ-10], spd=1000)
robot.lowerCare(z=agarZ, descendZ=5, retreatZ=5)
time.sleep(0.1)
robot.moveRel([-50, 0, 0, 0, 0])
time.sleep(0.2)
robot.moveRel([0, 3, 0, 0, 0])
time.sleep(0.2)
robot.moveRel([47, 0, 0, 0, 0])
time.sleep(0.2)
robot.moveRel([0, 3, 0, 0, 0])
time.sleep(0.2)
robot.moveRel([-44, 0, 0, 0, 0])
time.sleep(0.2)
robot.moveRel([0, 3, 0, 0, 0])
time.sleep(0.2)
robot.moveRel([41, 0, 0, 0, 0])
time.sleep(0.2)
robot.moveRel([0, 3, 0, 0, 0])
time.sleep(0.2)
robot.moveRel([-38, 0, 0, 0, 0])
time.sleep(0.1)
robot.moveRel([0, 0, 0, 0, -10])
CurCoord = robot.getCurrentPosition()
robot.moveToSpd(pt=[float(arenaX), CurCoord[1], 10, 0, CurCoord[4]], spd=3000)
CurCoord = robot.getCurrentPosition()
robot.moveToSpd(pt=[float(arenaX), float(arenaY), 10, 0, CurCoord[4]], spd=3000)
time.sleep(0.2)
robot.moveRel([31, 0, 0, 0, 0])
time.sleep(0.2)
robot.moveRel([0, -7, 0, 0, 0])
CurCoord = robot.getCurrentPosition()
robot.moveToSpd(pt=[CurCoord[0], CurCoord[1], 10, 0, agarZ-10], spd=1000)
robot.lowerCare(z=agarZ, descendZ=5, retreatZ=5)
time.sleep(0.1)
robot.moveRel([-62, 0, 0, 0, 0])
time.sleep(0.2)
robot.moveRel([0, -3, 0, 0, 0])
time.sleep(0.2)
robot.moveRel([59, 0, 0, 0, 0])
time.sleep(0.1)
robot.moveRel([0, -3, 0, 0, 0])
time.sleep(0.2)
robot.moveRel([-56, 0, 0, 0, 0])
time.sleep(0.2)
robot.moveRel([0, -3, 0, 0, 0])
time.sleep(0.2)
robot.moveRel([53, 0, 0, 0, 0])
time.sleep(0.1)
robot.moveRel([0, -3, 0, 0, 0])
time.sleep(0.2)
robot.moveRel([-50, 0, 0, 0, 0])
time.sleep(0.2)
robot.moveRel([0, -3, 0, 0, 0])
time.sleep(0.2)
robot.moveRel([47, 0, 0, 0, 0])
time.sleep(0.2)
robot.moveRel([0, -3, 0, 0, 0])
time.sleep(0.2)
robot.moveRel([-44, 0, 0, 0, 0])
time.sleep(0.2)
robot.moveRel([0, -3, 0, 0, 0])
time.sleep(0.2)
robot.moveRel([41, 0, 0, 0, 0])
time.sleep(0.2)
robot.moveRel([0, -3, 0, 0, 0])
time.sleep(0.2)
robot.moveRel([-38, 0, 0, 0, 0])
time.sleep(0.1)
robot.moveRel([0, -3, 0, 0, 0])
time.sleep(0.2)
robot.moveRel([35, 0, 0, 0, 0])
|
Nebelhom/WordPuzzleCreator
|
refs/heads/master
|
lib/xlwt/Worksheet.py
|
16
|
# -*- coding: windows-1252 -*-
'''
BOF
UNCALCED
INDEX
Calculation Settings Block
PRINTHEADERS
PRINTGRIDLINES
GRIDSET
GUTS
DEFAULTROWHEIGHT
WSBOOL
Page Settings Block
Worksheet Protection Block
DEFCOLWIDTH
COLINFO
SORT
DIMENSIONS
Row Blocks
WINDOW2
SCL
PANE
SELECTION
STANDARDWIDTH
MERGEDCELLS
LABELRANGES
PHONETIC
Conditional Formatting Table
Hyperlink Table
Data Validity Table
SHEETLAYOUT (BIFF8X only)
SHEETPROTECTION (BIFF8X only)
RANGEPROTECTION (BIFF8X only)
EOF
'''
import BIFFRecords
import Bitmap
import Style
import tempfile
class Worksheet(object):
# a safe default value, 3 is always valid!
active_pane = 3
#################################################################
## Constructor
#################################################################
def __init__(self, sheetname, parent_book, cell_overwrite_ok=False):
import Row
self.Row = Row.Row
import Column
self.Column = Column.Column
self.__name = sheetname
self.__parent = parent_book
self._cell_overwrite_ok = cell_overwrite_ok
self.__rows = {}
self.__cols = {}
self.__merged_ranges = []
self.__bmp_rec = ''
self.__show_formulas = 0
self.__show_grid = 1
self.__show_headers = 1
self.__panes_frozen = 0
self.show_zero_values = 1
self.__auto_colour_grid = 1
self.__cols_right_to_left = 0
self.__show_outline = 1
self.__remove_splits = 0
# Multiple sheets can be selected, but only one can be active
# (hold down Ctrl and click multiple tabs in the file in OOo)
self.__selected = 0
# "sheet_visible" should really be called "sheet_active"
# and is 1 when this sheet is the sheet displayed when the file
# is open. More than likely only one sheet should ever be set as
# visible.
# The same sheet should be specified in Workbook.active_sheet
# (that way, both the WINDOW1 record in the book and the WINDOW2
# records in each sheet will be in agreement)
# The visibility of the sheet is found in the "visibility"
# attribute obtained from the BOUNDSHEET record.
self.__sheet_visible = 0
self.__page_preview = 0
self.__first_visible_row = 0
self.__first_visible_col = 0
self.__grid_colour = 0x40
self.__preview_magn = 0 # use default (60%)
self.__normal_magn = 0 # use default (100%)
self.__scl_magn = None
self.explicit_magn_setting = False
self.visibility = 0 # from/to BOUNDSHEET record.
self.__vert_split_pos = None
self.__horz_split_pos = None
self.__vert_split_first_visible = None
self.__horz_split_first_visible = None
# This is a caller-settable flag:
self.split_position_units_are_twips = False
# Default is False for backward compatibility with pyExcelerator
# and previous versions of xlwt.
# if panes_frozen:
# vert/horz_split_pos are taken as number of rows/cols
# else: # split
# if split_position_units_are_twips:
# vert/horz_split_pos are taken as number of twips
# else:
# vert/horz_split_pos are taken as
# number of rows(cols) * default row(col) height (width) (i.e. 12.75 (8.43) somethings)
# and converted to twips by approximate formulas
# Callers who are copying an existing file should use
# xlwt_worksheet.split_position_units_are_twips = True
# because that's what's actually in the file.
# There are 20 twips to a point. There are 72 points to an inch.
self.__row_gut_width = 0
self.__col_gut_height = 0
self.__show_auto_page_breaks = 1
self.__dialogue_sheet = 0
self.__auto_style_outline = 0
self.__outline_below = 0
self.__outline_right = 0
self.__fit_num_pages = 0
self.__show_row_outline = 1
self.__show_col_outline = 1
self.__alt_expr_eval = 0
self.__alt_formula_entries = 0
self.__row_default_height = 0x00FF
self.row_default_height_mismatch = 0
self.row_default_hidden = 0
self.row_default_space_above = 0
self.row_default_space_below = 0
self.__col_default_width = 0x0008
self.__calc_mode = 1
self.__calc_count = 0x0064
self.__RC_ref_mode = 1
self.__iterations_on = 0
self.__delta = 0.001
self.__save_recalc = 0
self.__print_headers = 0
self.__print_grid = 0
self.__grid_set = 1
self.__vert_page_breaks = []
self.__horz_page_breaks = []
self.__header_str = '&P'
self.__footer_str = '&F'
self.__print_centered_vert = 0
self.__print_centered_horz = 1
self.__left_margin = 0.3 #0.5
self.__right_margin = 0.3 #0.5
self.__top_margin = 0.61 #1.0
self.__bottom_margin = 0.37 #1.0
self.__paper_size_code = 9 # A4
self.__print_scaling = 100
self.__start_page_number = 1
self.__fit_width_to_pages = 1
self.__fit_height_to_pages = 1
self.__print_in_rows = 1
self.__portrait = 1
self.__print_not_colour = 0
self.__print_draft = 0
self.__print_notes = 0
self.__print_notes_at_end = 0
self.__print_omit_errors = 0
self.__print_hres = 0x012C # 300 dpi
self.__print_vres = 0x012C # 300 dpi
self.__header_margin = 0.1
self.__footer_margin = 0.1
self.__copies_num = 1
self.__wnd_protect = 0
self.__obj_protect = 0
self.__protect = 0
self.__scen_protect = 0
self.__password = ''
self.last_used_row = 0
self.first_used_row = 65535
self.last_used_col = 0
self.first_used_col = 255
self.row_tempfile = None
self.__flushed_rows = {}
self.__row_visible_levels = 0
#################################################################
## Properties, "getters", "setters"
#################################################################
def set_name(self, value):
self.__name = value
def get_name(self):
return self.__name
name = property(get_name, set_name)
#################################################################
def get_parent(self):
return self.__parent
parent = property(get_parent)
#################################################################
def get_rows(self):
return self.__rows
rows = property(get_rows)
#################################################################
def get_cols(self):
return self.__cols
cols = property(get_cols)
#################################################################
def get_merged_ranges(self):
return self.__merged_ranges
merged_ranges = property(get_merged_ranges)
#################################################################
def get_bmp_rec(self):
return self.__bmp_rec
bmp_rec = property(get_bmp_rec)
#################################################################
def set_show_formulas(self, value):
self.__show_formulas = int(value)
def get_show_formulas(self):
return bool(self.__show_formulas)
show_formulas = property(get_show_formulas, set_show_formulas)
#################################################################
def set_show_grid(self, value):
self.__show_grid = int(value)
def get_show_grid(self):
return bool(self.__show_grid)
show_grid = property(get_show_grid, set_show_grid)
#################################################################
def set_show_headers(self, value):
self.__show_headers = int(value)
def get_show_headers(self):
return bool(self.__show_headers)
show_headers = property(get_show_headers, set_show_headers)
#################################################################
def set_panes_frozen(self, value):
self.__panes_frozen = int(value)
def get_panes_frozen(self):
return bool(self.__panes_frozen)
panes_frozen = property(get_panes_frozen, set_panes_frozen)
#################################################################
### def set_show_empty_as_zero(self, value):
### self.__show_empty_as_zero = int(value)
### def get_show_empty_as_zero(self):
### return bool(self.__show_empty_as_zero)
### show_empty_as_zero = property(get_show_empty_as_zero, set_show_empty_as_zero)
#################################################################
def set_auto_colour_grid(self, value):
self.__auto_colour_grid = int(value)
def get_auto_colour_grid(self):
return bool(self.__auto_colour_grid)
auto_colour_grid = property(get_auto_colour_grid, set_auto_colour_grid)
#################################################################
def set_cols_right_to_left(self, value):
self.__cols_right_to_left = int(value)
def get_cols_right_to_left(self):
return bool(self.__cols_right_to_left)
cols_right_to_left = property(get_cols_right_to_left, set_cols_right_to_left)
#################################################################
def set_show_outline(self, value):
self.__show_outline = int(value)
def get_show_outline(self):
return bool(self.__show_outline)
show_outline = property(get_show_outline, set_show_outline)
#################################################################
def set_remove_splits(self, value):
self.__remove_splits = int(value)
def get_remove_splits(self):
return bool(self.__remove_splits)
remove_splits = property(get_remove_splits, set_remove_splits)
#################################################################
def set_selected(self, value):
self.__selected = int(value)
def get_selected(self):
return bool(self.__selected)
selected = property(get_selected, set_selected)
#################################################################
def set_sheet_visible(self, value):
self.__sheet_visible = int(value)
def get_sheet_visible(self):
return bool(self.__sheet_visible)
sheet_visible = property(get_sheet_visible, set_sheet_visible)
#################################################################
def set_page_preview(self, value):
self.__page_preview = int(value)
def get_page_preview(self):
return bool(self.__page_preview)
page_preview = property(get_page_preview, set_page_preview)
#################################################################
def set_first_visible_row(self, value):
self.__first_visible_row = value
def get_first_visible_row(self):
return self.__first_visible_row
first_visible_row = property(get_first_visible_row, set_first_visible_row)
#################################################################
def set_first_visible_col(self, value):
self.__first_visible_col = value
def get_first_visible_col(self):
return self.__first_visible_col
first_visible_col = property(get_first_visible_col, set_first_visible_col)
#################################################################
def set_grid_colour(self, value):
self.__grid_colour = value
def get_grid_colour(self):
return self.__grid_colour
grid_colour = property(get_grid_colour, set_grid_colour)
#################################################################
def set_preview_magn(self, value):
self.__preview_magn = value
def get_preview_magn(self):
return self.__preview_magn
preview_magn = property(get_preview_magn, set_preview_magn)
#################################################################
def set_normal_magn(self, value):
self.__normal_magn = value
def get_normal_magn(self):
return self.__normal_magn
normal_magn = property(get_normal_magn, set_normal_magn)
#################################################################
def set_scl_magn(self, value):
self.__scl_magn = value
def get_scl_magn(self):
return self.__scl_magn
scl_magn = property(get_scl_magn, set_scl_magn)
#################################################################
def set_vert_split_pos(self, value):
self.__vert_split_pos = abs(value)
def get_vert_split_pos(self):
return self.__vert_split_pos
vert_split_pos = property(get_vert_split_pos, set_vert_split_pos)
#################################################################
def set_horz_split_pos(self, value):
self.__horz_split_pos = abs(value)
def get_horz_split_pos(self):
return self.__horz_split_pos
horz_split_pos = property(get_horz_split_pos, set_horz_split_pos)
#################################################################
def set_vert_split_first_visible(self, value):
self.__vert_split_first_visible = abs(value)
def get_vert_split_first_visible(self):
return self.__vert_split_first_visible
vert_split_first_visible = property(get_vert_split_first_visible, set_vert_split_first_visible)
#################################################################
def set_horz_split_first_visible(self, value):
self.__horz_split_first_visible = abs(value)
def get_horz_split_first_visible(self):
return self.__horz_split_first_visible
horz_split_first_visible = property(get_horz_split_first_visible, set_horz_split_first_visible)
#################################################################
#def set_row_gut_width(self, value):
# self.__row_gut_width = value
#
#def get_row_gut_width(self):
# return self.__row_gut_width
#
#row_gut_width = property(get_row_gut_width, set_row_gut_width)
#
#################################################################
#
#def set_col_gut_height(self, value):
# self.__col_gut_height = value
#
#def get_col_gut_height(self):
# return self.__col_gut_height
#
#col_gut_height = property(get_col_gut_height, set_col_gut_height)
#
#################################################################
def set_show_auto_page_breaks(self, value):
self.__show_auto_page_breaks = int(value)
def get_show_auto_page_breaks(self):
return bool(self.__show_auto_page_breaks)
show_auto_page_breaks = property(get_show_auto_page_breaks, set_show_auto_page_breaks)
#################################################################
def set_dialogue_sheet(self, value):
self.__dialogue_sheet = int(value)
def get_dialogue_sheet(self):
return bool(self.__dialogue_sheet)
dialogue_sheet = property(get_dialogue_sheet, set_dialogue_sheet)
#################################################################
def set_auto_style_outline(self, value):
self.__auto_style_outline = int(value)
def get_auto_style_outline(self):
return bool(self.__auto_style_outline)
auto_style_outline = property(get_auto_style_outline, set_auto_style_outline)
#################################################################
def set_outline_below(self, value):
self.__outline_below = int(value)
def get_outline_below(self):
return bool(self.__outline_below)
outline_below = property(get_outline_below, set_outline_below)
#################################################################
def set_outline_right(self, value):
self.__outline_right = int(value)
def get_outline_right(self):
return bool(self.__outline_right)
outline_right = property(get_outline_right, set_outline_right)
#################################################################
def set_fit_num_pages(self, value):
self.__fit_num_pages = value
def get_fit_num_pages(self):
return self.__fit_num_pages
fit_num_pages = property(get_fit_num_pages, set_fit_num_pages)
#################################################################
def set_show_row_outline(self, value):
self.__show_row_outline = int(value)
def get_show_row_outline(self):
return bool(self.__show_row_outline)
show_row_outline = property(get_show_row_outline, set_show_row_outline)
#################################################################
def set_show_col_outline(self, value):
self.__show_col_outline = int(value)
def get_show_col_outline(self):
return bool(self.__show_col_outline)
show_col_outline = property(get_show_col_outline, set_show_col_outline)
#################################################################
def set_alt_expr_eval(self, value):
self.__alt_expr_eval = int(value)
def get_alt_expr_eval(self):
return bool(self.__alt_expr_eval)
alt_expr_eval = property(get_alt_expr_eval, set_alt_expr_eval)
#################################################################
def set_alt_formula_entries(self, value):
self.__alt_formula_entries = int(value)
def get_alt_formula_entries(self):
return bool(self.__alt_formula_entries)
alt_formula_entries = property(get_alt_formula_entries, set_alt_formula_entries)
#################################################################
def set_row_default_height(self, value):
self.__row_default_height = value
def get_row_default_height(self):
return self.__row_default_height
row_default_height = property(get_row_default_height, set_row_default_height)
#################################################################
def set_col_default_width(self, value):
self.__col_default_width = value
def get_col_default_width(self):
return self.__col_default_width
col_default_width = property(get_col_default_width, set_col_default_width)
#################################################################
def set_calc_mode(self, value):
self.__calc_mode = value & 0x03
def get_calc_mode(self):
return self.__calc_mode
calc_mode = property(get_calc_mode, set_calc_mode)
#################################################################
def set_calc_count(self, value):
self.__calc_count = value
def get_calc_count(self):
return self.__calc_count
calc_count = property(get_calc_count, set_calc_count)
#################################################################
def set_RC_ref_mode(self, value):
self.__RC_ref_mode = int(value)
def get_RC_ref_mode(self):
return bool(self.__RC_ref_mode)
RC_ref_mode = property(get_RC_ref_mode, set_RC_ref_mode)
#################################################################
def set_iterations_on(self, value):
self.__iterations_on = int(value)
def get_iterations_on(self):
return bool(self.__iterations_on)
iterations_on = property(get_iterations_on, set_iterations_on)
#################################################################
def set_delta(self, value):
self.__delta = value
def get_delta(self):
return self.__delta
delta = property(get_delta, set_delta)
#################################################################
def set_save_recalc(self, value):
self.__save_recalc = int(value)
def get_save_recalc(self):
return bool(self.__save_recalc)
save_recalc = property(get_save_recalc, set_save_recalc)
#################################################################
def set_print_headers(self, value):
self.__print_headers = int(value)
def get_print_headers(self):
return bool(self.__print_headers)
print_headers = property(get_print_headers, set_print_headers)
#################################################################
def set_print_grid(self, value):
self.__print_grid = int(value)
def get_print_grid(self):
return bool(self.__print_grid)
print_grid = property(get_print_grid, set_print_grid)
#################################################################
#
#def set_grid_set(self, value):
# self.__grid_set = int(value)
#
#def get_grid_set(self):
# return bool(self.__grid_set)
#
#grid_set = property(get_grid_set, set_grid_set)
#
#################################################################
def set_vert_page_breaks(self, value):
self.__vert_page_breaks = value
def get_vert_page_breaks(self):
return self.__vert_page_breaks
vert_page_breaks = property(get_vert_page_breaks, set_vert_page_breaks)
#################################################################
def set_horz_page_breaks(self, value):
self.__horz_page_breaks = value
def get_horz_page_breaks(self):
return self.__horz_page_breaks
horz_page_breaks = property(get_horz_page_breaks, set_horz_page_breaks)
#################################################################
def set_header_str(self, value):
if isinstance(value, str):
value = unicode(value, self.__parent.encoding)
self.__header_str = value
def get_header_str(self):
return self.__header_str
header_str = property(get_header_str, set_header_str)
#################################################################
def set_footer_str(self, value):
if isinstance(value, str):
value = unicode(value, self.__parent.encoding)
self.__footer_str = value
def get_footer_str(self):
return self.__footer_str
footer_str = property(get_footer_str, set_footer_str)
#################################################################
def set_print_centered_vert(self, value):
self.__print_centered_vert = int(value)
def get_print_centered_vert(self):
return bool(self.__print_centered_vert)
print_centered_vert = property(get_print_centered_vert, set_print_centered_vert)
#################################################################
def set_print_centered_horz(self, value):
self.__print_centered_horz = int(value)
def get_print_centered_horz(self):
return bool(self.__print_centered_horz)
print_centered_horz = property(get_print_centered_horz, set_print_centered_horz)
#################################################################
def set_left_margin(self, value):
self.__left_margin = value
def get_left_margin(self):
return self.__left_margin
left_margin = property(get_left_margin, set_left_margin)
#################################################################
def set_right_margin(self, value):
self.__right_margin = value
def get_right_margin(self):
return self.__right_margin
right_margin = property(get_right_margin, set_right_margin)
#################################################################
def set_top_margin(self, value):
self.__top_margin = value
def get_top_margin(self):
return self.__top_margin
top_margin = property(get_top_margin, set_top_margin)
#################################################################
def set_bottom_margin(self, value):
self.__bottom_margin = value
def get_bottom_margin(self):
return self.__bottom_margin
bottom_margin = property(get_bottom_margin, set_bottom_margin)
#################################################################
def set_paper_size_code(self, value):
self.__paper_size_code = value
def get_paper_size_code(self):
return self.__paper_size_code
paper_size_code = property(get_paper_size_code, set_paper_size_code)
#################################################################
def set_print_scaling(self, value):
self.__print_scaling = value
def get_print_scaling(self):
return self.__print_scaling
print_scaling = property(get_print_scaling, set_print_scaling)
#################################################################
def set_start_page_number(self, value):
self.__start_page_number = value
def get_start_page_number(self):
return self.__start_page_number
start_page_number = property(get_start_page_number, set_start_page_number)
#################################################################
def set_fit_width_to_pages(self, value):
self.__fit_width_to_pages = value
def get_fit_width_to_pages(self):
return self.__fit_width_to_pages
fit_width_to_pages = property(get_fit_width_to_pages, set_fit_width_to_pages)
#################################################################
def set_fit_height_to_pages(self, value):
self.__fit_height_to_pages = value
def get_fit_height_to_pages(self):
return self.__fit_height_to_pages
fit_height_to_pages = property(get_fit_height_to_pages, set_fit_height_to_pages)
#################################################################
def set_print_in_rows(self, value):
self.__print_in_rows = int(value)
def get_print_in_rows(self):
return bool(self.__print_in_rows)
print_in_rows = property(get_print_in_rows, set_print_in_rows)
#################################################################
def set_portrait(self, value):
self.__portrait = int(value)
def get_portrait(self):
return bool(self.__portrait)
portrait = property(get_portrait, set_portrait)
#################################################################
def set_print_colour(self, value):
self.__print_not_colour = int(not value)
def get_print_colour(self):
return not bool(self.__print_not_colour)
print_colour = property(get_print_colour, set_print_colour)
#################################################################
def set_print_draft(self, value):
self.__print_draft = int(value)
def get_print_draft(self):
return bool(self.__print_draft)
print_draft = property(get_print_draft, set_print_draft)
#################################################################
def set_print_notes(self, value):
self.__print_notes = int(value)
def get_print_notes(self):
return bool(self.__print_notes)
print_notes = property(get_print_notes, set_print_notes)
#################################################################
def set_print_notes_at_end(self, value):
self.__print_notes_at_end = int(value)
def get_print_notes_at_end(self):
return bool(self.__print_notes_at_end)
print_notes_at_end = property(get_print_notes_at_end, set_print_notes_at_end)
#################################################################
def set_print_omit_errors(self, value):
self.__print_omit_errors = int(value)
def get_print_omit_errors(self):
return bool(self.__print_omit_errors)
print_omit_errors = property(get_print_omit_errors, set_print_omit_errors)
#################################################################
def set_print_hres(self, value):
self.__print_hres = value
def get_print_hres(self):
return self.__print_hres
print_hres = property(get_print_hres, set_print_hres)
#################################################################
def set_print_vres(self, value):
self.__print_vres = value
def get_print_vres(self):
return self.__print_vres
print_vres = property(get_print_vres, set_print_vres)
#################################################################
def set_header_margin(self, value):
self.__header_margin = value
def get_header_margin(self):
return self.__header_margin
header_margin = property(get_header_margin, set_header_margin)
#################################################################
def set_footer_margin(self, value):
self.__footer_margin = value
def get_footer_margin(self):
return self.__footer_margin
footer_margin = property(get_footer_margin, set_footer_margin)
#################################################################
def set_copies_num(self, value):
self.__copies_num = value
def get_copies_num(self):
return self.__copies_num
copies_num = property(get_copies_num, set_copies_num)
##################################################################
def set_wnd_protect(self, value):
self.__wnd_protect = int(value)
def get_wnd_protect(self):
return bool(self.__wnd_protect)
wnd_protect = property(get_wnd_protect, set_wnd_protect)
#################################################################
def set_obj_protect(self, value):
self.__obj_protect = int(value)
def get_obj_protect(self):
return bool(self.__obj_protect)
obj_protect = property(get_obj_protect, set_obj_protect)
#################################################################
def set_protect(self, value):
self.__protect = int(value)
def get_protect(self):
return bool(self.__protect)
protect = property(get_protect, set_protect)
#################################################################
def set_scen_protect(self, value):
self.__scen_protect = int(value)
def get_scen_protect(self):
return bool(self.__scen_protect)
scen_protect = property(get_scen_protect, set_scen_protect)
#################################################################
def set_password(self, value):
self.__password = value
def get_password(self):
return self.__password
password = property(get_password, set_password)
##################################################################
## Methods
##################################################################
def get_parent(self):
return self.__parent
def write(self, r, c, label="", style=Style.default_style):
self.row(r).write(c, label, style)
def write_rich_text(self, r, c, rich_text_list, style=Style.default_style):
self.row(r).set_cell_rich_text(c, rich_text_list, style)
def merge(self, r1, r2, c1, c2, style=Style.default_style):
# Stand-alone merge of previously written cells.
# Problems: (1) style to be used should be existing style of
# the top-left cell, not an arg.
# (2) should ensure that any previous data value in
# non-top-left cells is nobbled.
# Note: if a cell is set by a data record then later
# is referenced by a [MUL]BLANK record, Excel will blank
# out the cell on the screen, but OOo & Gnu will not
# blank it out. Need to do something better than writing
# multiple records. In the meantime, avoid this method and use
# write_merge() instead.
if c2 > c1:
self.row(r1).write_blanks(c1 + 1, c2, style)
for r in range(r1+1, r2+1):
self.row(r).write_blanks(c1, c2, style)
self.__merged_ranges.append((r1, r2, c1, c2))
def write_merge(self, r1, r2, c1, c2, label="", style=Style.default_style):
assert 0 <= c1 <= c2 <= 255
assert 0 <= r1 <= r2 <= 65535
self.write(r1, c1, label, style)
if c2 > c1:
self.row(r1).write_blanks(c1 + 1, c2, style) # skip (r1, c1)
for r in range(r1+1, r2+1):
self.row(r).write_blanks(c1, c2, style)
self.__merged_ranges.append((r1, r2, c1, c2))
def insert_bitmap(self, filename, row, col, x = 0, y = 0, scale_x = 1, scale_y = 1):
bmp = Bitmap.ImDataBmpRecord(filename)
obj = Bitmap.ObjBmpRecord(row, col, self, bmp, x, y, scale_x, scale_y)
self.__bmp_rec += obj.get() + bmp.get()
def col(self, indx):
if indx not in self.__cols:
self.__cols[indx] = self.Column(indx, self)
return self.__cols[indx]
def row(self, indx):
if indx not in self.__rows:
if indx in self.__flushed_rows:
raise Exception("Attempt to reuse row index %d of sheet %r after flushing" % (indx, self.__name))
self.__rows[indx] = self.Row(indx, self)
if indx > self.last_used_row:
self.last_used_row = indx
if indx < self.first_used_row:
self.first_used_row = indx
return self.__rows[indx]
def row_height(self, row): # in pixels
if row in self.__rows:
return self.__rows[row].get_height_in_pixels()
else:
return 17
def col_width(self, col): # in pixels
if col in self.__cols:
return self.__cols[col].width_in_pixels()
else:
return 64
##################################################################
## BIFF records generation
##################################################################
def __bof_rec(self):
return BIFFRecords.Biff8BOFRecord(BIFFRecords.Biff8BOFRecord.WORKSHEET).get()
def __update_row_visible_levels(self):
if self.__rows:
temp = max([self.__rows[r].level for r in self.__rows]) + 1
self.__row_visible_levels = max(temp, self.__row_visible_levels)
def __guts_rec(self):
self.__update_row_visible_levels()
col_visible_levels = 0
if len(self.__cols) != 0:
col_visible_levels = max([self.__cols[c].level for c in self.__cols]) + 1
return BIFFRecords.GutsRecord(
self.__row_gut_width, self.__col_gut_height, self.__row_visible_levels, col_visible_levels).get()
def __defaultrowheight_rec(self):
options = 0x0000
options |= (self.row_default_height_mismatch & 1) << 0
options |= (self.row_default_hidden & 1) << 1
options |= (self.row_default_space_above & 1) << 2
options |= (self.row_default_space_below & 1) << 3
defht = self.__row_default_height
return BIFFRecords.DefaultRowHeightRecord(options, defht).get()
def __wsbool_rec(self):
options = 0x00
options |= (self.__show_auto_page_breaks & 0x01) << 0
options |= (self.__dialogue_sheet & 0x01) << 4
options |= (self.__auto_style_outline & 0x01) << 5
options |= (self.__outline_below & 0x01) << 6
options |= (self.__outline_right & 0x01) << 7
options |= (self.__fit_num_pages & 0x01) << 8
options |= (self.__show_row_outline & 0x01) << 10
options |= (self.__show_col_outline & 0x01) << 11
options |= (self.__alt_expr_eval & 0x01) << 14
options |= (self.__alt_formula_entries & 0x01) << 15
return BIFFRecords.WSBoolRecord(options).get()
def __eof_rec(self):
return BIFFRecords.EOFRecord().get()
def __colinfo_rec(self):
result = ''
for col in self.__cols:
result += self.__cols[col].get_biff_record()
return result
def __dimensions_rec(self):
return BIFFRecords.DimensionsRecord(
self.first_used_row, self.last_used_row,
self.first_used_col, self.last_used_col
).get()
def __window2_rec(self):
# Appends SCL record.
options = 0
options |= (self.__show_formulas & 0x01) << 0
options |= (self.__show_grid & 0x01) << 1
options |= (self.__show_headers & 0x01) << 2
options |= (self.__panes_frozen & 0x01) << 3
options |= (self.show_zero_values & 0x01) << 4
options |= (self.__auto_colour_grid & 0x01) << 5
options |= (self.__cols_right_to_left & 0x01) << 6
options |= (self.__show_outline & 0x01) << 7
options |= (self.__remove_splits & 0x01) << 8
options |= (self.__selected & 0x01) << 9
options |= (self.__sheet_visible & 0x01) << 10
options |= (self.__page_preview & 0x01) << 11
if self.explicit_magn_setting:
# Experimentation: caller can set the scl magn.
# None -> no SCL record written
# Otherwise 10 <= scl_magn <= 400 or scl_magn == 0
# Note: value 0 means use 100 for normal view, 60 for page break preview
# BREAKING NEWS: Excel interprets scl_magn = 0 very literally, your
# sheet appears like a tiny dot on the screen
scl_magn = self.__scl_magn
else:
if self.__page_preview:
scl_magn = self.__preview_magn
magn_default = 60
else:
scl_magn = self.__normal_magn
magn_default = 100
if scl_magn == magn_default or scl_magn == 0:
# Emulate what we think MS does
scl_magn = None # don't write an SCL record
return BIFFRecords.Window2Record(
options, self.__first_visible_row, self.__first_visible_col,
self.__grid_colour,
self.__preview_magn, self.__normal_magn, scl_magn).get()
def __panes_rec(self):
if self.__vert_split_pos is None and self.__horz_split_pos is None:
return ""
if self.__vert_split_pos is None:
self.__vert_split_pos = 0
if self.__horz_split_pos is None:
self.__horz_split_pos = 0
if self.__panes_frozen:
if self.__vert_split_first_visible is None:
self.__vert_split_first_visible = self.__vert_split_pos
if self.__horz_split_first_visible is None:
self.__horz_split_first_visible = self.__horz_split_pos
# when frozen, the active pane has to be specifically set:
if self.__vert_split_pos > 0 and self.__horz_split_pos > 0:
active_pane = 0
elif self.__vert_split_pos > 0 and self.__horz_split_pos == 0:
active_pane = 1
elif self.__vert_split_pos == 0 and self.__horz_split_pos > 0:
active_pane = 2
else:
active_pane = 3
else:
if self.__vert_split_first_visible is None:
self.__vert_split_first_visible = 0
if self.__horz_split_first_visible is None:
self.__horz_split_first_visible = 0
if not self.split_position_units_are_twips:
# inspired by pyXLWriter
if self.__horz_split_pos > 0:
self.__horz_split_pos = 20 * self.__horz_split_pos + 255
if self.__vert_split_pos > 0:
self.__vert_split_pos = 113.879 * self.__vert_split_pos + 390
# when split, the active pain can be set as required:
active_pane = self.active_pane
result = BIFFRecords.PanesRecord(*map(int, (
self.__vert_split_pos,
self.__horz_split_pos,
self.__horz_split_first_visible,
self.__vert_split_first_visible,
active_pane
))).get()
return result
def __row_blocks_rec(self):
result = []
for row in self.__rows.itervalues():
result.append(row.get_row_biff_data())
result.append(row.get_cells_biff_data())
return ''.join(result)
def __merged_rec(self):
return BIFFRecords.MergedCellsRecord(self.__merged_ranges).get()
def __bitmaps_rec(self):
return self.__bmp_rec
def __calc_settings_rec(self):
result = ''
result += BIFFRecords.CalcModeRecord(self.__calc_mode & 0x01).get()
result += BIFFRecords.CalcCountRecord(self.__calc_count & 0xFFFF).get()
result += BIFFRecords.RefModeRecord(self.__RC_ref_mode & 0x01).get()
result += BIFFRecords.IterationRecord(self.__iterations_on & 0x01).get()
result += BIFFRecords.DeltaRecord(self.__delta).get()
result += BIFFRecords.SaveRecalcRecord(self.__save_recalc & 0x01).get()
return result
def __print_settings_rec(self):
result = ''
result += BIFFRecords.PrintHeadersRecord(self.__print_headers).get()
result += BIFFRecords.PrintGridLinesRecord(self.__print_grid).get()
result += BIFFRecords.GridSetRecord(self.__grid_set).get()
result += BIFFRecords.HorizontalPageBreaksRecord(self.__horz_page_breaks).get()
result += BIFFRecords.VerticalPageBreaksRecord(self.__vert_page_breaks).get()
result += BIFFRecords.HeaderRecord(self.__header_str).get()
result += BIFFRecords.FooterRecord(self.__footer_str).get()
result += BIFFRecords.HCenterRecord(self.__print_centered_horz).get()
result += BIFFRecords.VCenterRecord(self.__print_centered_vert).get()
result += BIFFRecords.LeftMarginRecord(self.__left_margin).get()
result += BIFFRecords.RightMarginRecord(self.__right_margin).get()
result += BIFFRecords.TopMarginRecord(self.__top_margin).get()
result += BIFFRecords.BottomMarginRecord(self.__bottom_margin).get()
setup_page_options = (self.__print_in_rows & 0x01) << 0
setup_page_options |= (self.__portrait & 0x01) << 1
setup_page_options |= (0x00 & 0x01) << 2
setup_page_options |= (self.__print_not_colour & 0x01) << 3
setup_page_options |= (self.__print_draft & 0x01) << 4
setup_page_options |= (self.__print_notes & 0x01) << 5
setup_page_options |= (0x00 & 0x01) << 6
setup_page_options |= (0x01 & 0x01) << 7
setup_page_options |= (self.__print_notes_at_end & 0x01) << 9
setup_page_options |= (self.__print_omit_errors & 0x03) << 10
result += BIFFRecords.SetupPageRecord(self.__paper_size_code,
self.__print_scaling,
self.__start_page_number,
self.__fit_width_to_pages,
self.__fit_height_to_pages,
setup_page_options,
self.__print_hres,
self.__print_vres,
self.__header_margin,
self.__footer_margin,
self.__copies_num).get()
return result
def __protection_rec(self):
result = ''
result += BIFFRecords.ProtectRecord(self.__protect).get()
result += BIFFRecords.ScenProtectRecord(self.__scen_protect).get()
result += BIFFRecords.WindowProtectRecord(self.__wnd_protect).get()
result += BIFFRecords.ObjectProtectRecord(self.__obj_protect).get()
result += BIFFRecords.PasswordRecord(self.__password).get()
return result
def get_biff_data(self):
result = [
self.__bof_rec(),
self.__calc_settings_rec(),
self.__guts_rec(),
self.__defaultrowheight_rec(),
self.__wsbool_rec(),
self.__colinfo_rec(),
self.__dimensions_rec(),
self.__print_settings_rec(),
self.__protection_rec(),
]
if self.row_tempfile:
self.row_tempfile.flush()
self.row_tempfile.seek(0)
result.append(self.row_tempfile.read())
self.row_tempfile.seek(0, 2) # to EOF
# Above seek() is necessary to avoid a spurious IOError
# with Errno 0 if the caller continues on writing rows
# and flushing row data after the save().
# See http://bugs.python.org/issue3207
result.extend([
self.__row_blocks_rec(),
self.__merged_rec(),
self.__bitmaps_rec(),
self.__window2_rec(),
self.__panes_rec(),
self.__eof_rec(),
])
return ''.join(result)
def flush_row_data(self):
if self.row_tempfile is None:
self.row_tempfile = tempfile.TemporaryFile()
self.row_tempfile.write(self.__row_blocks_rec())
for rowx in self.__rows:
self.__flushed_rows[rowx] = 1
self.__update_row_visible_levels()
self.__rows = {}
|
wakatime/sketch-wakatime
|
refs/heads/master
|
WakaTime.sketchplugin/Contents/Resources/wakatime/projects/projectmap.py
|
7
|
# -*- coding: utf-8 -*-
"""
wakatime.projects.projectmap
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Use the ~/.wakatime.cfg file to set custom project names by matching files
with regex patterns. Project maps go under the [projectmap] config section.
For example:
[projectmap]
/home/user/projects/foo = new project name
/home/user/projects/bar(\d+)/ = project{0}
Will result in file `/home/user/projects/foo/src/main.c` to have
project name `new project name` and file `/home/user/projects/bar42/main.c`
to have project name `project42`.
:copyright: (c) 2013 Alan Hamlett.
:license: BSD, see LICENSE for more details.
"""
import logging
import os
import re
from .base import BaseProject
from ..compat import u
log = logging.getLogger('WakaTime')
class ProjectMap(BaseProject):
def process(self):
if not self._configs:
return False
self.project = self._find_project(self.path)
return self.project is not None
def _find_project(self, path):
path = os.path.realpath(path)
for pattern, new_proj_name in self._configs.items():
try:
compiled = re.compile(pattern, re.IGNORECASE)
match = compiled.search(path)
if match:
try:
return new_proj_name.format(*match.groups())
except IndexError as ex:
log.warning(u('Regex error ({msg}) for projectmap pattern: {pattern}').format(
msg=u(ex),
pattern=u(new_proj_name),
))
except re.error as ex:
log.warning(u('Regex error ({msg}) for projectmap pattern: {pattern}').format(
msg=u(ex),
pattern=u(pattern),
))
return None
def branch(self):
return None
def name(self):
return u(self.project) if self.project else None
|
acourtney2015/boto
|
refs/heads/develop
|
boto/rds/optiongroup.py
|
180
|
# Copyright (c) 2013 Amazon.com, Inc. or its affiliates.
# All Rights Reserved
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish, dis-
# tribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the fol-
# lowing conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
"""
Represents an OptionGroup
"""
from boto.rds.dbsecuritygroup import DBSecurityGroup
from boto.resultset import ResultSet
class OptionGroup(object):
"""
Represents an RDS option group
Properties reference available from the AWS documentation at
http://docs.aws.amazon.com/AmazonRDS/latest/APIReference/API_OptionGroup.html
:ivar connection: :py:class:`boto.rds.RDSConnection` associated with the
current object
:ivar name: Name of the option group
:ivar description: The description of the option group
:ivar engine_name: The name of the database engine to use
:ivar major_engine_version: The major version number of the engine to use
:ivar allow_both_vpc_and_nonvpc: Indicates whether this option group can be
applied to both VPC and non-VPC instances.
The value ``True`` indicates the option
group can be applied to both VPC and
non-VPC instances.
:ivar vpc_id: If AllowsVpcAndNonVpcInstanceMemberships is 'false', this
field is blank. If AllowsVpcAndNonVpcInstanceMemberships is
``True`` and this field is blank, then this option group can
be applied to both VPC and non-VPC instances. If this field
contains a value, then this option group can only be applied
to instances that are in the VPC indicated by this field.
:ivar options: The list of :py:class:`boto.rds.optiongroup.Option` objects
associated with the group
"""
def __init__(self, connection=None, name=None, engine_name=None,
major_engine_version=None, description=None,
allow_both_vpc_and_nonvpc=False, vpc_id=None):
self.name = name
self.engine_name = engine_name
self.major_engine_version = major_engine_version
self.description = description
self.allow_both_vpc_and_nonvpc = allow_both_vpc_and_nonvpc
self.vpc_id = vpc_id
self.options = []
def __repr__(self):
return 'OptionGroup:%s' % self.name
def startElement(self, name, attrs, connection):
if name == 'Options':
self.options = ResultSet([
('Options', Option)
])
else:
return None
def endElement(self, name, value, connection):
if name == 'OptionGroupName':
self.name = value
elif name == 'EngineName':
self.engine_name = value
elif name == 'MajorEngineVersion':
self.major_engine_version = value
elif name == 'OptionGroupDescription':
self.description = value
elif name == 'AllowsVpcAndNonVpcInstanceMemberships':
if value.lower() == 'true':
self.allow_both_vpc_and_nonvpc = True
else:
self.allow_both_vpc_and_nonvpc = False
elif name == 'VpcId':
self.vpc_id = value
else:
setattr(self, name, value)
def delete(self):
return self.connection.delete_option_group(self.name)
class Option(object):
"""
Describes a Option for use in an OptionGroup
:ivar name: The name of the option
:ivar description: The description of the option.
:ivar permanent: Indicate if this option is permanent.
:ivar persistent: Indicate if this option is persistent.
:ivar port: If required, the port configured for this option to use.
:ivar settings: The option settings for this option.
:ivar db_security_groups: If the option requires access to a port, then
this DB Security Group allows access to the port.
:ivar vpc_security_groups: If the option requires access to a port, then
this VPC Security Group allows access to the
port.
"""
def __init__(self, name=None, description=None, permanent=False,
persistent=False, port=None, settings=None,
db_security_groups=None, vpc_security_groups=None):
self.name = name
self.description = description
self.permanent = permanent
self.persistent = persistent
self.port = port
self.settings = settings
self.db_security_groups = db_security_groups
self.vpc_security_groups = vpc_security_groups
if self.settings is None:
self.settings = []
if self.db_security_groups is None:
self.db_security_groups = []
if self.vpc_security_groups is None:
self.vpc_security_groups = []
def __repr__(self):
return 'Option:%s' % self.name
def startElement(self, name, attrs, connection):
if name == 'OptionSettings':
self.settings = ResultSet([
('OptionSettings', OptionSetting)
])
elif name == 'DBSecurityGroupMemberships':
self.db_security_groups = ResultSet([
('DBSecurityGroupMemberships', DBSecurityGroup)
])
elif name == 'VpcSecurityGroupMemberships':
self.vpc_security_groups = ResultSet([
('VpcSecurityGroupMemberships', VpcSecurityGroup)
])
else:
return None
def endElement(self, name, value, connection):
if name == 'OptionName':
self.name = value
elif name == 'OptionDescription':
self.description = value
elif name == 'Permanent':
if value.lower() == 'true':
self.permenant = True
else:
self.permenant = False
elif name == 'Persistent':
if value.lower() == 'true':
self.persistent = True
else:
self.persistent = False
elif name == 'Port':
self.port = int(value)
else:
setattr(self, name, value)
class OptionSetting(object):
"""
Describes a OptionSetting for use in an Option
:ivar name: The name of the option that has settings that you can set.
:ivar description: The description of the option setting.
:ivar value: The current value of the option setting.
:ivar default_value: The default value of the option setting.
:ivar allowed_values: The allowed values of the option setting.
:ivar data_type: The data type of the option setting.
:ivar apply_type: The DB engine specific parameter type.
:ivar is_modifiable: A Boolean value that, when true, indicates the option
setting can be modified from the default.
:ivar is_collection: Indicates if the option setting is part of a
collection.
"""
def __init__(self, name=None, description=None, value=None,
default_value=False, allowed_values=None, data_type=None,
apply_type=None, is_modifiable=False, is_collection=False):
self.name = name
self.description = description
self.value = value
self.default_value = default_value
self.allowed_values = allowed_values
self.data_type = data_type
self.apply_type = apply_type
self.is_modifiable = is_modifiable
self.is_collection = is_collection
def __repr__(self):
return 'OptionSetting:%s' % self.name
def startElement(self, name, attrs, connection):
return None
def endElement(self, name, value, connection):
if name == 'Name':
self.name = value
elif name == 'Description':
self.description = value
elif name == 'Value':
self.value = value
elif name == 'DefaultValue':
self.default_value = value
elif name == 'AllowedValues':
self.allowed_values = value
elif name == 'DataType':
self.data_type = value
elif name == 'ApplyType':
self.apply_type = value
elif name == 'IsModifiable':
if value.lower() == 'true':
self.is_modifiable = True
else:
self.is_modifiable = False
elif name == 'IsCollection':
if value.lower() == 'true':
self.is_collection = True
else:
self.is_collection = False
else:
setattr(self, name, value)
class VpcSecurityGroup(object):
"""
Describes a VPC security group for use in a OptionGroup
"""
def __init__(self, vpc_id=None, status=None):
self.vpc_id = vpc_id
self.status = status
def __repr__(self):
return 'VpcSecurityGroup:%s' % self.vpc_id
def startElement(self, name, attrs, connection):
pass
def endElement(self, name, value, connection):
if name == 'VpcSecurityGroupId':
self.vpc_id = value
elif name == 'Status':
self.status = value
else:
setattr(self, name, value)
class OptionGroupOption(object):
"""
Describes a OptionGroupOption for use in an OptionGroup
:ivar name: The name of the option
:ivar description: The description of the option.
:ivar engine_name: Engine name that this option can be applied to.
:ivar major_engine_version: Indicates the major engine version that the
option is available for.
:ivar min_minor_engine_version: The minimum required engine version for the
option to be applied.
:ivar permanent: Indicate if this option is permanent.
:ivar persistent: Indicate if this option is persistent.
:ivar port_required: Specifies whether the option requires a port.
:ivar default_port: If the option requires a port, specifies the default
port for the option.
:ivar settings: The option settings for this option.
:ivar depends_on: List of all options that are prerequisites for this
option.
"""
def __init__(self, name=None, description=None, engine_name=None,
major_engine_version=None, min_minor_engine_version=None,
permanent=False, persistent=False, port_required=False,
default_port=None, settings=None, depends_on=None):
self.name = name
self.description = description
self.engine_name = engine_name
self.major_engine_version = major_engine_version
self.min_minor_engine_version = min_minor_engine_version
self.permanent = permanent
self.persistent = persistent
self.port_required = port_required
self.default_port = default_port
self.settings = settings
self.depends_on = depends_on
if self.settings is None:
self.settings = []
if self.depends_on is None:
self.depends_on = []
def __repr__(self):
return 'OptionGroupOption:%s' % self.name
def startElement(self, name, attrs, connection):
if name == 'OptionGroupOptionSettings':
self.settings = ResultSet([
('OptionGroupOptionSettings', OptionGroupOptionSetting)
])
elif name == 'OptionsDependedOn':
self.depends_on = []
else:
return None
def endElement(self, name, value, connection):
if name == 'Name':
self.name = value
elif name == 'Description':
self.description = value
elif name == 'EngineName':
self.engine_name = value
elif name == 'MajorEngineVersion':
self.major_engine_version = value
elif name == 'MinimumRequiredMinorEngineVersion':
self.min_minor_engine_version = value
elif name == 'Permanent':
if value.lower() == 'true':
self.permenant = True
else:
self.permenant = False
elif name == 'Persistent':
if value.lower() == 'true':
self.persistent = True
else:
self.persistent = False
elif name == 'PortRequired':
if value.lower() == 'true':
self.port_required = True
else:
self.port_required = False
elif name == 'DefaultPort':
self.default_port = int(value)
else:
setattr(self, name, value)
class OptionGroupOptionSetting(object):
"""
Describes a OptionGroupOptionSetting for use in an OptionGroupOption.
:ivar name: The name of the option that has settings that you can set.
:ivar description: The description of the option setting.
:ivar value: The current value of the option setting.
:ivar default_value: The default value of the option setting.
:ivar allowed_values: The allowed values of the option setting.
:ivar data_type: The data type of the option setting.
:ivar apply_type: The DB engine specific parameter type.
:ivar is_modifiable: A Boolean value that, when true, indicates the option
setting can be modified from the default.
:ivar is_collection: Indicates if the option setting is part of a
collection.
"""
def __init__(self, name=None, description=None, default_value=False,
allowed_values=None, apply_type=None, is_modifiable=False):
self.name = name
self.description = description
self.default_value = default_value
self.allowed_values = allowed_values
self.apply_type = apply_type
self.is_modifiable = is_modifiable
def __repr__(self):
return 'OptionGroupOptionSetting:%s' % self.name
def startElement(self, name, attrs, connection):
return None
def endElement(self, name, value, connection):
if name == 'SettingName':
self.name = value
elif name == 'SettingDescription':
self.description = value
elif name == 'DefaultValue':
self.default_value = value
elif name == 'AllowedValues':
self.allowed_values = value
elif name == 'ApplyType':
self.apply_type = value
elif name == 'IsModifiable':
if value.lower() == 'true':
self.is_modifiable = True
else:
self.is_modifiable = False
else:
setattr(self, name, value)
|
belokop/indico_bare
|
refs/heads/master
|
indico/MaKaC/posterDesignConf.py
|
1
|
# This file is part of Indico.
# Copyright (C) 2002 - 2016 European Organization for Nuclear Research (CERN).
#
# Indico is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 3 of the
# License, or (at your option) any later version.
#
# Indico is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Indico; if not, see <http://www.gnu.org/licenses/>.
from conference import Conference
from poster import PosterTemplateItem
class ConferenceDates:
def getArgumentType(cls):
return Conference
getArgumentType = classmethod (getArgumentType)
def getValue(cls, conf):
if conf.getStartDate().date() == conf.getEndDate().date():
return conf.getAdjustedStartDate().strftime("%a %d/%m/%Y %H:%M")
else:
return str(conf.getAdjustedStartDate().date()) + ' - ' + str(conf.getAdjustedEndDate().date())
getValue = classmethod (getValue)
class ConferenceLocation:
def getArgumentType(cls):
return Conference
getArgumentType = classmethod (getArgumentType)
def getValue(cls, conf):
return conf.as_event.venue_name
getValue = classmethod (getValue)
class ConferenceAddress:
def getArgumentType(cls):
return Conference
getArgumentType = classmethod (getArgumentType)
def getValue(cls, conf):
return conf.as_event.address
getValue = classmethod (getValue)
class LectureCategory:
def getArgumentType(cls):
return Conference
getArgumentType = classmethod (getArgumentType)
def getValue(cls, conf):
return conf.getOwner().getTitle()
getValue = classmethod (getValue)
class Organisers:
def getArgumentType(cls):
return Conference
getArgumentType = classmethod (getArgumentType)
def getValue(cls, conf):
return conf.getOrgText()
getValue = classmethod (getValue)
class ConferenceRoom:
def getArgumentType(cls):
return Conference
getArgumentType = classmethod (getArgumentType)
def getValue(cls, conf):
return conf.as_event.room_name
getValue = classmethod (getValue)
class ConferenceChairperson:
@classmethod
def getArgumentType(cls):
return Conference
@classmethod
def getValue(cls, conf):
return conf.as_event.person_links
class PosterDesignConfiguration:
""" This class has 2 objects:
-items_actions maps the name of an item to the action that should be taken
at the time it is drawed.
-groups organizes the item names into groups. These groups are used for the
<select> box in the WConfModifPosterDesign.tpl file.
"""
""" Dictionary that maps the name of an item to the action that should be taken
at the time it is drawed.
An action can be:
-A method: depending on the class owning the method, a Conference object,
a Registrant object, or a PosterTemplateItem object should be passed to the method.
The method must return a string.
For example: 'Full Name' : Registrant.getFullName means that, if a posterTemplate
has a 'Full Name' item, each time a poster will be drawed, the Full Name of the
registrant will be drawed as returned by the method getFullName of the class Registrant.
-A class: when there is no method already available for what we need, we have
to write a custom class (see classes above).
These classes must have 2 methods:
*it must have a getArgumentType() method, which returns either Conference, Registrant or PosterTemplateItem.
Depending on what is returned, we will pass a different object to the getValue() method.
*it must have a getValue(object) method, to which a Conference instance, a Registrant instance or a
PosterTemplateItem instance must be passed, depending on the result of the getArgumentType() method.
"""
def __init__(self):
self.items_actions = {
"Lecture Category": (_("Lecture Category"), LectureCategory),
"Lecture Name": (_("Lecture Name"), lambda x: x.as_event.title.encode('utf-8')),
"Lecture Date(s)": (_("Lecture Date(s)"), ConferenceDates),
"Speaker(s)": (_("Speaker(s)"), ConferenceChairperson),
"Description": (_("Description"), lambda x: x.as_event.description.encode('utf-8')),
"Location (name)": (_("Location (name)"), ConferenceLocation),
"Location (address)": (_("Location (address)"), ConferenceAddress),
"Location (room)": (_("Location (room)"), ConferenceRoom),
"Organisers": (_("Organisers"), Organisers),
"Fixed Text": (_("Fixed Text"), PosterTemplateItem.getFixedText)
}
""" Dictionary that maps group names to the item names that fall into that group.
The groups are only used for the <select> box in the WConfModifPosterDesign.tpl file.
"""
self.groups = [( _("Lecture Data"), ["Lecture Category", "Lecture Name", "Lecture Date(s)","Speaker(s)",
"Description", "Location (name)", "Location (address)", "Location (room)","Organisers"]),
( _("Fixed Elements"), ["Fixed Text"])]
|
SophieBartmann/Faust-Bot
|
refs/heads/master
|
FaustBot/Model/__init__.py
|
3
|
__author__ = 'Pups'
|
sdgdsffdsfff/Mobile-Security-Framework-MobSF
|
refs/heads/master
|
DynamicAnalyzer/tools/pyWebProxy/proxy.py
|
32
|
#!/usr/bin/env python
'''
owtf is an OWASP+PTES-focused try to unite great tools & facilitate pentesting
Copyright (c) 2013, Abraham Aranguren <name.surname@gmail.com> http://7-a.org
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
* Neither the name of the copyright owner nor the
names of its contributors may be used to endorse or promote products
derived from this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
# Inbound Proxy Module developed by Bharadwaj Machiraju (blog.tunnelshade.in)
# as a part of Google Summer of Code 2013
'''
import tornado.httpserver
import tornado.ioloop
import tornado.iostream
import tornado.web
import tornado.httpclient
import tornado.escape
import tornado.httputil
import tornado.options
import tornado.template
import tornado.websocket
import tornado.gen
import socket
import ssl
import os
import datetime
import uuid
import re,sys
from multiprocessing import Process, Value, Lock
from socket_wrapper import wrap_socket
LOG=''
#This function create logs
def Logz(request,response,log):
TRAFFIC=''
rdat=''
dat=response.request.body if response.request.body else ''
TRAFFIC+= "\n\nREQUEST: " + str(response.request.method)+ " " + str(response.request.url) + '\n'
for header, value in list(response.headers.items()):
TRAFFIC+= header + ": " + value +"\n"
TRAFFIC+= "\n\n" + str(dat) + "\n"
TRAFFIC+= "\n\nRESPONSE: " +str(response.code) + " " + str(response.reason) + "\n"
for header, value in list(response.headers.items()):
TRAFFIC+= header + ": " + value + "\n"
if "content-type" in header.lower():
if re.findall("json|xml|application\/javascript",value.lower()):
rdat=request.response_buffer
else:
rdat=''
TRAFFIC+= "\n\n" +str(rdat) + "\n"
#print TRAFFIC
with open(log,'a') as f:
f.write(TRAFFIC)
class ProxyHandler(tornado.web.RequestHandler):
"""
This RequestHandler processes all the requests that the application received
"""
SUPPORTED_METHODS = ['GET', 'POST', 'CONNECT', 'HEAD', 'PUT', 'DELETE', 'OPTIONS', 'TRACE']
def __new__(cls, application, request, **kwargs):
# http://stackoverflow.com/questions/3209233/how-to-replace-an-instance-in-init-with-a-different-object
# Based on upgrade header, websocket request handler must be used
try:
if request.headers['Upgrade'].lower() == 'websocket':
return CustomWebSocketHandler(application, request, **kwargs)
except KeyError:
pass
return tornado.web.RequestHandler.__new__(cls, application, request, **kwargs)
def set_default_headers(self):
# This is automatically called by Tornado :P
# XD Using this to remove "Server" header set by tornado
del self._headers["Server"]
def set_status(self, status_code, reason=None):
"""
Sets the status code for our response.
Overriding is done so as to handle unknown
response codes gracefully.
"""
self._status_code = status_code
if reason is not None:
self._reason = tornado.escape.native_str(reason)
else:
try:
self._reason = tornado.httputil.responses[status_code]
except KeyError:
self._reason = tornado.escape.native_str("Server Not Found")
# This function writes a new response & caches it
def finish_response(self, response):
Logz(self.request,response,LOG)
self.set_status(response.code)
for header, value in list(response.headers.items()):
if header == "Set-Cookie":
self.add_header(header, value)
else:
if header not in restricted_response_headers:
self.set_header(header, value)
self.finish()
# This function is a callback when a small chunk is received
def handle_data_chunk(self, data):
if data:
self.write(data)
self.request.response_buffer += data
@tornado.web.asynchronous
@tornado.gen.coroutine
def get(self):
"""
* This function handles all requests except the connect request.
* Once ssl stream is formed between browser and proxy, the requests are
then processed by this function
"""
# The flow starts here
self.request.response_buffer = ''
# The requests that come through ssl streams are relative requests, so transparent
# proxying is required. The following snippet decides the url that should be passed
# to the async client
if self.request.uri.startswith(self.request.protocol,0): # Normal Proxy Request
self.request.url = self.request.uri
else: # Transparent Proxy Request
self.request.url = self.request.protocol + "://" + self.request.host
if self.request.uri != '/': # Add uri only if needed
self.request.url += self.request.uri
# Request header cleaning
for header in restricted_request_headers:
try:
del self.request.headers[header]
except:
continue
# httprequest object is created and then passed to async client with a callback
request = tornado.httpclient.HTTPRequest(
url=self.request.url,
method=self.request.method,
body=self.request.body if self.request.body else None,
headers=self.request.headers,
follow_redirects=False,
use_gzip=True,
streaming_callback=self.handle_data_chunk,
header_callback=None,
proxy_host=self.application.outbound_ip,
proxy_port=self.application.outbound_port,
proxy_username=self.application.outbound_username,
proxy_password=self.application.outbound_password,
allow_nonstandard_methods=True,
validate_cert=False)
response = yield tornado.gen.Task(self.application.async_client.fetch, request)
self.finish_response(response)
# The following 5 methods can be handled through the above implementation
@tornado.web.asynchronous
def post(self):
return self.get()
@tornado.web.asynchronous
def head(self):
return self.get()
@tornado.web.asynchronous
def put(self):
return self.get()
@tornado.web.asynchronous
def delete(self):
return self.get()
@tornado.web.asynchronous
def options(self):
return self.get()
@tornado.web.asynchronous
def trace(self):
return self.get()
@tornado.web.asynchronous
def connect(self):
"""
This function gets called when a connect request is received.
* The host and port are obtained from the request uri
* A socket is created, wrapped in ssl and then added to SSLIOStream
* This stream is used to connect to speak to the remote host on given port
* If the server speaks ssl on that port, callback start_tunnel is called
* An OK response is written back to client
* The client side socket is wrapped in ssl
* If the wrapping is successful, a new SSLIOStream is made using that socket
* The stream is added back to the server for monitoring
"""
host, port = self.request.uri.split(':')
def start_tunnel():
try:
base=os.path.dirname(os.path.realpath(__file__))
ca_crt=os.path.join(base,"ca.crt")
ca_key=os.path.join(base,"ca.key")
self.request.connection.stream.write(b"HTTP/1.1 200 Connection established\r\n\r\n")
wrap_socket(
self.request.connection.stream.socket,
host,
ca_crt,
ca_key,
"mobsec-yso",
"logs",
success=ssl_success
)
except tornado.iostream.StreamClosedError:
pass
def ssl_success(client_socket):
client = tornado.iostream.SSLIOStream(client_socket)
server.handle_stream(client, self.application.inbound_ip)
# Tiny Hack to satisfy proxychains CONNECT request to HTTP port.
# HTTPS fail check has to be improvised
def ssl_fail():
self.request.connection.stream.write(b"HTTP/1.1 200 Connection established\r\n\r\n")
server.handle_stream(self.request.connection.stream, self.application.inbound_ip)
######
# Hacking to be done here, so as to check for ssl using proxy and auth
try:
s = ssl.wrap_socket(socket.socket(socket.AF_INET, socket.SOCK_STREAM, 0))
upstream = tornado.iostream.SSLIOStream(s)
#start_tunnel()
upstream.set_close_callback(ssl_fail)
upstream.connect((host, int(port)), start_tunnel)
except Exception:
self.finish()
class CustomWebSocketHandler(tornado.websocket.WebSocketHandler):
"""
* See docs XD
* This class is used for handling websocket traffic.
* Object of this class replaces the main request handler for a request with
header => "Upgrade: websocket"
* wss:// - CONNECT request is handled by main handler
"""
def upstream_connect(self, io_loop=None, callback=None):
"""
Implemented as a custom alternative to tornado.websocket.websocket_connect
"""
# io_loop is needed, how else will it work with tornado :P
if io_loop is None:
io_loop = tornado.ioloop.IOLoop.current()
# During secure communication, we get relative URI, so make them absolute
if self.request.uri.startswith(self.request.protocol,0): # Normal Proxy Request
self.request.url = self.request.uri
else: # Transparent Proxy Request
self.request.url = self.request.protocol + "://" + self.request.host + self.request.uri
# WebSocketClientConnection expects ws:// & wss://
self.request.url = self.request.url.replace("http", "ws", 1)
# Have to add cookies and stuff
request_headers = tornado.httputil.HTTPHeaders()
for name, value in self.request.headers.iteritems():
if name not in restricted_request_headers:
request_headers.add(name, value)
# Build a custom request
request = tornado.httpclient.HTTPRequest(
url=self.request.url,
headers=request_headers,
proxy_host=self.application.outbound_ip,
proxy_port=self.application.outbound_port,
proxy_username=self.application.outbound_username,
proxy_password=self.application.outbound_password
)
self.upstream_connection = CustomWebSocketClientConnection(io_loop, request)
if callback is not None:
io_loop.add_future(self.upstream_connection.connect_future, callback)
return self.upstream_connection.connect_future # This returns a future
def _execute(self, transforms, *args, **kwargs):
"""
Overriding of a method of WebSocketHandler
"""
def start_tunnel(future):
"""
A callback which is called when connection to url is successful
"""
self.upstream = future.result() # We need upstream to write further messages
self.handshake_request = self.upstream_connection.request # HTTPRequest needed for caching :P
self.handshake_request.response_buffer = "" # Needed for websocket data & compliance with cache_handler stuff
self.handshake_request.version = "HTTP/1.1" # Tiny hack to protect caching (But according to websocket standards)
self.handshake_request.body = self.handshake_request.body or "" # I dont know why a None is coming :P
tornado.websocket.WebSocketHandler._execute(self, transforms, *args, **kwargs) # The regular procedures are to be done
# We try to connect to provided URL & then we proceed with connection on client side.
self.upstream = self.upstream_connect(callback=start_tunnel)
def store_upstream_data(self, message):
"""
Save websocket data sent from client to server, i.e add it to HTTPRequest.response_buffer with direction (>>)
"""
try: # Cannot write binary content as a string, so catch it
self.handshake_request.response_buffer += (">>> %s\r\n"%(message))
except TypeError:
self.handshake_request.response_buffer += (">>> May be binary\r\n")
def store_downstream_data(self, message):
"""
Save websocket data sent from client to server, i.e add it to HTTPRequest.response_buffer with direction (<<)
"""
try: # Cannot write binary content as a string, so catch it
self.handshake_request.response_buffer += ("<<< %s\r\n"%(message))
except TypeError:
self.handshake_request.response_buffer += ("<<< May be binary\r\n")
def on_message(self, message):
"""
Everytime a message is received from client side, this instance method is called
"""
self.upstream.write_message(message) # The obtained message is written to upstream
self.store_upstream_data(message)
# The following check ensures that if a callback is added for reading message from upstream, another one is not added
if not self.upstream.read_future:
self.upstream.read_message(callback=self.on_response) # A callback is added to read the data when upstream responds
def on_response(self, message):
"""
A callback when a message is recieved from upstream
*** Here message is a future
"""
# The following check ensures that if a callback is added for reading message from upstream, another one is not added
if not self.upstream.read_future:
self.upstream.read_message(callback=self.on_response)
if self.ws_connection: # Check if connection still exists
if message.result(): # Check if it is not NULL ( Indirect checking of upstream connection )
self.write_message(message.result()) # Write obtained message to client
self.store_downstream_data(message.result())
else:
self.close()
def on_close(self):
"""
Called when websocket is closed. So handshake request-response pair along with websocket data as response body is saved
"""
# Required for cache_handler
self.handshake_response = tornado.httpclient.HTTPResponse(
self.handshake_request,
self.upstream_connection.code,
headers=self.upstream_connection.headers,
request_time=0
)
# Close fd descriptor
class CustomWebSocketClientConnection(tornado.websocket.WebSocketClientConnection):
# Had to extract response code, so it is necessary to override
def _handle_1xx(self, code):
self.code = code
super(CustomWebSocketClientConnection, self)._handle_1xx(code)
# The tornado application, which is used to pass variables to request handler
application = tornado.web.Application(handlers=[
(r'.*', ProxyHandler)
],
debug=False,
gzip=True,
)
application.async_client = tornado.httpclient.AsyncHTTPClient()
instances = "1"
# SSL MiTM
# SSL certs, keys and other settings (os.path.expanduser because they are stored in users home directory ~/.owtf/proxy )
application.outbound_ip = None
application.outbound_port = None
application.outbound_username = None
application.outbound_password = None
application.inbound_ip="0.0.0.0"
#try: # Ensure CA.crt and Key exist
#assert os.path.exists(application.ca_cert)
#assert os.path.exists(application.ca_key)
#except AssertionError:
#print ("Files required for SSL MiTM are missing. Please run the install script")
# Server has to be global, because it is used inside request handler to attach sockets for monitoring
global server
server = tornado.httpserver.HTTPServer(application)
server = server
# Header filters
# Restricted headers are picked from framework/config/framework_config.cfg
# These headers are removed from the response obtained from webserver, before sending it to browser
global restricted_response_headers
rresh=["Content-Length","Content-Encoding","Etag","Transfer-Encoding","Connection","Vary","Accept-Ranges","Pragma"]
restricted_response_headers = rresh
# These headers are removed from request obtained from browser, before sending it to webserver
global restricted_request_headers
rreqh=["Connection","Pragma","Cache-Control","If-Modified-Since"]
restricted_request_headers = rreqh
# "0" equals the number of cores present in a machine
if len(sys.argv)==4:
LOG=sys.argv[3]
try:
server.bind(sys.argv[2], address=sys.argv[1])
# Useful for using custom loggers because of relative paths in secure requests
# http://www.joet3ch.com/blog/2011/09/08/alternative-tornado-logging/
#ornado.options.parse_command_line(args=["dummy_arg","--log_file_prefix="+application.Core.DB.Config.Get("PROXY_LOG"),"--logging=info"])
tornado.options.parse_command_line(args=["dummy_arg","--log_file_prefix=logs/proxy.log","--logging=info"])
# To run any number of instances
server.start(int(1))
tornado.ioloop.IOLoop.instance().start()
except Exception as e:
print "[WebProxy Error] "+str(e)
else:
print "proxy.py <IP> <PORT> <LOGFILE>"
|
rgeleta/odoo
|
refs/heads/8.0
|
openerp/cli/__init__.py
|
185
|
import logging
import sys
import os
import openerp
from openerp import tools
from openerp.modules import module
_logger = logging.getLogger(__name__)
commands = {}
class CommandType(type):
def __init__(cls, name, bases, attrs):
super(CommandType, cls).__init__(name, bases, attrs)
name = getattr(cls, name, cls.__name__.lower())
cls.name = name
if name != 'command':
commands[name] = cls
class Command(object):
"""Subclass this class to define new openerp subcommands """
__metaclass__ = CommandType
def run(self, args):
pass
class Help(Command):
"""Display the list of available commands"""
def run(self, args):
print "Available commands:\n"
padding = max([len(k) for k in commands.keys()]) + 2
for k, v in commands.items():
print " %s%s" % (k.ljust(padding, ' '), v.__doc__ or '')
print "\nUse '%s <command> --help' for individual command help." % sys.argv[0].split(os.path.sep)[-1]
import server
import deploy
import scaffold
import start
def main():
args = sys.argv[1:]
# The only shared option is '--addons-path=' needed to discover additional
# commands from modules
if len(args) > 1 and args[0].startswith('--addons-path=') and not args[1].startswith("-"):
# parse only the addons-path, do not setup the logger...
tools.config._parse_config([args[0]])
args = args[1:]
# Default legacy command
command = "server"
# Subcommand discovery
if len(args) and not args[0].startswith("-"):
logging.disable(logging.CRITICAL)
for m in module.get_modules():
m_path = module.get_module_path(m)
if os.path.isdir(os.path.join(m_path, 'cli')):
__import__('openerp.addons.' + m)
logging.disable(logging.NOTSET)
command = args[0]
args = args[1:]
if command in commands:
o = commands[command]()
o.run(args)
# vim:et:ts=4:sw=4:
|
jbedorf/tensorflow
|
refs/heads/master
|
tensorflow/python/kernel_tests/tridiagonal_solve_op_test.py
|
3
|
# Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for tensorflow.ops.linalg.linalg_impl.tridiagonal_solve."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
from tensorflow.python.client import session
from tensorflow.python.eager import context
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import errors_impl
from tensorflow.python.framework import ops
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import control_flow_ops
from tensorflow.python.ops import random_ops
from tensorflow.python.ops import variables
from tensorflow.python.ops.linalg import linalg_impl
from tensorflow.python.platform import benchmark
from tensorflow.python.platform import test
_sample_diags = np.array([[2, 1, 4, 0], [1, 3, 2, 2], [0, 1, -1, 1]])
_sample_rhs = np.array([1, 2, 3, 4])
_sample_result = np.array([-9, 5, -4, 4])
def _tfconst(array):
return constant_op.constant(array, dtypes.float64)
def _tf_ones(shape):
return array_ops.ones(shape, dtype=dtypes.float64)
class TridiagonalSolveOpTest(test.TestCase):
def _test(self,
diags,
rhs,
expected,
diags_format="compact",
transpose_rhs=False,
conjugate_rhs=False):
with self.cached_session(use_gpu=False):
result = linalg_impl.tridiagonal_solve(diags, rhs, diags_format,
transpose_rhs, conjugate_rhs)
self.assertAllClose(self.evaluate(result), expected)
def _testWithLists(self,
diags,
rhs,
expected,
diags_format="compact",
transpose_rhs=False,
conjugate_rhs=False):
self._test(
_tfconst(diags), _tfconst(rhs), _tfconst(expected), diags_format,
transpose_rhs, conjugate_rhs)
def _assertRaises(self, diags, rhs, diags_format="compact"):
with self.assertRaises(ValueError):
linalg_impl.tridiagonal_solve(diags, rhs, diags_format)
# Tests with various dtypes
def testReal(self):
for dtype in dtypes.float32, dtypes.float64:
self._test(
diags=constant_op.constant(_sample_diags, dtype),
rhs=constant_op.constant(_sample_rhs, dtype),
expected=constant_op.constant(_sample_result, dtype))
def testComplex(self):
for dtype in dtypes.complex64, dtypes.complex128:
self._test(
diags=constant_op.constant(_sample_diags, dtype) * (1 + 1j),
rhs=constant_op.constant(_sample_rhs, dtype) * (1 - 1j),
expected=constant_op.constant(_sample_result, dtype) * (1 - 1j) /
(1 + 1j))
# Tests with small matrix sizes
def test3x3(self):
self._testWithLists(
diags=[[2, -1, 0], [1, 3, 1], [0, -1, -2]],
rhs=[1, 2, 3],
expected=[-3, 2, 7])
def test2x2(self):
self._testWithLists(
diags=[[2, 0], [1, 3], [0, 1]], rhs=[1, 4], expected=[-5, 3])
def test1x1(self):
self._testWithLists(diags=[[0], [3], [0]], rhs=[6], expected=[2])
def test0x0(self):
self._test(
diags=constant_op.constant(0, shape=(3, 0), dtype=dtypes.float32),
rhs=constant_op.constant(0, shape=(0, 1), dtype=dtypes.float32),
expected=constant_op.constant(0, shape=(0, 1), dtype=dtypes.float32))
# Other edge cases
def testCaseRequiringPivoting(self):
# Without partial pivoting (e.g. Thomas algorithm) this would fail.
self._testWithLists(
diags=[[2, -1, 1, 0], [1, 4, 1, -1], [0, 2, -2, 3]],
rhs=[1, 2, 3, 4],
expected=[8, -3.5, 0, -4])
def testCaseRequiringPivotingLastRows(self):
self._testWithLists(
diags=[[2, 1, -1, 0], [1, -1, 2, 1], [0, 1, -6, 1]],
rhs=[1, 2, -1, -2],
expected=[5, -2, -5, 3])
def testNotInvertible(self):
with self.assertRaises(errors_impl.InvalidArgumentError):
self._testWithLists(
diags=[[2, -1, 1, 0], [1, 4, 1, -1], [0, 2, 0, 3]],
rhs=[1, 2, 3, 4],
expected=[8, -3.5, 0, -4])
def testDiagonal(self):
self._testWithLists(
diags=[[0, 0, 0, 0], [1, 2, -1, -2], [0, 0, 0, 0]],
rhs=[1, 2, 3, 4],
expected=[1, 1, -3, -2])
def testUpperTriangular(self):
self._testWithLists(
diags=[[2, 4, -1, 0], [1, 3, 1, 2], [0, 0, 0, 0]],
rhs=[1, 6, 4, 4],
expected=[13, -6, 6, 2])
def testLowerTriangular(self):
self._testWithLists(
diags=[[0, 0, 0, 0], [2, -1, 3, 1], [0, 1, 4, 2]],
rhs=[4, 5, 6, 1],
expected=[2, -3, 6, -11])
# Multiple right-hand sides and batching
def testWithTwoRightHandSides(self):
self._testWithLists(
diags=_sample_diags,
rhs=np.transpose([_sample_rhs, 2 * _sample_rhs]),
expected=np.transpose([_sample_result, 2 * _sample_result]))
def testBatching(self):
self._testWithLists(
diags=np.array([_sample_diags, -_sample_diags]),
rhs=np.array([_sample_rhs, 2 * _sample_rhs]),
expected=np.array([_sample_result, -2 * _sample_result]))
def testBatchingAndTwoRightHandSides(self):
rhs = np.transpose([_sample_rhs, 2 * _sample_rhs])
expected_result = np.transpose([_sample_result, 2 * _sample_result])
self._testWithLists(
diags=np.array([_sample_diags, -_sample_diags]),
rhs=np.array([rhs, 2 * rhs]),
expected=np.array([expected_result, -2 * expected_result]))
# Various input formats
def testSequenceFormat(self):
self._test(
diags=(_tfconst([2, 1, 4]), _tfconst([1, 3, 2, 2]), _tfconst([1, -1,
1])),
rhs=_tfconst([1, 2, 3, 4]),
expected=_tfconst([-9, 5, -4, 4]),
diags_format="sequence")
def testSequenceFormatWithDummyElements(self):
dummy = 20
self._test(
diags=(_tfconst([2, 1, 4, dummy]), _tfconst([1, 3, 2, 2]),
_tfconst([dummy, 1, -1, 1])),
rhs=_tfconst([1, 2, 3, 4]),
expected=_tfconst([-9, 5, -4, 4]),
diags_format="sequence")
def testSequenceFormatWithBatching(self):
self._test(
diags=(_tfconst([[2, 1, 4], [-2, -1, -4]]),
_tfconst([[1, 3, 2, 2], [-1, -3, -2, -2]]),
_tfconst([[1, -1, 1], [-1, 1, -1]])),
rhs=_tfconst([[1, 2, 3, 4], [1, 2, 3, 4]]),
expected=_tfconst([[-9, 5, -4, 4], [9, -5, 4, -4]]),
diags_format="sequence")
def testMatrixFormat(self):
self._testWithLists(
diags=[[1, 2, 0, 0], [1, 3, 1, 0], [0, -1, 2, 4], [0, 0, 1, 2]],
rhs=[1, 2, 3, 4],
expected=[-9, 5, -4, 4],
diags_format="matrix")
def testMatrixFormatWithMultipleRightHandSides(self):
self._testWithLists(
diags=[[1, 2, 0, 0], [1, 3, 1, 0], [0, -1, 2, 4], [0, 0, 1, 2]],
rhs=[[1, -1], [2, -2], [3, -3], [4, -4]],
expected=[[-9, 9], [5, -5], [-4, 4], [4, -4]],
diags_format="matrix")
def testMatrixFormatWithBatching(self):
self._testWithLists(
diags=[[[1, 2, 0, 0], [1, 3, 1, 0], [0, -1, 2, 4], [0, 0, 1, 2]],
[[-1, -2, 0, 0], [-1, -3, -1, 0], [0, 1, -2, -4], [0, 0, -1,
-2]]],
rhs=[[1, 2, 3, 4], [1, 2, 3, 4]],
expected=[[-9, 5, -4, 4], [9, -5, 4, -4]],
diags_format="matrix")
def testRightHandSideAsColumn(self):
self._testWithLists(
diags=_sample_diags,
rhs=np.transpose([_sample_rhs]),
expected=np.transpose([_sample_result]),
diags_format="compact")
# Tests with transpose and adjoint
def testTransposeRhs(self):
self._testWithLists(
diags=_sample_diags,
rhs=np.array([_sample_rhs, 2 * _sample_rhs]),
expected=np.array([_sample_result, 2 * _sample_result]),
transpose_rhs=True)
def testConjugateRhs(self):
self._testWithLists(
diags=_sample_diags,
rhs=np.transpose([_sample_rhs * (1 + 1j), _sample_rhs * (1 - 2j)]),
expected=np.transpose(
[_sample_result * (1 - 1j), _sample_result * (1 + 2j)]),
conjugate_rhs=True)
def testAdjointRhs(self):
self._testWithLists(
diags=_sample_diags,
rhs=np.array([_sample_rhs * (1 + 1j), _sample_rhs * (1 - 2j)]),
expected=np.array(
[_sample_result * (1 - 1j), _sample_result * (1 + 2j)]),
transpose_rhs=True,
conjugate_rhs=True)
def testTransposeRhsWithBatching(self):
self._testWithLists(
diags=np.array([_sample_diags, -_sample_diags]),
rhs=np.array([[_sample_rhs, 2 * _sample_rhs],
[3 * _sample_rhs, 4 * _sample_rhs]]),
expected=np.array([[_sample_result, 2 * _sample_result],
[-3 * _sample_result, -4 * _sample_result]]),
transpose_rhs=True)
def testTransposeRhsWithRhsAsVector(self):
self._testWithLists(
diags=_sample_diags,
rhs=_sample_rhs,
expected=_sample_result,
transpose_rhs=True)
def testConjugateRhsWithRhsAsVector(self):
self._testWithLists(
diags=_sample_diags,
rhs=_sample_rhs * (1 + 1j),
expected=_sample_result * (1 - 1j),
conjugate_rhs=True)
def testTransposeRhsWithRhsAsVectorAndBatching(self):
self._testWithLists(
diags=np.array([_sample_diags, -_sample_diags]),
rhs=np.array([_sample_rhs, 2 * _sample_rhs]),
expected=np.array([_sample_result, -2 * _sample_result]),
transpose_rhs=True)
# Invalid input shapes
def testInvalidShapesCompactFormat(self):
def test_raises(diags_shape, rhs_shape):
self._assertRaises(_tf_ones(diags_shape), _tf_ones(rhs_shape), "compact")
test_raises((5, 4, 4), (5, 4))
test_raises((5, 3, 4), (4, 5))
test_raises((5, 3, 4), (5))
test_raises((5), (5, 4))
def testInvalidShapesSequenceFormat(self):
def test_raises(diags_tuple_shapes, rhs_shape):
diagonals = tuple(_tf_ones(shape) for shape in diags_tuple_shapes)
self._assertRaises(diagonals, _tf_ones(rhs_shape), "sequence")
test_raises(((5, 4), (5, 4)), (5, 4))
test_raises(((5, 4), (5, 4), (5, 6)), (5, 4))
test_raises(((5, 3), (5, 4), (5, 6)), (5, 4))
test_raises(((5, 6), (5, 4), (5, 3)), (5, 4))
test_raises(((5, 4), (7, 4), (5, 4)), (5, 4))
test_raises(((5, 4), (7, 4), (5, 4)), (3, 4))
def testInvalidShapesMatrixFormat(self):
def test_raises(diags_shape, rhs_shape):
self._assertRaises(_tf_ones(diags_shape), _tf_ones(rhs_shape), "matrix")
test_raises((5, 4, 7), (5, 4))
test_raises((5, 4, 4), (3, 4))
test_raises((5, 4, 4), (5, 3))
# Tests with placeholders
def _testWithPlaceholders(self,
diags_shape,
rhs_shape,
diags_feed,
rhs_feed,
expected,
diags_format="compact"):
if context.executing_eagerly():
return
diags = array_ops.placeholder(dtypes.float64, shape=diags_shape)
rhs = array_ops.placeholder(dtypes.float64, shape=rhs_shape)
x = linalg_impl.tridiagonal_solve(diags, rhs, diags_format)
with self.cached_session(use_gpu=False) as sess:
result = sess.run(x, feed_dict={diags: diags_feed, rhs: rhs_feed})
self.assertAllClose(result, expected)
def testCompactFormatAllDimsUnknown(self):
self._testWithPlaceholders(
diags_shape=[None, None],
rhs_shape=[None],
diags_feed=_sample_diags,
rhs_feed=_sample_rhs,
expected=_sample_result)
def testCompactFormatUnknownMatrixSize(self):
self._testWithPlaceholders(
diags_shape=[3, None],
rhs_shape=[4],
diags_feed=_sample_diags,
rhs_feed=_sample_rhs,
expected=_sample_result)
def testCompactFormatUnknownRhsCount(self):
self._testWithPlaceholders(
diags_shape=[3, 4],
rhs_shape=[4, None],
diags_feed=_sample_diags,
rhs_feed=np.transpose([_sample_rhs, 2 * _sample_rhs]),
expected=np.transpose([_sample_result, 2 * _sample_result]))
def testCompactFormatUnknownBatchSize(self):
self._testWithPlaceholders(
diags_shape=[None, 3, 4],
rhs_shape=[None, 4],
diags_feed=np.array([_sample_diags, -_sample_diags]),
rhs_feed=np.array([_sample_rhs, 2 * _sample_rhs]),
expected=np.array([_sample_result, -2 * _sample_result]))
def testMatrixFormatWithUnknownDims(self):
if context.executing_eagerly():
return
def test_with_matrix_shapes(matrix_shape):
matrix = np.array([[1, 2, 0, 0], [1, 3, 1, 0], [0, -1, 2, 4],
[0, 0, 1, 2]])
rhs = np.array([1, 2, 3, 4])
x = np.array([-9, 5, -4, 4])
self._testWithPlaceholders(
diags_shape=matrix_shape,
rhs_shape=[None, None],
diags_feed=matrix,
rhs_feed=np.transpose([rhs, 2 * rhs]),
expected=np.transpose([x, 2 * x]),
diags_format="matrix")
test_with_matrix_shapes(matrix_shape=[4, 4])
test_with_matrix_shapes(matrix_shape=[None, 4])
test_with_matrix_shapes(matrix_shape=[4, None])
with self.assertRaises(ValueError):
test_with_matrix_shapes(matrix_shape=[None, None])
def testSequenceFormatWithUnknownDims(self):
if context.executing_eagerly():
return
superdiag = array_ops.placeholder(dtypes.float64, shape=[None])
diag = array_ops.placeholder(dtypes.float64, shape=[None])
subdiag = array_ops.placeholder(dtypes.float64, shape=[None])
rhs = array_ops.placeholder(dtypes.float64, shape=[None])
x = linalg_impl.tridiagonal_solve((superdiag, diag, subdiag),
rhs,
diagonals_format="sequence")
with self.cached_session(use_gpu=False) as sess:
result = sess.run(
x,
feed_dict={
subdiag: [20, 1, -1, 1],
diag: [1, 3, 2, 2],
superdiag: [2, 1, 4, 20],
rhs: [1, 2, 3, 4]
})
self.assertAllClose(result, [-9, 5, -4, 4])
# Benchmark
class TridiagonalSolveBenchmark(test.Benchmark):
sizes = [(100000, 1, 1), (1000000, 1, 1), (10000000, 1, 1), (100000, 10, 1),
(100000, 100, 1), (10000, 1, 100), (10000, 1, 1000),
(10000, 1, 10000)]
def _generateData(self, matrix_size, batch_size, num_rhs, seed=42):
data = random_ops.random_normal(
shape=(batch_size, 3 + num_rhs, matrix_size),
dtype=dtypes.float64,
seed=seed)
diags = array_ops.stack([data[:, 0], data[:, 1], data[:, 2]], axis=-2)
rhs = data[:, 3:, :]
return diags, rhs
def benchmarkTridiagonalSolveOp(self):
for matrix_size, batch_size, num_rhs in self.sizes:
with ops.Graph().as_default(), \
session.Session(config=benchmark.benchmark_config()) as sess, \
ops.device("/cpu:0"):
diags, rhs = self._generateData(matrix_size, batch_size, num_rhs)
x = linalg_impl.tridiagonal_solve(diags, rhs, transpose_rhs=True)
variables.global_variables_initializer().run()
self.run_op_benchmark(
sess,
control_flow_ops.group(x),
min_iters=10,
store_memory_usage=False,
name=("tridiagonal_solve_matrix_size_{}_batch_size_{}_"
"num_rhs_{}").format(matrix_size, batch_size, num_rhs))
if __name__ == "__main__":
test.main()
|
vismartltd/edx-platform
|
refs/heads/master
|
lms/djangoapps/course_structure_api/v0/views.py
|
2
|
""" API implementation for course-oriented interactions. """
import logging
from django.conf import settings
from django.http import Http404
from rest_framework.authentication import OAuth2Authentication, SessionAuthentication
from rest_framework.exceptions import PermissionDenied, AuthenticationFailed
from rest_framework.generics import RetrieveAPIView, ListAPIView
from rest_framework.response import Response
from xmodule.modulestore.django import modulestore
from opaque_keys.edx.keys import CourseKey
from course_structure_api.v0 import api, serializers
from course_structure_api.v0.errors import CourseNotFoundError, CourseStructureNotAvailableError
from courseware import courses
from courseware.access import has_access
from openedx.core.lib.api.permissions import IsAuthenticatedOrDebug
from openedx.core.lib.api.serializers import PaginationSerializer
from student.roles import CourseInstructorRole, CourseStaffRole
log = logging.getLogger(__name__)
class CourseViewMixin(object):
"""
Mixin for views dealing with course content. Also handles authorization and authentication.
"""
lookup_field = 'course_id'
authentication_classes = (OAuth2Authentication, SessionAuthentication,)
permission_classes = (IsAuthenticatedOrDebug,)
def get_course_or_404(self):
"""
Retrieves the specified course, or raises an Http404 error if it does not exist.
Also checks to ensure the user has permissions to view the course
"""
try:
course_id = self.kwargs.get('course_id')
course_key = CourseKey.from_string(course_id)
course = courses.get_course(course_key)
self.check_course_permissions(self.request.user, course_key)
return course
except ValueError:
raise Http404
@staticmethod
def course_check(func):
"""Decorator responsible for catching errors finding and returning a 404 if the user does not have access
to the API function.
:param func: function to be wrapped
:returns: the wrapped function
"""
def func_wrapper(self, *args, **kwargs):
"""Wrapper function for this decorator.
:param *args: the arguments passed into the function
:param **kwargs: the keyword arguments passed into the function
:returns: the result of the wrapped function
"""
try:
course_id = self.kwargs.get('course_id')
self.course_key = CourseKey.from_string(course_id)
self.check_course_permissions(self.request.user, self.course_key)
return func(self, *args, **kwargs)
except CourseNotFoundError:
raise Http404
return func_wrapper
def user_can_access_course(self, user, course):
"""
Determines if the user is staff or an instructor for the course.
Always returns True if DEBUG mode is enabled.
"""
return (settings.DEBUG
or has_access(user, CourseStaffRole.ROLE, course)
or has_access(user, CourseInstructorRole.ROLE, course))
def check_course_permissions(self, user, course):
"""
Checks if the request user can access the course.
Raises PermissionDenied if the user does not have course access.
"""
if not self.user_can_access_course(user, course):
raise PermissionDenied
def perform_authentication(self, request):
"""
Ensures that the user is authenticated (e.g. not an AnonymousUser), unless DEBUG mode is enabled.
"""
super(CourseViewMixin, self).perform_authentication(request)
if request.user.is_anonymous() and not settings.DEBUG:
raise AuthenticationFailed
class CourseList(CourseViewMixin, ListAPIView):
"""
**Use Case**
Get a paginated list of courses in the edX Platform.
The list can be filtered by course_id.
Each page in the list can contain up to 10 courses.
**Example Requests**
GET /api/course_structure/v0/courses/
GET /api/course_structure/v0/courses/?course_id={course_id1},{course_id2}
**Response Values**
* count: The number of courses in the edX platform.
* next: The URI to the next page of courses.
* previous: The URI to the previous page of courses.
* num_pages: The number of pages listing courses.
* results: A list of courses returned. Each collection in the list
contains these fields.
* id: The unique identifier for the course.
* name: The name of the course.
* category: The type of content. In this case, the value is always
"course".
* org: The organization specified for the course.
* run: The run of the course.
* course: The course number.
* uri: The URI to use to get details of the course.
* image_url: The URI for the course's main image.
* start: The course start date.
* end: The course end date. If course end date is not specified, the
value is null.
"""
paginate_by = 10
paginate_by_param = 'page_size'
pagination_serializer_class = PaginationSerializer
serializer_class = serializers.CourseSerializer
def get_queryset(self):
course_ids = self.request.QUERY_PARAMS.get('course_id', None)
results = []
if course_ids:
course_ids = course_ids.split(',')
for course_id in course_ids:
course_key = CourseKey.from_string(course_id)
course_descriptor = courses.get_course(course_key)
results.append(course_descriptor)
else:
results = modulestore().get_courses()
# Ensure only course descriptors are returned.
results = (course for course in results if course.scope_ids.block_type == 'course')
# Ensure only courses accessible by the user are returned.
results = (course for course in results if self.user_can_access_course(self.request.user, course))
# Sort the results in a predictable manner.
return sorted(results, key=lambda course: unicode(course.id))
class CourseDetail(CourseViewMixin, RetrieveAPIView):
"""
**Use Case**
Get details for a specific course.
**Example Request**:
GET /api/course_structure/v0/courses/{course_id}/
**Response Values**
* id: The unique identifier for the course.
* name: The name of the course.
* category: The type of content.
* org: The organization that is offering the course.
* run: The run of the course.
* course: The course number.
* uri: The URI to use to get details about the course.
* image_url: The URI for the course's main image.
* start: The course start date.
* end: The course end date. If course end date is not specified, the
value is null.
"""
serializer_class = serializers.CourseSerializer
def get_object(self, queryset=None):
return self.get_course_or_404()
class CourseStructure(CourseViewMixin, RetrieveAPIView):
"""
**Use Case**
Get the course structure. This endpoint returns all blocks in the
course.
**Example requests**:
GET /api/course_structure/v0/course_structures/{course_id}/
**Response Values**
* root: The ID of the root node of the course structure.
* blocks: A dictionary that maps block IDs to a collection of
information about each block. Each block contains the following
fields.
* id: The ID of the block.
* type: The type of block. Possible values include sequential,
vertical, html, problem, video, and discussion. The type can also be
the name of a custom type of block used for the course.
* display_name: The display name configured for the block.
* graded: Whether or not the sequential or problem is graded. The
value is true or false.
* format: The assignment type.
* children: If the block has child blocks, a list of IDs of the child
blocks.
"""
@CourseViewMixin.course_check
def get(self, request, **kwargs):
try:
return Response(api.course_structure(self.course_key))
except CourseStructureNotAvailableError:
# If we don't have data stored, we will try to regenerate it, so
# return a 503 and as them to retry in 2 minutes.
return Response(status=503, headers={'Retry-After': '120'})
class CourseGradingPolicy(CourseViewMixin, ListAPIView):
"""
**Use Case**
Get the course grading policy.
**Example requests**:
GET /api/course_structure/v0/grading_policies/{course_id}/
**Response Values**
* assignment_type: The type of the assignment, as configured by course
staff. For example, course staff might make the assignment types Homework,
Quiz, and Exam.
* count: The number of assignments of the type.
* dropped: Number of assignments of the type that are dropped.
* weight: The weight, or effect, of the assignment type on the learner's
final grade.
"""
allow_empty = False
@CourseViewMixin.course_check
def get(self, request, **kwargs):
return Response(api.course_grading_policy(self.course_key))
|
theshadowx/enigma2
|
refs/heads/master
|
lib/python/Plugins/SystemPlugins/IniVFD/__init__.py
|
12133432
| |
benjaminrigaud/django
|
refs/heads/master
|
django/contrib/gis/tests/geo3d/models.py
|
111
|
from django.contrib.gis.db import models
from django.utils.encoding import python_2_unicode_compatible
@python_2_unicode_compatible
class NamedModel(models.Model):
name = models.CharField(max_length=30)
objects = models.GeoManager()
class Meta:
abstract = True
app_label = 'geo3d'
def __str__(self):
return self.name
class City3D(NamedModel):
point = models.PointField(dim=3)
class Interstate2D(NamedModel):
line = models.LineStringField(srid=4269)
class Interstate3D(NamedModel):
line = models.LineStringField(dim=3, srid=4269)
class InterstateProj2D(NamedModel):
line = models.LineStringField(srid=32140)
class InterstateProj3D(NamedModel):
line = models.LineStringField(dim=3, srid=32140)
class Polygon2D(NamedModel):
poly = models.PolygonField(srid=32140)
class Polygon3D(NamedModel):
poly = models.PolygonField(dim=3, srid=32140)
class SimpleModel(models.Model):
objects = models.GeoManager()
class Meta:
abstract = True
app_label = 'geo3d'
class Point2D(SimpleModel):
point = models.PointField()
class Point3D(SimpleModel):
point = models.PointField(dim=3)
class MultiPoint3D(SimpleModel):
mpoint = models.MultiPointField(dim=3)
|
detrout/debian-statsmodels
|
refs/heads/debian
|
examples/python/quantile_regression.py
|
30
|
## Quantile regression
#
# This example page shows how to use ``statsmodels``' ``QuantReg`` class to replicate parts of the analysis published in
#
# * Koenker, Roger and Kevin F. Hallock. "Quantile Regressioin". Journal of Economic Perspectives, Volume 15, Number 4, Fall 2001, Pages 143–156
#
# We are interested in the relationship between income and expenditures on food for a sample of working class Belgian households in 1857 (the Engel data).
#
# ## Setup
#
# We first need to load some modules and to retrieve the data. Conveniently, the Engel dataset is shipped with ``statsmodels``.
from __future__ import print_function
import patsy
import numpy as np
import pandas as pd
import statsmodels.api as sm
import statsmodels.formula.api as smf
import matplotlib.pyplot as plt
from statsmodels.regression.quantile_regression import QuantReg
data = sm.datasets.engel.load_pandas().data
data.head()
# ## Least Absolute Deviation
#
# The LAD model is a special case of quantile regression where q=0.5
mod = smf.quantreg('foodexp ~ income', data)
res = mod.fit(q=.5)
print(res.summary())
# ## Visualizing the results
#
# We estimate the quantile regression model for many quantiles between .05 and .95, and compare best fit line from each of these models to Ordinary Least Squares results.
# ### Prepare data for plotting
#
# For convenience, we place the quantile regression results in a Pandas DataFrame, and the OLS results in a dictionary.
quantiles = np.arange(.05, .96, .1)
def fit_model(q):
res = mod.fit(q=q)
return [q, res.params['Intercept'], res.params['income']] + res.conf_int().ix['income'].tolist()
models = [fit_model(x) for x in quantiles]
models = pd.DataFrame(models, columns=['q', 'a', 'b','lb','ub'])
ols = smf.ols('foodexp ~ income', data).fit()
ols_ci = ols.conf_int().ix['income'].tolist()
ols = dict(a = ols.params['Intercept'],
b = ols.params['income'],
lb = ols_ci[0],
ub = ols_ci[1])
print(models)
print(ols)
# ### First plot
#
# This plot compares best fit lines for 10 quantile regression models to the least squares fit. As Koenker and Hallock (2001) point out, we see that:
#
# 1. Food expenditure increases with income
# 2. The *dispersion* of food expenditure increases with income
# 3. The least squares estimates fit low income observations quite poorly (i.e. the OLS line passes over most low income households)
x = np.arange(data.income.min(), data.income.max(), 50)
get_y = lambda a, b: a + b * x
for i in range(models.shape[0]):
y = get_y(models.a[i], models.b[i])
plt.plot(x, y, linestyle='dotted', color='grey')
y = get_y(ols['a'], ols['b'])
plt.plot(x, y, color='red', label='OLS')
plt.scatter(data.income, data.foodexp, alpha=.2)
plt.xlim((240, 3000))
plt.ylim((240, 2000))
plt.legend()
plt.xlabel('Income')
plt.ylabel('Food expenditure')
plt.show()
# ### Second plot
#
# The dotted black lines form 95% point-wise confidence band around 10 quantile regression estimates (solid black line). The red lines represent OLS regression results along with their 95% confindence interval.
#
# In most cases, the quantile regression point estimates lie outside the OLS confidence interval, which suggests that the effect of income on food expenditure may not be constant across the distribution.
from matplotlib import rc
rc('text', usetex=True)
n = models.shape[0]
p1 = plt.plot(models.q, models.b, color='black', label='Quantile Reg.')
p2 = plt.plot(models.q, models.ub, linestyle='dotted', color='black')
p3 = plt.plot(models.q, models.lb, linestyle='dotted', color='black')
p4 = plt.plot(models.q, [ols['b']] * n, color='red', label='OLS')
p5 = plt.plot(models.q, [ols['lb']] * n, linestyle='dotted', color='red')
p6 = plt.plot(models.q, [ols['ub']] * n, linestyle='dotted', color='red')
plt.ylabel(r'\beta_\mbox{income}')
plt.xlabel('Quantiles of the conditional food expenditure distribution')
plt.legend()
plt.show()
|
francisar/rds_manager
|
refs/heads/master
|
aliyun/api/rest/Ecs20140526ModifyEipAddressAttributeRequest.py
|
1
|
'''
Created by auto_sdk on 2015.06.23
'''
from aliyun.api.base import RestApi
class Ecs20140526ModifyEipAddressAttributeRequest(RestApi):
def __init__(self,domain='ecs.aliyuncs.com',port=80):
RestApi.__init__(self,domain, port)
self.AllocationId = None
self.Bandwidth = None
def getapiname(self):
return 'ecs.aliyuncs.com.ModifyEipAddressAttribute.2014-05-26'
|
trunca/enigma2
|
refs/heads/6.5
|
lib/python/OPENDROID/MountManager.py
|
2
|
from boxbranding import getMachineBrand, getMachineName, getBoxType, getMachineBuild
from os import system, rename, path, mkdir, remove, listdir, remove as os_remove
from time import sleep
import re
from re import search
from enigma import eTimer, getDesktop
from Screens.ChoiceBox import ChoiceBox
from Screens.InputBox import InputBox
from Screens.Screen import Screen
from Screens.MessageBox import MessageBox
from Screens.Standby import TryQuitMainloop
from Components.ActionMap import ActionMap
from Components.Label import Label
from Components.ConfigList import ConfigListScreen
from Components.config import getConfigListEntry, config, ConfigSelection, NoSave, configfile
from Components.Pixmap import Pixmap
from Components.Console import Console
from Screens.Console import Console as ConsoleScreen
from Components.Sources.List import List
from Components.Sources.StaticText import StaticText
from Components.Harddisk import Harddisk
from Tools.LoadPixmap import LoadPixmap
from Tools.Directories import fileExists
class DeviceManager(Screen):
screenwidth = getDesktop(0).size().width()
if screenwidth and screenwidth == 1920:
skin = '\n\t\t\t<screen name="DeviceManager" position="center,center" size="1040,680">\n\t\t\t\t<widget source="list" render="Listbox" position="10,0" size="1020,510" scrollbarMode="showOnDemand" >\n\t\t\t\t\t<convert type="TemplatedMultiContent">\n\t\t\t\t\t{"template": [\n\t\t\t\t\tMultiContentEntryText(pos = (90, 0), size = (990, 30), font=0, text = 0),\n\t\t\t\t\tMultiContentEntryText(pos = (110, 30), size = (970, 50), font=1, flags = RT_VALIGN_TOP, text = 1),\n\t\t\t\t\tMultiContentEntryPixmapAlphaTest(pos = (0, 0), size = (80, 80), png = 2),\n\t\t\t\t\t],\n\t\t\t\t\t"fonts": [gFont("Regular", 32),gFont("Regular", 26)],\n\t\t\t\t\t"itemHeight": 100\n\t\t\t\t\t}\n\t\t\t\t\t</convert>\n\t\t\t\t</widget>\n\t\t\t\t<widget name="lab1" zPosition="2" position="50,40" size="700,40" font="Regular;32" halign="center" transparent="1"/>\n\t\t\t\t<ePixmap position="40,604" size="100,40" zPosition="0" pixmap="buttons/red.png" transparent="1" alphatest="blend"/>\n\t\t\t\t<ePixmap position="200,604" size="100,40" zPosition="0" pixmap="buttons/green.png" transparent="1" alphatest="blend"/>\n\t\t\t\t<ePixmap position="450,604" size="100,40" zPosition="0" pixmap="buttons/yellow.png" transparent="1" alphatest="blend"/>\n\t\t\t\t<ePixmap position="730,604" size="100,40" zPosition="0" pixmap="buttons/blue.png" transparent="1" alphatest="blend"/>\n\t\t\t\t<widget name="key_red" position="80,604" zPosition="1" size="270,35" font="Regular;32" valign="top" halign="left" backgroundColor="red" transparent="1" />\n\t\t\t\t<widget name="key_green" position="240,604" zPosition="1" size="270,35" font="Regular;32" valign="top" halign="left" backgroundColor="green" transparent="1" />\n\t\t\t\t<widget name="key_yellow" position="490,604" zPosition="1" size="270,35" font="Regular;32" valign="top" halign="left" backgroundColor="yellow" transparent="1" />\n\t\t\t\t<widget name="key_blue" position="770,604" zPosition="1" size="270,35" font="Regular;32" valign="top" halign="left" backgroundColor="blue" transparent="1" />\n\t\t\t</screen>'
else:
skin = '\n\t\t\t<screen name="DeviceManager" position="center,center" size="800,560" title="Devices Manager">\n\t\t\t\t<widget source="list" render="Listbox" position="10,0" size="780,510" scrollbarMode="showOnDemand" >\n\t\t\t\t\t<convert type="TemplatedMultiContent">\n\t\t\t\t\t{"template": [\n\t\t\t\t\tMultiContentEntryText(pos = (90, 0), size = (690, 30), font=0, text = 0),\n\t\t\t\t\tMultiContentEntryText(pos = (110, 30), size = (670, 50), font=1, flags = RT_VALIGN_TOP, text = 1),\n\t\t\t\t\tMultiContentEntryPixmapAlphaTest(pos = (0, 0), size = (80, 80), png = 2),\n\t\t\t\t\t],\n\t\t\t\t\t"fonts": [gFont("Regular", 24),gFont("Regular", 20)],\n\t\t\t\t\t"itemHeight": 85\n\t\t\t\t\t}\n\t\t\t\t\t</convert>\n\t\t\t\t</widget>\n\t\t\t\t<widget name="lab1" zPosition="2" position="50,40" size="700,40" font="Regular;24" halign="center" transparent="1"/>\n\t\t\t\t<widget name="key_red" position="70,524" zPosition="1" size="200,40" font="Regular;20" halign="left" valign="top" backgroundColor="#9f1313" transparent="1" />\n\t\t\t\t<widget name="key_green" position="240,524" zPosition="1" size="200,40" font="Regular;20" halign="left" valign="top" backgroundColor="#9f1313" transparent="1" />\n\t\t\t\t<widget name="key_yellow" position="390,524" zPosition="1" size="140,40" font="Regular;20" halign="left" valign="top" backgroundColor="#a08500" transparent="1" />\n\t\t\t\t<widget name="key_blue" position="540,524" zPosition="1" size="140,40" font="Regular;20" halign="left" valign="top" backgroundColor="#a08500" transparent="1" />\n\t\t\t</screen>'
def __init__(self, session):
Screen.__init__(self, session)
Screen.setTitle(self, _('Devices Manager'))
self['key_red'] = Label(_('saveMypoints'))
self['key_green'] = Label(_('Setup Mounts'))
self['key_yellow'] = Label(_('Unmount'))
self['key_blue'] = Label(_('Mount'))
self['lab1'] = Label()
self.onChangedEntry = []
self.list = []
self['list'] = List(self.list)
self['list'].onSelectionChanged.append(self.selectionChanged)
self['actions'] = ActionMap(['WizardActions', 'ColorActions', 'MenuActions'], {'back': self.close,
'green': self.SetupMounts,
'red': self.saveMypoints,
'yellow': self.Unmount,
'blue': self.Mount,
'menu': self.close})
self.activityTimer = eTimer()
self.activityTimer.timeout.get().append(self.updateList2)
self.updateList()
def selectionChanged(self):
if len(self.list) == 0:
return
self.sel = self['list'].getCurrent()
mountp = self.sel[3]
if mountp.find('/media/hdd') < 0:
self['key_red'].setText(_('Use as HDD'))
else:
self['key_red'].setText(' ')
if self.sel:
try:
name = str(self.sel[0])
desc = str(self.sel[1].replace('\t', ' '))
except:
name = ''
desc = ''
else:
name = ''
desc = ''
for cb in self.onChangedEntry:
cb(name, desc)
def updateList(self, result = None, retval = None, extra_args = None):
scanning = _('Wait please while scanning for devices...')
self['lab1'].setText(scanning)
self.activityTimer.start(10)
def updateList2(self):
self.activityTimer.stop()
self.list = []
list2 = []
f = open('/proc/partitions', 'r')
for line in f.readlines():
parts = line.strip().split()
if not parts:
continue
device = parts[3]
if not search('sd[a-z][1-9]', device) and not search('mmcblk[0-9]p[1-9]', device):
continue
if getMachineBuild() in ('sf5008','et13000','et1x000','vuuno4k', 'vuultimo4k', 'vusolo4k', 'hd51', 'hd52', 'dm820', 'dm7080', 'sf4008', 'dm900', 'dm920', 'gb7252', 'dags7252', 'vs1500','h7','8100s') and search('mmcblk0p[1-9]',device):
continue
if getMachineBuild() in ('xc7439') and search('mmcblk1p[1-9]',device):
continue
if device in list2:
continue
self.buildMy_rec(device)
list2.append(device)
f.close()
self['list'].list = self.list
self['lab1'].hide()
def buildMy_rec(self, device):
device2 = device[:-1]
devicetype = path.realpath('/sys/block/' + device2 + '/device')
d2 = device
name = 'USB: '
mypixmap = '/usr/lib/enigma2/python/OPENDROID/icons/dev_usb.png'
if device2.startswith('mmcblk'):
if getBoxType() in ('wetekplay', 'wetekplayplus', 'zgemmah5') or getMachineBuild() in ('h3', 'h5'):
model = file('/sys/block/mmcblk0/device/name').read()
else:
try:
model = file('/sys/block/' + device2 + '/device/name').read()
except:
model = 'Unknown'
mypixmap = '/usr/lib/enigma2/python/OPENDROID/icons/dev_mmc.png'
name = 'MMC: '
else:
model = file('/sys/block/' + device2 + '/device/model').read()
model = str(model).replace('\n', '')
des = ''
if devicetype.find('/devices/pci') != -1 or devicetype.find('ahci') != -1:
name = _('HARD DISK: ')
mypixmap = '/usr/lib/enigma2/python/OPENDROID/icons/dev_hdd.png'
name = name + model
self.Console = Console()
self.Console.ePopen("sfdisk -l | grep swap | awk '{print $(NF-9)}' >/tmp/devices.tmp")
sleep(0.5)
try:
f = open('/tmp/devices.tmp', 'r')
swapdevices = f.read()
f.close()
except:
swapdevices = ' '
if path.exists('/tmp/devices.tmp'):
remove('/tmp/devices.tmp')
swapdevices = swapdevices.replace('\n', '')
swapdevices = swapdevices.split('/')
f = open('/proc/mounts', 'r')
for line in f.readlines():
if line.find(device) != -1:
parts = line.strip().split()
d1 = parts[1]
dtype = parts[2]
rw = parts[3]
break
continue
elif device in swapdevices:
parts = line.strip().split()
d1 = _('None')
dtype = 'swap'
rw = _('None')
break
continue
else:
d1 = _('None')
dtype = _('unavailable')
rw = _('None')
f.close()
f = open('/proc/partitions', 'r')
for line in f.readlines():
if line.find(device) != -1:
parts = line.strip().split()
size = int(parts[2])
if float(size) / 1024 / 1024 / 1024 > 1:
des = _('Size: ') + str(round(float(size) / 1024 / 1024 / 1024, 2)) + _('TB')
elif size / 1024 / 1024 > 1:
des = _('Size: ') + str(size / 1024 / 1024) + _('GB')
else:
des = _('Size: ') + str(size / 1024) + _('MB')
else:
try:
size = file('/sys/block/' + device2 + '/' + device + '/size').read()
size = str(size).replace('\n', '')
size = int(size)
except:
size = 0
if float(size) / 2 / 1024 / 1024 / 1024 > 1:
des = _('Size: ') + str(round(float(size) / 2 / 1024 / 1024 / 1024, 2)) + _('TB')
elif size / 2 / 1024 / 1024 > 1:
des = _('Size: ') + str(size / 2 / 1024 / 1024) + _('GB')
else:
des = _('Size: ') + str(size / 2 / 1024) + _('MB')
f.close()
if des != '':
if rw.startswith('rw'):
rw = ' R/W'
elif rw.startswith('ro'):
rw = ' R/O'
else:
rw = ''
des += '\t' + _('Mount: ') + d1 + '\n' + _('Device: ') + '/dev/' + device + '\t' + _('Type: ') + dtype + rw
png = LoadPixmap(mypixmap)
mountP = d1
deviceP = '/dev/' + device
res = (name,
des,
png,
mountP,
deviceP)
self.list.append(res)
def SetupMounts(self):
self.session.openWithCallback(self.updateList, DeviceManager_Setup)
def Mount(self):
sel = self['list'].getCurrent()
if sel:
mountp = sel[3]
device = sel[4]
system('mount ' + device)
mountok = False
f = open('/proc/mounts', 'r')
for line in f.readlines():
if line.find(device) != -1:
mountok = True
if not mountok:
self.session.open(MessageBox, _('Mount failed'), MessageBox.TYPE_INFO, timeout=5)
self.updateList()
def Unmount(self):
sel = self['list'].getCurrent()
if sel:
mountp = sel[3]
device = sel[4]
system('umount ' + mountp)
try:
mounts = open('/proc/mounts')
except IOError:
return -1
mountcheck = mounts.readlines()
mounts.close()
for line in mountcheck:
parts = line.strip().split(' ')
if path.realpath(parts[0]).startswith(device):
self.session.open(MessageBox, _("Can't unmount partiton, make sure it is not being used for swap or record/timeshift paths"), MessageBox.TYPE_INFO)
self.updateList()
def saveMypoints(self):
self.Console = Console()
sel = self['list'].getCurrent()
if sel:
self.mountp = sel[3]
self.device = sel[4]
if self.mountp.find('/media/hdd') < 0:
self.Console.ePopen('umount ' + self.device)
if not path.exists('/media/hdd'):
mkdir('/media/hdd', 493)
else:
self.Console.ePopen('umount /media/hdd')
self.Console.ePopen('mount ' + self.device + ' /media/hdd')
self.Console.ePopen('/sbin/blkid | grep ' + self.device, self.add_fstab, [self.device, self.mountp])
else:
self.session.open(MessageBox, _('This Device is already mounted as HDD.'), MessageBox.TYPE_INFO, timeout=10, close_on_any_key=True)
def add_fstab(self, result = None, retval = None, extra_args = None):
self.device = extra_args[0]
self.mountp = extra_args[1]
self.device_uuid = 'UUID=' + result.split('UUID=')[1].split(' ')[0].replace('"', '')
if not path.exists(self.mountp):
mkdir(self.mountp, 493)
file('/etc/fstab.tmp', 'w').writelines([ l for l in file('/etc/fstab').readlines() if '/media/hdd' not in l ])
rename('/etc/fstab.tmp', '/etc/fstab')
file('/etc/fstab.tmp', 'w').writelines([ l for l in file('/etc/fstab').readlines() if self.device not in l ])
rename('/etc/fstab.tmp', '/etc/fstab')
file('/etc/fstab.tmp', 'w').writelines([ l for l in file('/etc/fstab').readlines() if self.device_uuid not in l ])
rename('/etc/fstab.tmp', '/etc/fstab')
out = open('/etc/fstab', 'a')
line = self.device_uuid + '\t/media/hdd\tauto\tdefaults\t0 0\n'
out.write(line)
out.close()
self.Console.ePopen('mount -a', self.updateList)
def restBo(self, answer):
if answer is True:
self.session.open(TryQuitMainloop, 2)
else:
self.updateList()
self.selectionChanged()
class DeviceManager_Setup(Screen, ConfigListScreen):
def __init__(self, session):
Screen.__init__(self, session)
Screen.setTitle(self, _('Devices Manager - Setup'))
self.skinName = ['Setup']
self.list = []
self.device_type = 'auto'
self.device_uuid = ''
ConfigListScreen.__init__(self, self.list)
Screen.setTitle(self, _('Choose where to mount your devices to:'))
self['key_green'] = Label(_('Save'))
self['key_red'] = Label(_('Cancel'))
self['Linconn'] = Label(_('Wait please while scanning your %s %s devices...') % (getMachineBrand(), getMachineName()))
self['actions'] = ActionMap(['WizardActions', 'ColorActions'], {'green': self.saveMypoints,
'red': self.close,
'back': self.close})
self.updateList()
def updateList(self):
self.list = []
list2 = []
self.Console = Console()
self.Console.ePopen("sfdisk -l | grep swap | awk '{print $(NF-9)}' >/tmp/devices.tmp")
sleep(0.5)
f = open('/tmp/devices.tmp', 'r')
swapdevices = f.read()
f.close()
if path.exists('/tmp/devices.tmp'):
remove('/tmp/devices.tmp')
swapdevices = swapdevices.replace('\n', '')
swapdevices = swapdevices.split('/')
f = open('/proc/partitions', 'r')
for line in f.readlines():
parts = line.strip().split()
if not parts:
continue
device = parts[3]
if not search('sd[a-z][1-9]', device) and not search('mmcblk[0-9]p[1-9]', device):
continue
if getMachineBuild() in ('sf5008','et13000','et1x000','vuuno4k', 'vuultimo4k', 'vusolo4k', 'hd51', 'hd52', 'dm820', 'dm7080', 'sf4008', 'dm900', 'dm920', 'gb7252', 'dags7252', 'vs1500','h7','8100s') and search('mmcblk0p[1-9]',device):
continue
if getMachineBuild() in ('xc7439') and search('mmcblk1p[1-9]',device):
continue
if device in list2:
continue
if device in swapdevices:
continue
self.buildMy_rec(device)
list2.append(device)
f.close()
self['config'].list = self.list
self['config'].l.setList(self.list)
self['Linconn'].hide()
def buildMy_rec(self, device):
device2 = ''
try:
if device.find('1') > 1:
device2 = device.replace('1', '')
except:
device2 = ''
try:
if device.find('2') > 1:
device2 = device.replace('2', '')
except:
device2 = ''
try:
if device.find('3') > 1:
device2 = device.replace('3', '')
except:
device2 = ''
try:
if device.find('4') > 1:
device2 = device.replace('4', '')
except:
device2 = ''
try:
if device.find('5') > 1:
device2 = device.replace('5', '')
except:
device2 = ''
try:
if device.find('6') > 1:
device2 = device.replace('6', '')
except:
device2 = ''
try:
if device.find('7') > 1:
device2 = device.replace('7', '')
except:
device2 = ''
try:
if device.find('8') > 1:
device2 = device.replace('8', '')
except:
device2 = ''
try:
if device.find('p1') > 1:
device2 = device.replace('p1', '')
except:
device2 = ''
try:
if device.find('p2') > 1:
device2 = device.replace('p2', '')
except:
device2 = ''
try:
if device.find('p3') > 1:
device2 = device.replace('p3', '')
except:
device2 = ''
try:
if device.find('p4') > 1:
device2 = device.replace('p4', '')
except:
device2 = ''
try:
if device.find('p5') > 1:
device2 = device.replace('p5', '')
except:
device2 = ''
try:
if device.find('p6') > 1:
device2 = device.replace('p6', '')
except:
device2 = ''
try:
if device.find('p7') > 1:
device2 = device.replace('p7', '')
except:
device2 = ''
try:
if device.find('p8') > 1:
device2 = device.replace('p8', '')
except:
device2 = ''
devicetype = path.realpath('/sys/block/' + device2 + '/device')
d2 = device
name = 'USB: '
mypixmap = '/usr/lib/enigma2/python/OPENDROID/icons/dev_usb.png'
if device2.startswith('mmcblk'):
if getBoxType() in ('wetekplay', 'wetekplayplus') or getMachineBuild() in ('h3', 'h5'):
model = file('/sys/block/mmcblk0/device/name').read()
else:
try:
model = file('/sys/block/' + device2 + '/device/name').read()
except:
model = 'Unknown'
mypixmap = '/usr/lib/enigma2/python/OPENDROID/icons/dev_mmc.png'
name = 'MMC: '
else:
model = file('/sys/block/' + device2 + '/device/model').read()
model = str(model).replace('\n', '')
des = ''
if devicetype.find('/devices/pci') != -1:
name = _('HARD DISK: ')
mypixmap = '/usr/lib/enigma2/python/OPENDROID/icons/dev_hdd.png'
name = name + model
f = open('/proc/mounts', 'r')
for line in f.readlines():
if line.find(device) != -1:
parts = line.strip().split()
d1 = parts[1]
dtype = parts[2]
break
continue
else:
d1 = _('None')
dtype = _('unavailable')
f.close()
f = open('/proc/partitions', 'r')
for line in f.readlines():
if line.find(device) != -1:
parts = line.strip().split()
size = int(parts[2])
if float(size) / 1024 / 1024 / 1024 > 1:
des = _('Size: ') + str(round(float(size) / 1024 / 1024 / 1024, 2)) + _('TB')
elif size / 1024 / 1024 > 1:
des = _('Size: ') + str(size / 1024 / 1024) + _('GB')
else:
des = _('Size: ') + str(size / 1024) + _('MB')
else:
try:
size = file('/sys/block/' + device2 + '/' + device + '/size').read()
size = str(size).replace('\n', '')
size = int(size)
except:
size = 0
if float(size) / 2 / 1024 / 1024 / 1024 > 1:
des = _('Size: ') + str(round(float(size) / 2 / 1024 / 1024 / 1024, 2)) + _('TB')
elif size / 2 / 1024 / 1024 > 1:
des = _('Size: ') + str(size / 2 / 1024 / 1024) + _('GB')
else:
des = _('Size: ') + str(size / 2 / 1024) + _('MB')
f.close()
item = NoSave(ConfigSelection(default='/media/' + device, choices=[('/media/' + device, '/media/' + device),
('/media/hdd', '/media/hdd'),
('/media/hdd2', '/media/hdd2'),
('/media/hdd3', '/media/hdd3'),
('/media/usb', '/media/usb'),
('/media/usb2', '/media/usb2'),
('/media/usb3', '/media/usb3'),
('/media/mmc', '/media/mmc'),
('/media/mmc2', '/media/mmc2'),
('/media/mmc3', '/media/mmc3'),
('/usr', '/usr')]))
if dtype == 'Linux':
dtype = 'ext3'
else:
dtype = 'auto'
item.value = d1.strip()
text = name + ' ' + des + ' /dev/' + device
res = getConfigListEntry(text, item, device, dtype)
if des != '' and self.list.append(res):
pass
def saveMypoints(self):
mycheck = False
for x in self['config'].list:
self.device = x[2]
self.mountp = x[1].value
self.type = x[3]
self.Console.ePopen('umount ' + self.device)
self.Console.ePopen('/sbin/blkid | grep ' + self.device, self.add_fstab, [self.device, self.mountp])
message = _('Updating mount locations.')
ybox = self.session.openWithCallback(self.delay, MessageBox, message, type=MessageBox.TYPE_INFO, timeout=5, enable_input=False)
ybox.setTitle(_('Please wait.'))
def delay(self, val):
message = _('Changes need a system restart to take effect.\nRestart your %s %s now?') % (getMachineBrand(), getMachineName())
ybox = self.session.openWithCallback(self.restartBox, MessageBox, message, MessageBox.TYPE_YESNO)
ybox.setTitle(_('Restart %s %s.') % (getMachineBrand(), getMachineName()))
def add_fstab(self, result = None, retval = None, extra_args = None):
print '[DevicesManager] RESULT:', result
if result:
self.device = extra_args[0]
self.mountp = extra_args[1]
self.device_uuid = 'UUID=' + result.split('UUID=')[1].split(' ')[0].replace('"', '')
self.device_type = result.split('TYPE=')[1].split(' ')[0].replace('"', '')
if self.device_type.startswith('ext'):
self.device_type = 'auto'
elif self.device_type.startswith('ntfs') and result.find('ntfs-3g') != -1:
self.device_type = 'ntfs-3g'
elif self.device_type.startswith('ntfs') and result.find('ntfs-3g') == -1:
self.device_type = 'ntfs'
if not path.exists(self.mountp):
mkdir(self.mountp, 493)
file('/etc/fstab.tmp', 'w').writelines([ l for l in file('/etc/fstab').readlines() if self.device not in l ])
rename('/etc/fstab.tmp', '/etc/fstab')
file('/etc/fstab.tmp', 'w').writelines([ l for l in file('/etc/fstab').readlines() if self.device_uuid not in l ])
rename('/etc/fstab.tmp', '/etc/fstab')
out = open('/etc/fstab', 'a')
line = self.device_uuid + '\t' + self.mountp + '\t' + self.device_type + '\tdefaults\t0 0\n'
out.write(line)
out.close()
def restartBox(self, answer):
if answer is True:
self.session.open(TryQuitMainloop, 2)
else:
self.close()
class UsbFormat(Screen):
skin = '\n\t<screen position="center,center" size="580,350" title="Usb Format Wizard">\n\t\t<widget name="lab1" position="10,10" size="560,280" font="Regular;20" valign="top" transparent="1"/>\n\t\t<ePixmap pixmap="skin_default/buttons/red.png" position="100,300" size="140,40" alphatest="on" />\n\t\t<ePixmap pixmap="skin_default/buttons/green.png" position="340,300" size="140,40" alphatest="on" />\n\t\t<widget name="key_red" position="100,300" zPosition="1" size="140,40" font="Regular;20" halign="center" valign="center" backgroundColor="#9f1313" transparent="1" />\n\t\t<widget name="key_green" position="340,300" zPosition="1" size="140,40" font="Regular;20" halign="center" valign="center" backgroundColor="#1f771f" transparent="1" />\n\t</screen>'
def __init__(self, session):
Screen.__init__(self, session)
Screen.setTitle(self, _('USB Disk Format Wizard'))
msg = _('This wizard will help you to format Usb mass storage devices for Linux.\n\n')
msg += _('Please be sure that your usb drive is NOT CONNECTED to your %s %s box before you continue.\n') % (getMachineBrand(), getMachineName())
msg += _('If your usb drive is connected and mounted you have to poweroff your box, remove the usb device and reboot.\n\n')
msg += _('Press Red button to continue, when you are ready and your usb is disconnected.\n')
self['key_red'] = Label(_('Continue ->'))
self['key_green'] = Label(_('Cancel'))
self['lab1'] = Label(msg)
self['actions'] = ActionMap(['WizardActions', 'ColorActions'], {'back': self.checkClose,
'red': self.step_Bump,
'green': self.checkClose})
self.step = 1
self.devices = []
self.device = None
self.totalpartitions = 1
self.totalsize = self.p1size = self.p2size = self.p3size = self.p4size = '0'
self.canclose = True
return
def stepOne(self):
msg = _('Connect your usb storage to your %s %s box\n\n') % (getMachineBrand(), getMachineName())
msg += _('Press Red button to continue when ready.\n\n')
msg += _('Warning: If your usb is already connected\n')
msg += _('to the box you have to unplug it, press\n')
msg += _('the Green button and restart the wizard.\n')
rc = system('/etc/init.d/autofs stop')
self.devices = self.get_Devicelist()
self['lab1'].setText(msg)
self.step = 2
def stepTwo(self):
msg = _('The wizard will now try to identify your connected usb device.')
msg += _('Press Red button to continue.')
self['lab1'].setText(msg)
self.step = 3
def stepThree(self):
newdevices = self.get_Devicelist()
for d in newdevices:
if d not in self.devices:
self.device = d
if self.device is None:
self.wizClose(_('Sorry, no new usb storage device detected.\nBe sure to follow the wizard instructions.'))
else:
msg = self.get_Deviceinfo(self.device)
self['lab1'].setText(msg)
self.step = 4
return
def stepFour(self):
myoptions = [['1', '1'],
['2', '2'],
['3', '3'],
['4', '4']]
self.session.openWithCallback(self.partSize1, ChoiceBox, title=_('Select number of partitions:'), list=myoptions)
def partSize1(self, total):
self.totalpartitions = int(total[1])
if self.totalpartitions > 1:
self.session.openWithCallback(self.partSize2, InputBox, title=_('Enter the size in Megabyte of the first partition:'), windowTitle=_('Partition size'), text='1', useableChars='1234567890')
else:
self.writePartFile()
def partSize2(self, psize):
if psize is None:
psize = '100'
self.p1size = psize
if self.totalpartitions > 2:
self.session.openWithCallback(self.partSize3, InputBox, title=_('Enter the size in Megabyte of the second partition:'), windowTitle=_('Partition size'), text='1', useableChars='1234567890')
else:
self.writePartFile()
return
def partSize3(self, psize):
if psize is None:
psize = '100'
self.p2size = psize
if self.totalpartitions > 3:
self.session.openWithCallback(self.partSize4, InputBox, title=_('Enter the size in Megabyte of the third partition:'), windowTitle=_('Partition size'), text='1', useableChars='1234567890')
else:
self.writePartFile()
return
def partSize4(self, psize):
if psize is None:
psize = '100'
self.p3size = psize
self.writePartFile()
return
def writePartFile(self):
p1 = p2 = p3 = p4 = '0'
device = '/dev/' + self.device
out0 = '#!/bin/sh\n\nsfdisk %s << EOF\n' % device
msg = _('Total Megabyte Available: \t') + str(self.totalsize)
msg += _('\nPartition scheme:\n')
p1 = self.p1size
out1 = ',%sM\n' % self.p1size
if self.totalpartitions == 1:
p1 = str(self.totalsize)
out1 = ';\n'
msg += '%s1 \t size:%s M\n' % (device, p1)
if self.totalpartitions > 1:
p2 = self.p2size
out2 = ',%sM\n' % self.p2size
if self.totalpartitions == 2:
p2 = self.totalsize - int(self.p1size)
out2 = ';\n'
msg += '%s2 \t size:%s M\n' % (device, p2)
if self.totalpartitions > 2:
p3 = self.p3size
out3 = ',%sM\n' % self.p3size
if self.totalpartitions == 3:
p3 = self.totalsize - (int(self.p1size) + int(self.p2size))
out3 = ';\n'
msg += '%s3 \t size:%s M\n' % (device, p3)
if self.totalpartitions > 3:
p4 = self.totalsize - (int(self.p1size) + int(self.p2size) + int(self.p3size))
out4 = ';\n'
msg += '%s4 \t size:%s M\n' % (device, p4)
msg += _('\nWarning: all the data will be lost.\nAre you sure you want to format this device?\n')
out = open('/tmp/sfdisk.tmp', 'w')
out.write(out0)
out.write(out1)
if self.totalpartitions > 1:
out.write(out2)
if self.totalpartitions > 2:
out.write(out3)
if self.totalpartitions > 3:
out.write(out4)
out.write('EOF\n')
out.close()
system('chmod 0755 /tmp/sfdisk.tmp')
self['lab1'].setText(msg)
if int(self.p1size) + int(self.p2size) + int(self.p3size) + int(self.p4size) > self.totalsize:
self.wizClose(_('Sorry, your partition(s) sizes are bigger than total device size.'))
else:
self.step = 5
def do_Part(self):
self.do_umount()
self.canclose = False
self['key_green'].hide()
device = '/dev/%s' % self.device
cmd = "echo -e 'Partitioning: %s \n\n'" % device
cmd2 = '/tmp/sfdisk.tmp'
self.session.open(ConsoleScreen, title=_('Partitioning...'), cmdlist=[cmd, cmd2], finishedCallback=self.partDone, closeOnSuccess=True)
def partDone(self):
msg = _('The device has been partitioned.\nPartitions will be now formatted.')
self['lab1'].setText(msg)
self.step = 6
def choiceBoxFstype(self):
menu = []
menu.append((_('ext2 - recommended for USB flash memory'), 'ext2'))
menu.append((_('ext3 - recommended for HARD Disks'), 'ext3'))
menu.append((_('ext4 - recommended for Boot'), 'ext4'))
menu.append((_('vfat - use only for media-files'), 'vfat'))
self.session.openWithCallback(self.choiceBoxFstypeCB, ChoiceBox, title=_('Choice filesystem.'), list=menu)
def choiceBoxFstypeCB(self, choice):
if choice is None:
return
else:
newfstype = choice[1]
if newfstype == 'ext4':
self.formatcmd = '/sbin/mkfs.ext4 -F -O extent,flex_bg,large_file,uninit_bg -m1'
elif newfstype == 'ext3':
self.formatcmd = '/sbin/mkfs.ext3 -F -m0'
elif newfstype == 'ext2':
self.formatcmd = '/sbin/mkfs.ext2 -F -m0'
elif newfstype == 'vfat':
self.formatcmd = '/sbin/mkfs.vfat'
self.do_Format()
return
def do_Format(self):
self.do_umount()
os_remove('/tmp/sfdisk.tmp')
cmds = ['sleep 1']
device = '/dev/%s1' % self.device
cmd = '%s %s' % (self.formatcmd, device)
cmds.append(cmd)
if self.totalpartitions > 1:
device = '/dev/%s2' % self.device
cmd = '%s %s' % (self.formatcmd, device)
cmds.append(cmd)
if self.totalpartitions > 2:
device = '/dev/%s3' % self.device
cmd = '%s %s' % (self.formatcmd, device)
cmds.append(cmd)
if self.totalpartitions > 3:
device = '/dev/%s4' % self.device
cmd = '%s %s' % (self.formatcmd, device)
cmds.append(cmd)
self.session.open(ConsoleScreen, title=_('Formatting...'), cmdlist=cmds, finishedCallback=self.succesS)
def step_Bump(self):
if self.step == 1:
self.stepOne()
elif self.step == 2:
self.stepTwo()
elif self.step == 3:
self.stepThree()
elif self.step == 4:
self.stepFour()
elif self.step == 5:
self.do_Part()
elif self.step == 6:
self.choiceBoxFstype()
def get_Devicelist(self):
devices = []
folder = listdir('/sys/block')
for f in folder:
if f.find('sd') != -1:
devices.append(f)
return devices
def get_Deviceinfo(self, device):
info = vendor = model = size = ''
filename = '/sys/block/%s/device/vendor' % device
if fileExists(filename):
vendor = file(filename).read().strip()
filename = '/sys/block/%s/device/model' % device
model = file(filename).read().strip()
filename = '/sys/block/%s/size' % device
size = int(file(filename).read().strip())
cap = size / 1000 * 512 / 1024
size = '%d.%03d GB' % (cap / 1000, cap % 1000)
self.totalsize = cap
info = _('Model: ') + vendor + ' ' + model + '\n' + _('Size: ') + size + '\n' + _('Device: ') + '/dev/' + device
return info
def do_umount(self):
f = open('/proc/mounts', 'r')
for line in f.readlines():
if line.find('/dev/sd') != -1:
parts = line.split()
cmd = 'umount -l ' + parts[0]
system(cmd)
f.close()
def checkClose(self):
if self.canclose == True:
self.close()
def wizClose(self, msg):
self.session.openWithCallback(self.close, MessageBox, msg, MessageBox.TYPE_INFO)
def succesS(self):
text = _("The %s %s will be now restarted to generate a new device UID.\nDon't forget to remap your device after the reboot.\nPress OK to continue") % (getMachineBrand(), getMachineName())
mybox = self.session.openWithCallback(self.hreBoot, MessageBox, text, MessageBox.TYPE_INFO)
def hreBoot(self, answer):
self.session.open(TryQuitMainloop, 2)
|
MarcosCommunity/odoo
|
refs/heads/marcos-8.0
|
addons/website_crm_partner_assign/__openerp__.py
|
322
|
{
'name': 'Resellers',
'category': 'Website',
'website': 'https://www.odoo.com/page/website-builder',
'summary': 'Publish Your Channel of Resellers',
'version': '1.0',
'description': """
Publish and Assign Partner
==========================
""",
'author': 'OpenERP SA',
'depends': ['crm_partner_assign','website_partner', 'website_google_map'],
'data': [
'views/partner_grade.xml',
'views/website_crm_partner_assign.xml',
],
'demo': [
'data/res_partner_grade_demo.xml',
],
'qweb': ['static/src/xml/*.xml'],
'installable': True,
}
|
DARKPOP/external_chromium_org
|
refs/heads/dark-5.1
|
chrome/common/extensions/docs/server2/permissions_data_source.py
|
41
|
# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from itertools import ifilter
from operator import itemgetter
from data_source import DataSource
from extensions_paths import PRIVATE_TEMPLATES
from future import Future
from platform_util import GetPlatforms
def _ListifyPermissions(permissions):
'''Filter out any permissions that do not have a description or with a name
that ends with Private then sort permissions features by name into a list.
'''
def filter_permissions(perm):
return 'description' in perm and not perm['name'].endswith('Private')
return sorted(
ifilter(filter_permissions, permissions.itervalues()),
key=itemgetter('name'))
def _AddDependencyDescriptions(permissions, api_features):
'''Use |api_features| to determine the dependencies APIs have on permissions.
Add descriptions to |permissions| based on those dependencies.
'''
for name, permission in permissions.iteritems():
# Don't overwrite the description created by expanding a partial template.
if 'partial' in permission:
continue
has_deps = False
if name in api_features:
for dependency in api_features[name].get('dependencies', ()):
if dependency.startswith('permission:'):
has_deps = True
if has_deps:
permission['partial'] = 'permissions/generic_description.html'
class PermissionsDataSource(DataSource):
'''Load and format permissions features to be used by templates.
'''
def __init__(self, server_instance, request):
self._platform_bundle = server_instance.platform_bundle
self._object_store = server_instance.object_store_creator.Create(
PermissionsDataSource)
self._template_cache = server_instance.compiled_fs_factory.ForTemplates(
server_instance.host_file_system_provider.GetMaster())
def _CreatePermissionsDataForPlatform(self, platform):
features_bundle = self._platform_bundle.GetFeaturesBundle(platform)
api_features_future = features_bundle.GetAPIFeatures()
permission_features_future = features_bundle.GetPermissionFeatures()
def resolve():
api_features = api_features_future.Get()
permission_features = permission_features_future.Get()
_AddDependencyDescriptions(permission_features, api_features)
# Turn partial templates into descriptions, ensure anchors are set.
for permission in permission_features.values():
if not 'anchor' in permission:
permission['anchor'] = permission['name']
if 'partial' in permission:
permission['description'] = self._template_cache.GetFromFile(
PRIVATE_TEMPLATES + permission['partial']).Get()
del permission['partial']
return _ListifyPermissions(permission_features)
return Future(callback=resolve)
def _CreatePermissionsData(self):
permissions_data_futures = dict(
(platform, self._CreatePermissionsDataForPlatform(platform))
for platform in GetPlatforms())
def resolve():
return dict(('declare_' + platform, future.Get())
for platform, future in permissions_data_futures.iteritems())
return Future(callback=resolve)
def _GetCachedPermissionsData(self):
data = self._object_store.Get('permissions_data').Get()
if data is None:
data = self._CreatePermissionsData().Get()
self._object_store.Set('permissions_data', data)
return data
def get(self, key):
return self._GetCachedPermissionsData().get(key)
def Refresh(self, path):
return self._CreatePermissionsData()
|
tempbottle/rethinkdb
|
refs/heads/next
|
test/changefeeds/squash_base.py
|
23
|
#!/usr/bin/env python
# Copyright 2014-2015 RethinkDB, all rights reserved.
import itertools, os, sys, time
try:
xrange
except NameError:
xrange = range
sys.path.append(os.path.join(os.path.dirname(__file__), os.path.pardir, 'common'))
import rdb_unittest, utils
# ---
class SquashBase(rdb_unittest.RdbTestCase):
'''Squash tests'''
# Local variables
squash = True
field = "id"
generator = itertools.count()
records = 0
limit = 0
multi = False
def setUp(self):
super(SquashBase, self).setUp()
# The generator emits values in increasing order thus we store the first twenty
# values for later use, specifically to do inserts and updates, and test that no
# change is emitted.
self._generator_initial_len = 20
self._generator_initial = []
for x in xrange(self._generator_initial_len):
self._generator_initial.append((x, next(self.generator)))
# The primary key is used to break ties in a multi index thus `self._document`
# has the option of generating an increasing key instead of them being
# auto-generated.
self._key_generator = itertools.count(self._generator_initial_len)
if self.multi:
# The generator for multi indices return an array, and the length of that
# array is a factor in the number of results from a changefeed.
self._multi_len = len(self._generator_initial[0][1])
self._primary_key = self.r.db(self.dbName) \
.table(self.tableName) \
.info()["primary_key"] \
.run(self.conn)
# Generate the records ..
for x in xrange(self.records):
self.r.db(self.dbName) \
.table(self.tableName) \
.insert(self._document(next(self.generator))) \
.run(self.conn)
# .. and add the requested index if necessary
if self.field != self._primary_key:
self.r.db(self.dbName) \
.table(self.tableName) \
.index_create(self.field, multi=self.multi) \
.run(self.conn)
self.r.db(self.dbName) \
.table(self.tableName) \
.index_wait(self.field) \
.run(self.conn)
# The changefeeds are requested through a separate connection
self._feed_conn = self.r.connect(
self.cluster[0].host, self.cluster[0].driver_port)
def _document(self, value, key=None, key_generate=None):
# An increasing primary key is automatically added to multi indices as they
# influence sorting.
if key_generate is None:
key_generate = self.multi
document = {
self.field: value
}
if key is None and key_generate:
key = "g-%i" % next(self._key_generator)
if key is not None:
self.assertTrue(self.field != self._primary_key)
document[self._primary_key] = key
return document
def test_insert(self):
query = self.r.db(self.dbName) \
.table(self.tableName) \
.order_by(index=self.r.desc(self.field)) \
.limit(self.limit) \
.changes(squash=self.squash)
with utils.NextWithTimeout(query.run(self._feed_conn), stopOnEmpty=False) as feed:
changes = min(self.records, self.limit)
if self.multi:
changes = min(
self.records * self._multi_len, self.limit)
initial = []
for x in xrange(changes):
initial.append(next(feed))
# If the number of records is greater than the limit then then insert a low
# value and verify it does not show up as a change, due to the `order_by`.
if self.records >= self.limit:
_, value = self._generator_initial.pop()
self.r.db(self.dbName) \
.table(self.tableName) \
.insert(self._document(value)) \
.run(self.conn)
self.assertRaises(Exception, feed.next)
# Insert a value and verify it does show up.
value = next(self.generator)
document = self._document(value)
key = self.r.db(self.dbName) \
.table(self.tableName) \
.insert(document, return_changes=True) \
.run(self.conn) \
.get("generated_keys", [document.get("id", value)])[0]
# With multi indices a single document may show up multiple times in the
# changefeed, `changes` calculates the number to verify it does indeed show
# up the expected number of times.
changes = 1
if self.multi:
changes = min(self._multi_len, self.limit)
for x in xrange(changes):
feed_next = next(feed)
self.assertTrue("old_val" in feed_next)
self.assertTrue("new_val" in feed_next)
# It depends on whether the initial limit was fulfilled whether the
# change has an "old_val" set to `None` or a document.
if len(initial) + x >= self.limit:
# Note that the initial values are ordered descending, hence
# the comparison with initial[-(x + 1)]
self.assertEqual(
feed_next["old_val"][self.field],
initial[-(x + 1)]["new_val"][self.field])
else:
self.assertEqual(feed_next["old_val"], None)
self.assertEqual(feed_next["new_val"]["id"], key)
self.assertEqual(feed_next["new_val"][self.field], value)
def test_insert_batch(self):
# FIXME: Python 2.7 has new facilities allowing tests to be skipped, use those
# when we no longer need to support 2.6
if self.squash == True:
# With squash True it might not squash agressively enough for this to be
# predictable, skip it
return
query = self.r.db(self.dbName) \
.table(self.tableName) \
.order_by(index=self.r.desc(self.field)) \
.limit(self.limit) \
.changes(squash=self.squash)
with utils.NextWithTimeout(query.run(self._feed_conn), stopOnEmpty=False) as feed:
changes = min(self.records, self.limit)
if self.multi:
changes = min(
self.records * self._multi_len, self.limit)
initial = []
for x in xrange(changes):
initial.append(next(feed))
# Insert two more documents than the limit as a single batch, due to the
# squashing we should never get a change for the first two.
documents = []
for x in xrange(self.limit + 2):
value = next(self.generator)
if self.field == self._primary_key:
documents.append(self._document(value))
else:
documents.append(self._document(value, key=x))
# A document with duplicate primary key should be ignored as well.
error = documents[-1].copy()
error.update({"error": True})
documents.append(error)
self.r.db(self.dbName) \
.table(self.tableName) \
.insert(documents) \
.run(self.conn)
for x in xrange(self.limit):
feed_next = next(feed)
self.assertTrue("old_val" in feed_next)
self.assertTrue("new_val" in feed_next)
if len(initial) + x >= self.limit:
self.assertTrue(
feed_next["old_val"] in map(lambda x: x["new_val"], initial))
else:
self.assertEqual(feed_next["old_val"], None)
self.assertTrue(feed_next["new_val"] in documents[2:])
self.assertTrue(not "error" in feed_next["new_val"])
def test_delete(self):
query = self.r.db(self.dbName) \
.table(self.tableName) \
.order_by(index=self.r.desc(self.field)) \
.limit(self.limit) \
.changes(squash=self.squash)
with utils.NextWithTimeout(query.run(self._feed_conn), stopOnEmpty=False) as feed:
changes = min(self.records, self.limit)
if self.multi:
changes = min(
self.records * self._multi_len, self.limit)
initial = []
for x in xrange(changes):
initial.append(next(feed))
# If the number of records is greater than the limit then insert and
# subsequently delete a low value, and verify it does not show up as a
# change because of the `order_by`.
if self.records >= self.limit:
_, value = self._generator_initial.pop()
key = self.r.db(self.dbName) \
.table(self.tableName) \
.insert(self._document(value), return_changes=True) \
.run(self.conn).get("generated_keys", [value])[0]
self.r.db(self.dbName) \
.table(self.tableName) \
.get(key) \
.delete() \
.run(self.conn)
# In inserting this document we have to do somewhat of a dance to get its
# primary key as it might either be the field, generated by us because of a
# multi index, or auto-generated.
value = next(self.generator)
document = self._document(value)
key = self.r.db(self.dbName) \
.table(self.tableName) \
.insert(document, return_changes=True) \
.run(self.conn) \
.get("generated_keys", [document.get("id", value)])[0]
changes = 1
if self.multi:
changes = min(self._multi_len, self.limit)
for x in xrange(changes):
next(feed)
# With the primary key delete the record again.
self.r.db(self.dbName) \
.table(self.tableName) \
.get(key) \
.delete() \
.run(self.conn)
for x in xrange(changes):
feed_next = next(feed)
self.assertTrue("old_val" in feed_next)
self.assertTrue("new_val" in feed_next)
self.assertTrue(feed_next["old_val"][self.field] < value or (
feed_next["old_val"][self.field] == value and
feed_next["old_val"]["id"] <= key))
if len(initial) + x < self.limit:
self.assertEqual(feed_next["new_val"], None)
def test_replace_key(self):
# FIXME: Python 2.7 has new facilities allowing tests to be skipped, use those
# when we no longer need to support 2.6
if self.field == self._primary_key:
# The primary key can not be updated, skip it
return
query = self.r.db(self.dbName) \
.table(self.tableName) \
.order_by(index=self.r.desc(self.field)) \
.limit(self.limit) \
.changes(squash=self.squash)
with utils.NextWithTimeout(query.run(self._feed_conn), stopOnEmpty=False) as feed:
changes = min(self.records, self.limit)
if self.multi:
changes = min(
self.records * self._multi_len, self.limit)
initial = []
for x in xrange(changes):
initial.append(next(feed))
# Insert a low value, this may or may not cause changes depending on whether
# we've had more initial changes than the limit.
index, value = self._generator_initial.pop()
document = self._document(value, "g-%i" % index)
key = self.r.db(self.dbName) \
.table(self.tableName) \
.insert(document, return_changes=True) \
.run(self.conn) \
.get("generated_keys", [document.get("id", value)])[0]
changes = 0
if len(initial) < self.limit:
changes = 1
if self.multi:
changes = min(self._multi_len, self.limit - len(initial))
for x in xrange(changes):
feed_next = next(feed)
self.assertTrue("old_val" in feed_next)
self.assertTrue("new_val" in feed_next)
if len(initial) + x < self.limit:
self.assertEqual(feed_next["old_val"], None)
self.assertEqual(feed_next["new_val"]["id"], key)
self.assertEqual(feed_next["new_val"][self.field], value)
# Update the key to a higher value, this should produce a change (or changes
# in the case of a multi index).
update = next(self.generator)
self.r.db(self.dbName) \
.table(self.tableName) \
.get(key) \
.update({
self.field: update
}) \
.run(self.conn)
changes = 1
if self.multi:
changes = min(self._multi_len, self.limit)
for x in xrange(changes):
feed_next = next(feed)
self.assertTrue("old_val" in feed_next)
self.assertTrue("new_val" in feed_next)
self.assertTrue(
feed_next["old_val"][self.field] <= feed_next["new_val"][self.field])
self.assertEqual(feed_next["new_val"]["id"], key)
self.assertEqual(feed_next["new_val"][self.field], update)
# Update the key back to the lower value.
self.r.db(self.dbName) \
.table(self.tableName) \
.get(key) \
.update({
self.field: value
}) \
.run(self.conn)
changes = 1
if self.multi:
changes = min(self._multi_len, self.limit)
for x in xrange(changes):
feed_next = next(feed)
self.assertTrue("old_val" in feed_next)
self.assertTrue("new_val" in feed_next)
self.assertTrue(feed_next["old_val"][self.field] <= update)
self.assertTrue(feed_next["new_val"][self.field] >= value)
def bare_test_squash_to_nothing_insert_delete(self):
# FIXME: Python 2.7 has new facilities allowing tests to be skipped, use those
# when we no longer need to support 2.6
if self.squash == True:
# This is too unpredictable
return
query = self.r.db(self.dbName) \
.table(self.tableName) \
.order_by(index=self.r.desc(self.field)) \
.limit(self.limit) \
.changes(squash=self.squash)
with utils.NextWithTimeout(query.run(self._feed_conn), stopOnEmpty=False) as feed:
changes = min(self.records, self.limit)
if self.multi:
changes = min(
self.records * self._multi_len, self.limit)
initial = []
for x in xrange(changes):
initial.append(next(feed))
# An insert followed by a delete within a two-second squashing period should
# not lead to a change being emitted.
value = next(self.generator)
key = self.r.db(self.dbName) \
.table(self.tableName) \
.insert(self._document(value, key_generate=False), return_changes=True) \
.run(self.conn).get("generated_keys", [value])[0]
self.r.db(self.dbName) \
.table(self.tableName) \
.get(key) \
.delete() \
.run(self.conn)
def test_squash_to_nothing_delete_insert(self):
# This test is similar to the one above but must be done in a separate function
# due to timing issues
# FIXME: Python 2.7 has new facilities allowing tests to be skipped, use those
# when we no longer need to support 2.6
if self.squash == True:
# This is too unpredictable
return
query = self.r.db(self.dbName) \
.table(self.tableName) \
.order_by(index=self.r.desc(self.field)) \
.limit(self.limit) \
.changes(squash=self.squash)
with utils.NextWithTimeout(query.run(self._feed_conn), stopOnEmpty=False) as feed:
changes = min(self.records, self.limit)
if self.multi:
changes = min(
self.records * self._multi_len, self.limit)
initial = []
for x in xrange(changes):
initial.append(next(feed))
# As above, deleting and re-inserting a value should not lead to a change
# being emitted.
if len(initial):
self.r.db(self.dbName) \
.table(self.tableName) \
.get(initial[0]["new_val"]["id"]) \
.delete() \
.run(self.conn)
self.r.db(self.dbName) \
.table(self.tableName) \
.insert(initial[0]["new_val"]) \
.run(self.conn)
class MultiGenerator(object):
def __init__(self):
self._count = itertools.count(3)
def __iter__(self):
return self
def __next__(self):
return self.next()
def next(self):
# This is crafted to be predictable, imagine you have an initial set
# [
# {u'new_val': {u'insert': True, u'multi': [44, -1], u'id': u'g-20'}},
# {u'new_val': {u'insert': True, u'multi': [45, -1], u'id': u'g-21'}},
# {u'new_val': {u'insert': True, u'multi': [46, -1], u'id': u'g-22'}},
# {u'new_val': {u'insert': True, u'multi': [47, -1], u'id': u'g-23'}},
# {u'new_val': {u'insert': True, u'multi': [48, -1], u'id': u'g-24'}},
# ->
# {u'new_val': {u'insert': True, u'multi': [44, -1], u'id': u'g-20'}},
# {u'new_val': {u'insert': True, u'multi': [45, -1], u'id': u'g-21'}},
# {u'new_val': {u'insert': True, u'multi': [46, -1], u'id': u'g-22'}},
# {u'new_val': {u'insert': True, u'multi': [47, -1], u'id': u'g-23'}},
# {u'new_val': {u'insert': True, u'multi': [48, -1], u'id': u'g-24'}}
# ]
# and want to insert the document
# {'insert': True, 'multi': [43, -1], u'id': 'g-19'}.
#
# This will get inserted once in the position marked by the arrow, which is
# hard to calculate or predict.
return [self._count.next()] * 3
|
macph/easement-curve
|
refs/heads/master
|
tests/tests_curve.py
|
1
|
# MIT License, copyright Ewan Macpherson, 2016; see LICENCE in root directory
# Test script for the TrackSection class
import math
import os
import sys
import unittest
sys.path.insert(0, os.path.abspath('..'))
import ec.common
import ec.coord
import ec.section
import ec.curve
from tests.tests_common import CustomAssertions
class BaseTCTests(unittest.TestCase, CustomAssertions):
def setUp(self):
super(BaseTCTests, self).setUp()
minimum_high, speed_high = 500, 120
minimum_low, speed_low = 200, 80
self.start_straight = ec.coord.TrackCoord(
pos_x=217.027, pos_z=34.523, rotation=48.882, quad=ec.coord.Q.NE, curvature=0)
self.start_curved = ec.coord.TrackCoord(
pos_x=354.667, pos_z=137.112, rotation=59.824, quad=ec.coord.Q.NE, curvature=-1/600)
self.start_curved_add = ec.coord.TrackCoord(
pos_x=287.741, pos_z=92.965, rotation=53.356, quad=ec.coord.Q.NE, curvature=0)
self.end_left = ec.coord.TrackCoord(
pos_x=467.962, pos_z=465.900, rotation=12.762, quad=ec.coord.Q.NE, curvature=0)
self.end_right = ec.coord.TrackCoord(
pos_x=582.769, pos_z=223.772, rotation=75.449, quad=ec.coord.Q.NE, curvature=0)
# For curves with diff > 270
self.end_far_left = ec.coord.TrackCoord(
pos_x=-123.550, pos_z=199.813, rotation=5.913, quad=ec.coord.Q.SW, curvature=0)
self.end_far_right = ec.coord.TrackCoord(
pos_x=296.508, pos_z=681.428-1024, rotation=72.687, quad=ec.coord.Q.NW, curvature=0)
# For curves with diff = 180
self.end_reverse_left = ec.coord.TrackCoord(
pos_x=6.616, pos_z=872.368, rotation=48.882, quad=ec.coord.Q.SW, curvature=0)
self.end_reverse_right = ec.coord.TrackCoord(
pos_x=569.182, pos_z=553.873-1024, rotation=48.882, quad=ec.coord.Q.SW, curvature=0)
# To test how RoC with low angle diff - should raise exception
self.end_low_angle = ec.coord.TrackCoord(
pos_x=400.495, pos_z=178.755, rotation=53.612, quad=ec.coord.Q.NE, curvature=0)
self.straight_high = ec.curve.TrackCurve(self.start_straight, minimum_high, speed_high)
self.straight_low = ec.curve.TrackCurve(self.start_straight, minimum_low, speed_low)
self.right = ec.curve.TrackCurve(self.start_curved, minimum_high, speed_high)
def tearDown(self):
super(BaseTCTests, self).tearDown()
del self.start_straight, self.start_curved, self.start_curved_add
del self.end_left, self.end_right, self.end_far_left, self.end_far_right
del self.end_reverse_left, self.end_reverse_right, self.end_low_angle
del self.straight_high, self.straight_low, self.right
def test_create_easement(self):
ts = ec.section.TrackSection(self.start_straight, 500, 120)
self.assertEqual(ts.easement_curve(0.0001).__dict__,
self.straight_high.easement_curve(0.0001).__dict__)
def test_create_static(self):
ts = ec.section.TrackSection(self.start_straight, 500, 120)
ts.start.curvature = 0.0001
self.straight_high.start.curvature = 0.0001
self.assertEqual(ts.static_curve(math.pi/4).__dict__,
self.straight_high.static_curve(math.pi/4).__dict__)
class DiffAngleTests(BaseTCTests):
def test_exception_parallel(self):
with self.assertRaisesRegex(ec.curve.CurveError, 'must not be parallel'):
self.straight_high.find_diff_angle(self.start_straight)
def test_diff_left(self):
diff_b = self.straight_high.find_diff_angle(self.end_left)
self.assertDataAlmostEqual((diff_b.deg, self.straight_high.clockwise), (36.12, False))
def test_diff_right(self):
diff_b = self.straight_high.find_diff_angle(self.end_right)
self.assertDataAlmostEqual((diff_b.deg, self.straight_high.clockwise), (26.567, True))
def test_diff_reverse_left(self):
diff_b = self.straight_high.find_diff_angle(self.end_reverse_left)
self.assertDataAlmostEqual((diff_b.deg, self.straight_high.clockwise), (180, False))
def test_diff_reverse_right(self):
diff_b = self.straight_high.find_diff_angle(self.end_reverse_right)
self.assertDataAlmostEqual((diff_b.deg, self.straight_high.clockwise), (180, True))
def test_diff_far_left(self):
self.straight_high.clockwise = False
diff_b = self.straight_high.find_diff_angle(self.end_far_left, True)
self.assertDataAlmostEqual((diff_b.deg, self.straight_high.clockwise), (222.969, False))
def test_diff_not_far_left(self):
diff_b = self.straight_high.find_diff_angle(self.end_far_left)
self.assertDataAlmostEqual((diff_b.deg, self.straight_high.clockwise), (137.031, True))
def test_diff_far_right(self):
self.straight_high.clockwise = True
diff_b = self.straight_high.find_diff_angle(self.end_far_right, True)
self.assertDataAlmostEqual((diff_b.deg, self.straight_high.clockwise), (238.431, True))
def test_diff_not_far_right(self):
diff_b = self.straight_high.find_diff_angle(self.end_far_right)
self.assertDataAlmostEqual((diff_b.deg, self.straight_high.clockwise), (121.569, False))
class AlignmentTests(BaseTCTests):
def test_exception_parallel(self):
self.end_reverse_left.bearing = self.end_reverse_left.bearing.flip()
with self.assertRaisesRegex(ec.curve.CurveError, 'must not be parallel'):
self.straight_high.check_start_alignment(self.end_reverse_left)
def test_alignment_left(self):
self.assertTrue(self.straight_high.check_start_alignment(self.end_left))
def test_alignment_right(self):
self.assertTrue(self.straight_high.check_start_alignment(self.end_right))
def test_alignment_far_left(self):
self.assertFalse(self.straight_high.check_start_alignment(self.end_far_left))
def test_alignment_far_right(self):
self.assertFalse(self.straight_high.check_start_alignment(self.end_far_right))
def test_alignment_reverse_left(self):
self.assertFalse(self.straight_high.check_start_alignment(self.end_reverse_left))
def test_alignment_reverse_right(self):
self.assertFalse(self.straight_high.check_start_alignment(self.end_reverse_right))
class CurveFitRadiusTests(BaseTCTests):
def test_exception_curve_radius_minimum_radius(self):
with self.assertRaisesRegex(ec.curve.CurveError, 'Radius 350 must be greater'):
self.straight_high.curve_fit_radius(self.end_left, 350)
def test_exception_curve_radius_wrong_object(self):
with self.assertRaisesRegex(AttributeError, 'need to be TrackCoord'):
self.straight_high.curve_fit_radius(None, 600)
def test_exception_curve_radius_cannot_fit(self):
with self.assertRaisesRegex(ec.curve.CurveError, 'The easement curves are too long'):
self.straight_high.curve_fit_radius(self.end_low_angle, 500)
def test_exception_curve_radius_curved(self):
with self.assertRaisesRegex(ec.curve.CurveError, 'Both tracks must be straight'):
self.right.curve_fit_radius(self.end_left, 500)
def test_exception_curve_radius_reverse(self):
with self.assertRaisesRegex(ec.curve.CurveError, 'This method does not work'):
self.straight_high.curve_fit_radius(self.end_reverse_left, 500)
def test_curve_assert_radius(self):
curve = self.straight_high.curve_fit_radius(self.end_left, 600)
self.assertAlmostEqual(curve[2].radius, 600)
def test_curve_radius_left(self):
curve = self.straight_high.curve_fit_radius(self.end_left, 600)
self.assertTrackAlign(curve[-1], self.end_left)
def test_curve_radius_right(self):
curve = self.straight_high.curve_fit_radius(self.end_right, 600)
self.assertTrackAlign(curve[-1], self.end_right)
def test_curve_radius_can_fit(self):
curve = self.straight_high.curve_fit_radius(self.end_low_angle, 1200)
self.assertTrackAlign(curve[-1], self.end_low_angle)
def test_curve_radius_far_left(self):
curve = self.straight_low.curve_fit_radius(self.end_far_left, 225, False)
self.assertTrackAlign(curve[-1], self.end_far_left)
def test_curve_radius_far_right(self):
curve = self.straight_low.curve_fit_radius(self.end_far_right, 225, True)
self.assertTrackAlign(curve[-1], self.end_far_right)
class CurveFitLengthTests(BaseTCTests):
def test_exception_curve_radius_minimum_radius(self):
with self.assertRaisesRegex(ec.curve.CurveError, 'The required radius of curvature'):
self.straight_high.curve_fit_length(self.end_far_left, 100, False)
def test_exception_curve_radius_wrong_object(self):
with self.assertRaisesRegex(AttributeError, 'need to be TrackCoord'):
self.straight_high.curve_fit_length(None, 600)
def test_exception_curve_radius_curved(self):
with self.assertRaisesRegex(ec.curve.CurveError, 'Both tracks must be straight'):
self.right.curve_fit_length(self.end_left, 200)
def test_exception_curve_radius_reverse(self):
with self.assertRaisesRegex(ec.curve.CurveError, 'This method does not work'):
self.straight_high.curve_fit_length(self.end_reverse_left, 500)
def test_curve_assert_length(self):
curve = self.straight_high.curve_fit_length(self.end_left, 300)
self.assertAlmostEqual(curve[2].org_length, 300, 4)
def test_curve_radius_left(self):
curve = self.straight_high.curve_fit_length(self.end_left, 300)
self.assertTrackAlign(curve[-1], self.end_left)
def test_curve_radius_right(self):
curve = self.straight_high.curve_fit_length(self.end_right, 300)
self.assertTrackAlign(curve[-1], self.end_right)
def test_curve_radius_far_left(self):
curve = self.straight_low.curve_fit_length(self.end_far_left, 1000, False)
self.assertTrackAlign(curve[-1], self.end_far_left)
def test_curve_radius_far_right(self):
curve = self.straight_low.curve_fit_length(self.end_far_right, 1000, True)
self.assertTrackAlign(curve[-1], self.end_far_right)
class CurveFitPointTests(BaseTCTests):
def test_exception_curve_point_end_curved(self):
with self.assertRaisesRegex(ec.curve.CurveError, 'end track must be straight'):
self.straight_high.curve_fit_point(self.start_curved)
def test_exception_curve_point_parallel(self):
self.end_reverse_left.bearing = self.end_reverse_left.bearing.flip()
with self.assertRaisesRegex(ec.curve.CurveError, 'must not be parallel'):
self.straight_high.curve_fit_point(self.end_reverse_left)
def test_exception_curve_point_wrong_object(self):
with self.assertRaisesRegex(AttributeError, 'TrackCoord'):
self.straight_high.curve_fit_point(None)
def test_exception_curve_point_too_close(self):
with self.assertRaisesRegex(ec.curve.CurveError, 'is too close'):
self.straight_high.curve_fit_point(self.end_reverse_left)
def test_exception_curve_point_curved_right_opposite(self):
self.right.get_static_radius(self.start_curved_add)
with self.assertRaisesRegex(ec.curve.CurveError, 'not aligned'):
self.right.curve_fit_point(self.end_left)
def test_curve_point_left(self):
curve = self.straight_high.curve_fit_point(self.end_left)
self.assertTrackAlign(curve[-1], self.end_left)
def test_curve_point_right(self):
curve = self.straight_high.curve_fit_point(self.end_right)
self.assertTrackAlign(curve[-1], self.end_right)
def test_curve_point_far_left(self):
curve = self.straight_low.curve_fit_point(self.end_far_left)
self.assertTrackAlign(curve[-1], self.end_far_left)
def test_curve_point_far_right(self):
curve = self.straight_low.curve_fit_point(self.end_far_right)
self.assertTrackAlign(curve[-1], self.end_far_right)
def test_curve_point_reverse_left(self):
curve = self.straight_low.curve_fit_point(self.end_reverse_left)
self.assertTrackAlign(curve[-1], self.end_reverse_left)
def test_curve_point_reverse_right(self):
curve = self.straight_low.curve_fit_point(self.end_reverse_right)
self.assertTrackAlign(curve[-1], self.end_reverse_right)
def test_curve_point_curved_right(self):
curve = self.right.curve_fit_point(self.end_right, self.start_curved_add)
self.assertTrackAlign(curve[-1], self.end_right)
|
beregond/pyhistory
|
refs/heads/master
|
pyhistory/file_config.py
|
1
|
from pathlib import Path
from six.moves.configparser import ConfigParser, NoSectionError, NoOptionError
from .utilities import find_file_across_parents
FILE_TO_CHECK = 'setup.cfg'
CONFIG_SECTION = 'pyhistory'
def get_defaults_from_config_file_if_exists(file_to_check=FILE_TO_CHECK):
try:
config_file = find_file_across_parents(Path.cwd(), file_to_check)
except RuntimeError:
return {}
return _get_config_from_file(config_file)
def _get_config_from_file(config_file):
parser = ConfigParser()
parser.read(str(config_file))
return _ConfigGetter(parser, CONFIG_SECTION)
class _ConfigGetter(object):
def __init__(self, parser, section):
self.parser = parser
self.section = section
def get(self, key, default=None):
try:
return self.parser.get(self.section, key)
except (NoSectionError, NoOptionError):
return default
|
niasand/learn_how_to_flask
|
refs/heads/master
|
blog/manage.py
|
1
|
# -*- coding: utf-8 -*-
from flask_script import Manager, Server
from app import app
from app.models import BillRecord
manager = Manager(app)
manager.add_command("runserver",Server(host='0.0.0.0',port=9000,use_debugger=True))
@manager.command
def save_todo():
todo = BillRecord(money="19.89",shop="amazon",content="Kindle")
todo.save()
if __name__ == "__main__":
manager.run()
|
ujjwalwahi/odoo
|
refs/heads/8.0
|
addons/website_mail/__init__.py
|
1577
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2013-Today OpenERP SA (<http://www.openerp.com>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import controllers
import models
|
hbhzwj/imalse
|
refs/heads/master
|
tools/ns-allinone-3.14.1/pybindgen-0.15.0.809/pybindgen/cppattribute.py
|
5
|
"""
Wraps C++ class instance/static attributes.
"""
from typehandlers.base import ForwardWrapperBase, ReverseWrapperBase
from typehandlers import codesink
import settings
import utils
class PyGetter(ForwardWrapperBase):
"""generates a getter, for use in a PyGetSetDef table"""
def generate(self, code_sink):
"""Generate the code of the getter to the given code sink"""
raise NotImplementedError
def generate_call(self):
"""(not actually called)"""
raise AssertionError
class PySetter(ReverseWrapperBase):
"""generates a setter, for use in a PyGetSetDef table"""
NO_GIL_LOCKING = True
def generate(self, code_sink):
"""Generate the code of the setter to the given code sink"""
raise NotImplementedError
def generate_python_call(self):
"""(not actually called)"""
raise AssertionError
class CppInstanceAttributeGetter(PyGetter):
'''
A getter for a C++ instance attribute.
'''
def __init__(self, value_type, class_, attribute_name, getter=None):
"""
:param value_type: a ReturnValue object handling the value type;
:param class_: the class (CppClass object)
:param attribute_name: name of attribute
:param getter: None, or name of a method of the class used to get the value
"""
super(CppInstanceAttributeGetter, self).__init__(
value_type, [], "return NULL;", "return NULL;", no_c_retval=True)
self.class_ = class_
self.attribute_name = attribute_name
self.getter = getter
self.c_function_name = "_wrap_%s__get_%s" % (self.class_.pystruct,
self.attribute_name)
if self.getter is None:
value_type.value = "self->obj->%s" % self.attribute_name
else:
value_type.value = "self->obj->%s()" % self.getter
def generate_call(self):
"virtual method implementation; do not call"
pass
def generate(self, code_sink):
"""
:param code_sink: a CodeSink instance that will receive the generated code
"""
tmp_sink = codesink.MemoryCodeSink()
self.generate_body(tmp_sink)
code_sink.writeln("static PyObject* %s(%s *self, void * PYBINDGEN_UNUSED(closure))"
% (self.c_function_name, self.class_.pystruct))
code_sink.writeln('{')
code_sink.indent()
tmp_sink.flush_to(code_sink)
code_sink.unindent()
code_sink.writeln('}')
class CppStaticAttributeGetter(PyGetter):
'''
A getter for a C++ class static attribute.
'''
def __init__(self, value_type, class_, attribute_name):
"""
:param value_type: a ReturnValue object handling the value type;
:param c_value_expression: C value expression
"""
super(CppStaticAttributeGetter, self).__init__(
value_type, [], "return NULL;", "return NULL;", no_c_retval=True)
self.class_ = class_
self.attribute_name = attribute_name
self.c_function_name = "_wrap_%s__get_%s" % (self.class_.pystruct,
self.attribute_name)
value_type.value = "%s::%s" % (self.class_.full_name, self.attribute_name)
def generate_call(self):
"virtual method implementation; do not call"
pass
def generate(self, code_sink):
"""
:param code_sink: a CodeSink instance that will receive the generated code
"""
tmp_sink = codesink.MemoryCodeSink()
self.generate_body(tmp_sink)
code_sink.writeln("static PyObject* %s(PyObject * PYBINDGEN_UNUSED(obj),"
" void * PYBINDGEN_UNUSED(closure))"
% self.c_function_name)
code_sink.writeln('{')
code_sink.indent()
tmp_sink.flush_to(code_sink)
code_sink.unindent()
code_sink.writeln('}')
class CppInstanceAttributeSetter(PySetter):
'''
A setter for a C++ instance attribute.
'''
def __init__(self, value_type, class_, attribute_name, setter=None):
"""
:param value_type: a ReturnValue object handling the value type;
:param class_: the class (CppClass object)
:param attribute_name: name of attribute
:param setter: None, or name of a method of the class used to set the value
"""
super(CppInstanceAttributeSetter, self).__init__(
value_type, [], "return -1;")
self.class_ = class_
self.attribute_name = attribute_name
self.setter = setter
self.c_function_name = "_wrap_%s__set_%s" % (self.class_.pystruct,
self.attribute_name)
def generate(self, code_sink):
"""
:param code_sink: a CodeSink instance that will receive the generated code
"""
self.declarations.declare_variable('PyObject*', 'py_retval')
self.before_call.write_code(
'py_retval = Py_BuildValue((char *) "(O)", value);')
self.before_call.add_cleanup_code('Py_DECREF(py_retval);')
if self.setter is not None:
## if we have a setter method, redirect the value to a temporary variable
if not self.return_value.REQUIRES_ASSIGNMENT_CONSTRUCTOR:
value_var = self.declarations.declare_variable(self.return_value.ctype, 'tmp_value')
else:
value_var = self.declarations.reserve_variable('tmp_value')
self.return_value.value = value_var
else:
## else the value is written directly to a C++ instance attribute
self.return_value.value = "self->obj->%s" % self.attribute_name
self.return_value.REQUIRES_ASSIGNMENT_CONSTRUCTOR = False
self.return_value.convert_python_to_c(self)
parse_tuple_params = ['py_retval']
params = self.parse_params.get_parameters()
assert params[0][0] == '"'
params[0] = '(char *) ' + params[0]
parse_tuple_params.extend(params)
self.before_call.write_error_check('!PyArg_ParseTuple(%s)' %
(', '.join(parse_tuple_params),))
if self.setter is not None:
## if we have a setter method, now is the time to call it
self.after_call.write_code("self->obj->%s(%s);" % (self.setter, value_var))
## cleanup and return
self.after_call.write_cleanup()
self.after_call.write_code('return 0;')
## now generate the function itself
code_sink.writeln("static int %s(%s *self, PyObject *value, void * PYBINDGEN_UNUSED(closure))"
% (self.c_function_name, self.class_.pystruct))
code_sink.writeln('{')
code_sink.indent()
self.declarations.get_code_sink().flush_to(code_sink)
code_sink.writeln()
self.before_call.sink.flush_to(code_sink)
self.after_call.sink.flush_to(code_sink)
code_sink.unindent()
code_sink.writeln('}')
class CppStaticAttributeSetter(PySetter):
'''
A setter for a C++ class static attribute.
'''
def __init__(self, value_type, class_, attribute_name):
"""
:param value_type: a ReturnValue object handling the value type;
:param class_: the class (CppClass object)
:param attribute_name: name of attribute
"""
super(CppStaticAttributeSetter, self).__init__(
value_type, [], "return -1;")
self.class_ = class_
self.attribute_name = attribute_name
self.c_function_name = "_wrap_%s__set_%s" % (self.class_.pystruct,
self.attribute_name)
value_type.value = "%s::%s" % (self.class_.full_name, self.attribute_name)
value_type.REQUIRES_ASSIGNMENT_CONSTRUCTOR = False
def generate(self, code_sink):
"""
:param code_sink: a CodeSink instance that will receive the generated code
"""
self.declarations.declare_variable('PyObject*', 'py_retval')
self.before_call.write_code(
'py_retval = Py_BuildValue((char *) "(O)", value);')
self.before_call.add_cleanup_code('Py_DECREF(py_retval);')
self.return_value.convert_python_to_c(self)
parse_tuple_params = ['py_retval']
params = self.parse_params.get_parameters()
assert params[0][0] == '"'
params[0] = '(char *) ' + params[0]
parse_tuple_params.extend(params)
self.before_call.write_error_check('!PyArg_ParseTuple(%s)' %
(', '.join(parse_tuple_params),))
## cleanup and return
self.after_call.write_cleanup()
self.after_call.write_code('return 0;')
## now generate the function itself
code_sink.writeln(("static int %s(%s * PYBINDGEN_UNUSED(dummy), "
"PyObject *value, void * PYBINDGEN_UNUSED(closure))")
% (self.c_function_name, self.class_.pystruct))
code_sink.writeln('{')
code_sink.indent()
self.declarations.get_code_sink().flush_to(code_sink)
code_sink.writeln()
self.before_call.sink.flush_to(code_sink)
self.after_call.sink.flush_to(code_sink)
code_sink.unindent()
code_sink.writeln('}')
class PyMetaclass(object):
"""
Class that generates a Python metaclass
"""
def __init__(self, name, parent_metaclass_expr, getsets=None):
"""
:param name: name of the metaclass (should normally end with Meta)
:param parent_metaclass_expr: C expression that should give a
pointer to the parent metaclass
(should have a C type of
PyTypeObject*)
:param getsets: name of a PyGetSetDef C array variable, or None
"""
assert getsets is None or isinstance(getsets, PyGetSetDef)
assert isinstance(name, basestring)
assert isinstance(parent_metaclass_expr, basestring)
self.name = name
prefix = settings.name_prefix.capitalize()
self.pytypestruct = "Py%s%s_Type" % (prefix, self.name)
self.parent_metaclass_expr = parent_metaclass_expr
self.getsets = getsets
def generate(self, code_sink, module):
"""
Generate the metaclass to code_sink and register it in the module.
"""
code_sink.writeln('''
PyTypeObject %(pytypestruct)s = {
PyObject_HEAD_INIT(NULL)
0, /* ob_size */
(char *) "%(name)s", /* tp_name */
0, /* tp_basicsize */
0, /* tp_itemsize */
0, /* tp_dealloc */
0, /* tp_print */
0, /* tp_getattr */
0, /* tp_setattr */
0, /* tp_compare */
0, /* tp_repr */
0, /* tp_as_number */
0, /* tp_as_sequence */
0, /* tp_as_mapping */
0, /* tp_hash */
0, /* tp_call */
0, /* tp_str */
0, /* tp_getattro */
0, /* tp_setattro */
0, /* tp_as_buffer */
Py_TPFLAGS_DEFAULT|Py_TPFLAGS_HAVE_GC|Py_TPFLAGS_BASETYPE, /* tp_flags */
0, /* tp_doc */
0, /* tp_traverse */
0, /* tp_clear */
0, /* tp_richcompare */
0, /* tp_weaklistoffset */
0, /* tp_iter */
0, /* tp_iternext */
0, /* tp_methods */
0, /* tp_members */
%(getset)s, /* tp_getset */
0, /* tp_base */
0, /* tp_dict */
0, /* tp_descr_get */
0, /* tp_descr_set */
0, /* tp_dictoffset */
0, /* tp_init */
0, /* tp_alloc */
0, /* tp_new */
0, /* tp_free */
0, /* tp_is_gc */
0, /* tp_bases */
0, /* tp_mro */
0, /* tp_cache */
0, /* tp_subclasses */
0, /* tp_weaklist */
0 /* tp_del */
};
''' % dict(pytypestruct=self.pytypestruct, name=self.name,
getset=(self.getsets and self.getsets.cname or '0')))
module.after_init.write_code("""
%(pytypestruct)s.tp_base = %(parent_metaclass)s;
/* Some fields need to be manually inheritted from the parent metaclass */
%(pytypestruct)s.tp_traverse = %(parent_metaclass)s->tp_traverse;
%(pytypestruct)s.tp_clear = %(parent_metaclass)s->tp_clear;
%(pytypestruct)s.tp_is_gc = %(parent_metaclass)s->tp_is_gc;
/* PyType tp_setattro is too restrictive */
%(pytypestruct)s.tp_setattro = PyObject_GenericSetAttr;
PyType_Ready(&%(pytypestruct)s);
""" % dict(pytypestruct=self.pytypestruct, parent_metaclass=self.parent_metaclass_expr))
class PyGetSetDef(object):
"""
Class that generates a PyGetSet table
"""
def __init__(self, cname):
"""
:param cname: C name of the getset table
"""
self.cname = cname
self.attributes = [] # (name, getter, setter)
def empty(self):
return len(self.attributes) == 0
def add_attribute(self, name, getter, setter):
"""
Add a new attribute
:param name: attribute name
:param getter: a PyGetter object, or None
:param setter: a PySetter object, or None
"""
assert getter is None or isinstance(getter, PyGetter)
assert setter is None or isinstance(setter, PySetter)
self.attributes.append((name, getter, setter))
def generate(self, code_sink):
"""
Generate the getset table, return the table C name or '0' if
the table is empty
"""
if not self.attributes:
return '0'
getsets = {} # attrname -> (getter, setter)
for name, getter, setter in self.attributes:
getter_name = 'NULL'
if getter is not None:
# getter.generate(code_sink)
try:
utils.call_with_error_handling(getter.generate, (code_sink,), {}, getter)
except utils.SkipWrapper:
pass
else:
getter_name = getter.c_function_name
setter_name = 'NULL'
if setter is not None:
#setter.generate(code_sink)
try:
utils.call_with_error_handling(setter.generate, (code_sink,), {}, setter)
except utils.SkipWrapper:
pass
else:
setter_name = setter.c_function_name
assert name not in getsets
getsets[name] = (getter_name, setter_name)
code_sink.writeln("static PyGetSetDef %s[] = {" % self.cname)
code_sink.indent()
for name, (getter_c_name, setter_c_name) in getsets.iteritems():
code_sink.writeln('{')
code_sink.indent()
code_sink.writeln('(char*) "%s", /* attribute name */' % name)
## getter
code_sink.writeln(
'(getter) %s, /* C function to get the attribute */'
% getter_c_name)
## setter
code_sink.writeln(
'(setter) %s, /* C function to set the attribute */'
% setter_c_name)
code_sink.writeln('NULL, /* optional doc string */')
code_sink.writeln('NULL /* optional additional data '
'for getter and setter */')
code_sink.unindent()
code_sink.writeln('},')
code_sink.writeln('{ NULL, NULL, NULL, NULL, NULL }')
code_sink.unindent()
code_sink.writeln('};')
return self.cname
|
AndrewMorris-scsu/classsuggestionapp
|
refs/heads/master
|
parse/parse.py
|
1
|
# data_file = "majorcourses (1).unl"
# with open('fixed_input', 'w') as output:
# output.write('{\n')
# with open(data_file, 'r') as data:
# for line in data.readlines():
# line = line.replace("\n", "")
# elems = line.split("|")
# class_id = '"{}"'.format(elems[0])
# output.write(" {}: ".format(class_id))
# classes = ['"{}"'.format(elem) for elem in elems[1:]]
# output.write("[{}], \n".format(', '.join(classes)))
# output.write('}')
data_file = "majorcourses (1).unl"
with open('major_courses.json', 'w') as output:
output.write('{\n')
with open(data_file, 'r') as data:
for line in data.readlines():
line = line.replace("\n", "")
elems = line.split("|")
class_id = elems[0]
major = elems[1]
desc = elems[2]
#Get department from class and from major
temp_class = elems[0].split(" ")
class_dept = temp_class[0]
temp_dept = elems[1].split("-")
major_dept = temp_dept[0]
output.write(' "{}": {}\n'.format(class_id, "{"))
output.write(' "class_dept":"{}",\n'.format(class_dept))
output.write(' "major_dept":"{}",\n'.format(major_dept))
output.write(' "major":"{}",\n'.format(major))
output.write(' "description": "{}"\n'.format(desc))
output.write(' },\n')
output.write('}')
|
youdonghai/intellij-community
|
refs/heads/master
|
python/testData/refactoring/extractmethod/DuplicateSingleLine.after.py
|
79
|
def bar():
a = foo()
print a
a = foo()
print a
def foo():
a = 1
return a
|
ShamblrTeam/Searcher
|
refs/heads/master
|
test_socket.py
|
1
|
import socket
import json
while True:
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.connect(('localhost', 7776))
tag = str(raw_input('find word: '))
if tag == '':
break
# send the json request for a socket
s.send(json.dumps({'query':tag}))
# tell the other end of the socket that I'm done writing
s.shutdown(socket.SHUT_WR)
#recieve the response
try:
data = bytes()
while True:
new_data = s.recv(1024)
if not new_data: break
data += new_data
s.close()
s = None
data = str(data)
except Exception as e:
print e
print data
data_obj = json.loads(data)
print "Length: " + str(len(data_obj['posts']))
|
clumsy/intellij-community
|
refs/heads/master
|
python/testData/quickFixes/PyMakeMethodStaticQuickFixTest/emptyParam_after.py
|
249
|
__author__ = 'ktisha'
class Child(Base):
def __init__(self):
super(Child, self).__init__()
@staticmethod
def f():
test = 1
|
TeamEOS/external_chromium_org_third_party_WebKit
|
refs/heads/lp5.1
|
Source/devtools/scripts/convert_svg_images_to_png.py
|
66
|
#!/usr/bin/env python
# Copyright (c) 2014 Google Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import devtools_file_hashes
import hashlib
import os
import os.path
import re
import subprocess
import sys
try:
import json
except ImportError:
import simplejson as json
def check_installed(app_name):
proc = subprocess.Popen("which %s" % app_name, stdout=subprocess.PIPE, shell=True)
proc.communicate()
if proc.returncode != 0:
print "This script needs \"%s\" to be installed." % app_name
sys.exit(1)
check_installed("inkscape")
scripts_path = os.path.dirname(os.path.abspath(__file__))
devtools_path = os.path.dirname(scripts_path)
devtools_frontend_path = devtools_path + "/front_end"
images_path = devtools_frontend_path + "/Images"
image_sources_path = images_path + "/src"
hashes_file_name = "svg2png.hashes"
hashes_file_path = image_sources_path + "/" + hashes_file_name
file_names = os.listdir(image_sources_path)
svg_file_paths = [image_sources_path + "/" + file_name for file_name in file_names if file_name.endswith(".svg")]
svg_file_paths_to_convert = devtools_file_hashes.files_with_invalid_hashes(hashes_file_path, svg_file_paths)
svg_file_names = [re.sub(".svg$", "", re.sub(".*/", "", file_path)) for file_path in svg_file_paths_to_convert]
def convert_svg_to_png(svg_file_name, png_file_name, dpi):
svg_full_path = image_sources_path + "/" + svg_file_name + ".svg"
png_full_path = images_path + "/" + png_file_name + ".png"
convert_command = "inkscape -f %s -e %s -d %s" % (svg_full_path, png_full_path, dpi)
proc = subprocess.Popen(convert_command, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, shell=True)
return proc
processes = {}
for file_name in svg_file_names:
name = re.sub(".svg$", "", file_name)
name2x = name + "_2x"
processes[name] = convert_svg_to_png(name, name, 90)
processes[name2x] = convert_svg_to_png(name, name2x, 180)
for file_name, proc in processes.items():
(convert_out, _) = proc.communicate()
print("Conversion of %s finished: %s" % (file_name, convert_out))
devtools_file_hashes.update_file_hashes(hashes_file_path, svg_file_paths)
|
serdimoa/vincenzoext
|
refs/heads/master
|
db_create.py
|
9
|
#!venv/bin/python
from migrate.versioning import api
from config import SQLALCHEMY_DATABASE_URI
from config import SQLALCHEMY_MIGRATE_REPO
from app import db
import os.path
db.create_all()
if not os.path.exists(SQLALCHEMY_MIGRATE_REPO):
api.create(SQLALCHEMY_MIGRATE_REPO, 'database repository')
api.version_control(SQLALCHEMY_DATABASE_URI, SQLALCHEMY_MIGRATE_REPO)
else:
api.version_control(SQLALCHEMY_DATABASE_URI, SQLALCHEMY_MIGRATE_REPO, api.version(SQLALCHEMY_MIGRATE_REPO))
|
azumimuo/family-xbmc-addon
|
refs/heads/master
|
script.module.youtube.dl/lib/youtube_dl/extractor/voicerepublic.py
|
53
|
from __future__ import unicode_literals
import re
from .common import InfoExtractor
from ..compat import (
compat_str,
compat_urlparse,
)
from ..utils import (
ExtractorError,
determine_ext,
int_or_none,
sanitized_Request,
)
class VoiceRepublicIE(InfoExtractor):
_VALID_URL = r'https?://voicerepublic\.com/(?:talks|embed)/(?P<id>[0-9a-z-]+)'
_TESTS = [{
'url': 'http://voicerepublic.com/talks/watching-the-watchers-building-a-sousveillance-state',
'md5': 'b9174d651323f17783000876347116e3',
'info_dict': {
'id': '2296',
'display_id': 'watching-the-watchers-building-a-sousveillance-state',
'ext': 'm4a',
'title': 'Watching the Watchers: Building a Sousveillance State',
'description': 'Secret surveillance programs have metadata too. The people and companies that operate secret surveillance programs can be surveilled.',
'thumbnail': r're:^https?://.*\.(?:png|jpg)$',
'duration': 1800,
'view_count': int,
}
}, {
'url': 'http://voicerepublic.com/embed/watching-the-watchers-building-a-sousveillance-state',
'only_matching': True,
}]
def _real_extract(self, url):
display_id = self._match_id(url)
req = sanitized_Request(
compat_urlparse.urljoin(url, '/talks/%s' % display_id))
# Older versions of Firefox get redirected to an "upgrade browser" page
req.add_header('User-Agent', 'youtube-dl')
webpage = self._download_webpage(req, display_id)
if '>Queued for processing, please stand by...<' in webpage:
raise ExtractorError(
'Audio is still queued for processing', expected=True)
config = self._search_regex(
r'(?s)return ({.+?});\s*\n', webpage,
'data', default=None)
data = self._parse_json(config, display_id, fatal=False) if config else None
if data:
title = data['title']
description = data.get('teaser')
talk_id = compat_str(data.get('talk_id') or display_id)
talk = data['talk']
duration = int_or_none(talk.get('duration'))
formats = [{
'url': compat_urlparse.urljoin(url, talk_url),
'format_id': format_id,
'ext': determine_ext(talk_url) or format_id,
'vcodec': 'none',
} for format_id, talk_url in talk['links'].items()]
else:
title = self._og_search_title(webpage)
description = self._html_search_regex(
r"(?s)<div class='talk-teaser'[^>]*>(.+?)</div>",
webpage, 'description', fatal=False)
talk_id = self._search_regex(
[r"id='jc-(\d+)'", r"data-shareable-id='(\d+)'"],
webpage, 'talk id', default=None) or display_id
duration = None
player = self._search_regex(
r"class='vr-player jp-jplayer'([^>]+)>", webpage, 'player')
formats = [{
'url': compat_urlparse.urljoin(url, talk_url),
'format_id': format_id,
'ext': determine_ext(talk_url) or format_id,
'vcodec': 'none',
} for format_id, talk_url in re.findall(r"data-([^=]+)='([^']+)'", player)]
self._sort_formats(formats)
thumbnail = self._og_search_thumbnail(webpage)
view_count = int_or_none(self._search_regex(
r"class='play-count[^']*'>\s*(\d+) plays",
webpage, 'play count', fatal=False))
return {
'id': talk_id,
'display_id': display_id,
'title': title,
'description': description,
'thumbnail': thumbnail,
'duration': duration,
'view_count': view_count,
'formats': formats,
}
|
OpenTSDB/tcollector
|
refs/heads/master
|
collectors/etc/elasticsearch_conf.py
|
1
|
#!/usr/bin/env python
# This file is part of tcollector.
# Copyright (C) 2015 The tcollector Authors.
#
# This program is free software: you can redistribute it and/or modify it
# under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 3 of the License, or (at your
# option) any later version. This program is distributed in the hope that it
# will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty
# of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser
# General Public License for more details. You should have received a copy
# of the GNU Lesser General Public License along with this program. If not,
# see <http://www.gnu.org/licenses/>.
def get_servers():
"""Get the ElasticSearch servers on this host.
Returns:
An iterable of tuples of (host, port)
"""
return [ ("localhost", 9200) ]
|
Pakoach/Sick-Beard
|
refs/heads/master
|
cherrypy/_cperror.py
|
45
|
"""Error classes for CherryPy."""
from cgi import escape as _escape
from sys import exc_info as _exc_info
from traceback import format_exception as _format_exception
from urlparse import urljoin as _urljoin
from cherrypy.lib import httputil as _httputil
class CherryPyException(Exception):
pass
class TimeoutError(CherryPyException):
"""Exception raised when Response.timed_out is detected."""
pass
class InternalRedirect(CherryPyException):
"""Exception raised to switch to the handler for a different URL.
Any request.params must be supplied in a query string.
"""
def __init__(self, path, query_string=""):
import cherrypy
self.request = cherrypy.serving.request
self.query_string = query_string
if "?" in path:
# Separate any params included in the path
path, self.query_string = path.split("?", 1)
# Note that urljoin will "do the right thing" whether url is:
# 1. a URL relative to root (e.g. "/dummy")
# 2. a URL relative to the current path
# Note that any query string will be discarded.
path = _urljoin(self.request.path_info, path)
# Set a 'path' member attribute so that code which traps this
# error can have access to it.
self.path = path
CherryPyException.__init__(self, path, self.query_string)
class HTTPRedirect(CherryPyException):
"""Exception raised when the request should be redirected.
The new URL must be passed as the first argument to the Exception,
e.g., HTTPRedirect(newUrl). Multiple URLs are allowed. If a URL is
absolute, it will be used as-is. If it is relative, it is assumed
to be relative to the current cherrypy.request.path_info.
"""
def __init__(self, urls, status=None):
import cherrypy
request = cherrypy.serving.request
if isinstance(urls, basestring):
urls = [urls]
abs_urls = []
for url in urls:
# Note that urljoin will "do the right thing" whether url is:
# 1. a complete URL with host (e.g. "http://www.example.com/test")
# 2. a URL relative to root (e.g. "/dummy")
# 3. a URL relative to the current path
# Note that any query string in cherrypy.request is discarded.
url = _urljoin(cherrypy.url(), url)
abs_urls.append(url)
self.urls = abs_urls
# RFC 2616 indicates a 301 response code fits our goal; however,
# browser support for 301 is quite messy. Do 302/303 instead. See
# http://www.alanflavell.org.uk/www/post-redirect.html
if status is None:
if request.protocol >= (1, 1):
status = 303
else:
status = 302
else:
status = int(status)
if status < 300 or status > 399:
raise ValueError("status must be between 300 and 399.")
self.status = status
CherryPyException.__init__(self, abs_urls, status)
def set_response(self):
"""Modify cherrypy.response status, headers, and body to represent self.
CherryPy uses this internally, but you can also use it to create an
HTTPRedirect object and set its output without *raising* the exception.
"""
import cherrypy
response = cherrypy.serving.response
response.status = status = self.status
if status in (300, 301, 302, 303, 307):
response.headers['Content-Type'] = "text/html;charset=utf-8"
# "The ... URI SHOULD be given by the Location field
# in the response."
response.headers['Location'] = self.urls[0]
# "Unless the request method was HEAD, the entity of the response
# SHOULD contain a short hypertext note with a hyperlink to the
# new URI(s)."
msg = {300: "This resource can be found at <a href='%s'>%s</a>.",
301: "This resource has permanently moved to <a href='%s'>%s</a>.",
302: "This resource resides temporarily at <a href='%s'>%s</a>.",
303: "This resource can be found at <a href='%s'>%s</a>.",
307: "This resource has moved temporarily to <a href='%s'>%s</a>.",
}[status]
msgs = [msg % (u, u) for u in self.urls]
response.body = "<br />\n".join(msgs)
# Previous code may have set C-L, so we have to reset it
# (allow finalize to set it).
response.headers.pop('Content-Length', None)
elif status == 304:
# Not Modified.
# "The response MUST include the following header fields:
# Date, unless its omission is required by section 14.18.1"
# The "Date" header should have been set in Response.__init__
# "...the response SHOULD NOT include other entity-headers."
for key in ('Allow', 'Content-Encoding', 'Content-Language',
'Content-Length', 'Content-Location', 'Content-MD5',
'Content-Range', 'Content-Type', 'Expires',
'Last-Modified'):
if key in response.headers:
del response.headers[key]
# "The 304 response MUST NOT contain a message-body."
response.body = None
# Previous code may have set C-L, so we have to reset it.
response.headers.pop('Content-Length', None)
elif status == 305:
# Use Proxy.
# self.urls[0] should be the URI of the proxy.
response.headers['Location'] = self.urls[0]
response.body = None
# Previous code may have set C-L, so we have to reset it.
response.headers.pop('Content-Length', None)
else:
raise ValueError("The %s status code is unknown." % status)
def __call__(self):
"""Use this exception as a request.handler (raise self)."""
raise self
def clean_headers(status):
"""Remove any headers which should not apply to an error response."""
import cherrypy
response = cherrypy.serving.response
# Remove headers which applied to the original content,
# but do not apply to the error page.
respheaders = response.headers
for key in ["Accept-Ranges", "Age", "ETag", "Location", "Retry-After",
"Vary", "Content-Encoding", "Content-Length", "Expires",
"Content-Location", "Content-MD5", "Last-Modified"]:
if key in respheaders:
del respheaders[key]
if status != 416:
# A server sending a response with status code 416 (Requested
# range not satisfiable) SHOULD include a Content-Range field
# with a byte-range-resp-spec of "*". The instance-length
# specifies the current length of the selected resource.
# A response with status code 206 (Partial Content) MUST NOT
# include a Content-Range field with a byte-range- resp-spec of "*".
if "Content-Range" in respheaders:
del respheaders["Content-Range"]
class HTTPError(CherryPyException):
""" Exception used to return an HTTP error code (4xx-5xx) to the client.
This exception will automatically set the response status and body.
A custom message (a long description to display in the browser)
can be provided in place of the default.
"""
def __init__(self, status=500, message=None):
self.status = status
try:
self.code, self.reason, defaultmsg = _httputil.valid_status(status)
except ValueError, x:
raise self.__class__(500, x.args[0])
if self.code < 400 or self.code > 599:
raise ValueError("status must be between 400 and 599.")
# See http://www.python.org/dev/peps/pep-0352/
# self.message = message
self._message = message or defaultmsg
CherryPyException.__init__(self, status, message)
def set_response(self):
"""Modify cherrypy.response status, headers, and body to represent self.
CherryPy uses this internally, but you can also use it to create an
HTTPError object and set its output without *raising* the exception.
"""
import cherrypy
response = cherrypy.serving.response
clean_headers(self.code)
# In all cases, finalize will be called after this method,
# so don't bother cleaning up response values here.
response.status = self.status
tb = None
if cherrypy.serving.request.show_tracebacks:
tb = format_exc()
response.headers['Content-Type'] = "text/html;charset=utf-8"
response.headers.pop('Content-Length', None)
content = self.get_error_page(self.status, traceback=tb,
message=self._message)
response.body = content
_be_ie_unfriendly(self.code)
def get_error_page(self, *args, **kwargs):
return get_error_page(*args, **kwargs)
def __call__(self):
"""Use this exception as a request.handler (raise self)."""
raise self
class NotFound(HTTPError):
"""Exception raised when a URL could not be mapped to any handler (404)."""
def __init__(self, path=None):
if path is None:
import cherrypy
request = cherrypy.serving.request
path = request.script_name + request.path_info
self.args = (path,)
HTTPError.__init__(self, 404, "The path '%s' was not found." % path)
_HTTPErrorTemplate = '''<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN"
"http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">
<html>
<head>
<meta http-equiv="Content-Type" content="text/html; charset=utf-8"></meta>
<title>%(status)s</title>
<style type="text/css">
#powered_by {
margin-top: 20px;
border-top: 2px solid black;
font-style: italic;
}
#traceback {
color: red;
}
</style>
</head>
<body>
<h2>%(status)s</h2>
<p>%(message)s</p>
<pre id="traceback">%(traceback)s</pre>
<div id="powered_by">
<span>Powered by <a href="http://www.cherrypy.org">CherryPy %(version)s</a></span>
</div>
</body>
</html>
'''
def get_error_page(status, **kwargs):
"""Return an HTML page, containing a pretty error response.
status should be an int or a str.
kwargs will be interpolated into the page template.
"""
import cherrypy
try:
code, reason, message = _httputil.valid_status(status)
except ValueError, x:
raise cherrypy.HTTPError(500, x.args[0])
# We can't use setdefault here, because some
# callers send None for kwarg values.
if kwargs.get('status') is None:
kwargs['status'] = "%s %s" % (code, reason)
if kwargs.get('message') is None:
kwargs['message'] = message
if kwargs.get('traceback') is None:
kwargs['traceback'] = ''
if kwargs.get('version') is None:
kwargs['version'] = cherrypy.__version__
for k, v in kwargs.iteritems():
if v is None:
kwargs[k] = ""
else:
kwargs[k] = _escape(kwargs[k])
# Use a custom template or callable for the error page?
pages = cherrypy.serving.request.error_page
error_page = pages.get(code) or pages.get('default')
if error_page:
try:
if callable(error_page):
return error_page(**kwargs)
else:
return open(error_page, 'rb').read() % kwargs
except:
e = _format_exception(*_exc_info())[-1]
m = kwargs['message']
if m:
m += "<br />"
m += "In addition, the custom error page failed:\n<br />%s" % e
kwargs['message'] = m
return _HTTPErrorTemplate % kwargs
_ie_friendly_error_sizes = {
400: 512, 403: 256, 404: 512, 405: 256,
406: 512, 408: 512, 409: 512, 410: 256,
500: 512, 501: 512, 505: 512,
}
def _be_ie_unfriendly(status):
import cherrypy
response = cherrypy.serving.response
# For some statuses, Internet Explorer 5+ shows "friendly error
# messages" instead of our response.body if the body is smaller
# than a given size. Fix this by returning a body over that size
# (by adding whitespace).
# See http://support.microsoft.com/kb/q218155/
s = _ie_friendly_error_sizes.get(status, 0)
if s:
s += 1
# Since we are issuing an HTTP error status, we assume that
# the entity is short, and we should just collapse it.
content = response.collapse_body()
l = len(content)
if l and l < s:
# IN ADDITION: the response must be written to IE
# in one chunk or it will still get replaced! Bah.
content = content + (" " * (s - l))
response.body = content
response.headers[u'Content-Length'] = str(len(content))
def format_exc(exc=None):
"""Return exc (or sys.exc_info if None), formatted."""
if exc is None:
exc = _exc_info()
if exc == (None, None, None):
return ""
import traceback
return "".join(traceback.format_exception(*exc))
def bare_error(extrabody=None):
"""Produce status, headers, body for a critical error.
Returns a triple without calling any other questionable functions,
so it should be as error-free as possible. Call it from an HTTP server
if you get errors outside of the request.
If extrabody is None, a friendly but rather unhelpful error message
is set in the body. If extrabody is a string, it will be appended
as-is to the body.
"""
# The whole point of this function is to be a last line-of-defense
# in handling errors. That is, it must not raise any errors itself;
# it cannot be allowed to fail. Therefore, don't add to it!
# In particular, don't call any other CP functions.
body = "Unrecoverable error in the server."
if extrabody is not None:
if not isinstance(extrabody, str):
extrabody = extrabody.encode('utf-8')
body += "\n" + extrabody
return ("500 Internal Server Error",
[('Content-Type', 'text/plain'),
('Content-Length', str(len(body)))],
[body])
|
balazssimon/ml-playground
|
refs/heads/master
|
udemy/Machine Learning A-Z/Part 2 - Regression/Section 4 - Simple Linear Regression/simple_linear_regression.py
|
1
|
# Simple Linear Regression
# Importing the libraries
import numpy as np
import matplotlib.pyplot as plt
import pandas as pd
# Importing the dataset
dataset = pd.read_csv('Salary_Data.csv')
X = dataset.iloc[:, :-1].values
y = dataset.iloc[:, 1].values
# Splitting the dataset into the Training set and Test set
from sklearn.model_selection import train_test_split
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size = 1/3, random_state = 0)
# Feature Scaling
"""from sklearn.preprocessing import StandardScaler
sc_X = StandardScaler()
X_train = sc_X.fit_transform(X_train)
X_test = sc_X.transform(X_test)
sc_y = StandardScaler()
y_train = sc_y.fit_transform(y_train)
y_test = sc_y.transform(y_test)"""
# Fitting simple linear regression to the Training set
from sklearn.linear_model import LinearRegression
regressor = LinearRegression()
regressor.fit(X_train, y_train)
# Predicting the Test set results
y_pred = regressor.predict(X_test)
# Visualizing the Training set results
plt.scatter(X_train, y_train, color='red')
plt.plot(X_train, regressor.predict(X_train), color='blue')
plt.title('Salary vs Experience (Training set)')
plt.xlabel('Years of experience')
plt.ylabel('Salary')
plt.show()
# Visualizing the Test set results
plt.scatter(X_test, y_test, color='red')
plt.plot(X_train, regressor.predict(X_train), color='blue')
plt.title('Salary vs Experience (Test set)')
plt.xlabel('Years of experience')
plt.ylabel('Salary')
plt.show()
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.