code
stringlengths 2
1.05M
| repo_name
stringlengths 5
104
| path
stringlengths 4
251
| language
stringclasses 1
value | license
stringclasses 15
values | size
int32 2
1.05M
|
|---|---|---|---|---|---|
import numpy as np
# ===== Patch normalization by mean intensity ========================
def mean_intensity_norm(patch):
mu = np(np.sum(patch))*1.0/(patch.shape[0]*shape[1])
return (patch - mu[np.newaxis,np.newaxis])
# ===== Patch normalization by mean intensity ========================
|
tntrung/pyCVML
|
misc/patch_norm.py
|
Python
|
gpl-2.0
| 294
|
# This file is part of the dionaea honeypot
#
# SPDX-FileCopyrightText: 2009 Paul Baecher & Markus Koetter & Mark Schloesser
# SPDX-FileCopyrightText: 2010 Markus Koetter & Tan Kean Siong
# SPDX-FileCopyrightText: 2015 Katarina Durechova
# SPDX-FileCopyrightText: 2017 Tan Kean Siong
# SPDX-FileCopyrightText: 2016-2017 PhiBo (DinoTools)
#
# SPDX-License-Identifier: GPL-2.0-or-later
import logging
import tempfile
from uuid import UUID
from time import time, localtime, altzone
from dionaea import ndrlib
from dionaea.core import g_dionaea, incident
from .include.smbfields import DCERPC_Header, DCERPC_Response
rpclog = logging.getLogger('rpcservices')
# Set the operating system of Dionaea by changing the value
# Default value is 2
# 1:"Windows XP Service Pack 0/1",
# 2:"Windows XP Service Pack 2",
# 3:"Windows XP Service Pack 3",
# 4:"Windows 7 Service Pack 1",
# 5:"Linux Samba 4.3.11"
OS_TYPE = 2
class DCERPCValueError(Exception):
"""Raised when an a value is passed to a dcerpc operation which is invalid"""
def __init__(self, varname, reason, value):
self.varname = varname
self.reason = reason
self.value = value
def __str__(self):
return "%s is %s (%s)" % (self.varname, self.reason, self.value)
class RPCService(object):
uuid = ''
version_major = 0
version_minor = 0
# syntax = UUID('8a885d04-1ceb-11c9-9fe8-08002b104860').hex
ops = {}
vulns = {}
@classmethod
def processrequest(cls, service, con, opnum, p):
if opnum in cls.ops:
opname = cls.ops[opnum]
method = getattr(cls, "handle_" + opname, None)
if method != None:
if opnum in cls.vulns:
vulnname = cls.vulns[opnum]
rpclog.info("Calling %s %s (%x) maybe %s exploit?" % (
service.__class__.__name__, opname, opnum, vulnname ) )
else:
rpclog.info("Calling %s %s (%x)" %
( service.__class__.__name__, opname, opnum ) )
r = DCERPC_Header() / DCERPC_Response()
try:
data = method(con, p)
except DCERPCValueError as e:
rpclog.debug("DCERPCValueError %s" % e)
return None
except EOFError as e:
rpclog.warn("EOFError data %s" % format(p.StubData))
return None
if data is None:
data = b''
#for metasploit OS type 'Windows XP Service Pack 2+"
if OS_TYPE == 2 or OS_TYPE == 3:
if opname == "NetNameCanonicalize":
r.PacketType = 3
r.StubData = data
r.AllocHint = len(data)
r.CallID = p.CallID
r.FragLen = 24 + len(data)
rpclog.debug(data)
# print(r.show())
return r
else:
rpclog.info("Unknown RPC Call to %s %i" %
( service.__class__.__name__, opnum) )
class ATSVC(RPCService):
uuid = UUID('1ff70682-0a51-30e8-076d-740be8cee98b').hex
ops = {
0x02: "NetrJobEnum",
}
class ATSVC_HANDLE(object):
# 2.3.2 ATSVC_HANDLE
#
# http://msdn.microsoft.com/en-us/library/cc248473%28PROT.13%29.aspx
#
#typedef [handle] const wchar_t* ATSVC_HANDLE;
def __init__(self, p):
self.__packer = p
if isinstance(self.__packer,ndrlib.Packer):
pass
elif isinstance(self.__packer,ndrlib.Unpacker):
self.Pointer = p.unpack_pointer()
self.Handle = p.unpack_string()
def pack(self):
if isinstance(self.__packer,ndrlib.Packer):
pass
#this function have not tested for the moment
@classmethod
def handle_NetrJobEnum(cls, con, p):
# 3.2.5.2.3 NetrJobEnum (Opnum 2)
#
# http://msdn.microsoft.com/en-us/library/cc248425%28PROT.10%29.aspx
#
#NET_API_STATUS NetrJobEnum(
# [in, string, unique] ATSVC_HANDLE ServerName,
# [in, out] LPAT_ENUM_CONTAINER pEnumContainer,
# [in] DWORD PreferedMaximumLength,
# [out] LPDWORD pTotalEntries,
# [in, out, unique] LPDWORD pResumeHandle
#);
x = ndrlib.Unpacker(p.StubData)
ServerName = ATSVC.ATSVC_HANDLE(x)
Pad = x.unpack_short()
# pEnumContainer
EntriesRead = x.unpack_long()
pEntries = x.unpack_pointer()
# PreferedMaximumLength
PreferedMaxLength = x.unpack_long()
# pResumeHandle
Pointer = x.unpack_pointer()
ResumeHandle = x.unpack_long()
r = ndrlib.Packer()
# pEnumContainer
r.pack_long(0) # EntriesRead
r.pack_pointer(0) # pEntries
# pTotalEntries
r.pack_long(0)
# pResumeHandle
r.pack_pointer(0x0016c918)
r.pack_long(0)
# return
r.pack_long(0)
return r.get_buffer()
class AudioSrv(RPCService):
uuid = UUID('3faf4738-3a21-4307-b46c-fdda9bb8c0d5').hex
class browser(RPCService):
uuid = UUID('6bffd098-a112-3610-9833-012892020162').hex
class davclntrpc(RPCService):
uuid = UUID('c8cb7687-e6d3-11d2-a958-00c04f682e16').hex
class DCOM(RPCService):
uuid = UUID('4d9f4ab8-7d1c-11cf-861e-0020af6e7c57').hex
ops = {
0x00: "RemoteActivation",
}
vulns = {
0x00: "MS03-26",
}
@classmethod
def handle_RemoteActivation(cls, con, p):
# MS03-026
pass
class DnsServer(RPCService):
uuid = UUID('50abc2a4-574d-40b3-9d66-ee4fd5fba076').hex
class DSSETUP(RPCService):
uuid = UUID('3919286a-b10c-11d0-9ba8-00c04fd92ef5').hex
ops = {
0x09: "DsRolerUpgradeDownlevelServer"
}
vulns = {
0x09: "MS04-11",
}
@classmethod
def handle_DsRolerUpgradeDownlevelServer(cls, con, p):
# MS04-011
pass
class epmp(RPCService):
uuid = UUID('e1af8308-5d1f-11c9-91a4-08002b14a0fa').hex
class eventlog(RPCService):
uuid = UUID('82273fdc-e32a-18c3-3f78-827929dc23ea').hex
class GetUserToken(RPCService):
uuid = UUID('a002b3a0-c9b7-11d1-ae88-0080c75e4ec1').hex
class ICertPassage(RPCService):
uuid = UUID('91ae6020-9e3c-11cf-8d7c-00aa00c091be').hex
class ICertProtect(RPCService):
uuid = UUID('0d72a7d4-6148-11d1-b4aa-00c04fb66ea0').hex
class InitShutdown(RPCService):
uuid = UUID('894de0c0-0d55-11d3-a322-00c04fa321a1').hex
class IKeySvc(RPCService):
uuid = UUID('8d0ffe72-d252-11d0-bf8f-00c04fd9126b').hex
class IPStoreProv(RPCService):
uuid = UUID('c9378ff1-16f7-11d0-a0b2-00aa0061426a').hex
class ISeclogon(RPCService):
uuid = UUID('12b81e99-f207-4a4c-85d3-77b42f76fd14').hex
class ISystemActivator(RPCService):
uuid = UUID('000001a0-0000-0000-c000-000000000046').hex
ops = {
0x4: "RemoteCreateInstance"
}
vulns = {
0x4: "MS04-12",
}
@classmethod
def handle_RemoteCreateInstance(cls, con, p):
# MS04-012
pass
class RPC_C_AUTHN(object):
# http://msdn.microsoft.com/en-us/library/ms692656%28VS.85%29.aspx
# seems globally used
NONE = 0
DCE_PRIVATE = 1
DCE_PUBLIC = 2
DEC_PUBLIC = 4
GSS_NEGOTIATE = 9
WINNT = 10
GSS_SCHANNEL = 14
GSS_KERBEROS = 16
DEFAULT = 0xFFFFFFFF
class NCACN(object):
# http://www.opengroup.org/onlinepubs/9692999399/apdxi.htm#tagtcjh_51
UDP =8
IP = 9
class IOXIDResolver(RPCService):
"""[MS-DCOM]: Distributed Component Object Model (DCOM) Remote Protocol Specification
http://msdn.microsoft.com/en-us/library/cc226801%28PROT.10%29.aspx"""
uuid = UUID('99fcfec4-5260-101b-bbcb-00aa0021347a').hex
ops = {
0x5: "ServerAlive2"
}
class COMVERSION(object):
# typedef struct tagCOMVERSION {
# unsigned short MajorVersion;
# unsigned short MinorVersion;
# } COMVERSION;
def __init__(self, p):
self.__packer = p
if isinstance(self.__packer,ndrlib.Packer):
self.MajorVersion = 5
self.MinorVersion = 7
def pack(self):
if isinstance(self.__packer,ndrlib.Packer):
self.__packer.pack_short(self.MajorVersion)
self.__packer.pack_short(self.MinorVersion)
def size(self):
return 4
class DUALSTRINGARRAY(object):
# 2.2.1.19.2 DUALSTRINGARRAY
#
# http://msdn.microsoft.com/en-us/library/cc226841%28PROT.10%29.aspx
#
# typedef struct tagDUALSTRINGARRAY {
# unsigned short wNumEntries;
# unsigned short wSecurityOffset;
# [size_is(wNumEntries)] unsigned short aStringArray[];
# } DUALSTRINGARRAY;
def __init__(self, p):
self.__packer = p
if isinstance(self.__packer,ndrlib.Packer):
self.NumEntries = 0
self.SecurityOffset = 0
self.StringArray = []
def pack(self):
if isinstance(self.__packer,ndrlib.Packer):
self.NumEntries=self.SecurityOffset=0
for x in self.StringArray:
rpclog.debug("x %s %i" % (x, x.size()))
xs = x.size()
if isinstance(x, IOXIDResolver.STRINGBINDING):
self.SecurityOffset += xs
self.NumEntries += xs
self.__packer.pack_short(int((self.NumEntries+4)/2))
self.__packer.pack_short(int((self.SecurityOffset+2)/2))
for i in self.StringArray:
if isinstance(i, IOXIDResolver.STRINGBINDING):
i.pack()
self.__packer.pack_raw(b'\0\0')
for i in self.StringArray:
if isinstance(i, IOXIDResolver.SECURITYBINDING):
i.pack()
self.__packer.pack_raw(b'\0\0')
def size(self):
return 2 + 2 + sum([x.size() for x in self.StringArray]) + 2 + 2
class STRINGBINDING(object):
# 2.2.1.19.3 STRINGBINDING
#
# http://msdn.microsoft.com/en-us/library/cc226838%28PROT.10%29.aspx
#
# fixmetypdef struct {
# unsigned short wTowerId
# char *aNetworkAddr
# } STRINGBINDING;
#
# TowerId ->
# http://www.opengroup.org/onlinepubs/9692999399/apdxi.htm#tagcjh_28
def __init__(self, p):
self.__packer = p
if isinstance(self.__packer,ndrlib.Packer):
self.TowerId = NCACN.IP
self.NetworkAddr = ''
def pack(self):
if isinstance(self.__packer,ndrlib.Packer):
self.__packer.pack_short(self.TowerId)
self.__packer.pack_raw(self.NetworkAddr.encode('utf16')[2:])
self.__packer.pack_raw(b'\0\0')
def size(self):
return 2 + len(self.NetworkAddr.encode('utf16')[2:]) + 2
class SECURITYBINDING(object):
# 2.2.1.19.4 SECURITYBINDING
#
# http://msdn.microsoft.com/en-us/library/cc226839%28PROT.10%29.aspx
#
# fixmetypedef struct {
# unsigned short wAuthnSvc
# unsigned short Reserved
# wchar_t aPrincName
def __init__(self, p):
self.__packer = p
if isinstance(self.__packer,ndrlib.Packer):
self.AuthnSvc = RPC_C_AUTHN.GSS_NEGOTIATE
self.Reserved = 0xffff
self.PrincName = 'none'
def pack(self):
if isinstance(self.__packer,ndrlib.Packer):
self.__packer.pack_short(self.AuthnSvc)
if self.AuthnSvc != RPC_C_AUTHN.NONE:
self.__packer.pack_short(self.Reserved)
self.__packer.pack_raw(self.PrincName.encode('utf16')[2:])
self.__packer.pack_raw(b'\0\0')
def size(self):
return 2 + 2 + len(self.PrincName.encode('utf16')[2:]) + 2
@classmethod
def handle_ServerAlive2(cls, con, dce):
# http://msdn.microsoft.com/en-us/library/cc226953%28PROT.10%29.aspx
#
# [idempotent] error_status_t ServerAlive2(
# [in] handle_t hRpc,
# [out, ref] COMVERSION* pComVersion,
# [out, ref] DUALSTRINGARRAY** ppdsaOrBindings,
# [out, ref] DWORD* pReserved
# );
p = ndrlib.Packer()
# prepare values
ComVersion = IOXIDResolver.COMVERSION(p)
# the DUALSTRINGARRAY
dsa = IOXIDResolver.DUALSTRINGARRAY(p)
s = IOXIDResolver.STRINGBINDING(p)
s.NetworkAddr = '127.0.0.1'
dsa.StringArray.append(s)
s = IOXIDResolver.STRINGBINDING(p)
s.NetworkAddr = '127.0.0.2'
dsa.StringArray.append(s)
s = IOXIDResolver.SECURITYBINDING(p)
s.AuthnSvc = RPC_C_AUTHN.GSS_NEGOTIATE
s.PrincName = "OEMCOMPUTER" # fixme: config value?
dsa.StringArray.append(s)
# we are done, pack it
# ComVersion
ComVersion.pack()
# pointer to DUALSTRINGARRAY
p.pack_pointer(0x200008)
# DUALSTRINGARRAY size
p.pack_long(int(dsa.size()/2))
# DUALSTRINGARRAY
dsa.pack()
# reserved
p.pack_pointer(0x4711)
p.pack_long(0)
return p.get_buffer()
class llsrpc(RPCService):
uuid = UUID('342cfd40-3c6c-11ce-a893-08002b2e9c6d').hex
class lsarpc(RPCService):
uuid = UUID('12345778-1234-abcd-ef00-0123456789ab').hex
class LSAPR_HANDLE(object):
# 2.2.2.1 LSAPR_HANDLE
#
# http://msdn.microsoft.com/en-us/library/cc234257%28v=PROT.10%29.aspx
#
#typedef [context_handle] void* LSAPR_HANDLE;
def __init__(self, p):
self.__packer = p
if isinstance(self.__packer,ndrlib.Packer):
self.Handle = b''
elif isinstance(self.__packer,ndrlib.Unpacker):
self.Value = p.unpack_raw(20)
def pack(self):
if isinstance(self.__packer,ndrlib.Packer):
self.__packer.pack_raw(self.Handle)
class LSAPR_OBJECT_ATTRIBUTES(object):
# 2.2.9 LSAPR_OBJECT_ATTRIBUTES
#
#http://help.outlook.com/en-us/140/cc234450%28PROT.10%29.aspx
#
#typedef struct _LSAPR_OBJECT_ATTRIBUTES {
# unsigned long Length;
# unsigned char* RootDirectory;
# PSTRING ObjectName;
# unsigned long Attributes;
# PLSAPR_SECURITY_DESCRIPTOR SecurityDescriptor;
# PSECURITY_QUALITY_OF_SERVICE SecurityQualityOfService;
#} LSAPR_OBJECT_ATTRIBUTES,
# *PLSAPR_OBJECT_ATTRIBUTES;
def __init__(self, p):
self.__packer = p
if isinstance(self.__packer,ndrlib.Packer):
pass
elif isinstance(self.__packer,ndrlib.Unpacker):
self.Length = self.__packer.unpack_long()
rpclog.debug("Length = %i" % self.Length)
self.RootDirectory = self.__packer.unpack_short()
rpclog.debug("RootDirectory = %x" % self.RootDirectory)
self.ObjectName = self.__packer.unpack_pointer()
rpclog.debug("ObjectName = %x" % self.ObjectName)
self.Attributes = self.__packer.unpack_long()
self.SecurityDescriptor = self.__packer.unpack_pointer()
self.SecurityQualityOfService = self.__packer.unpack_pointer()
class LSA_TRANSLATED_SID(object):
#http://msdn.microsoft.com/en-us/library/dd424381.aspx
#
#typedef struct {
# SID_NAME_USE Use;
# ULONG RelativeId;
# LONG DomainIndex;
#} LSA_TRANSLATED_SID,
# *PLSA_TRANSLATED_SID;
def __init__(self, p):
self.__packer = p
if isinstance(self.__packer,ndrlib.Packer):
self.Use = 0
self.RelativeId = 0
self.DomainIndex = 0
elif isinstance(self.__packer,ndrlib.Unpacker):
pass
def pack(self):
if isinstance(self.__packer,ndrlib.Packer):
self.__packer.pack_short(self.Use)
self.__packer.pack_long(self.RelativeId)
self.__packer.pack_long(self.DomainIndex)
# unknown
self.__packer.pack_long(0)
class LSAPR_TRANSLATED_SIDS(object):
# 2.2.15 LSAPR_TRANSLATED_SIDS
#
#http://msdn.microsoft.com/en-us/library/cc234457%28PROT.10%29.aspx
#
#typedef struct _LSAPR_TRANSLATED_SIDS {
# [range(0,1000)] unsigned long Entries;
# [size_is(Entries)] PLSA_TRANSLATED_SID Sids;
#} LSAPR_TRANSLATED_SIDS,
# *PLSAPR_TRANSLATED_SIDS;
def __init__(self, p):
self.__packer = p
if isinstance(self.__packer,ndrlib.Packer):
self.Entries = 0
self.Pointer = 0x3456
self.MaxCount = 0
self.Data = []
elif isinstance(self.__packer,ndrlib.Unpacker):
self.Entries = self.__packer.unpack_long()
rpclog.debug("Entries = %i" % self.Entries)
self.Pointer = self.__packer.unpack_pointer()
self.MaxCount = self.__packer.unpack_long()
if self.Entries != 0:
Sids = lsarpc.LSA_TRANSLATED_SID(self.__packer)
def pack(self):
if isinstance(self.__packer,ndrlib.Packer):
self.__packer.pack_long(self.Entries)
rpclog.debug("Entries = %i" % self.Entries)
self.__packer.pack_pointer(self.Pointer)
self.__packer.pack_long(self.Entries)
for i in range(self.Entries):
Sids = lsarpc.LSA_TRANSLATED_SID(self.__packer)
Sids.pack()
class LSAPR_TRUST_INFORMATION(object):
#2.2.11 LSAPR_TRUST_INFORMATION
#
#http://msdn.microsoft.com/en-us/library/cc234452%28PROT.10%29.aspx
#
#typedef struct _LSAPR_TRUST_INFORMATION {
# RPC_UNICODE_STRING Name;
# PRPC_SID Sid;
#} LSAPR_TRUST_INFORMATION,
# *PLSAPR_TRUST_INFORMATION;
def __init__(self, p):
self.__packer = p
if isinstance(self.__packer,ndrlib.Packer):
self.Name = []
self.Entries = 0
self.RelativeId = 0
self.Pointer = 0x11
elif isinstance(self.__packer,ndrlib.Unpacker):
pass
def pack(self):
if isinstance(self.__packer,ndrlib.Packer):
# MaxCount,needed as the element of NDR array
self.__packer.pack_long(self.Entries)
# RPC_UNICODE_STRING Name;
for i in range(self.Entries):
b = samr.RPC_UNICODE_STRING(self.__packer)
b.Data = self.Name[i]
b.pack()
# Pointer to RPC_UNICODE_STRING buffer
self.__packer.pack_long(self.Pointer)
# Pointer to RPC_SID buffer
self.__packer.pack_long(self.Pointer)
for j in range(self.Entries):
self.__packer.pack_string(self.Name[j].encode('utf16')[2:])
# PRPC_SID Sid ;
sid = samr.RPC_SID(self.__packer)
sid.Value = 'NT_AUTHORITY'
sid.SubAuthority = ['32','544']
sid.SubAuthorityCount = len(sid.SubAuthority)
# Maxcount, needed as the element of NDR array
self.__packer.pack_long(sid.SubAuthorityCount)
sid.pack()
class LSAPR_REFERENCED_DOMAIN_LIST(object):
# 2.2.12 LSAPR_REFERENCED_DOMAIN_LIST
#
#http://msdn.microsoft.com/en-us/library/cc234453%28PROT.13%29.aspx
#
#typedef struct _LSAPR_REFERENCED_DOMAIN_LIST {
# unsigned long Entries;
# [size_is(Entries)] PLSAPR_TRUST_INFORMATION Domains;
# unsigned long MaxEntries;
#} LSAPR_REFERENCED_DOMAIN_LIST,
# *PLSAPR_REFERENCED_DOMAIN_LIST;
def __init__(self, p):
self.__packer = p
if isinstance(self.__packer,ndrlib.Packer):
self.Entries = 0
self.MaxEntries = 0
self.Data = []
self.Pointer = 0x4567
elif isinstance(self.__packer,ndrlib.Unpacker):
pass
def pack(self):
if isinstance(self.__packer,ndrlib.Packer):
self.__packer.pack_long(self.Entries)
for i in range(self.Entries):
# Pointer
self.__packer.pack_long(self.Pointer)
# MaxEntries
self.__packer.pack_long(0)
Domains = lsarpc.LSAPR_TRUST_INFORMATION(self.__packer)
Domains.Name = self.Data
Domains.Entries = self.Entries
Domains.pack()
class LSAPR_SID_INFORMATION(object):
# 2.2.17 LSAPR_SID_INFORMATION
#
# http://msdn.microsoft.com/en-us/library/cc234459%28v=PROT.10%29.aspx
#
#typedef struct _LSAPR_SID_INFORMATION {
# PRPC_SID Sid;
#} LSAPR_SID_INFORMATION,
# *PLSAPR_SID_INFORMATION
def __init__(self, p):
self.__packer = p
if isinstance(self.__packer,ndrlib.Packer):
pass
elif isinstance(self.__packer,ndrlib.Unpacker):
self.Count = self.__packer.unpack_long()
Sid = samr.RPC_SID(self.__packer)
def pack(self):
if isinstance(self.__packer, ndrlib.Packer):
pass
class LSAPR_SID_ENUM_BUFFER(object):
# 2.2.18 LSAPR_SID_ENUM_BUFFER
#
# http://msdn.microsoft.com/en-us/library/cc234460%28PROT.10%29.aspx
#
#typedef struct _LSAPR_SID_ENUM_BUFFER {
# [range(0,20480)] unsigned long Entries;
# [size_is(Entries)] PLSAPR_SID_INFORMATION SidInfo;
#} LSAPR_SID_ENUM_BUFFER,
# *PLSAPR_SID_ENUM_BUFFER;
def __init__(self, p):
self.__packer = p
if isinstance(self.__packer,ndrlib.Packer):
pass
elif isinstance(self.__packer,ndrlib.Unpacker):
self.Entries = self.__packer.unpack_long()
self.Pointer = self.__packer.unpack_pointer()
self.MaxCount = self.__packer.unpack_long()
for i in range(self.MaxCount):
self.Reference = self.__packer.unpack_pointer()
for j in range(self.MaxCount):
SidInfo = lsarpc.LSAPR_SID_INFORMATION(self.__packer)
def pack(self):
if isinstance(self.__packer, ndrlib.Packer):
pass
class LSAPR_TRANSLATED_NAME_EX(object):
#2.2.21 LSAPR_TRANSLATED_NAME_EX
#
#http://msdn.microsoft.com/en-us/library/cc234463%28v=PROT.13%29.aspx
#
#typedef struct _LSAPR_TRANSLATED_NAME_EX {
# SID_NAME_USE Use;
# RPC_UNICODE_STRING Name;
# long DomainIndex;
# unsigned long Flags;
#} LSAPR_TRANSLATED_NAME_EX,
# *PLSAPR_TRANSLATED_NAME_EX;
def __init__(self, p):
self.__packer = p
if isinstance(self.__packer,ndrlib.Packer):
#2.2.13 SID_NAME_USE
# http://msdn.microsoft.com/en-us/library/cc234454%28v=PROT.13%29.aspx
self.Use = 8 #SidTypeUnknown
self.Flags = 0
self.DomainIndex = 0
self.Data = []
self.Pointer = 0x11
self.Entries = 0
elif isinstance(self.__packer,ndrlib.Unpacker):
pass
def pack(self):
if isinstance(self.__packer,ndrlib.Packer):
for i in range(self.Entries):
self.__packer.pack_short(self.Use)
Name = samr.RPC_UNICODE_STRING(self.__packer)
# Empty String
Name.Data = self.Data
self.__packer.pack_pointer(0x00)
Name.pack()
self.__packer.pack_long(self.DomainIndex)
self.__packer.pack_long(self.Flags)
class LSAPR_TRANSLATED_NAMES_EX(object):
#2.2.22 LSAPR_TRANSLATED_NAMES_EX
#
#http://msdn.microsoft.com/en-us/library/cc234464%28PROT.13%29.aspx
#
#typedef struct _LSAPR_TRANSLATED_NAMES_EX {
# [range(0,20480)] unsigned long Entries;
# [size_is(Entries)] PLSAPR_TRANSLATED_NAME_EX Names;
#} LSAPR_TRANSLATED_NAMES_EX,
# *PLSAPR_TRANSLATED_NAMES_EX;
def __init__(self, p):
self.__packer = p
if isinstance(self.__packer,ndrlib.Packer):
self.Entries = 0
self.Data = []
self.Pointer = 0x6879
elif isinstance(self.__packer,ndrlib.Unpacker):
self.Entries = self.__packer.unpack_long()
self.Pointer = self.__packer.unpack_pointer()
if self.Entries != 0:
Sids = lsarpc.LSAPR_TRANSLATED_NAMES_EX(self.__packer)
def pack(self):
if isinstance(self.__packer,ndrlib.Packer):
self.__packer.pack_long(self.Entries)
self.__packer.pack_pointer(self.Pointer)
self.__packer.pack_long(self.Entries)
Names = lsarpc.LSAPR_TRANSLATED_NAME_EX(self.__packer)
Names.Entries = self.Entries
Names.pack()
ops = {
0: "Close",
44: "OpenPolicy",
57: "LookupSids2",
58: "LookupNames2"
}
@classmethod
def handle_OpenPolicy(cls, con, p):
# 3.1.4.4.1 LsarOpenPolicy2 (Opnum 44)
#
# http://msdn.microsoft.com/en-us/library/cc234337%28PROT.10%29.aspx
#
#NTSTATUS LsarOpenPolicy2(
# [in, unique, string] wchar_t* SystemName,
# [in] PLSAPR_OBJECT_ATTRIBUTES ObjectAttributes,
# [in] ACCESS_MASK DesiredAccess,
# [out] LSAPR_HANDLE* PolicyHandle
#);
x = ndrlib.Unpacker(p.StubData)
PSystemName = x.unpack_pointer()
SystemName = x.unpack_string()
rpclog.debug("ServerName %s" % SystemName)
ObjectAttributes = lsarpc.LSAPR_OBJECT_ATTRIBUTES(x)
DesiredAccess = x.unpack_long()
r = ndrlib.Packer()
PolicyHandle = lsarpc.LSAPR_HANDLE(r)
PolicyHandle.Handle = b'01234567890123456789'
PolicyHandle.pack()
# return
r.pack_long(0)
return r.get_buffer()
@classmethod
def handle_LookupNames2(cls, con, p):
# 3.1.4.7 LsarLookupNames2 (Opnum 58)
#
# http://msdn.microsoft.com/en-us/library/cc234494%28PROT.13%29.aspx
#
#NTSTATUS LsarLookupNames2(
# [in] LSAPR_HANDLE PolicyHandle,
# [in, range(0,1000)] unsigned long Count,
# [in, size_is(Count)] PRPC_UNICODE_STRING Names,
# [out] PLSAPR_REFERENCED_DOMAIN_LIST* ReferencedDomains,
# [in, out] PLSAPR_TRANSLATED_SIDS_EX TranslatedSids,
# [in] LSAP_LOOKUP_LEVEL LookupLevel,
# [in, out] unsigned long* MappedCount,
# [in] unsigned long LookupOptions,
# [in] unsigned long ClientRevision
#);
x = ndrlib.Unpacker(p.StubData)
PolicyHandle = lsarpc.LSAPR_HANDLE(x)
Count = x.unpack_long()
# Maxcount, needed as the element of NDR array
MaxCount = x.unpack_long()
Names = samr.RPC_UNICODE_STRING(x,MaxCount)
TranslatedSids = lsarpc.LSAPR_TRANSLATED_SIDS(x)
LookupLevel = x.unpack_short()
MappedCount = x.unpack_long()
LookupOptions = x.unpack_long()
ClientRevision = x.unpack_long()
r = ndrlib.Packer()
r.pack_pointer(0x23456)
ReferenceDomains = lsarpc.LSAPR_REFERENCED_DOMAIN_LIST(r)
ReferenceDomains.Data = ['HOMEUSER-3AF6FE']
ReferenceDomains.Entries = len(ReferenceDomains.Data)
ReferenceDomains.pack()
Sids = lsarpc.LSAPR_TRANSLATED_SIDS(r)
Sids.Entries = Count
Sids.pack()
# MappedCount
r.pack_long(3)
# Return
r.pack_pointer(0x00000107) #STATUS_SOME_NOT_MAPPED
return r.get_buffer()
@classmethod
def handle_LookupSids2(cls, con, p):
# 3.1.4.10 LsarLookupSids2 (Opnum 57)
#
# http://msdn.microsoft.com/en-us/library/cc234487%28PROT.13%29.aspx
#
#NTSTATUS LsarLookupSids2(
# [in] LSAPR_HANDLE PolicyHandle,
# [in] PLSAPR_SID_ENUM_BUFFER SidEnumBuffer,
# [out] PLSAPR_REFERENCED_DOMAIN_LIST* ReferencedDomains,
# [in, out] PLSAPR_TRANSLATED_NAMES_EX TranslatedNames,
# [in] LSAP_LOOKUP_LEVEL LookupLevel,
# [in, out] unsigned long* MappedCount,
# [in] unsigned long LookupOptions,
# [in] unsigned long ClientRevision
#);
x = ndrlib.Unpacker(p.StubData)
PolicyHandle = lsarpc.LSAPR_HANDLE(x)
SidEnumBuffer = lsarpc.LSAPR_SID_ENUM_BUFFER(x)
rpclog.debug("EntriesRead = %i" % SidEnumBuffer.Entries)
TranslatedNames = lsarpc.LSAPR_TRANSLATED_NAMES_EX(x)
LookupLevel = x.unpack_short()
MappedCount = x.unpack_long()
LookupOptions = x.unpack_long()
ClientRevision = x.unpack_long()
rpclog.debug ("LookupLevel %i MappedCount %i LookupOptions %i ClientRevision %i" %(
LookupLevel,MappedCount,LookupOptions,ClientRevision))
r = ndrlib.Packer()
r.pack_pointer(0x23456)
ReferenceDomains = lsarpc.LSAPR_REFERENCED_DOMAIN_LIST(r)
ReferenceDomains.Data = ['HOMEUSER-3AF6FE']
ReferenceDomains.Entries = len(ReferenceDomains.Data)
ReferenceDomains.pack()
# Nmap smb-enum-users.nse scanning will simply return none of the
# element has translated, ugly but it works for the moment
TranslatedNames = lsarpc.LSAPR_TRANSLATED_NAMES_EX(r)
TranslatedNames.Entries = SidEnumBuffer.Entries
TranslatedNames.pack()
# return
r.pack_long(0)
r.pack_pointer(0xc0000073) #STATUS_NONE_MAPPED
return r.get_buffer()
@classmethod
def handle_Close(cls, con, p):
#3.1.4.3 LsarClose (Opnum 0)
#
#http://msdn.microsoft.com/en-us/library/cc234490%28v=PROT.13%29.aspx
#
#NTSTATUS LsarClose(
# [in, out] LSAPR_HANDLE* ObjectHandle
#);
x = ndrlib.Unpacker(p.StubData)
ObjectHandle = lsarpc.LSAPR_HANDLE(x)
rpclog.debug("ObjectHandle %s" % ObjectHandle)
r = ndrlib.Packer()
s = lsarpc.LSAPR_HANDLE(r)
s.Handle = b'\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0'
s.pack()
r.pack_long(0)
return r.get_buffer()
class msgsvcsend(RPCService):
uuid = UUID('5a7b91f8-ff00-11d0-a9b2-00c04fb6e6fc').hex
class MSMQ(RPCService):
uuid = UUID('fdb3a030-065f-11d1-bb9b-00a024ea5525').hex
ops = {
0x06: "QMCreateObjectInternal",
0x09: "QMDeleteObject",
}
vulns = {
0x06: "MS07-065",
0x09: "MS05-017",
}
@classmethod
def handle_QMCreateObjectInternal(cls, con, p):
# MS07-065
pass
@classmethod
def handle_QMDeleteObject(cls, con, p):
# MS05-017
pass
class netdfs(RPCService):
uuid = UUID('4fc742e0-4a10-11cf-8273-00aa004ae673').hex
class netlogon(RPCService):
uuid = UUID('12345678-1234-abcd-ef00-01234567cffb').hex
class nddeapi(RPCService):
uuid = UUID('2f5f3220-c126-1076-b549-074d078619da').hex
ops = {
0x0c: "NDdeSetTrustedShareW"
}
vulns = {
0x0c: "MS04-031"
}
@classmethod
def handle_NDdeSetTrustedShareW(cls, con, p):
# MS04-031
pass
class NWWKS(RPCService):
uuid = UUID('e67ab081-9844-3521-9d32-834f038001c0').hex
ops = {
0x09: "NwOpenEnumNdsSubTrees",
0x01: "NwChangePassword"
}
vulns = {
0x09: "MS06-66",
0x01: "MS06-66",
}
@classmethod
def handle_NwOpenEnumNdsSubTrees(cls, con, p):
# MS06-066
pass
@classmethod
def handle_NwChangePassword(cls, con, p):
# MS06-066
pass
class NsiS(RPCService):
uuid = UUID('d6d70ef0-0e3b-11cb-acc3-08002b1d29c4').hex
class PNP(RPCService):
uuid = UUID('8d9f4e40-a03d-11ce-8f69-08003e30051b').hex
ops = {
0x36: "PNP_QueryResConfList",
}
vulns = {
0x36: "MS05-39",
}
@classmethod
def handle_PNP_QueryResConfList(cls, con, p):
# MS05-39
pass
class PolicyAgent(RPCService):
uuid = UUID('d335b8f6-cb31-11d0-b0f9-006097ba4e54').hex
class pmapapi(RPCService):
uuid = UUID('369ce4f0-0fdc-11d3-bde8-00c04f8eee78').hex
class RemoteAccess(RPCService):
uuid = UUID('8f09f000-b7ed-11ce-bbd2-00001a181cad').hex
class MGMT(RPCService):
""" Remote Management Interface
http://www.opengroup.org/onlinepubs/9629399/apdxq.htm """
uuid = UUID('afa8bd80-7d8a-11c9-bef4-08002b102989').hex
ops = {
0 : "inq_if_ids",
1 : "inq_stats",
2 : "is_server_listening",
3 : "stop_server_listening",
4 : "inq_princ_name"
}
# As I lack a way to verify the code, this is commented, maybe samba4
# smbtorture can help out
class handle_t(object):
def __init__(self, p):
self.__packer = p
if isinstance(self.__packer,ndrlib.Packer):
pass
elif isinstance(self.__packer,ndrlib.Unpacker):
self.handle = self.__packer.unpack_short()
class uuid_t(object):
# typedef struct {
# unsigned32 time_low;
# unsigned16 time_mid;
# unsigned16 time_hi_and_version;
# unsigned8 clock_seq_hi_and_reserved;
# unsigned8 clock_seq_low;
# byte node[6];
# } uuid_t, *uuid_p_t;
def __init__(self, p):
self.__packer = p
if isinstance(self.__packer,ndrlib.Packer):
self.__packer = p
self.time_low = 0
self.time_mid = 1
self.time_hi_and_version = 2
self.clock_seq_hi_and_reserved = 3
self.clock_seq_low = 4
self.node = b"56789a"
def pack(self):
if isinstance(self.__packer,ndrlib.Packer):
self.__packer.pack_long(self.time_low)
self.__packer.pack_short(self.time_mid)
self.__packer.pack_short(self.time_hi_and_version)
self.__packer.pack_small(self.clock_seq_hi_and_reserved)
self.__packer.pack_small(self.clock_seq_low)
self.__packer.pack_raw(self.node)
def __str__(self):
return "123455"
class rpc_if_id_t(object):
# typedef struct {
# uuid_t uuid;
# unsigned16 vers_major;
# unsigned16 vers_minor;
# } rpc_if_id_t;
# typedef [ptr] rpc_if_id_t *rpc_if_id_p_t;
def __init__(self, p):
self.__packer = p
if isinstance(self.__packer,ndrlib.Packer):
self.uuid = MGMT.uuid_t(p)
self.vers_major = 0
self.vers_minor = 1
def pack(self):
if isinstance(self.__packer,ndrlib.Packer):
self.uuid.pack()
self.__packer.pack_short(self.vers_major)
self.__packer.pack_short(self.vers_minor)
def show(self):
rpclog.debug("uuid %s %i.%i" %
(self.uuid, self.vers_major, self.vers_minor))
class rpc_if_id_vector_t(object):
# typedef struct {
# unsigned32 count;
# [size_is(count)]
# rpc_if_id_p_t if_id[*];
# } rpc_if_id_vector_t;
# typedef [ptr] rpc_if_id_vector_t *rpc_if_id_vector_p_t;
def __init__(self, p):
self.__packer = p
if isinstance(self.__packer,ndrlib.Packer):
self.count = 0
self.if_id = []
def pack(self):
if isinstance(self.__packer,ndrlib.Packer):
self.count = len(self.if_id)
self.__packer.pack_long(self.count)
self.__packer.pack_long(self.count) # maybe array size?
# pointers ...
for i in self.if_id:
self.__packer.pack_pointer(65)
# the if_id_vectors
for i in self.if_id:
i.pack()
def show(self, indent=0):
rpclog.debug("rpc_if_id_vector_t")
rpclog.debug("count %i", len(self.if_id))
for i in self.if_id:
i.show()
@classmethod
def handle_inq_if_ids(cls, con, p):
#
# void rpc__mgmt_inq_if_ids
# (
# [in] handle_t binding_handle,
# [out] rpc_if_id_vector_p_t *if_id_vector,
# [out] error_status_t *status
# );
r = ndrlib.Packer()
r.pack_pointer(0x4747)
v = MGMT.rpc_if_id_vector_t(r)
v.if_id.append(MGMT.rpc_if_id_t(r))
v.show()
v.pack()
r.pack_long(0) # return value
return r.get_buffer()
@classmethod
def handle_inq_stats(cls, con, p):
pass
@classmethod
def handle_is_server_listening(cls, con, p):
pass
@classmethod
def handle_stop_server_listening(cls, con, p):
pass
@classmethod
def handle_inq_princ_name(cls, con, p):
# void rpc__mgmt_inq_princ_name
# (
# [in] handle_t binding_handle,
# [in] unsigned32 authn_proto,
# [in] unsigned32 princ_name_size,
# [out, string, size_is(princ_name_size)]
# char princ_name[],
# [out] error_status_t *status
# );
x = ndrlib.Unpacker(p.StubData)
handle = MGMT.handle_t(x)
# authn_proto = x.unpack_long()
# princ_name_size = x.unpack_long()
r = ndrlib.Packer()
r.pack_string(b"oemcomputer")
# r.pack_long(0)
# r.pack_long(0)
return r.get_buffer()
__userinfo__ = {
'Administrator' : {
'RID': 500,
'comment' : 'Built-in account for administering the computer/domain'
},
'Guest' : {
'RID': 501,
'comment' : 'Built-in account for guest access the computer/domain'
},
'HelpAssistant' : {
'RID': 1000,
'comment' : 'Account for Providing Remote Assistance'
},
'SUPPORT_388945a0' : {
'RID' : 1002,
'comment' : 'This is a vendor\'s account for the Help and Support Service'
}
}
class samr(RPCService):
""" [MS-SAMR]: Security Account Manager (SAM) Remote Protocol Specification (Client-to-Server)
http://msdn.microsoft.com/en-us/library/cc245476%28v=PROT.13%29.aspx
http://download.microsoft.com/download/a/e/6/ae6e4142-aa58-45c6-8dcf-a657e5900cd3/%5BMS-SAMR%5D.pdf"""
uuid = UUID('12345778-1234-abcd-ef00-0123456789ac').hex
# Used for SAMR handle_LookupNamesInDomain and handle_QueryInformationUser
LookupName = ""
class SAMPR_HANDLE(object):
# 2.2.3.2 SAMPR_HANDLE
#
# http://msdn.microsoft.com/en-us/library/cc245544%28v=PROT.10%29.aspx
#
# typedef [context_handle] void* SAMPR_HANDLE;
#
def __init__(self, p):
self.__packer = p
if isinstance(self.__packer,ndrlib.Packer):
self.Handle = b''
elif isinstance(self.__packer,ndrlib.Unpacker):
self.Value = p.unpack_raw(20)
def pack(self):
if isinstance(self.__packer,ndrlib.Packer):
self.__packer.pack_raw(self.Handle)
class RPC_SID_IDENTIFIER_AUTHORITY(object):
# 2.4.1.1 RPC_SID_IDENTIFIER_AUTHORITY
#
# http://msdn.microsoft.com/en-us/library/cc230372%28PROT.10%29.aspx
#
# typedef struct _RPC_SID_IDENTIFIER_AUTHORITY {
# byte Value[6];
# } RPC_SID_IDENTIFIER_AUTHORITY;
#
SID_AUTHORITY = {
'NULL_SID_AUTHORITY' : b'\x00\x00\x00\x00\x00\x00',
'WORLD_SID_AUTHORITY' : b'\x00\x00\x00\x00\x00\x01',
'LOCAL_SID_AUTHORITY' : b'\x00\x00\x00\x00\x00\x02',
'CREATOR_SID_AUTHORITY' : b'\x00\x00\x00\x00\x00\x03',
'NON_UNIQUE_AUTHORITY' : b'\x00\x00\x00\x00\x00\x04',
'NT_AUTHORITY' : b'\x00\x00\x00\x00\x00\x05',
'SECURITY_MANDATORY_LABEL_AUTHORITY' : b'\x00\x00\x00\x00\x00\x10'
}
def __init__(self, p):
self.__packer = p
if isinstance(self.__packer,ndrlib.Packer):
self.Value = ''
elif isinstance(self.__packer,ndrlib.Unpacker):
self.Value = self.__packer.unpack_raw(6)
def pack(self):
if isinstance(self.__packer,ndrlib.Packer):
if not self.SID_AUTHORITY.get(self.Value) == None:
self.__packer.pack_raw(self.SID_AUTHORITY[self.Value])
class RPC_SID(object):
# 2.4.2.2 RPC_SID
#
# http://msdn.microsoft.com/en-us/library/cc230364%28PROT.10%29.aspx
#
# typedef struct _RPC_SID {
# unsigned char Revision;
# unsigned char SubAuthorityCount;
# RPC_SID_IDENTIFIER_AUTHORITY IdentifierAuthority;
# [size_is(SubAuthorityCount)]
# unsigned long SubAuthority[];
# } RPC_SID,
# *PRPC_SID;
#
def __init__(self, p):
self.__packer = p
if isinstance(p,ndrlib.Packer):
self.Value = ''
self.Revision = 1 # must be 0x01
self.SubAuthorityCount = 0
self.SubAuthority = []
elif isinstance(self.__packer,ndrlib.Unpacker):
self.Revision = self.__packer.unpack_small()
self.SubAuthorityCount = self.__packer.unpack_small()
self.IdentifierAuthority = samr.RPC_SID_IDENTIFIER_AUTHORITY(
self.__packer)
self.SubAuthority = []
for i in range(self.SubAuthorityCount):
self.SubAuthority.append(p.unpack_long())
def pack(self):
if isinstance(self.__packer,ndrlib.Packer):
# Revision
self.__packer.pack_small(self.Revision)
# SubAuthorityCount
self.__packer.pack_small(self.SubAuthorityCount)
# RPC_SID_IDENTIFIER_AUTHORITY
b = samr.RPC_SID_IDENTIFIER_AUTHORITY(self.__packer)
b.Value = self.Value
b.pack()
# SubAuthority
for i in range(self.SubAuthorityCount):
self.__packer.pack_long(int(self.SubAuthority[i]))
class RPC_UNICODE_STRING(object):
# 2.3.5 RPC_UNICODE_STRING
#
# http://msdn.microsoft.com/en-us/library/cc230365%28PROT.10%29.aspx
#
# typedef struct _RPC_UNICODE_STRING {
# unsigned short Length;
# unsigned short MaximumLength;
# [size_is(MaximumLength/2), length_is(Length/2)]
# WCHAR* Buffer;
# } RPC_UNICODE_STRING,
# *PRPC_UNICODE_STRING;
#
def __init__(self, p, c=1):
self.__packer = p
if isinstance(self.__packer,ndrlib.Packer):
self.Data =[]
elif isinstance(self.__packer,ndrlib.Unpacker):
self.Count = c #specify how many string array
rpclog.debug("Count = %i" % self.Count)
for i in range(self.Count):
self.Length = self.__packer.unpack_short()
self.MaximumLength = self.__packer.unpack_short()
self.Reference = self.__packer.unpack_pointer()
for j in range(self.Count):
self.Buffer = self.__packer.unpack_string()
def pack(self):
if isinstance(self.__packer,ndrlib.Packer):
self.__packer.pack_rpc_unicode_string(self.Data)
class SAMPR_RID_ENUMERATION(object):
# 2.2.3.9 SAMPR_RID_ENUMERATION
#
# http://msdn.microsoft.com/en-us/library/cc245560%28PROT.10%29.aspx
#
# typedef struct _SAMPR_RID_ENUMERATION {
# unsigned long RelativeId;
# RPC_UNICODE_STRING Name;
# } SAMPR_RID_ENUMERATION,
# *PSAMPR_RID_ENUMERATION;
def __init__(self, p):
self.__packer = p
if isinstance(self.__packer,ndrlib.Packer):
self.Name = []
self.RelativeId = 0
self.Pointer = 0x11
elif isinstance(self.__packer,ndrlib.Unpacker):
self.RelativeId = self.__packer.unpack_long()
self.Name = samr.RPC_UNICODE_STRING(self.__packer, Name)
def pack(self):
if isinstance(self.__packer,ndrlib.Packer):
for i in range(len(self.Name)):
#RelativeID
self.__packer.pack_long(self.RelativeId)
b = samr.RPC_UNICODE_STRING(self.__packer)
b.Data = self.Name[i]
b.pack()
self.__packer.pack_pointer(self.Pointer)
for j in range(len(self.Name)):
self.__packer.pack_string(self.Name[j].encode('utf16')[2:])
class SAMPR_ENUMERATION_BUFFER(object):
# 2.2.3.10 SAMPR_ENUMERATION_BUFFER
#
# http://msdn.microsoft.com/en-us/library/cc245561%28v=PROT.10%29.aspx
#
# typedef struct _SAMPR_ENUMERATION_BUFFER {
# unsigned long EntriesRead;
# [size_is(EntriesRead)] PSAMPR_RID_ENUMERATION Buffer;
# } SAMPR_ENUMERATION_BUFFER,
# *PSAMPR_ENUMERATION_BUFFER;
def __init__(self, p):
self.__packer = p
if isinstance(self.__packer,ndrlib.Packer):
self.EntriesRead = 0
self.Buffer = []
self.Pointer = 0x4711
elif isinstance(self.__packer,ndrlib.Unpacker):
raise NotImplementedError
def pack(self):
if isinstance(self.__packer, ndrlib.Packer):
# EntriesRead
self.__packer.pack_long(self.EntriesRead)
self.__packer.pack_pointer(self.Pointer)
# Maxcount, needed as NDR array
self.__packer.pack_long(self.EntriesRead)
b = samr.SAMPR_RID_ENUMERATION(self.__packer)
b.Name = self.Buffer
b.pack()
class SAMPR_DOMAIN_DISPLAY_USER(object):
# 2.2.8.2 SAMPR_DOMAIN_DISPLAY_USER
#
# http://msdn.microsoft.com/en-us/library/cc245632%28PROT.10%29.aspx
#
# typedef struct _SAMPR_DOMAIN_DISPLAY_USER {
# unsigned long Index;
# unsigned long Rid;
# unsigned long AccountControl;
# RPC_UNICODE_STRING AccountName;
# RPC_UNICODE_STRING AdminComment;
# RPC_UNICODE_STRING FullName;
#} SAMPR_DOMAIN_DISPLAY_USER,
# *PSAMPR_DOMAIN_DISPLAY_USER;
def __init__(self, p):
self.__packer = p
if isinstance(self.__packer,ndrlib.Packer):
self.Name = []
self.Index = 0
self.Rid = 0
# AccountControl
# http://msdn.microsoft.com/en-us/library/cc245514%28v=PROT.10%29.aspx
self.AccountControl = 16 # USER_NORMAL_ACCOUNT
self.Pointer = 0x11
elif isinstance(self.__packer,ndrlib.Unpacker):
self.RelativeId = self.__packer.unpack_long()
self.Name = samr.RPC_UNICODE_STRING(self.__packer, Name)
def pack(self):
if isinstance(self.__packer,ndrlib.Packer):
for i in range(int(len(self.Name)/3)):
#Index
self.__packer.pack_long(self.Index)
#RelativeID
self.__packer.pack_long(self.Rid)
#AccountCotrol
self.__packer.pack_long(self.AccountControl)
for k in range(3):
b = samr.RPC_UNICODE_STRING(self.__packer)
b.Data = self.Name[i*k]
b.pack()
self.__packer.pack_pointer(self.Pointer)
for j in range(len(self.Name)):
self.__packer.pack_string(self.Name[j].encode('utf16')[2:])
class SAMPR_DOMAIN_DISPLAY_USER_BUFFER(object):
# 2.2.8.7 SAMPR_DOMAIN_DISPLAY_USER_BUFFER
#
# http://msdn.microsoft.com/en-us/library/cc245637%28PROT.13%29.aspx
#
#typedef struct _SAMPR_DOMAIN_DISPLAY_USER_BUFFER {
# unsigned long EntriesRead;
# [size_is(EntriesRead)] PSAMPR_DOMAIN_DISPLAY_USER Buffer;
#} SAMPR_DOMAIN_DISPLAY_USER_BUFFER,
# *PSAMPR_DOMAIN_DISPLAY_USER_BUFFER;
def __init__(self, p):
self.__packer = p
if isinstance(self.__packer,ndrlib.Packer):
self.EntriesRead = 0
self.Buffer = []
self.Pointer = 0x4711
elif isinstance(self.__packer,ndrlib.Unpacker):
raise NotImplementedError
def pack(self):
if isinstance(self.__packer, ndrlib.Packer):
# EntriesRead
self.__packer.pack_long(self.EntriesRead)
self.__packer.pack_pointer(self.Pointer)
# Maxcount, needed as NDR array
self.__packer.pack_long(self.EntriesRead)
b = samr.SAMPR_DOMAIN_DISPLAY_USER(self.__packer)
b.Name = self.Buffer
b.pack()
class ACCESS_ALLOWED_ACE(object):
# ACCESS_ALLOWED_ACE Structure
#
# http://msdn.microsoft.com/en-us/library/aa374847%28v=vs.85%29.aspx
#
# typedef struct _ACCESS_ALLOWED_ACE {
# ACE_HEADER Header;
# ACCESS_MASK Mask;
# DWORD SidStart;
#} ACCESS_ALLOWED_ACE, *PACCESS_ALLOWED_ACE;
def __init__(self, p):
self.__packer = p
if isinstance(self.__packer,ndrlib.Packer):
self.AceType = 0
self.AceFlags = 0
self.AceSize = 20
self.Mask = 0x0002035b
elif isinstance(self.__packer,ndrlib.Unpacker):
raise NotImplementedError
def pack(self):
if isinstance(self.__packer, ndrlib.Packer):
#http://msdn.microsoft.com/en-us/library/aa374919%28v=vs.85%29.aspx
# typedef struct _ACE_HEADER {
# BYTE AceType;
# BYTE AceFlags;
# WORD AceSize;
#} ACE_HEADER, *PACE_HEADER;
self.__packer.pack_small(self.AceType)
self.__packer.pack_small(self.AceFlags)
self.__packer.pack_short(self.AceSize)
# ACCESS_MASK Mask;
self.__packer.pack_long(self.Mask)
# DWORD SidStart;
# for example : SID = S-1-1-0
SidStart = samr.RPC_SID(self.__packer)
SidStart.Value = 'WORLD_SID_AUTHORITY'
SidStart.SubAuthority = ['0']
SidStart.SubAuthorityCount = len(SidStart.SubAuthority)
SidStart.pack()
class ACL(object):
# ACL Structure
# http://msdn.microsoft.com/en-us/library/aa374931%28v=vs.85%29.aspx
#
# typedef struct _ACL {
# BYTE AclRevision;
# BYTE Sbz1;
# WORD AclSize;
# WORD AceCount;
# WORD Sbz2;
#} ACL, *PACL;
# A complete ACL consists of an ACL structure followed by an ordered
# list of zero or more access control entries (ACEs).
def __init__(self, p):
self.__packer = p
if isinstance(self.__packer,ndrlib.Packer):
self.AclRevision = 2
self.Sbz1 = 0
self.AclSize = 20
self.AceCount = 1
self.Sbz2 = 0
elif isinstance(self.__packer,ndrlib.Unpacker):
raise NotImplementedError
def pack(self):
if isinstance(self.__packer, ndrlib.Packer):
# ACL Structure
self.__packer.pack_small(self.AclRevision)
self.__packer.pack_small(self.Sbz1)
self.__packer.pack_short(self.AclSize)
self.__packer.pack_short(self.AceCount)
self.__packer.pack_short(self.Sbz2)
# Followed by a ACCESS_ALLOWED_ACE Structure
b = samr.ACCESS_ALLOWED_ACE(self.__packer)
b.pack()
class SECURITY_DESCRIPTOR(object):
# 2.4.6 SECURITY_DESCRIPTOR
#
# http://msdn.microsoft.com/en-us/library/cc230366%28v=prot.10%29.aspx
# The documentation only provide the struct details as below:
#
# Revision (1 byte)
# Sbz1 (1 byte)
# Control (2 bytes)
# OffsetOwner (4 bytes)
# OffsetGroup (4 bytes)
# OffsetSacl (4 bytes)
# OffsetDacl (4 bytes)
# OwnerSid (variable)
# GroupSid (variable)
# Sacl (variable)
# Dacl (variable)
def __init__(self, p):
self.__packer = p
if isinstance(self.__packer,ndrlib.Packer):
self.Revision = 1
self.Sbz1 = 0
self.Control = 0x8004
self.OffsetOwner = 0
self.OffsetGroup = 0
self.OffsetSacl = 0
self.OffsetDacl = 20
elif isinstance(self.__packer,ndrlib.Unpacker):
raise NotImplementedError
def pack(self):
if isinstance(self.__packer, ndrlib.Packer):
self.__packer.pack_small(self.Revision)
self.__packer.pack_small(self.Sbz1)
self.__packer.pack_short(self.Control)
self.__packer.pack_long(self.OffsetOwner)
self.__packer.pack_long(self.OffsetGroup)
self.__packer.pack_long(self.OffsetSacl)
self.__packer.pack_long(self.OffsetDacl)
b = samr.ACL(self.__packer)
b.pack()
class SAMPR_SR_SECURITY_DESCRIPTOR(object):
# 2.2.3.11 SAMPR_SR_SECURITY_DESCRIPTOR
#
# http://msdn.microsoft.com/en-us/library/cc245537%28v=prot.10%29.aspx
#
#typedef struct _SAMPR_SR_SECURITY_DESCRIPTOR {
# [range(0, 256 * 1024)] unsigned long Length;
# [size_is(Length)] unsigned char* SecurityDescriptor;
#} SAMPR_SR_SECURITY_DESCRIPTOR,
# *PSAMPR_SR_SECURITY_DESCRIPTOR;
def __init__(self, p, c=0):
self.__packer = p
if isinstance(self.__packer,ndrlib.Packer):
self.Length = c
self.Pointer = 0xd1b00
elif isinstance(self.__packer,ndrlib.Unpacker):
raise NotImplementedError
def pack(self):
if isinstance(self.__packer, ndrlib.Packer):
if self.Length == 0:
# Support for samr.handle_QueryInformationUser()
self.__packer.pack_long(self.Length)
self.__packer.pack_pointer(0)
else:
# Support samr.handle_QuerySecurityObject()
# Pointer to SecurityDescriptor
self.__packer.pack_pointer(self.Pointer)
self.__packer.pack_long(self.Length)
b = samr.SECURITY_DESCRIPTOR(self.__packer)
b.pack()
class SAMPR_USER_ALL_INFORMATION(object):
# 2.2.7.6 SAMPR_USER_ALL_INFORMATION
#
# http://msdn.microsoft.com/en-us/library/cc245622%28v=prot.10%29.aspx
#
#typedef struct _SAMPR_USER_ALL_INFORMATION {
# OLD_LARGE_INTEGER LastLogon;
# OLD_LARGE_INTEGER LastLogoff;
# OLD_LARGE_INTEGER PasswordLastSet;
# OLD_LARGE_INTEGER AccountExpires;
# OLD_LARGE_INTEGER PasswordCanChange;
# OLD_LARGE_INTEGER PasswordMustChange;
# RPC_UNICODE_STRING UserName;
# RPC_UNICODE_STRING FullName;
# RPC_UNICODE_STRING HomeDirectory;
# RPC_UNICODE_STRING HomeDirectoryDrive;
# RPC_UNICODE_STRING ScriptPath;
# RPC_UNICODE_STRING ProfilePath;
# RPC_UNICODE_STRING AdminComment;
# RPC_UNICODE_STRING WorkStations;
# RPC_UNICODE_STRING UserComment;
# RPC_UNICODE_STRING Parameters;
# RPC_SHORT_BLOB LmOwfPassword;
# RPC_SHORT_BLOB NtOwfPassword;
# RPC_UNICODE_STRING PrivateData;
# SAMPR_SR_SECURITY_DESCRIPTOR SecurityDescriptor;
# unsigned long UserId;
# unsigned long PrimaryGroupId;
# unsigned long UserAccountControl;
# unsigned long WhichFields;
# SAMPR_LOGON_HOURS LogonHours;
# unsigned short BadPasswordCount;
# unsigned short LogonCount;
# unsigned short CountryCode;
# unsigned short CodePage;
# unsigned char LmPasswordPresent;
# unsigned char NtPasswordPresent;
# unsigned char PasswordExpired;
# unsigned char PrivateDataSensitive;
#} SAMPR_USER_ALL_INFORMATION,
# *PSAMPR_USER_ALL_INFORMATION;
def __init__(self, p):
self.__packer = p
if isinstance(self.__packer,ndrlib.Packer):
self.LastLogon = 0
self.LastLogoff = 0
# Last Password Change = Apr 20,2011 07:32:44 78125000
self.PasswordLastSet = 0x01cbfeea1590eb94
self.AccountExpires = 0
self.PasswordCanChange = 0
self.PasswordMustChange = 0x7fffffffffffffff
self.Buffer = []
self.Pointer = 0x6511
self.UserId = 0
self.PrimaryGroupId = 513
self.UserAccountControl = 0x00000210
self.WhichFields = 0x00ffffff
self.UnitsPerWeek = 168
self.LogonHours = 0xff
self.BadPasswordCount = 0
self.LogonCount = 0
self.CountryCode = 0
self.CodePage = 0
self.LmPasswordPresent = 0
self.NtPasswordPresent = 0
self.PasswordExpired = 0
self.PrivateDataSensitive = 0
elif isinstance(self.__packer,ndrlib.Unpacker):
raise NotImplementedError
def pack(self):
if isinstance(self.__packer, ndrlib.Packer):
self.__packer.pack_hyper(self.LastLogon)
self.__packer.pack_hyper(self.LastLogoff)
self.__packer.pack_hyper(self.PasswordLastSet)
self.__packer.pack_hyper(self.AccountExpires)
self.__packer.pack_hyper(self.PasswordCanChange)
self.__packer.pack_hyper(self.PasswordMustChange)
for k in range(13):
b = samr.RPC_UNICODE_STRING(self.__packer)
b.Data = self.Buffer[k]
b.pack()
self.__packer.pack_pointer(self.Pointer)
# SAMPR_SR_SECURITY_DESCRIPTOR SecurityDescriptor;
k = samr.SAMPR_SR_SECURITY_DESCRIPTOR(self.__packer, 0)
k.pack()
self.__packer.pack_long(self.UserId)
self.__packer.pack_long(self.PrimaryGroupId)
self.__packer.pack_long(self.UserAccountControl)
self.__packer.pack_long(self.WhichFields)
# SAMPR_LOGON_HOURS LogonHours;
# 2.2.7.5 SAMPR_LOGON_HOURS
# http://msdn.microsoft.com/en-us/library/cc245621%28v=prot.10%29.aspx
self.__packer.pack_long(self.UnitsPerWeek)
self.__packer.pack_pointer(self.Pointer)
self.__packer.pack_short(self.BadPasswordCount)
self.__packer.pack_short(self.LogonCount)
self.__packer.pack_short(self.CountryCode)
self.__packer.pack_short(self.CodePage)
self.__packer.pack_small(self.LmPasswordPresent)
self.__packer.pack_small(self.NtPasswordPresent)
self.__packer.pack_small(self.PasswordExpired)
self.__packer.pack_small(self.PrivateDataSensitive)
for j in range(len(self.Buffer)):
self.__packer.pack_string(
self.Buffer[j].encode('utf16')[2:])
self.__packer.pack_long(int(self.UnitsPerWeek*60/8))
self.__packer.pack_long(0)
self.__packer.pack_long(int(self.UnitsPerWeek/8))
for l in range(int(self.UnitsPerWeek/8)):
self.__packer.pack_small(self.LogonHours)
class SAMPR_PSID_ARRAY(object):
# 2.2.3.6 SAMPR_PSID_ARRAY
#
# http://msdn.microsoft.com/en-us/library/cc245548%28v=prot.10%29.aspx
#
# typedef struct _SAMPR_PSID_ARRAY {
# [range(0,1024)] unsigned long Count;
# [size_is(Count)] PSAMPR_SID_INFORMATION Sids;
#} SAMPR_PSID_ARRAY,
# *PSAMPR_PSID_ARRAY;
# 2.2.3.5 SAMPR_SID_INFORMATION
#
#http://msdn.microsoft.com/en-us/library/cc245547%28v=prot.10%29.aspx
#
# typedef struct _SAMPR_SID_INFORMATION {
# PRPC_SID SidPointer;
#} SAMPR_SID_INFORMATION,
# *PSAMPR_SID_INFORMATION;
def __init__(self, p):
self.__packer = p
if isinstance(self.__packer,ndrlib.Packer):
self.Count1 = 0
self.Count2 = 0
elif isinstance(self.__packer,ndrlib.Unpacker):
self.Count1 = self.__packer.unpack_long()
for i in range(int(self.Count1)):
self.SidPointer = self.__packer.unpack_pointer()
for j in range(int(self.Count1)):
self.Count2 = self.__packer.unpack_long()
Sids = samr.RPC_SID(self.__packer)
def pack(self):
if isinstance(self.__packer,ndrlib.Packer):
raise NotImplementedError
ops = {
1: "Close",
3: "QuerySecurityObject",
5: "LookupDomain",
6: "EnumDomains",
7: "OpenDomain",
13: "EnumDomainUsers",
15: "EnumerateAliasesInDomain",
16: "GetAliasMembership",
17: "LookupNamesInDomain",
34: "OpenUser",
36: "QueryInformationUser",
39: "GetGroupsForUser",
40: "QueryDisplayInformation",
46: "QueryInformationDomain2",
62: "Connect4",
64: "Connect5"
}
@classmethod
def handle_Connect4(cls, con, p):
# 3.1.5.1.2 SamrConnect4 (Opnum 62)
#
# http://msdn.microsoft.com/en-us/library/cc245746%28PROT.10%29.aspx
#
# long SamrConnect4(
# [in, unique, string] PSAMPR_SERVER_NAME ServerName,
# [out] SAMPR_HANDLE* ServerHandle,
# [in] unsigned long ClientRevision,
# [in] unsigned long DesiredAccess
# );
x = ndrlib.Unpacker(p.StubData)
PServerName = x.unpack_pointer()
ServerName = x.unpack_string()
rpclog.debug("ServerName %s" % ServerName)
DesiredAccess = x.unpack_long()
rpclog.debug("DesiredAccess %i" % DesiredAccess)
ClientRevision = x.unpack_long()
rpclog.debug("InVersion %i" % ClientRevision)
r = ndrlib.Packer()
# ServerHandle
ServerHandle = samr.SAMPR_HANDLE(r)
ServerHandle.Handle = b'01234567890123456789'
ServerHandle.pack()
# return
r.pack_long(0)
return r.get_buffer()
@classmethod
def handle_Connect5(cls, con, p):
# 3.1.5.1.1 SamrConnect5 (Opnum 64)
#
# http://msdn.microsoft.com/en-us/library/cc245745%28PROT.10%29.aspx
#
# long SamrConnect5(
# [in, unique, string] PSAMPR_SERVER_NAME ServerName,
# [in] unsigned long DesiredAccess,
# [in] unsigned long InVersion,
# [in, switch_is(InVersion)] SAMPR_REVISION_INFO* InRevisionInfo,
# [out] unsigned long* OutVersion,
# [out, switch_is(*OutVersion)] SAMPR_REVISION_INFO* OutRevisionInfo,
# [out] SAMPR_HANDLE* ServerHandle
# );
x = ndrlib.Unpacker(p.StubData)
PServerName = x.unpack_pointer()
ServerName = x.unpack_string()
rpclog.debug("ServerName %s" % ServerName)
DesiredAccess = x.unpack_long()
rpclog.debug("DesiredAccess %i" % DesiredAccess)
InVersion = x.unpack_long()
rpclog.debug("InVersion %i" % InVersion)
PInRevisionInfo = x.unpack_pointer()
# 2.2.3.15 SAMPR_REVISION_INFO_V1
# http://msdn.microsoft.com/en-us/library/cc245541%28v=PROT.10%29.aspx
Revision = x.unpack_long()
SupportedFeatures = x.unpack_long()
rpclog.debug("Revision %i SupportedFeatures %i" %
(Revision, SupportedFeatures))
r = ndrlib.Packer()
r.pack_pointer(0x1)
r.pack_long(InVersion)
r.pack_long(Revision)
r.pack_long(SupportedFeatures)
# ServerHandle
ServerHandle = samr.SAMPR_HANDLE(r)
ServerHandle.Handle = b'01234567890123456789'
ServerHandle.pack()
# return
r.pack_long(0)
return r.get_buffer()
@classmethod
def handle_QuerySecurityObject(cls, con, p):
#3.1.5.12.2 SamrQuerySecurityObject (Opnum 3)
#
#http://msdn.microsoft.com/en-us/library/cc245718%28v=PROT.10%29.aspx
#
#long SamrQuerySecurityObject(
# [in] SAMPR_HANDLE ObjectHandle,
# [in] SECURITY_INFORMATION SecurityInformation,
# [out] PSAMPR_SR_SECURITY_DESCRIPTOR* SecurityDescriptor
#);
x = ndrlib.Unpacker(p.StubData)
ObjectHandle = samr.SAMPR_HANDLE(x)
rpclog.debug("ObjectHandle %s" % ObjectHandle)
SecurityInformation = x.unpack_long()
rpclog.debug("SecurityInformation %i" % SecurityInformation)
r = ndrlib.Packer()
# Pointer to struct _SAMPR_SR_SECURITY_DESCRIPTOR
r.pack_pointer(0xbbe58)
# FIXME: currently length is hardcoded as 48, make it dynamic if
# necessary
Length = 48
r.pack_long(Length)
s = samr.SAMPR_SR_SECURITY_DESCRIPTOR(r,Length)
s.pack()
r.pack_long(0)
return r.get_buffer()
@classmethod
def handle_EnumDomains(cls, con, p):
#3.1.5.2.1 SamrEnumerateDomainsInSamServer (Opnum 6)
#
#http://msdn.microsoft.com/en-us/library/cc245755%28v=PROT.10%29.aspx
#
#long SamrEnumerateDomainsInSamServer(
# [in] SAMPR_HANDLE ServerHandle,
# [in, out] unsigned long* EnumerationContext,
# [out] PSAMPR_ENUMERATION_BUFFER* Buffer,
# [in] unsigned long PreferedMaximumLength,
# [out] unsigned long* CountReturned
#);
x = ndrlib.Unpacker(p.StubData)
ServerHandle = samr.SAMPR_HANDLE(x)
rpclog.debug("ServerHandle %s" % ServerHandle)
EnumerationContext = x.unpack_long()
rpclog.debug("EnumerationContext %i" % EnumerationContext)
PreferedMaximumLength = x.unpack_long()
rpclog.debug("PreferedMaximumLength %i" % PreferedMaximumLength)
r = ndrlib.Packer()
# unsigned long* EnumerationContext,
r.pack_pointer(EnumerationContext)
# Pointer to SAMPR_ENUMERATION_BUFFER* Buffer
r.pack_pointer(0x0da260)
# SAMPR_ENUMERATION_BUFFER Buffer
s = samr.SAMPR_ENUMERATION_BUFFER(r)
s.Buffer = ['HOMEUSER-3AF6FE','Builtin']
s.EntriesRead = len(s.Buffer)
s.pack()
# long* CountReturned
r.pack_long(s.EntriesRead)
r.pack_long(0)
return r.get_buffer()
@classmethod
def handle_LookupDomain(cls, con, p):
#3.1.5.11.1 SamrLookupDomainInSamServer (Opnum 5)
#
#http://msdn.microsoft.com/en-us/library/cc245711%28v=PROT.13%29.aspx
#
#long SamrLookupDomainInSamServer(
#[in] SAMPR_HANDLE ServerHandle,
#[in] PRPC_UNICODE_STRING Name,
#[out] PRPC_SID* DomainId
#);
x = ndrlib.Unpacker(p.StubData)
ServerHandle = samr.SAMPR_HANDLE(x)
Name = samr.RPC_UNICODE_STRING(x)
r = ndrlib.Packer()
r.pack_pointer(0x0da260) #same as EnumDomain
# http://technet.microsoft.com/en-us/library/cc778824%28WS.10%29.aspx
# example the SID for the built-in Administrators group : S-1-5-32-544
DomainId = samr.RPC_SID(r)
DomainId.Value = 'NT_AUTHORITY'
DomainId.SubAuthority = ['32','544']
DomainId.SubAuthorityCount = len(DomainId.SubAuthority)
# Maxcount, needed as the element of NDR array
r.pack_long(DomainId.SubAuthorityCount)
DomainId.pack()
r.pack_long(0)
return r.get_buffer()
@classmethod
def handle_OpenDomain(cls, con, p):
# 3.1.5.1.5 SamrOpenDomain (Opnum 7)
#
# http://msdn.microsoft.com/en-us/library/cc245748%28v=PROT.10%29.aspx
#
# long SamrOpenDomain(
# [in] SAMPR_HANDLE ServerHandle,
# [in] unsigned long DesiredAccess,
# [in] PRPC_SID DomainId,
# [out] SAMPR_HANDLE* DomainHandle
# );
x = ndrlib.Unpacker(p.StubData)
ServerHandle = samr.SAMPR_HANDLE(x)
rpclog.debug("ServerHandle %s" % ServerHandle)
DesiredAccess = x.unpack_long()
rpclog.debug("DesiredAccess %i" % DesiredAccess)
DomainId = samr.RPC_SID(x)
r = ndrlib.Packer()
DomainHandle = samr.SAMPR_HANDLE(r)
DomainHandle.Handle = b'11223344556677889900'
DomainHandle.pack()
r.pack_long(0)
return r.get_buffer()
@classmethod
def handle_EnumDomainUsers(cls, con, p):
#3.1.5.2.5 SamrEnumerateUsersInDomain (Opnum 13)
#
#http://msdn.microsoft.com/en-us/library/cc245759%28v=PROT.13%29.aspx
#
#long SamrEnumerateUsersInDomain(
#[in] SAMPR_HANDLE DomainHandle,
#[in, out] unsigned long* EnumerationContext,
#[in] unsigned long UserAccountControl,
#[out] PSAMPR_ENUMERATION_BUFFER* Buffer,
#[in] unsigned long PreferedMaximumLength,
#[out] unsigned long* CountReturned
#)
x = ndrlib.Unpacker(p.StubData)
DomainHandle = samr.SAMPR_HANDLE(x)
rpclog.debug("DomainHandle %s" % DomainHandle)
EnumerationContext = x.unpack_long()
rpclog.debug("EnumerationContext %i" % EnumerationContext)
UserAccountControl = x.unpack_long()
rpclog.debug("UserAccountControl %i" % UserAccountControl)
PreferedMaximumLength = x.unpack_long()
rpclog.debug("PreferedMaximumLength %i" % PreferedMaximumLength)
r = ndrlib.Packer()
r.pack_pointer(EnumerationContext)
# PSAMPR_ENUMERATION_BUFFER* Buffer
r.pack_pointer(0x0da260)
# SAMPR_ENUMERATION_BUFFER Buffer
s = samr.SAMPR_ENUMERATION_BUFFER(r)
s.Buffer = ['Administrator','Guest','HelpAssistant','SUPPORT_388945a0']
s.EntriesRead = len(s.Buffer)
s.pack()
# long* CountReturned
r.pack_long(s.EntriesRead)
r.pack_long(0)
return r.get_buffer()
@classmethod
def handle_GetAliasMembership(cls, con, p):
#3.1.5.9.2 SamrGetAliasMembership (Opnum 16)
#
#http://msdn.microsoft.com/en-us/library/cc245816%28v=prot.10%29.aspx
#
#long SamrGetAliasMembership(
# [in] SAMPR_HANDLE DomainHandle,
# [in] PSAMPR_PSID_ARRAY SidArray,
# [out] PSAMPR_ULONG_ARRAY Membership
#);
x = ndrlib.Unpacker(p.StubData)
DomainHandle = samr.SAMPR_HANDLE(x)
rpclog.debug("DomainHandle %s" % DomainHandle)
Count = x.unpack_long()
# PSAMPR_PSID_ARRAY SidArray
Pointer = x.unpack_pointer()
SidArray = samr.SAMPR_PSID_ARRAY(x)
r = ndrlib.Packer()
r.pack_long(1)
# PSAMPR_ULONG_ARRAY Membership
r.pack_pointer(0x0d15a8)
r.pack_long(1)
r.pack_long(514)
r.pack_long(0)
return r.get_buffer()
@classmethod
def handle_LookupNamesInDomain(cls, con, p):
#3.1.5.11.2 SamrLookupNamesInDomain (Opnum 17)
#
#http://msdn.microsoft.com/en-us/library/cc245712%28v=prot.10%29.aspx
#
#long SamrLookupNamesInDomain(
# [in] SAMPR_HANDLE DomainHandle,
# [in, range(0,1000)] unsigned long Count,
# [in, size_is(1000), length_is(Count)]
# RPC_UNICODE_STRING Names[*],
# [out] PSAMPR_ULONG_ARRAY RelativeIds,
# [out] PSAMPR_ULONG_ARRAY Use
#)
x = ndrlib.Unpacker(p.StubData)
DomainHandle = samr.SAMPR_HANDLE(x)
rpclog.debug("DomainHandle %s" % DomainHandle)
# unsigned long Count
Count= x.unpack_long()
rpclog.debug("Count %i" % Count)
# RPC_UNICODE_STRING Names[*]
Maxcount = x.unpack_long()
rpclog.debug("Maxcount %i" % Maxcount)
Offset = x.unpack_long()
rpclog.debug("Offset %i" % Offset)
ActualCount = x.unpack_long()
rpclog.debug("ActualCount %i" % ActualCount)
Names = samr.RPC_UNICODE_STRING(x,Count)
global LookupName
LookupName = Names.Buffer.decode('UTF-16')
rpclog.debug("LookupName %s" % LookupName)
r = ndrlib.Packer()
if LookupName in __userinfo__:
# PSAMPR_ULONG_ARRAY RelativeIds
# RelativeIds.Count
r.pack_long(Count)
# RelativeIds.Element
r.pack_pointer(0x0da260)
r.pack_long(1)
data = __userinfo__[LookupName]
rid = data["RID"]
r.pack_long(rid)
# PSAMPR_ULONG_ARRAY Use
# Use.Count
r.pack_long(Count)
# Use.Element
r.pack_pointer(0x0e1288)
r.pack_long(1)
r.pack_long(1)
# return
r.pack_long(0)
else:
r.pack_long(0)
r.pack_pointer(0)
r.pack_long(0)
r.pack_pointer(0)
# return
r.pack_pointer(0xc0000073)
return r.get_buffer()
@classmethod
def handle_OpenUser(cls, con, p):
#3.1.5.1.9 SamrOpenUser (Opnum 34)
#
#http://msdn.microsoft.com/en-us/library/cc245752%28v=prot.10%29.aspx
#
#long SamrOpenUser(
# [in] SAMPR_HANDLE DomainHandle,
# [in] unsigned long DesiredAccess,
# [in] unsigned long UserId,
# [out] SAMPR_HANDLE* UserHandle
#);
x = ndrlib.Unpacker(p.StubData)
DomainHandle = samr.SAMPR_HANDLE(x)
rpclog.debug("DomainHandle %s" % DomainHandle)
DesiredAccess = x.unpack_long()
rpclog.debug("DesiredAccess %i" % DesiredAccess)
UserId = x.unpack_long()
rpclog.debug("UserId %i" % UserId)
r = ndrlib.Packer()
# UserHandle
UserHandle = samr.SAMPR_HANDLE(r)
UserHandle.Handle = b'01234567890123456789'
UserHandle.pack()
# return
r.pack_long(0)
return r.get_buffer()
@classmethod
def handle_QueryInformationUser(cls, con, p):
#3.1.5.5.6 SamrQueryInformationUser (Opnum 36)
#
#http://msdn.microsoft.com/en-us/library/cc245786%28v=prot.10%29.aspx
#
#long SamrQueryInformationUser(
# [in] SAMPR_HANDLE UserHandle,
# [in] USER_INFORMATION_CLASS UserInformationClass,
# [out, switch_is(UserInformationClass)]
# PSAMPR_USER_INFO_BUFFER* Buffer
#);
x = ndrlib.Unpacker(p.StubData)
UserHandle = samr.SAMPR_HANDLE(x)
rpclog.debug("UserHandle %s" % UserHandle)
UserInformationClass = x.unpack_short()
rpclog.debug("UserInformationClass %i" % UserInformationClass)
r = ndrlib.Packer()
#typedef enum _USER_INFORMATION_CLASS
#{
# UserGeneralInformation = 1,
# UserPreferencesInformation = 2,
# UserLogonInformation = 3,
# UserLogonHoursInformation = 4,
# UserAccountInformation = 5,
# UserNameInformation = 6,
# UserAccountNameInformation = 7,
# UserFullNameInformation = 8,
# UserPrimaryGroupInformation = 9,
# UserHomeInformation = 10,
# UserScriptInformation = 11,
# UserProfileInformation = 12,
# UserAdminCommentInformation = 13,
# UserWorkStationsInformation = 14,
# UserControlInformation = 16,
# UserExpiresInformation = 17,
# UserInternal1Information = 18,
# UserParametersInformation = 20,
# UserAllInformation = 21,
# UserInternal4Information = 23,
# UserInternal5Information = 24,
# UserInternal4InformationNew = 25,
# UserInternal5InformationNew = 26
#} USER_INFORMATION_CLASS,
# *PUSER_INFORMATION_CLASS;
# Pointer to the USER_INFORMATION_CLASS
r.pack_pointer(0x000cc228)
if UserInformationClass == 21 :
r.pack_long(UserInformationClass)
# SAMPR_USER_ALL_INFORMATION
s = samr.SAMPR_USER_ALL_INFORMATION(r)
rpclog.debug("LookupName %s" % cls.LookupName)
if cls.LookupName in __userinfo__:
data = __userinfo__[cls.LookupName]
rid = data["RID"]
comment = data["comment"]
s.Buffer = [
cls.LookupName,'','','','','',comment,'','','','','','']
s.UserID = rid
s.pack()
# padding
r.pack_small(0)
# return
r.pack_long(0)
return r.get_buffer()
@classmethod
def handle_GetGroupsForUser (cls, con, p):
#3.1.5.9.1 SamrGetGroupsForUser (Opnum 39)
#
#http://msdn.microsoft.com/en-us/library/cc245815%28v=prot.10%29.aspx
#
#long SamrGetGroupsForUser(
# [in] SAMPR_HANDLE UserHandle,
# [out] PSAMPR_GET_GROUPS_BUFFER* Groups
#);
x = ndrlib.Unpacker(p.StubData)
UserHandle = samr.SAMPR_HANDLE(x)
rpclog.debug("UserHandle %s" % UserHandle)
# 2.2.3.13 SAMPR_GET_GROUPS_BUFFER
# http://msdn.microsoft.com/en-us/library/cc245539%28v=prot.10%29.aspx
#typedef struct _SAMPR_GET_GROUPS_BUFFER {
# unsigned long MembershipCount;
# [size_is(MembershipCount)] PGROUP_MEMBERSHIP Groups;
#} SAMPR_GET_GROUPS_BUFFER,
# *PSAMPR_GET_GROUPS_BUFFER;
# Note: Information about PGROUP_MEMBERSHIP struct not found on MDSN.
# The following response is made by refering to Microsoft Network
# Monitor
r = ndrlib.Packer()
r.pack_pointer(0xc6298)
# MembershipCount;
r.pack_long(1)
r.pack_pointer(0xd2a80)
r.pack_long(1)
r.pack_long(513)
r.pack_long(7)
# return
r.pack_long(0)
return r.get_buffer()
@classmethod
def handle_QueryDisplayInformation(cls, con, p):
#3.1.5.3.3 SamrQueryDisplayInformation (Opnum 40)
#
#http://msdn.microsoft.com/en-us/library/cc245763%28PROT.10%29.aspx
#
#long SamrQueryDisplayInformation(
# [in] SAMPR_HANDLE DomainHandle,
# [in] DOMAIN_DISPLAY_INFORMATION DisplayInformationClass,
# [in] unsigned long Index,
# [in] unsigned long EntryCount,
# [in] unsigned long PreferredMaximumLength,
# [out] unsigned long* TotalAvailable,
# [out] unsigned long* TotalReturned,
# [out, switch_is(DisplayInformationClass)]
# PSAMPR_DISPLAY_INFO_BUFFER Buffer
#);
x = ndrlib.Unpacker(p.StubData)
DomainHandle = samr.SAMPR_HANDLE(x)
rpclog.debug("DomainHandle %s" % DomainHandle)
DisplayInformationClass = x.unpack_long()
rpclog.debug("DisplayInformationClass %i" % DisplayInformationClass)
Index = x.unpack_long()
rpclog.debug("Index %i" % Index)
EntryCount = x.unpack_long()
rpclog.debug("EntryCount %i" % EntryCount)
PreferredMaximumLength = x.unpack_long()
rpclog.debug("PreferredMaximumLength %i" % PreferredMaximumLength)
r = ndrlib.Packer()
# unsigned long* TotalAvailable
r.pack_long(30)
# unsigned long* TotalReturned
r.pack_long(30)
if DisplayInformationClass == 1 :
r.pack_long(DisplayInformationClass)
# SAMPR_DOMAIN_DISPLAY_USER_BUFFER
s = samr.SAMPR_DOMAIN_DISPLAY_USER_BUFFER(r)
s.Buffer = ['Administrator','Builtin','Full Name','Guest','Builtin','Full Name',
'HelpAssistant','Builtin','Full Name','SUPPORT_388945a0','Builtin','Full Name']
s.EntriesRead = int(len(s.Buffer)/3)
s.pack()
r.pack_long(0)
return r.get_buffer()
@classmethod
def handle_QueryInformationDomain2(cls, con, p):
#3.1.5.5.1 SamrQueryInformationDomain2 (Opnum 46)
#
#http://msdn.microsoft.com/en-us/library/cc245773%28PROT.13%29.aspx
#
#long SamrQueryInformationDomain2(
# [in] SAMPR_HANDLE DomainHandle,
# [in] DOMAIN_INFORMATION_CLASS DomainInformationClass,
# [out, switch_is(DomainInformationClass)]
# PSAMPR_DOMAIN_INFO_BUFFER* Buffer
#)
x = ndrlib.Unpacker(p.StubData)
DomainHandle = samr.SAMPR_HANDLE(x)
rpclog.debug("DomainHandle %s" % DomainHandle)
DisplayInformationClass = x.unpack_long()
rpclog.debug("DisplayInformationClass %i" % DisplayInformationClass)
r = ndrlib.Packer()
#typedef
#[switch_type(DOMAIN_INFORMATION_CLASS)]
# union _SAMPR_DOMAIN_INFO_BUFFER {
# [case(DomainPasswordInformation)]
# DOMAIN_PASSWORD_INFORMATION Password;
# [case(DomainGeneralInformation)]
# SAMPR_DOMAIN_GENERAL_INFORMATION General;
# [case(DomainLogoffInformation)]
# DOMAIN_LOGOFF_INFORMATION Logoff;
# [case(DomainOemInformation)]
# SAMPR_DOMAIN_OEM_INFORMATION Oem;
# [case(DomainNameInformation)]
# SAMPR_DOMAIN_NAME_INFORMATION Name;
# [case(DomainServerRoleInformation)]
# DOMAIN_SERVER_ROLE_INFORMATION Role;
# [case(DomainReplicationInformation)]
# SAMPR_DOMAIN_REPLICATION_INFORMATION Replication;
# [case(DomainModifiedInformation)]
# DOMAIN_MODIFIED_INFORMATION Modified;
# [case(DomainStateInformation)]
# DOMAIN_STATE_INFORMATION State;
# [case(DomainGeneralInformation2)]
# SAMPR_DOMAIN_GENERAL_INFORMATION2 General2;
# [case(DomainLockoutInformation)]
# SAMPR_DOMAIN_LOCKOUT_INFORMATION Lockout;
# [case(DomainModifiedInformation2)]
# DOMAIN_MODIFIED_INFORMATION2 Modified2;
#} SAMPR_DOMAIN_INFO_BUFFER,
# *PSAMPR_DOMAIN_INFO_BUFFER;
# Pointer to the SAMPR_DOMAIN_INFO_BUFFER
r.pack_pointer(0x23456)
if DisplayInformationClass == 1:
# 2.2.4.5 DOMAIN_PASSWORD_INFORMATION
# http://msdn.microsoft.com/en-us/library/cc245575%28PROT.13%29.aspx
#typedef struct _DOMAIN_PASSWORD_INFORMATION {
# unsigned short MinPasswordLength;
# unsigned short PasswordHistoryLength;
# unsigned long PasswordProperties;
# OLD_LARGE_INTEGER MaxPasswordAge;
# OLD_LARGE_INTEGER MinPasswordAge;
#} DOMAIN_PASSWORD_INFORMATION,
# *PDOMAIN_PASSWORD_INFORMATION;
r.pack_long(DisplayInformationClass)
r.pack_short(0)
r.pack_short(0)
r.pack_hyper(999999999999)
r.pack_hyper(0)
elif DisplayInformationClass == 8:
# 2.2.4.8 DOMAIN_MODIFIED_INFORMATION
# http://msdn.microsoft.com/en-us/library/cc245578%28PROT.10%29.aspx
#typedef struct _DOMAIN_MODIFIED_INFORMATION {
# OLD_LARGE_INTEGER DomainModifiedCount;
# OLD_LARGE_INTEGER CreationTime;
#} DOMAIN_MODIFIED_INFORMATION,
# *PDOMAIN_MODIFIED_INFORMATION;
r.pack_long(DisplayInformationClass)
r.pack_hyper(10)
# Jun 25,2010 03:40:46.078125000
r.pack_raw(b'\xc2\x1e\xdc\x23\xd5\x13\xcb\x01')
elif DisplayInformationClass == 12:
# 2.2.4.15 SAMPR_DOMAIN_LOCKOUT_INFORMATION
# http://msdn.microsoft.com/en-us/library/cc245569%28PROT.13%29.aspx
#typedef struct _SAMPR_DOMAIN_LOCKOUT_INFORMATION {
# LARGE_INTEGER LockoutDuration;
# LARGE_INTEGER LockoutObservationWindow;
# unsigned short LockoutThreshold;
#} SAMPR_DOMAIN_LOCKOUT_INFORMATION,
# *PSAMPR_DOMAIN_LOCKOUT_INFORMATION;
r.pack_long(DisplayInformationClass)
r.pack_hyper(18446744055709551616) #windows XP give this value
r.pack_hyper(18446744055709551616)
r.pack_short(0)
r.pack_long(0)
return r.get_buffer()
@classmethod
def handle_EnumerateAliasesInDomain(cls, con, p):
#3.1.5.2.4 SamrEnumerateAliasesInDomain (Opnum 15)
#
#http://msdn.microsoft.com/en-us/library/cc245758%28PROT.10%29.aspx
#
#long SamrEnumerateAliasesInDomain(
# [in] SAMPR_HANDLE DomainHandle,
# [in, out] unsigned long* EnumerationContext,
# [out] PSAMPR_ENUMERATION_BUFFER* Buffer,
# [in] unsigned long PreferedMaximumLength,
# [out] unsigned long* CountReturned
#)
x = ndrlib.Unpacker(p.StubData)
DomainHandle = samr.SAMPR_HANDLE(x)
rpclog.debug("DomainHandle %s" % DomainHandle)
EnumerationContext = x.unpack_long()
rpclog.debug("EnumerationContext %i" % EnumerationContext)
PreferedMaximumLength = x.unpack_long()
rpclog.debug("PreferedMaximumLength %i" % PreferedMaximumLength)
r = ndrlib.Packer()
r.pack_long(EnumerationContext)
# PSAMPR_ENUMERATION_BUFFER* Buffer
r.pack_pointer(0x0da260)
# SAMPR_ENUMERATION_BUFFER Buffer
s = samr.SAMPR_ENUMERATION_BUFFER(r)
s.Buffer = ['Administrator','Guest']
s.EntriesRead = len(s.Buffer)
s.pack()
# long* CountReturned
r.pack_long(s.EntriesRead)
r.pack_long(0)
return r.get_buffer()
@classmethod
def handle_Close(cls, con, p):
#3.1.5.13.1 SamrCloseHandle (Opnum 1)
#
#http://msdn.microsoft.com/en-us/library/cc245722%28v=PROT.13%29.aspx
#
#long SamrCloseHandle(
#[in, out] SAMPR_HANDLE* SamHandle
#);
x = ndrlib.Unpacker(p.StubData)
SamHandle = samr.SAMPR_HANDLE(x)
rpclog.debug("SamHandle %s" % SamHandle)
r = ndrlib.Packer()
s = samr.SAMPR_HANDLE(r)
s.Handle = b'\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0'
s.pack()
r.pack_long(0)
return r.get_buffer()
class SceSvc(RPCService):
uuid = UUID('93149ca2-973b-11d1-8c39-00c04fb984f9').hex
class sfcapi(RPCService):
uuid = UUID('83da7c00-e84f-11d2-9807-00c04f8ec850').hex
class spoolss(RPCService):
uuid = UUID('12345678-1234-abcd-ef00-0123456789ab').hex
ops = {
0x00: "EnumPrinters",
0x11: "StartDocPrinter",
0x13: "WritePrinter",
0x17: "EndDocPrinter",
0x1d: "ClosePrinter",
0x45: "OpenPrinter"
}
class DOC_INFO_1(object):
# DOC_INFO_1 Structure
#
# http://msdn.microsoft.com/en-us/library/dd162471%28v=VS.85%29.aspx
#
#typedef struct _DOC_INFO_1 {
# LPTSTR pDocName;
# LPTSTR pOutputFile;
# LPTSTR pDatatype;
#} DOC_INFO_1;
def __init__(self, p):
self.__packer = p
if isinstance(self.__packer,ndrlib.Packer):
pass
elif isinstance(self.__packer,ndrlib.Unpacker):
self.Level = self.__packer.unpack_long()
self.Pointer = self.__packer.unpack_pointer()
self.pDocName = self.__packer.unpack_pointer()
self.pOutputFile = self.__packer.unpack_pointer()
self.pDatatype = self.__packer.unpack_pointer()
self.DocName = self.__packer.unpack_string()
self.OutputFile = self.__packer.unpack_string()
#self.DataType = self.__packer.unpack_string()
# rpclog.debug("DocName %s OutputFile %s" %(self.DocName,self.OutputFile))
def pack(self):
if isinstance(self.__packer, ndrlib.Packer):
pass
class PRINTER_INFO_1(object):
# PRINTER_INFO_1 Structure
#
# http://msdn.microsoft.com/en-us/library/dd162844%28v=VS.85%29.aspx
#
#typedef struct _PRINTER_INFO_1 {
# DWORD Flags;
# LPTSTR pDescription;
# LPTSTR pName;
# LPTSTR pComment;
#} PRINTER_INFO_1, *PPRINTER_INFO_1;
def __init__(self, p):
self.__packer = p
if isinstance(self.__packer,ndrlib.Packer):
self.Flags = 0x00018000
self.Buffer = ''
self.Buffersize = 0
self.Offset = 0
elif isinstance(self.__packer,ndrlib.Unpacker):
pass
def pack(self):
if isinstance(self.__packer, ndrlib.Packer):
self.__packer.pack_pointer(self.Flags)
# self.Offset is the distance of the string count from the end
# of PRINTER_INFO_1 buffer. To count the distance of the string
# from the start of PRINTER_INFO_1 buffer,
# self.Buffersize - self offset needed
for j in range(len(self.Buffer)):
count = 0
count = len(self.Buffer) - j - 1
self.Offset = self.Offset + 2*len(self.Buffer[count])
self.__packer.pack_long(self.Buffersize-self.Offset)
for i in range(len(self.Buffer)):
self.__packer.pack_raw(self.Buffer[i].encode('utf16')[2:])
def size(self):
size = 4 + 4*len(self.Buffer) + 2* \
(sum([len(x) for x in self.Buffer]))
rpclog.debug ("rpclog.debugER_INFO_1 size %i" % size)
return size
@classmethod
def handle_EnumPrinters (cls, con, p):
#EnumPrinters Function
#
#http://msdn.microsoft.com/en-us/library/dd162692%28VS.85%29.aspx
#
#BOOL EnumPrinters(
# __in DWORD Flags,
# __in LPTSTR Name,
# __in DWORD Level,
# __out LPBYTE pPrinterEnum,
# __in DWORD cbBuf,
# __out LPDWORD pcbNeeded,
# __out LPDWORD pcReturned
#);
p = ndrlib.Unpacker(p.StubData)
Flags = p.unpack_long()
Name = p.unpack_pointer()
Level = p.unpack_long()
Pointer = p.unpack_pointer()
cbBuf = p.unpack_long()
rpclog.debug("Flags %s Name %s Level %i cbBuf %i " %
(Flags, Name, Level, cbBuf))
r = ndrlib.Packer()
# Pointer to PRINTER_INFO_X buffer
r.pack_pointer(0x6b254)
# PRINTER_INFO_1 Buffer
a = spoolss.PRINTER_INFO_1(r)
# these string are the default response of windows xp
# 'Windows NT Remote Printers' need for msf fingerprinting OS language as 'English version'
# https://www.metasploit.com/redmine/projects/framework/repository/revisions/8941/entry/lib/msf/core/exploit/smb.rb#L396
a.Buffer = ['Internet URL Printers\0','Windows NT Internet Provider\0','Windows NT Internet Printing\0','Remote Printers\0','Windows NT Remote Printers\0',
'Microsoft Windows Network\0','Locally Connected Printers\0','Windows NT Local Print Providor\0','Windows NT Local Printers\0']
a.Buffersize = a.size()
if Level == 1 and cbBuf != 0:
r.pack_long(a.Buffersize)
a.pack()
r.pack_long(a.Buffersize)
r.pack_long(3) #pcReturned, default in windows xp is 3
r.pack_long(0)
else:
# this need to trick metasploit ms08-067 exploit
# dionaea need send a malformed response if the cbBuf == 0
r.pack_long(0)
r.pack_long(a.Buffersize)
r.pack_long(0)
return r.get_buffer()
@classmethod
def handle_OpenPrinter(cls, con, p):
#OpenPrinter Function
#
#http://msdn.microsoft.com/en-us/library/dd162751%28v=VS.85%29.aspx
#
#BOOL OpenPrinter(
# __in LPTSTR pPrinterName,
# __out LPHANDLE phPrinter,
# __in LPPRINTER_DEFAULTS pDefault
#);
x = ndrlib.Unpacker(p.StubData)
pPrinterName = x.unpack_pointer()
PrinterName = x.unpack_string()
print("PrinterName %s" % PrinterName)
pDatatype = x.unpack_pointer()
print("Datatype %s" % pDatatype)
cbBuf = x.unpack_long()
pDevMode = x.unpack_pointer()
print("DevMode %s" % pDevMode)
DesiredAccess = x.unpack_long()
print("DesiredAccess %x" % DesiredAccess)
#Below is the ClientInfo structure which showed in
#Microsoft Network Monitor, but I cant find the correct doc to refer
Level = x.unpack_long()
SwitchValue = x.unpack_long()
Pointer = x.unpack_pointer()
Size = x.unpack_long()
Buff = x.unpack_raw(Size)
print("Size %i Buff %s" % (Size, Buff))
r = ndrlib.Packer()
# Returned Handle
r.pack_raw(b'\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0')
# Success
r.pack_long(0)
return r.get_buffer()
@classmethod
def handle_ClosePrinter(cls, con, p):
r = ndrlib.Packer()
r.pack_long(0)
return r.get_buffer()
@classmethod
def handle_StartDocPrinter(cls, con, p):
#StartDocPrinter Function
#
#http://msdn.microsoft.com/en-us/library/dd145115%28v=VS.85%29.aspx
#
#DWORD StartDocPrinter(
# __in HANDLE hPrinter,
# __in DWORD Level,
# __in LPBYTE pDocInfo
#);
x = ndrlib.Unpacker(p.StubData)
hPrinter = x.unpack_raw(20)
rpclog.debug("hPrinter %s" % hPrinter)
Level = x.unpack_long()
rpclog.debug("Level %i" % Level)
DocInfo = spoolss.DOC_INFO_1(x)
DocName = DocInfo.DocName.decode('UTF-16')[:-1]
OutputFile = DocInfo.OutputFile.decode('UTF-16')[:-1]
rpclog.debug("docname {} outputfile {}".format(DocName, OutputFile))
if OutputFile.startswith('\\') and OutputFile.endswith('\PIPE\ATSVC'):
# FIXME PIPE ATSVC COMMAND
pass
else:
i = incident("dionaea.download.offer")
i.con = con
i.url = "spoolss://" + con.remote.host + '/' + OutputFile
i.report()
r = ndrlib.Packer()
# Job ID
r.pack_long(3)
# Success
r.pack_long(0)
return r.get_buffer()
@classmethod
def handle_EndDocPrinter(cls, con, p):
r=ndrlib.Packer()
r.pack_long(0)
return r.get_buffer()
@classmethod
def handle_WritePrinter(cls, con, p):
#WritePrinter Function
#
#http://msdn.microsoft.com/en-us/library/dd145226%28v=VS.85%29.aspx
#
#BOOL WritePrinter(
# __in HANDLE hPrinter,
# __in LPVOID pBuf,
# __in DWORD cbBuf,
# __out LPDWORD pcWritten
#);
# For MS10-061 SPOOLSS exploit with metasploit,
# the payload has sent in packet fragment.
# The packet dissection for the first and middle fragment is different, here the trick needed to make it work.
# Meaning of PacketFlags in dcerpc header:
# 0x00 : Middle fragment
# 0x01 : First fragment
# 0x02 : Last fragment
# 0x03 : No fragment needed
#
# FIXME actually this defragmentation should be in
# smbd.process_dcerpc_packet
if p.PacketFlags == 0:
con.printer += p.StubData
return None
elif p.PacketFlags == 1:
con.printer += p.StubData
return None
elif p.PacketFlags == 2:
con.printer += p.StubData
x = ndrlib.Unpacker(con.printer)
hPrinter = x.unpack_raw(20)
cbBuf = x.unpack_long()
Buf = x.unpack_raw(cbBuf)
dionaea_config = g_dionaea.config().get("dionaea")
download_dir = dionaea_config.get("download.dir")
download_suffix = dionaea_config.get("download.suffix", ".tmp")
x = tempfile.NamedTemporaryFile(
delete=False,
prefix="spoolss-",
suffix=download_suffix,
dir=download_dir
)
x.write(Buf)
x.close()
i = incident("dionaea.download.complete")
i.path = x.name
i.url = "spoolss://" + con.remote.host
i.con = con
i.report()
r = ndrlib.Packer()
r.pack_long(len(Buf))
r.pack_long(0)
return r.get_buffer()
elif p.PacketFlags == 3:
x = ndrlib.Unpacker(p.StubData)
hPrinter = x.unpack_raw(20)
cbBuf = x.unpack_long()
r = ndrlib.Packer()
r.pack_long(cbBuf)
r.pack_long(0)
return r.get_buffer()
STYPE_DISKTREE = 0x00000000 # Disk drive
STYPE_PRINTQ = 0x00000001 # Print queue
STYPE_DEVICE = 0x00000002 # Communication device
STYPE_IPC = 0x00000003 # Interprocess communication (IPC)
STYPE_SPECIAL = 0x80000000 # Special share reserved for interprocess
# communication (IPC$) or remote administration of the server (ADMIN$).
# Can also refer to administrative shares such as C$, D$, E$, and so forth.
STYPE_TEMPORARY= 0x40000000 # A temporary share that is not persisted
# for creation each time the file server initializes.
# Overwritten by smb config
__shares__ = {}
class SRVSVC(RPCService):
""" [MS-SRVS]: Server Service Remote Protocol Specification
http://msdn.microsoft.com/en-us/library/cc247080%28v=PROT.13%29.aspx
http://download.microsoft.com/download/a/e/6/ae6e4142-aa58-45c6-8dcf-a657e5900cd3/%5BMS-SRVS%5D.pdf
"""
uuid = UUID('4b324fc8-1670-01d3-1278-5a47bf6ee188').hex
version_major = 0
version_minor = 0
ops = {
0x0e: "NetShareAdd",
0x0f: "NetShareEnum",
0x10: "NetrShareGetInfo",
0x15: "NetServerGetInfo",
0x1c: "NetrRemoteTOD",
0x1f: "NetPathCanonicalize",
0x20: "NetPathCompare",
0x22: "NetNameCanonicalize"
}
vulns = {
0x1f: "MS08-67",
0x20: "MS08-67",
}
class SRVSVC_HANDLE(object):
# 2.2.1.1 SRVSVC_HANDLE
#
# http://msdn.microsoft.com/en-us/library/cc247105%28PROT.10%29.aspx
#
# typedef [handle, string] WCHAR* SRVSVC_HANDLE;
def __init__(self, p):
self.__packer = p
if isinstance(self.__packer,ndrlib.Packer):
self.Handle = b''
self.Pointer = 0x3a20f2
elif isinstance(self.__packer,ndrlib.Unpacker):
self.Ref = self.__packer.unpack_pointer()
self.Handle = self.__packer.unpack_string()
def pack(self):
if isinstance(self.__packer, ndrlib.Packer):
self.__packer.pack_pointer(self.Pointer)
self.__packer.pack_string(self.Handle)
class SHARE_INFO_0_CONTAINER(object):
# 2.2.4.32 SHARE_INFO_0_CONTAINER
#
# http://msdn.microsoft.com/en-us/library/cc247156%28PROT.13%29.aspx
#
#typedef struct _SHARE_INFO_0_CONTAINER {
# DWORD EntriesRead;
# [size_is(EntriesRead)] LPSHARE_INFO_0 Buffer;
#} SHARE_INFO_0_CONTAINER;
def __init__(self, p):
self.__packer = p
if isinstance(self.__packer,ndrlib.Packer):
self.EntriesRead = 0
self.Data = []
self.Pointer = 0x23456
elif isinstance(self.__packer,ndrlib.Unpacker):
self.Ptr = self.__packer.unpack_pointer()
self.EntriesRead = self.__packer.unpack_long()
self.Buffer = self.__packer.unpack_pointer()
def pack(self):
if isinstance(self.__packer,ndrlib.Packer):
# EntriesRead
self.EntriesRead = len(self.Data)
self.__packer.pack_long(self.EntriesRead)
# LPSHARE_INFO_0 Buffer
b = SRVSVC.SHARE_INFO_0(self.__packer)
b.Data = self.Data
b.MaxCount = self.EntriesRead
b.pack()
class SHARE_INFO_1_CONTAINER(object):
# 2.2.4.33 SHARE_INFO_1_CONTAINER
#
# http://msdn.microsoft.com/en-us/library/cc247157%28PROT.10%29.aspx
#
# typedef struct _SHARE_INFO_1_CONTAINER {
# DWORD EntriesRead;
# [size_is(EntriesRead)] LPSHARE_INFO_1 Buffer;
# } SHARE_INFO_1_CONTAINER;
def __init__(self, p):
self.__packer = p
if isinstance(self.__packer,ndrlib.Packer):
self.EntriesRead = 0
self.Data = {}
self.Pointer = 0x23456
elif isinstance(self.__packer,ndrlib.Unpacker):
self.Ptr = self.__packer.unpack_pointer()
self.EntriesRead = self.__packer.unpack_long()
self.Buffer = self.__packer.unpack_pointer()
def pack(self):
if isinstance(self.__packer,ndrlib.Packer):
# EntriesRead
self.EntriesRead = len(self.Data)
self.__packer.pack_long(self.EntriesRead)
# LPSHARE_INFO_1 Buffer
b = SRVSVC.SHARE_INFO_1(self.__packer)
b.Data = self.Data
b.pack()
class SHARE_INFO_2_CONTAINER(object):
# 2.2.4.34 SHARE_INFO_2_CONTAINER
#
# http://msdn.microsoft.com/en-us/library/cc247158%28PROT.13%29.aspx
#
#typedef struct _SHARE_INFO_2_CONTAINER {
# DWORD EntriesRead;
# [size_is(EntriesRead)] LPSHARE_INFO_2 Buffer;
#} SHARE_INFO_2_CONTAINER,
# *PSHARE_INFO_2_CONTAINER,
# *LPSHARE_INFO_2_CONTAINER;
def __init__(self, p):
self.__packer = p
if isinstance(self.__packer,ndrlib.Packer):
self.EntriesRead = 0
self.Data = {}
self.Pointer = 0x23456
elif isinstance(self.__packer,ndrlib.Unpacker):
self.Ptr = self.__packer.unpack_pointer()
self.EntriesRead = self.__packer.unpack_long()
self.Buffer = self.__packer.unpack_pointer()
def pack(self):
if isinstance(self.__packer,ndrlib.Packer):
self.EntriesRead = len(self.Data)
# self.__packer.pack_long(self.EntriesRead)
# LPSHARE_INFO_2 Buffer
b = SRVSVC.SHARE_INFO_2(self.__packer)
b.Data = self.Data
b.pack()
class SHARE_INFO_502_CONTAINER(object):
# 2.2.4.36 SHARE_INFO_502_CONTAINER
#
# http://msdn.microsoft.com/en-us/library/cc247160%28PROT.13%29.aspx
#
# typedef struct _SHARE_INFO_502_CONTAINER {
# DWORD EntriesRead;
# [size_is(EntriesRead)] LPSHARE_INFO_502_I Buffer;
# } SHARE_INFO_502_CONTAINER,
def __init__(self, p):
self.__packer = p
if isinstance(self.__packer,ndrlib.Packer):
self.EntriesRead = 0
self.Data = {}
self.Pointer = 0x23456
elif isinstance(self.__packer,ndrlib.Unpacker):
self.Ctr = self.__packer.unpack_pointer()
self.Ptr = self.__packer.unpack_pointer()
self.EntriesRead = self.__packer.unpack_long()
self.Buffer = self.__packer.unpack_pointer()
def pack(self):
if isinstance(self.__packer,ndrlib.Packer):
self.EntriesRead = len(self.Data)
self.__packer.pack_long(self.EntriesRead)
# SHARE_INFO_502_I Buffer
b = SRVSVC.SHARE_INFO_502(self.__packer)
b.Data = self.Data
b.pack()
class SHARE_INFO_0(object):
# 2.2.4.22 SHARE_INFO_0
#
# http://msdn.microsoft.com/en-us/library/cc247146%28v=PROT.13%29.aspx
#
#typedef struct _SHARE_INFO_0 {
# [string] wchar_t* shi0_netname;
#} SHARE_INFO_0,
# *PSHARE_INFO_0,
# *LPSHARE_INFO_0
def __init__(self, p):
self.__packer = p
if isinstance(self.__packer,ndrlib.Packer):
self.Data = {}
self.Pointer = 0x99999
self.MaxCount = 0
self.Netname_pointer = 0x34567
elif isinstance(self.__packer,ndrlib.Unpacker):
pass
def pack(self):
if isinstance(self.__packer,ndrlib.Packer):
self.__packer.pack_pointer(self.Pointer)
# MaxCount, needed as the NDR array
self.MaxCount = len(self.Data)
self.__packer.pack_long(self.MaxCount)
for i in range(self.MaxCount):
self.__packer.pack_pointer(self.Netname_pointer) # netname
for j in self.Data:
data = self.Data[j]
self.__packer.pack_string_fix(
str(j+'\0').encode('utf16')[2:])
class SHARE_INFO_1(object):
# 2.2.4.23 SHARE_INFO_1
#
# http://msdn.microsoft.com/en-us/library/cc247147%28PROT.10%29.aspx
#
# typedef struct _SHARE_INFO_1 {
# [string] wchar_t* shi1_netname;
# DWORD shi1_type;
# [string] wchar_t* shi1_remark;
# } SHARE_INFO_1,
# *PSHARE_INFO_1,
# *LPSHARE_INFO_1;
# http://msdn.microsoft.com/en-us/library/cc247150%28PROT.10%29.aspx
def __init__(self, p):
self.__packer = p
if isinstance(self.__packer,ndrlib.Packer):
self.Data = {}
self.Pointer = 0x99999
self.MaxCount = 0
self.Netname_pointer = 0x34567
self.Type = 0x00000000 # STYPE_DISKTREE
self.Remark_pointer = 0x45678
elif isinstance(self.__packer,ndrlib.Unpacker):
pass
def pack(self):
if isinstance(self.__packer,ndrlib.Packer):
self.__packer.pack_pointer(self.Pointer)
# MaxCount, needed as the NDR array
self.MaxCount = len(self.Data)
self.__packer.pack_long(self.MaxCount)
for i in self.Data:
data = self.Data[i]
self.__packer.pack_pointer(self.Netname_pointer) # netname
self.__packer.pack_long(data['type']) # type
self.__packer.pack_pointer(self.Remark_pointer) # remark
for j in self.Data:
data = self.Data[j]
self.__packer.pack_string_fix(
str(j+'\0').encode('utf16')[2:])
self.__packer.pack_string_fix(
str(data['comment']+'\0').encode('utf16')[2:])
class SHARE_INFO_502(object):
# 2.2.4.26 SHARE_INFO_502_I
#
# http://msdn.microsoft.com/en-us/library/cc247150%28v=PROT.13%29.aspx
#
# typedef struct _SHARE_INFO_502_I {
# [string] WCHAR* shi502_netname;
# DWORD shi502_type;
# [string] WCHAR* shi502_remark;
# DWORD shi502_permissions;
# DWORD shi502_max_uses;
# DWORD shi502_current_uses;
# [string] WCHAR* shi502_path;
# [string] WCHAR* shi502_passwd;
# DWORD shi502_reserved;
# [size_is(shi502_reserved)] unsigned char* shi502_security_descriptor;
#} SHARE_INFO_502_I,
# *PSHARE_INFO_502_I,
# *LPSHARE_INFO_502_I;
def __init__(self, p, data=None):
self.__packer = p
if isinstance(self.__packer,ndrlib.Packer):
self.Data = []
self.Pointer = 0x99999
self.MaxCount = 0
self.Netname_pointer = 0x34567
self.Type = 0x00000000
self.Remark_pointer = 0x45678
self.Permissions = 0
self.Max_uses = 0xffffffff
self.Current_uses = 1
self.Path_pointer = 0x87654
self.Passwd_pointer = 0
self.Reserved = 0
self.Security_descriptor = 0
elif isinstance(self.__packer,ndrlib.Unpacker):
pass
def pack(self):
if isinstance(self.__packer,ndrlib.Packer):
self.__packer.pack_pointer(self.Pointer)
self.MaxCount = len(self.Data)
self.__packer.pack_long(self.MaxCount)
for i in self.Data:
data = self.Data[i]
self.__packer.pack_pointer(self.Netname_pointer) # netname
self.__packer.pack_long(data['type']) # STYPE_DISKTREE
self.__packer.pack_pointer(self.Remark_pointer) # remark
self.__packer.pack_long(self.Permissions) # permissions
self.__packer.pack_long(self.Max_uses) # max_uses
self.__packer.pack_long(self.Current_uses) # current_uses
self.__packer.pack_pointer(self.Path_pointer) # path
self.__packer.pack_pointer(self.Passwd_pointer) # passwd
self.__packer.pack_long(self.Reserved) # reserved
# security descriptor
self.__packer.pack_pointer(self.Security_descriptor)
for j in self.Data:
data = self.Data[i]
self.__packer.pack_string_fix(
str(j+'\0').encode('utf16')[2:])
self.__packer.pack_string_fix(
str(data['path']+'\0').encode('utf16')[2:])
self.__packer.pack_string_fix(
str(data['comment']+'\0').encode('utf16')[2:])
class SHARE_INFO_2(object):
#2.2.4.24 SHARE_INFO_2
#
#http://msdn.microsoft.com/en-us/library/cc247148%28v=PROT.13%29.aspx
#
#typedef struct _SHARE_INFO_2 {
# [string] wchar_t* shi2_netname;
# DWORD shi2_type;
# [string] wchar_t* shi2_remark;
# DWORD shi2_permissions;
# DWORD shi2_max_uses;
# DWORD shi2_current_uses;
# [string] wchar_t* shi2_path;
# [string] wchar_t* shi2_passwd;
#} SHARE_INFO_2
def __init__(self, p):
self.__packer = p
if isinstance(self.__packer,ndrlib.Packer):
self.Data = {}
self.Pointer = 0x99999
self.MaxCount = 0
self.Netname_pointer = 0x34567
self.Type = 0x00000000
self.Remark_pointer = 0x45678
self.Permissions = 0
self.Max_uses = 0xffffffff
self.Current_uses = 1
self.Path_pointer = 0x6789
self.Passwd_pointer = 0x56789
elif isinstance(self.__packer,ndrlib.Unpacker):
self.ref = self.__packer.unpack_pointer()
self.netname = self.__packer.unpack_pointer()
self.sharetype = self.__packer.unpack_long()
self.remark = self.__packer.unpack_long()
self.permission = self.__packer.unpack_long()
self.max_use = self.__packer.unpack_long()
self.current_use = self.__packer.unpack_long()
self.path = self.__packer.unpack_pointer()
self.passwd = self.__packer.unpack_pointer()
self.share_name = self.__packer.unpack_string()
self.share_comment = self.__packer.unpack_string()
self.share_path = self.__packer.unpack_string()
def pack(self):
if isinstance(self.__packer,ndrlib.Packer):
# self.__packer.pack_pointer(self.Pointer)
# self.MaxCount = len(self.Data)
# self.__packer.pack_long(self.MaxCount)
rpclog.warn("%s" % self.Data)
# raise Exception()
for i in self.Data:
data = self.Data[i]
self.__packer.pack_pointer(self.Netname_pointer) # netname
self.__packer.pack_long(data['type']) # STYPE_DISKTREE
self.__packer.pack_pointer(self.Remark_pointer) # remark
self.__packer.pack_long(self.Permissions) # permissions
self.__packer.pack_long(self.Max_uses) # max_uses
self.__packer.pack_long(self.Current_uses) # current_uses
self.__packer.pack_pointer(self.Path_pointer) # path
self.__packer.pack_pointer(self.Passwd_pointer) # passwd
for j in self.Data:
data = self.Data[j]
# NetName
self.__packer.pack_string_fix(
str(j+'\0').encode('utf16')[2:])
# Remark
self.__packer.pack_string_fix(
str(data['comment']+'\0').encode('utf16')[2:])
# Path
self.__packer.pack_string_fix(
str(data['path']+'\0').encode('utf16')[2:])
# Password
# this is necessary for Metasploit module /exploit/linux/samba/is_known_pipename
self.__packer.pack_string_fix(
str('\0').encode('utf16')[2:])
class SERVER_INFO_101(object):
# 2.2.4.41 SERVER_INFO_101
#
# http://msdn.microsoft.com/en-us/library/cc247164%28v=PROT.13%29.aspx
#
#typedef struct _SERVER_INFO_101 {
# DWORD sv101_platform_id;
# [string] wchar_t* sv101_name;
# DWORD sv101_version_major;
# DWORD sv101_version_minor;
# DWORD sv101_type;
# [string] wchar_t* sv101_comment;
#} SERVER_INFO_101,
# *PSERVER_INFO_101,
# *LPSERVER_INFO_101;
def __init__(self, p):
self.__packer = p
if isinstance(self.__packer,ndrlib.Packer):
self.Data = {}
self.Pointer = 0x99999
# Windows NT or a newer Windows operating system version.
self.Platform_id = 500
self.Name_pointer= 0x68458
self.Comment_pointer = 0x73429
self.Version_major = 5 # Windows XP SP2 default reply
self.Version_minor = 1 # Windows XP SP2 default reply
# self.Type = 0x00051003 # Windows XP SP2 default reply (Type:
# Workstation, Server, NT Workstation, Potential Browser,
# Master Browser)
self.Type = 0xFFFFFFFF # All servers
elif isinstance(self.__packer,ndrlib.Unpacker):
pass
def pack(self):
if isinstance(self.__packer,ndrlib.Packer):
self.__packer.pack_pointer(self.Pointer)
self.__packer.pack_long(self.Platform_id)
self.__packer.pack_pointer(self.Name_pointer)
self.__packer.pack_long(self.Version_major)
self.__packer.pack_long(self.Version_minor)
self.__packer.pack_long(self.Type)
self.__packer.pack_pointer(self.Comment_pointer)
for j in range(len(self.Data)):
self.__packer.pack_string_fix(
self.Data[j].encode('utf16')[2:])
@classmethod
def handle_NetShareEnum(cls, con, p):
x = ndrlib.Unpacker(p.StubData)
# 3.1.4.8 NetrShareEnum (Opnum 15)
#
# http://msdn.microsoft.com/en-us/library/cc247276%28PROT.10%29.aspx
#
# NET_API_STATUS NetrShareEnum(
# [in, string, unique] SRVSVC_HANDLE ServerName,
# [in, out] LPSHARE_ENUM_STRUCT InfoStruct,
# [in] DWORD PreferedMaximumLength,
# [out] DWORD* TotalEntries,
# [in, out, unique] DWORD* ResumeHandle
# );
ServerName = SRVSVC.SRVSVC_HANDLE(x)
# 2.2.4.38 SHARE_ENUM_STRUCT
#
# http://msdn.microsoft.com/en-us/library/cc247161%28PROT.10%29.aspx
#
# typedef struct _SHARE_ENUM_STRUCT {
# DWORD Level;
# [switch_is(Level)] SHARE_ENUM_UNION ShareInfo;
# } SHARE_ENUM_STRUCT,
# *PSHARE_ENUM_STRUCT,
# *LPSHARE_ENUM_STRUCT;
infostruct_level = x.unpack_long()
infostruct_share = x.unpack_long()
# typedef
# [switch_type(DWORD)]
# union _SHARE_ENUM_UNION {
# [case(0)]
# SHARE_INFO_0_CONTAINER* Level0;
# [case(1)]
# SHARE_INFO_1_CONTAINER* Level1;
# [case(2)]
# SHARE_INFO_2_CONTAINER* Level2;
# [case(501)]
# SHARE_INFO_501_CONTAINER* Level501;
# [case(502)]
# SHARE_INFO_502_CONTAINER* Level502;
# [case(503)]
# SHARE_INFO_503_CONTAINER* Level503;
# } SHARE_ENUM_UNION;
if infostruct_share == 0:
buffer = SRVSVC.SHARE_INFO_0_CONTAINER(x)
elif infostruct_share == 1:
buffer = SRVSVC.SHARE_INFO_1_CONTAINER(x)
elif infostruct_share == 502:
buffer = SRVSVC.SHARE_INFO_502_CONTAINER(x)
preferdmaxlen = x.unpack_long()
# ResumeHandle
resumehandleptr = x.unpack_pointer()
resumehandle = 0
if resumehandleptr != 0:
resumehandle = x.unpack_long()
rpclog.debug("infostruct_share %i preferdmaxlen %i resumehandleptr %x resumehandle %i" % (
infostruct_share,preferdmaxlen,resumehandleptr,resumehandle) )
# compile reply
r = ndrlib.Packer()
r.pack_long(infostruct_level)
r.pack_long(infostruct_share)
# pointer to the SHARE_INFO_X_CONTAINER
r.pack_pointer(0x23456)
if infostruct_share == 0:
s = SRVSVC.SHARE_INFO_0_CONTAINER(r)
elif infostruct_share == 1:
s = SRVSVC.SHARE_INFO_1_CONTAINER(r)
elif infostruct_share == 502:
s = SRVSVC.SHARE_INFO_502_CONTAINER(r)
s.Data = __shares__
s.pack()
# total entries
r.pack_long(s.EntriesRead)
# resume handle
r.pack_pointer(0x47123123)
r.pack_long(0)
r.pack_long(0)
return r.get_buffer()
@classmethod
def handle_NetPathCanonicalize(cls, con, p):
# MS08-067
# WERROR srvsvc_NetPathCanonicalize(
# [in,unique] [string,charset(UTF16)] uint16 *server_unc,
# [in] [string,charset(UTF16)] uint16 path[],
# [out] [size_is(maxbuf)] uint8 can_path[],
# [in] uint32 maxbuf,
# [in] [string,charset(UTF16)] uint16 prefix[],
# [in,out,ref] uint32 *pathtype,
# [in] uint32 pathflags
# );
x = ndrlib.Unpacker(p.StubData)
ref = x.unpack_pointer()
server_unc = x.unpack_string()
path = x.unpack_string()
maxbuf = x.unpack_long()
prefix = x.unpack_string()
pathtype = x.unpack_long()
pathflags = x.unpack_long()
rpclog.debug("ref 0x%x server_unc %s path %s maxbuf %s prefix %s pathtype %i pathflags %i" % (
ref, server_unc, path, maxbuf, prefix, pathtype, pathflags))
# conficker is stubborn
# dionaea replies to the exploit, conficker retries to exploit
# I'd prefer a real check for a bad path, but the path provided by conficker is not utf16,
# therefore it is not possible to canonicalize the path and check if it path canonicalizes beyond /
# the workaround ... is checking for a 'long' path ...
if len(path) > 128:
raise DCERPCValueError("path","too long", path)
r = ndrlib.Packer()
r.pack_long(pathtype)
r.pack_long(0)
r.pack_string(path)
return r.get_buffer()
@classmethod
def handle_NetPathCompare(cls, con, p):
# MS08-067
# WERROR srvsvc_NetPathCompare(
# [in,unique] [string,charset(UTF16)] uint16 *server_unc,
# [in] [string,charset(UTF16)] uint16 path1[],
# [in] [string,charset(UTF16)] uint16 path2[],
# [in] uint32 pathtype,
# [in] uint32 pathflags
# );
p = ndrlib.Unpacker(p.StubData)
ref = p.unpack_pointer()
server_unc = p.unpack_string()
path1 = p.unpack_string()
path2 = p.unpack_string()
pathtype = p.unpack_long()
pathflags = p.unpack_long()
rpclog.debug("ref 0x%x server_unc %s path1 %s path2 %s pathtype %i pathflags %i" % (
ref, server_unc, path1, path2, pathtype, pathflags))
r = ndrlib.Packer()
x = (path1 > path2) - (path1 < path2)
if x < 0:
r.pack_long( 0 )
else:
r.pack_long( 0 )
# r.pack_long( x )
return r.get_buffer()
@classmethod
def handle_NetShareAdd(cls, con, p):
#3.1.4.7 NetrShareAdd (Opnum 14)
#
#http://msdn.microsoft.com/en-us/library/cc247275%28v=PROT.10%29.aspx
#
#NET_API_STATUS NetrShareAdd(
#[in, string, unique] SRVSVC_HANDLE ServerName,
# [in] DWORD Level,
# [in, switch_is(Level)] LPSHARE_INFO InfoStruct,
# [in, out, unique] DWORD* ParmErr
#);
p = ndrlib.Unpacker(p.StubData)
ServerName = SRVSVC.SRVSVC_HANDLE(p)
infostruct_level = p.unpack_long()
infostruct_share = p.unpack_long()
if infostruct_share == 2:
buffer = SRVSVC.SHARE_INFO_2(p)
ptr_parm = p.unpack_pointer()
error = p.unpack_long()
rpclog.debug("infostruct_share %i ptr_parm %x ParmErr %i" %
(infostruct_share,ptr_parm,error) )
r = ndrlib.Packer()
r.pack_pointer(0x324567)
r.pack_long(0)
r.pack_long(0)
return r.get_buffer()
@classmethod
def handle_NetrShareGetInfo(cls, con, p):
#3.1.4.10 NetrShareGetInfo (Opnum 16)
#
#http://msdn.microsoft.com/en-us/library/cc247236%28PROT.13%29.aspx
#
#NET_API_STATUS NetrShareGetInfo(
# [in, string, unique] SRVSVC_HANDLE ServerName,
# [in, string] WCHAR* NetName,
# [in] DWORD Level,
# [out, switch_is(Level)] LPSHARE_INFO InfoStruct
#);
p = ndrlib.Unpacker(p.StubData)
ServerName = SRVSVC.SRVSVC_HANDLE(p)
NetName = p.unpack_string()
Level = p.unpack_long()
rpclog.debug("NetName %s Level %i" % (NetName,Level))
r = ndrlib.Packer()
r.pack_long(Level)
# pointer to the SHARE_INFO_X_CONTAINER
r.pack_pointer(0x23456)
if Level == 2:
s = SRVSVC.SHARE_INFO_2_CONTAINER(r)
NetName = NetName.decode('UTF-16')[:-1]
if NetName in __shares__:
data = __shares__[NetName]
s.Data = {NetName:data}
else:
rpclog.warn(
"FIXME: this code has to be written, lame workaround for now")
data = __shares__['C$']
s.Data = {NetName:data}
s.pack()
r.pack_long(0)
return r.get_buffer()
@classmethod
def handle_NetNameCanonicalize (cls, con, p):
#3.1.4.33 NetprNameCanonicalize (Opnum 34)
#
#http://msdn.microsoft.com/en-us/library/cc247261%28PROT.13%29.aspx
#
#NET_API_STATUS NetprNameCanonicalize(
# [in, string, unique] SRVSVC_HANDLE ServerName,
# [in, string] WCHAR* Name,
# [out, size_is(OutbufLen)] WCHAR* Outbuf,
# [in, range(0,64000)] DWORD OutbufLen,
# [in] DWORD NameType,
# [in] DWORD Flags
#);
p = ndrlib.Unpacker(p.StubData)
ServerName = SRVSVC.SRVSVC_HANDLE(p)
Name = p.unpack_string()
Outbuflen = p.unpack_long()
NameType = p.unpack_long()
Flags = p.unpack_long()
rpclog.debug("ServerName %s Name %s Outbuflen %i Nametype %i Flags %i" % (
ServerName, Name, Outbuflen , NameType, Flags))
r = ndrlib.Packer()
# Metasploit smb fingerprinting for OS type
# https://www.metasploit.com/redmine/projects/framework/repository/revisions/8941/entry/lib/msf/core/exploit/smb.rb#L324
# for 'Windows XP Service Pack 0 / 1'
if OS_TYPE == 1:
r.pack_pointer(0)
r.pack_string(Name)
r.pack_long(0)
# for 'Windows XP Service Pack 2+'
if OS_TYPE == 2 or OS_TYPE == 3:
r.pack_pointer(0x000006f7)
r.pack_long(0)
return r.get_buffer()
@classmethod
def handle_NetrRemoteTOD(cls, con, p):
#3.1.4.21 NetrRemoteTOD (Opnum 28)
#
#http://msdn.microsoft.com/en-us/library/cc247248%28v=PROT.13%29.aspx
#
#NET_API_STATUS NetrRemoteTOD(
# [in, string, unique] SRVSVC_HANDLE ServerName,
# [out] LPTIME_OF_DAY_INFO* BufferPtr
#);
p = ndrlib.Unpacker(p.StubData)
ServerName = SRVSVC.SRVSVC_HANDLE(p)
r = ndrlib.Packer()
# pointer to the LPTIME_OF_DAY_INFO* BufferPtr
# Metasploit smb fingerprinting for OS type
# for 'Windows XP Service Pack 3'
if OS_TYPE == 3:
r.pack_pointer(0x00020000)
else :
r.pack_pointer(0x23456)
#typedef struct TIME_OF_DAY_INFO {
# DWORD tod_elapsedt;
# DWORD tod_msecs;
# DWORD tod_hours;
# DWORD tod_mins;
# DWORD tod_secs;
# DWORD tod_hunds;
# long tod_timezone;
# DWORD tod_tinterval;
# DWORD tod_day;
# DWORD tod_month;
# DWORD tod_year;
# DWORD tod_weekday;
#} TIME_OF_DAY_INFO,
# *PTIME_OF_DAY_INFO,
# *LPTIME_OF_DAY_INFO;
ctime = localtime()
#Eg, time.struct_time(tm_year=2010, tm_mon=7, tm_mday=13, tm_hour=2, tm_min=12, tm_sec=27, tm_wday=1, tm_yday=194, tm_isdst=0)
r.pack_long(int(time()))#elapsedt
r.pack_long(515893) #msecs
r.pack_long(ctime[3]) #hours
r.pack_long(ctime[4]) #mins
r.pack_long(ctime[5]) #secs
r.pack_long(59) #hunds
r.pack_long_signed(int(altzone/60),) #timezone
r.pack_long(310) #tinterval
r.pack_long(ctime[2]) #day
r.pack_long(ctime[1]) #month
r.pack_long(ctime[0]) #year
r.pack_long(ctime[6]) #weekday
r.pack_long(0)
return r.get_buffer()
@classmethod
def handle_NetServerGetInfo(cls, con, p):
#3.1.4.17 NetrServerGetInfo (Opnum 21)
#
#http://msdn.microsoft.com/en-us/library/cc247243%28v=PROT.13%29.aspx
#
#NET_API_STATUS NetrServerGetInfo(
# [in, string, unique] SRVSVC_HANDLE ServerName,
# [in] DWORD Level,
# [out, switch_is(Level)] LPSERVER_INFO InfoStruct
#);
p = ndrlib.Unpacker(p.StubData)
Pointer = p.unpack_pointer()
ServerName = p.unpack_string()
Level = p.unpack_long()
print("ServerName %s Level %i" % (ServerName,Level))
r = ndrlib.Packer()
r.pack_long(Level)
if Level == 101:
s = SRVSVC.SERVER_INFO_101(r)
server = ServerName.decode('UTF-16')[2:]
s.Data = [server, '\0']
s.pack()
r.pack_long(0)
return r.get_buffer()
class ssdpsrv(RPCService):
uuid = UUID('4b112204-0e19-11d3-b42b-0000f81feb9f').hex
class SVCCTL(RPCService):
"""[MS-SCMR]: Service Control Manager Remote Protocol Specification
http://msdn.microsoft.com/en-us/library/cc245832%28v=PROT.10%29.aspx
"""
uuid = UUID('367abb81-9844-35f1-ad32-98f038001003').hex
version_major = 0
version_minor = 0
ops = {
0 : "CloseServiceHandle",
24: "CreateServiceA",
27: "OpenSCManagerA",
}
@classmethod
def handle_CloseServiceHandle(cls, con, p):
# DWORD RCloseServiceHandle(
# [in, out] LPSC_RPC_HANDLE hSCObject
# );
pass
@classmethod
def handle_CreateServiceA(cls, con, p):
# DWORD RCreateServiceA(
# [in] SC_RPC_HANDLE hSCManager,
# [in, string, range(0, SC_MAX_NAME_LENGTH)] LPSTR lpServiceName,
# [in, string, unique, range(0, SC_MAX_NAME_LENGTH)] LPSTR lpDisplayName,
# [in] DWORD dwDesiredAccess,
# [in] DWORD dwServiceType,
# [in] DWORD dwStartType,
# [in] DWORD dwErrorControl,
# [in, string, range(0, SC_MAX_PATH_LENGTH)] LPSTR lpBinaryPathName,
# [in, string, unique, range(0, SC_MAX_NAME_LENGTH)] LPSTR lpLoadOrderGroup,
# [in, out, unique] LPDWORD lpdwTagId,
# [in, unique, size_is(dwDependSize)] LPBYTE lpDependencies,
# [in, range(0, SC_MAX_DEPEND_SIZE)] DWORD dwDependSize,
# [in, string, unique, range(0, SC_MAX_ACCOUNT_NAME_LENGTH)] LPSTR lpServiceStartName,
# [in, unique, size_is(dwPwSize)] LPBYTE lpPassword,
# [in, range(0, SC_MAX_PWD_SIZE)] DWORD dwPwSize,
# [out] LPSC_RPC_HANDLE lpServiceHandle
# );
pass
@classmethod
def handle_OpenSCManagerA(cls, con, p):
# DWORD ROpenSCManagerA(
# [in, string, unique, range(0, SC_MAX_COMPUTER_NAME_LENGTH)] SVCCTL_HANDLEA lpMachineName,
# [in, string, unique, range(0, SC_MAX_NAME_LENGTH)] LPSTR lpDatabaseName,
# [in] DWORD dwDesiredAccess,
# [out] LPSC_RPC_HANDLE lpScHandle
# );
pass
class tapsrv(RPCService):
uuid = UUID('2f5f6520-ca46-1067-b319-00dd010662da').hex
class TerminalServerLicensing(RPCService):
uuid = UUID('3d267954-eeb7-11d1-b94e-00c04fa3080d').hex
class trkwks(RPCService):
uuid = UUID('300f3532-38cc-11d0-a3f0-0020af6b0add').hex
class w32time(RPCService):
uuid = UUID('8fb6d884-2388-11d0-8c35-00c04fda2795').hex
#class winipsec(RPCService):
# uuid = UUID('12345678-1234-abcd-ef00-0123456789ab').hex
class winreg(RPCService):
uuid = UUID('338cd001-2244-31f1-aaaa-900038001003').hex
class winsif(RPCService):
uuid = UUID('45f52c28-7f9f-101a-b52b-08002b2efabe').hex
class winstation_rpc(RPCService):
uuid = UUID('5ca4a760-ebb1-11cf-8611-00a0245420ed').hex
class WKSSVC(RPCService):
uuid = UUID('6bffd098-a112-3610-9833-46c3f87e345a').hex
ops = {
0x1b: "NetAddAlternateComputerName"
}
vulns = {
0x1b: "MS03-39",
}
@classmethod
def handle_NetAddAlternateComputerName(cls, con, p):
# MS03-039
pass
|
dionaea-honeypot/dionaea
|
modules/python/dionaea/smb/rpcservices.py
|
Python
|
gpl-2.0
| 133,752
|
#!/usr/bin/env python
# pylint: disable=R0903
#
# A library that provides a Python interface to the Telegram Bot API
# Copyright (C) 2015-2016
# Leandro Toledo de Souza <devs@python-telegram-bot.org>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser Public License for more details.
#
# You should have received a copy of the GNU Lesser Public License
# along with this program. If not, see [http://www.gnu.org/licenses/].
"""This module contains a object that represents a Telegram
Message Parse Modes."""
class ParseMode(object):
"""This object represents a Telegram Message Parse Modes."""
MARKDOWN = 'Markdown'
HTML = 'HTML'
|
franciscod/python-telegram-bot
|
telegram/parsemode.py
|
Python
|
gpl-2.0
| 1,054
|
#
# Copyright 2001 - 2016 Ludek Smid [http://www.ospace.net/]
#
# This file is part of Outer Space.
#
# Outer Space is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# Outer Space is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Outer Space; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
#
import math
import ige.ospace.Const as Const
from ige.IDataHolder import makeIDataHolder
from Techs import noop as techDefaultHandler
def init(configDir):
global techs, Tech
import Techs
Techs.init(configDir)
from Techs import techs, Tech
## General
turnsPerDay = 24
galaxyStartDelay = turnsPerDay * 2
playerTimeout = 60 * 60 * 24 * 28 # 28 days
novicePlayerTimeout = 60 * 60 * 24 * 14 # 14 days
messageTimeout = 60 * 60 * 24 * 14 # 14 days
## New player
startingPopulation = 9000
startingBio = 1000
startingMin = 1000
startingEn = 1000
startingScannerPwr = 100
## Production
maxProdQueueLen = 10
buildOnSamePlanetMod = 1
buildOnAnotherPlanetMod = 2
unusedProdMod = 0.75
# structure economy revamp constants
basePlanetProdProd = 5 # prevents deadlocked planets, makes small planets more competitive
structDefaultHpRatio = 0.1 # structures are build with this percentage of HPs
structDefaultCpCosts = 0.2 # structures costs this amount of what is in XMLs
structFromShipHpRatio = 1.0 # structures from ships are build with this percentage of HPs
structNewPlayerHpRatio = 1.0 # structures from ships are build with this percentage of HPs
structTransferWaste = 0.5 # when replacing building, how much CP of old building is transfered to new one
structTransferMaxRatio = 0.5 # when replacing building, what is maximum effect of transfered CPs
# as we now build structures damaged, repair and decay are part of economy revamp
# repair ratio is dynamic on cost of building. it's full of magic constants
# goal is to have 480 CP building to repair in ~2 days (which is twice the legacy repair
# ratio), and the most expansive ones (adv. stargate) ~ 6 days.
# We are using log10() as it's quicker than log()
_magicBase = 1.0 / (turnsPerDay * 2)
_repairMagicBase = math.log10(480 * structDefaultCpCosts) ** 2 * _magicBase
repairRatioFunc = lambda x: _repairMagicBase / math.log10(x) ** 2
# building decay ratio bigger or equivalent of 480 CP repair
decayRatioFunc = lambda x: min( _magicBase, repairRatioFunc(x))
decayProdQueue = 0.02
## Environment
envInterval = 1000
envAutoMod = 10.0
envMax = 200
envSelfUpgradeChance = {"H": 5, "C": 1, "B": 500, "m": 100, "r": 100, "p": 100, "e": 100} # in ten thousandths (10 000)
planetSpec = {}
planetSpec[u'A'] = makeIDataHolder(
minBio = 0,
maxBio = 0,
upgradeTo = None,
downgradeTo = None,
)
planetSpec[u'G'] = makeIDataHolder(
minBio = 0,
maxBio = 0,
upgradeTo = None,
downgradeTo = None,
)
planetSpec[u'C'] = makeIDataHolder(
minBio = 0,
maxBio = 6,
upgradeTo = u'D',
upgradeEnReqs = (5, 180),
downgradeTo = None,
)
planetSpec[u'R'] = makeIDataHolder(
minBio = 0,
maxBio = 6,
upgradeTo = u'D',
upgradeEnReqs = (5, 180),
downgradeTo = None,
)
planetSpec[u'D'] = makeIDataHolder(
minBio = 6,
maxBio = 12,
upgradeTo = u'H',
upgradeEnReqs = (25, 150),
downgradeTo = u'R',
)
planetSpec[u'H'] = makeIDataHolder(
minBio = 12,
maxBio = 25,
upgradeTo = u'M',
upgradeEnReqs = (50, 125),
downgradeTo = u'D',
)
planetSpec[u'M'] = makeIDataHolder(
minBio = 25,
maxBio = 75,
upgradeTo = u'E',
upgradeEnReqs = (50, 100),
downgradeTo = u'H',
)
planetSpec[u'E'] = makeIDataHolder(
minBio = 75,
maxBio = 125,
upgradeTo = u"I",
upgradeEnReqs = (50, 100),
downgradeTo = u'M',
)
planetSpec[u"I"] = makeIDataHolder( # gaia
minBio = 125,
maxBio = 200,
upgradeTo = None,
downgradeTo = u"E",
)
## New colony settings
colonyMinBio = 600
colonyMinMin = 600
colonyMinEn = 600
## Storage
popPerSlot = 0
bioPerSlot = 0
minPerSlot = 0
enPerSlot = 0
popBaseStor = 4800
bioBaseStor = 4800
minBaseStor = 4800
enBaseStor = 4800
autoMinStorTurns = 2
tlPopReserve = 100
## Resources
stratResRate = turnsPerDay * 6
stratResAmountBig = 10
stratResAmountSmall = 1
## Population
popGrowthRate = 0.02
popMinGrowthRate = int(5000 * popGrowthRate) # Increase the Minimum Population Growth from 20 to 100 per turn
popDieRate = 0.1
popMinDieRate = 100
popKillMod = 0.25
popSlotKillMod = 5 # how many people per 1 DMG get killed when slot is hit
popSlotHP = 100 # HP of habitable structures on slot (where people live)
## Research
maxRsrchQueueLen = 10
techBaseImprovement = 1
techMaxImprovement = 5
techImprCostMod = {1:480, 2:480, 3:720, 4:960, 5:1200, 6: 1440, 7: 1680} #per level
sciPtsPerCitizen = {1: 0, 2: 0.00075, 3: 0.00150, 4: 0.00175, 5: 0.00200, 6: 0.002125, 7: 0.00225, 99: 0} #per level
techImprEff = {1:0.750, 2:0.875, 3:1.000, 4:1.125, 5:1.250} #per sublevel
#maxSciPtsTL = {1:100, 2:200, 3:300, 4:400, 5:500, 6:600, 7:700}
#sciPtsStepFraction = 0.25
## Scanner
maxSignature = 100
scannerMinPwr = 1
scannerMaxPwr = 150
level1InfoScanPwr = 1000
level2InfoScanPwr = 1200
level3InfoScanPwr = 1400
level4InfoScanPwr = 1600
maxScanPwr = 200000
mapForgetScanPwr = 0.94
partnerScanPwr = 300000
## Fleets
maxCmdQueueLen = 10
signatureBase = 1.10
operProdRatio = 0.001
combatRetreatWait = 3
starGateDamage = 0.2 # damage for 100% speed boost (double for 200%, etc...)
shipDecayRatio = 0.04
maxDamageAbsorb = 5 # max absorbed damage for tech "damageAbsorb" property.
# max seq_mod equipments of equipType; anything not in list is unlimited
maxEquipType = {
'ECM' : 1, # +Missile DEF
'Combat Bonuses' : 1, # +%ATT, +%DEF
'Combat Modifiers' : 1, # +ATT, +DEF
'Shields' : 1, # not hardshields
'Stealth' : 1,
'Auto Repair' : 1,
}
## Buildings
plShieldRegen = 0.05 #regen rate of planetary shield
## Diplomacy
baseRelationChange = -5
relLostWhenAttacked = -1000000
defaultRelation = Const.REL_NEUTRAL
contactTimeout = 6 * turnsPerDay
voteForImpAnnounceOffset = 2 * turnsPerDay
voteForImpPeriod = 6 * turnsPerDay
ratioNeededForImp = 0.6666
pactDescrs = {}
pactDescrs[Const.PACT_ALLOW_CIVILIAN_SHIPS] = makeIDataHolder(
targetRel = 500,
relChng = 10,
validityInterval = (0, 10000),
)
pactDescrs[Const.PACT_ALLOW_MILITARY_SHIPS] = makeIDataHolder(
targetRel = 750,
relChng = 8,
validityInterval = (0, 10000),
)
pactDescrs[Const.PACT_ALLOW_TANKING] = makeIDataHolder(
targetRel = 750,
relChng = 7,
validityInterval = (0, 10000),
)
pactDescrs[Const.PACT_MINOR_CP_COOP] = makeIDataHolder(
targetRel = 1000,
relChng = 6,
effectivity = 0.05,
validityInterval = (625, 10000),
)
pactDescrs[Const.PACT_MAJOR_CP_COOP] = makeIDataHolder(
targetRel = 1000,
relChng = 1,
effectivity = 0.05,
validityInterval = (875, 10000),
)
pactDescrs[Const.PACT_SHARE_SCANNER] = makeIDataHolder(
targetRel = 1000,
relChng = 1,
validityInterval = (625, 10000),
)
pactDescrs[Const.PACT_MINOR_SCI_COOP] = makeIDataHolder(
targetRel = 750,
relChng = 1,
effectivity = 0.05,
validityInterval = (625, 10000),
)
pactDescrs[Const.PACT_MAJOR_SCI_COOP] = makeIDataHolder(
targetRel = 1000,
relChng = 1,
effectivity = 0.05,
validityInterval = (875, 10000),
)
## Morale
baseGovPwr = 50000
maxMorale = 100.0
minMoraleTrgt = 30.0
revoltThr = 25.0
moraleChngPerc = 0.03
moraleHighPopPenalty = 2.0
moraleBasePop = 10000
moraleLowPop = 5000
moraleLowPopBonus = 40.0
moraleLostWhenSurrender = 0.0
moraleLostNoFood = 1.0
moraleModPlHit = 96.0 # how many morale point per 1 per cent of damage
moralePerPointChance = 5.0 # for every point below revoltThr % chance for revolt
moraleProdStep = 10
moraleProdBonus = [-0.875, -0.75, -0.625, -0.50, -0.375, -0.25, -0.125, 0.0, 0.0, 0.125, 0.25]
# we expect pop reserve from TL to get into unemployed
# tlPopReserve * TL1
# if we get no reserve, there is a hit, if we get at least
# the reserve, it's a bonus, linear in between
unemployedMoraleLow = -20
unemployedMoraleHigh = 10
## Revolt
revoltDestrBio = 0.05
revoltDestrMin = 0.05
revoltDestrEn = 0.05
revoltPenalty = 0.75
## Messages
messageMaxAge = turnsPerDay * 3
## Projects
projECOINIT3PlBio = 1
## Ships
shipImprovementMod = 1.05
shipMaxImprovements = 5
shipMaxDesigns = 40
shipExpToLevel = {0:1, 1:2, 2:2, 3:3, 4:3, 5:3, 6:3, 7:4, 8:4, 9:4, 10:4, 11:4,
12:4, 13:4, 15:5}
shipDefLevel = 5
shipLevelEff = {1:0.50, 2:0.75, 3:1.00, 4:1.25, 5:1.50}
shipBaseExpMod = 20
shipBaseExp = {0:10, 1:20, 2:40, 3:80, 4:160}
shipTargetPerc = [25, 50, 90, 100]
shipMinUpgrade = 120
shipUpgradeMod = 1.375
shipUpgradePts = [1, 3, 10]
weaponDmgDegrade = [1.0, 0.5, 0.25, 0.125]
## EMR
emrMinDuration = 36
emrMaxDuration = 60
emrPeriod = 576
emrSeasons = [None, None, None, None]
emrSeasons[0] = makeIDataHolder(
name = "spring",
startTime = 0,
endTime = 143,
emrLevelMin = 0.75,
emrLevelMax = 1.25,
)
emrSeasons[1] = makeIDataHolder(
name = "summer",
startTime = 144,
endTime = 287,
emrLevelMin = 0.50,
emrLevelMax = 1.00,
)
emrSeasons[2] = makeIDataHolder(
name = "fall",
startTime = 287,
endTime = 431,
emrLevelMin = 0.50,
emrLevelMax = 1.50,
)
emrSeasons[3] = makeIDataHolder(
name = "winter",
startTime = 432,
endTime = 575,
emrLevelMin = 1.00,
emrLevelMax = 1.50,
)
## Pirates
## General
pirateInfluenceRange = 7.5 # in parsecs
pirateGovPwr = int(500000 * 1.25)
## Fame
pirateGainFamePropability = lambda d: 2 - d * 0.2
pirateLoseFameProbability = lambda d: 1 - (15 - d) * 0.2
pirateCaptureInRangeFame = 1
pirateSurvivalFame = 1
pirateCaptureOutOfRangeFame = -1
## Colonization
pirateColonyCostMod = 1.5 # base multiplier - all other multipliers are multiplied by this
pirateTL3StratResColonyCostMod = 0.25
piratePlayerZoneCostMod = 1.25
pirateColonyFameZoneCost = lambda d: min(d * 0.1 + pirateTL3StratResColonyCostMod,1)
pirateColonyPlayerZoneCost = lambda d: piratePlayerZoneCostMod + (d - 15) * 0.01 * piratePlayerZoneCostMod
## Techs
pirateCanStealImprovements = 3
pirateGrantHSE = 60*24*3600 #60 days; AI only
pirateGrantASSEM = 105*24*3600 #105 days; AI only
pirateGrantCOND = 105*24*3600 #105 days; AI only
## Timed events (not implemented)
pirateTimerMod = 3*24*3600 # +/- up to 3 days for each grant
pirateTimerRum = 20*24*3600 #20 days; grant Brewery, Rum strategic resource, and Drunken Factory (110% Pirate Prison; requires Rum)
pirateTimerEnslavement = 60*24*3600 #60 days; grant Prison
pirateTimerEDENStructure = 120*24*3600 #120 days; grant EDEN Factory (you have discovered a prototype factory...; 135% Pirate Prison; requires Rum)
pirateTimerBerserk = 150*24*3600 #150 days; grant "Berserk" ship module (major defense penalty; major ATT bonus; requires Rum)
pirateTimerSlaveMine = 180*24*3600 #180 days; grant Slave Mine (mining facility with hamster wheel for power; 160% Pirate Prison; requires Rum)
## Bonuses
galLeaderBonus = 0.05
galImperatorBonus = 0.10
## Combat
combatStructureHitMod = 0.75
combatShipHitMod = 0.75
combatHitXferMod = 3.00
combatStructDefense = 1
|
dahaic/outerspace
|
server/lib/ige/ospace/Rules/__init__.py
|
Python
|
gpl-2.0
| 11,626
|
#!/usr/bin/env python
#
# api.py
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 2 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston,
# MA 02111-1307 USA
#
# Author: Pietro Delsante <p.delsante@certego.net>
# www.certego.net
#
import os
from django.contrib.auth.models import User
from tastypie.resources import ModelResource, ALL, ALL_WITH_RELATIONS
from tastypie.fields import ListField, ForeignKey
from tastypie.authentication import BasicAuthentication, ApiKeyAuthentication, SessionAuthentication, MultiAuthentication
from pcapoptikon.authorization import CertegoDjangoAuthorization
from pcapoptikon.fields import Base64FileField
from main.models import *
def is_post(bundle):
if bundle.request.method == 'post':
return True
class UserResource(ModelResource):
class Meta:
queryset = User.objects.all()
resource_name = 'user'
authentication = MultiAuthentication(BasicAuthentication(), ApiKeyAuthentication(), SessionAuthentication())
authorization = CertegoDjangoAuthorization()
allowed_methods = ['get']
fields = ['id', 'username']
ordering = ['id', 'username']
class TaskResource(ModelResource):
pcap_file = Base64FileField("pcap_file", use_in=is_post)
user = ForeignKey(UserResource, 'user', full=True)
results = ListField(attribute='results', null=True, blank=True, default=None)
def obj_create(self, bundle, **kwargs):
return super(TaskResource, self).obj_create(bundle, user=bundle.request.user)
def alter_list_data_to_serialize(self, request, data):
for item in data['objects']:
item.data['filename'] = os.path.basename(Task.objects.get(pk=item.data['id']).pcap_file.name)
return data
class Meta:
queryset = Task.objects.all().order_by('-id')
resource_name = 'task'
allowed_methods = ['get', 'post']
authentication = MultiAuthentication(BasicAuthentication(), ApiKeyAuthentication(), SessionAuthentication())
authorization = CertegoDjangoAuthorization()
filtering = {
'submitted_on': ALL,
'user': ALL,
'status': ALL,
}
ordering = ['id', 'submitted_on', 'status']
|
certego/pcapoptikon
|
main/api.py
|
Python
|
gpl-2.0
| 2,785
|
from libraries import JSONDictionary
holdingsData = {
'defense': {
'superiorCastle': 50,
'castle': 40,
'smallCastle': 30,
'hall': 20,
'tower': 10
},
'influence': {
'firstBorn': 20,
'secondBorn': 10,
'otherChildren': 5
},
'lands': {
'terrainCosts': {
'hills': 7,
'mountains': 9,
'plains': 5,
'wetlands': 3
},
'featureCost': {
'population': {
'hamlet': 10,
'smallTown': 20,
'largeTown': 30,
'smallCity': 40,
'largeCity': 50
},
'waterBodies': {
'stream': 1,
'river': 3,
'pond': 5,
'lake': 7
},
'seaAccess': {
'coast': 3,
'island': 10
},
'woods': {
'lightWoods': 3,
'denseWoods': 5
},
'extras': {
'grassland': 1,
'road': 5,
'ruin': 3,
}
},
'realms': {
'dorne': (
'hills',
'mountains',
'plains'
),
'dragonstone': (
'hills',
'plains',
'wetlands'
),
'theIronIslands': (
'hills',
'plains'
),
'kingslanding': (
'plains'
),
'mountainsOfTheMoon': (
'hills',
'mountains'
),
'theNorth': (
'plains',
'hills',
'mountains',
'wetlands'
),
'theReach': (
'plains'
),
'riverlands': (
'hills',
'plains',
'wetlands'
),
'theStormlands': (
'hills',
'mountains',
'plains',
'wetlands'
),
'westernlands': (
'hills',
'mountains',
'plains'
)
}
}
}
class Holdings(object):
defenseHoldings = None
influenceHoldings = None
landTerrains = None
def __init__(self, holdingsDict):
self.defenseHoldings = list()
self.influenceHoldings = list()
self.landTerrains = list()
self.defense = JSONDictionary(holdingsDict).getKeyValue('defense')
self.influence = JSONDictionary(holdingsDict).getKeyValue('influence')
self.lands = JSONDictionary(holdingsDict).getKeyValue('lands')
def generateAllHoldings(self, houseDict, realm):
'''Function to generate all holdings in one process,
takes the house dict generated by the house Stat generator
and the house realm'''
self.generateDefense(houseDict)
self.generateInfluence(houseDict)
self.generateLand(houseDict, realm)
return houseDict
def generateDefense(self, houseDict, *args):
'''Buys defense holdings for the house and returns
the remaining unspend points. Takes a house dictonary to add
items to it and can take a specific defense dictionary
in case you don't want to use the standard one.'''
if len(args) < 1:
defenseDict = self.defense
else:
defenseDict = args[0]
defenseTotal = houseDict['defense']
if defenseTotal > defenseDict['superiorCastle']:
self.defenseHoldings.append('Superior Castle')
defenseTotal -= defenseDict['superiorCastle']
if self.defenseHoldings.count('Superior Castle') > 0:
while defenseTotal > defenseDict['smallCastle']:
self.defenseHoldings.append('Small Castle')
defenseTotal -= defenseDict['smallCastle']
while defenseTotal > defenseDict['hall']:
self.defenseHoldings.append('Hall')
defenseTotal -= defenseDict['hall']
while defenseTotal > defenseDict['tower']:
self.defenseHoldings.append('Tower')
defenseTotal -= defenseDict['tower']
else:
if defenseTotal > defenseDict['castle']:
self.defenseHoldings.append('Castle')
defenseTotal -= defenseDict['castle']
if self.defenseHoldings.count('Castle') > 0:
while defenseTotal > defenseDict['hall']:
self.defenseHoldings.append('Hall')
defenseTotal -= defenseDict['hall']
while defenseTotal > defenseDict['tower']:
self.defenseHoldings.append('Tower')
defenseTotal -= defenseDict['tower']
else:
while defenseTotal > defenseDict['smallCastle']:
self.defenseHoldings.append('Small Castle')
defenseTotal -= defenseDict['smallCastle']
while defenseTotal > defenseDict['hall']:
self.defenseHoldings.append('Hall')
defenseTotal -= defenseDict['hall']
while defenseTotal > defenseDict['tower']:
self.defenseHoldings.append('Tower')
defenseTotal -= defenseDict['tower']
houseDict['defenseHoldings'] = self.defenseHoldings
houseDict['remainingDefense'] = defenseTotal
return houseDict
def generateInfluence(self, houseDict, *args):
if len(args) < 1:
influenceDict = self.influence
else:
influenceDict = args[0]
influenceTotal = houseDict['influence']
if influenceTotal < 11:
houseDict['maxStatus'] = 2
elif influenceTotal < 21:
houseDict['maxStatus'] = 3
elif influenceTotal < 41:
houseDict['maxStatus'] = 4
elif influenceTotal < 51:
houseDict['maxStatus'] = 5
elif influenceTotal < 61:
houseDict['maxStatus'] = 6
elif influenceTotal < 71:
houseDict['maxStatus'] = 7
if influenceTotal < influenceDict['firstBorn']:
houseDict['influenceHoldings'] = self.influenceHoldings
houseDict['remainingInfluence'] = influenceTotal
return houseDict
else:
self.influenceHoldings.append('First Born')
influenceTotal -= influenceDict['firstBorn']
if influenceTotal < influenceDict['secondBorn']:
houseDict['influenceHoldings'] = self.influenceHoldings
houseDict['remainingInfluence'] = influenceTotal
return houseDict
else:
self.influenceHoldings.append('Second Born')
influenceTotal -= influenceDict['secondBorn']
while influenceTotal > influenceDict['otherChildren']:
self.influenceHoldings.append('Other child')
influenceTotal -= influenceDict['otherChildren']
houseDict['influenceHoldings'] = self.influenceHoldings
houseDict['remainingInfluence'] = influenceTotal
return houseDict
def generateLand(self, houseDict, realm, *args):
if len(args) < 1:
terrainsDict = self.lands
else:
terrainsDict = args[0]
print realm
print type(realm)
print JSONDictionary(terrainsDict).getKeyValue(realm)
|
ondoheer/GOT-english
|
holdings.py
|
Python
|
gpl-2.0
| 7,657
|
# -*- coding: utf-8 -*-
"""
Created on Mon Oct 3 02:10:23 2011
@author: IxxI
@version: v1.0
"""
import sys
import logging
from optparse import OptionParser
from commands import Commands, CLIENT, SERVER
from mcp import getchangedsrc_side
def main():
parser = OptionParser(version='MCP %s' % Commands.fullversion())
parser.add_option('--client', dest='only_client', action='store_true', help='only process client', default=False)
parser.add_option('--server', dest='only_server', action='store_true', help='only process server', default=False)
parser.add_option('-c', '--config', dest='config', help='additional configuration file')
options, _ = parser.parse_args()
getchangedsrc(options.config, options.only_client, options.only_server)
def getchangedsrc(conffile, only_client, only_server):
try:
commands = Commands(conffile)
# client or server
process_client = True
process_server = True
if only_client and not only_server:
process_server = False
if only_server and not only_client:
process_client = False
if process_client:
getchangedsrc_side(commands, CLIENT)
if process_server:
getchangedsrc_side(commands, SERVER)
except Exception: # pylint: disable-msg=W0703
logging.exception('FATAL ERROR')
sys.exit(1)
if __name__ == '__main__':
main()
|
mviitanen/marsmod
|
mcp/runtime/getchangedsrc.py
|
Python
|
gpl-2.0
| 1,420
|
#! /usr/bin/env python
from distutils.core import setup, Extension
from distutils.util import get_platform
import shutil
import os, sys
def buil_all():
packages=['miasm2',
'miasm2/arch',
'miasm2/arch/x86',
'miasm2/arch/arm',
'miasm2/arch/aarch64',
'miasm2/arch/msp430',
'miasm2/arch/sh4',
'miasm2/arch/mips32',
'miasm2/core',
'miasm2/expression',
'miasm2/ir',
'miasm2/ir/translators',
'miasm2/analysis',
'miasm2/os_dep',
'miasm2/jitter',
'miasm2/jitter/arch',
'miasm2/jitter/loader',
]
ext_modules_no_tcc = [
Extension("miasm2.jitter.VmMngr",
["miasm2/jitter/vm_mngr.c",
"miasm2/jitter/vm_mngr_py.c"]),
Extension("miasm2.jitter.arch.JitCore_x86",
["miasm2/jitter/JitCore.c",
"miasm2/jitter/vm_mngr.c",
"miasm2/jitter/arch/JitCore_x86.c"]),
Extension("miasm2.jitter.arch.JitCore_arm",
["miasm2/jitter/JitCore.c",
"miasm2/jitter/vm_mngr.c",
"miasm2/jitter/arch/JitCore_arm.c"]),
Extension("miasm2.jitter.arch.JitCore_aarch64",
["miasm2/jitter/JitCore.c",
"miasm2/jitter/vm_mngr.c",
"miasm2/jitter/arch/JitCore_aarch64.c"]),
Extension("miasm2.jitter.arch.JitCore_msp430",
["miasm2/jitter/JitCore.c",
"miasm2/jitter/vm_mngr.c",
"miasm2/jitter/arch/JitCore_msp430.c"]),
Extension("miasm2.jitter.arch.JitCore_mips32",
["miasm2/jitter/JitCore.c",
"miasm2/jitter/vm_mngr.c",
"miasm2/jitter/arch/JitCore_mips32.c"]),
Extension("miasm2.jitter.Jitgcc",
["miasm2/jitter/Jitgcc.c"]),
Extension("miasm2.jitter.Jitllvm",
["miasm2/jitter/Jitllvm.c"]),
]
ext_modules_all = [
Extension("miasm2.jitter.VmMngr",
["miasm2/jitter/vm_mngr.c",
"miasm2/jitter/vm_mngr_py.c"]),
Extension("miasm2.jitter.arch.JitCore_x86",
["miasm2/jitter/JitCore.c",
"miasm2/jitter/vm_mngr.c",
"miasm2/jitter/arch/JitCore_x86.c"]),
Extension("miasm2.jitter.arch.JitCore_arm",
["miasm2/jitter/JitCore.c",
"miasm2/jitter/vm_mngr.c",
"miasm2/jitter/arch/JitCore_arm.c"]),
Extension("miasm2.jitter.arch.JitCore_aarch64",
["miasm2/jitter/JitCore.c",
"miasm2/jitter/vm_mngr.c",
"miasm2/jitter/arch/JitCore_aarch64.c"]),
Extension("miasm2.jitter.arch.JitCore_msp430",
["miasm2/jitter/JitCore.c",
"miasm2/jitter/vm_mngr.c",
"miasm2/jitter/arch/JitCore_msp430.c"]),
Extension("miasm2.jitter.arch.JitCore_mips32",
["miasm2/jitter/JitCore.c",
"miasm2/jitter/vm_mngr.c",
"miasm2/jitter/arch/JitCore_mips32.c"]),
Extension("miasm2.jitter.Jitllvm",
["miasm2/jitter/Jitllvm.c"]),
Extension("miasm2.jitter.Jitgcc",
["miasm2/jitter/Jitgcc.c"]),
Extension("miasm2.jitter.Jittcc",
["miasm2/jitter/Jittcc.c"],
libraries=["tcc"])
]
print 'building'
build_ok = False
for name, ext_modules in [('all', ext_modules_all),
('notcc', ext_modules_no_tcc)]:
print 'build with', repr(name)
try:
s = setup(
name = 'Miasm',
version = '2.0',
packages = packages,
package_data = {'miasm2':['jitter/*.h',
'jitter/arch/*.h',]},
ext_modules = ext_modules,
# Metadata
author = 'Fabrice Desclaux',
author_email = 'serpilliere@droid-corp.org',
description = 'Machine code manipulation library',
license = 'GPLv2',
# keywords = '',
# url = '',
)
except SystemExit, e:
print repr(e)
continue
build_ok = True
break
if not build_ok:
raise ValueError('Unable to build Miasm!')
print 'build', name
if name == 'notcc':
print
print "*"*80
print "Warning: TCC is not properly installed,"
print "Miasm will be installed without TCC Jitter"
print "Etheir install TCC or use LLVM jitter"
print "*"*80
print
# we copy libraries from build dir to current miasm directory
build_base = None
if 'build' in s.command_options:
if 'build_base' in s.command_options['build']:
build_base = s.command_options['build']['build_base']
if build_base is None:
build_base = "build"
plat_specifier = ".%s-%s" % (get_platform(), sys.version[0:3])
build_base = os.path.join('build','lib' + plat_specifier)
print build_base
def buil_no_tcc():
setup(
name = 'Miasm',
version = '2.0',
packages=['miasm2', 'miasm2/tools',
'miasm2/expression', 'miasm2/graph', 'miasm2/arch',
'miasm2/core', 'miasm2/tools/emul_lib' ],
package_data = {'miasm2':['tools/emul_lib/*.h']},
# data_files = [('toto', ['miasm2/tools/emul_lib/queue.h'])],
# Metadata
author = 'Fabrice Desclaux',
author_email = 'serpilliere@droid-corp.org',
description = 'Machine code manipulation library',
license = 'GPLv2',
# keywords = '',
# url = '',
)
def try_build():
buil_all()
"""
try:
buil_all()
return
except:
print "WARNING cannot build with libtcc!, trying without it"
print "Miasm will not be able to emulate code"
buil_no_tcc()
"""
try_build()
|
chubbymaggie/miasm
|
setup.py
|
Python
|
gpl-2.0
| 6,252
|
# -*- coding: UTF-8 -*-
from __future__ import with_statement
from __future__ import division
import pyejdb
import os.path
from datetime import datetime
from datetime import timedelta
from datetime import date as pydate
import json
import sys
reload(sys)
sys.setdefaultencoding("utf-8")
# ==================== basic tools and settings ===================
def note_date_str():
d = timedelta(hours = 7)
td = (datetime.now() - d).date()
rst = "%d-%d-%d" % (td.year, td.month, td.day)
return rst
# ======================= template ========================
class NTemplatePool():
def __init__(self, db):
self.db = db
def list_all(self):
names = []
_ids = []
with self.db.find("templates", {}) as cur:
for p in cur:
names.append(p["title"])
_ids.append(p["_id"])
return (names, _ids)
def update(self, template):
template["_update_time"] = datetime.utcnow()
with self.db.find("templates", {"title" : template["title"]}) as cur:
if len(cur):
template["_id"] = cur[0]["_id"]
template["_create_time"] = cur[0]["_create_time"]
template["_instance_count"] = cur[0]["_instance_count"]
self.db.save("templates", template)
return template
def remove(self, title = "", _id = ""):
if len(title) == 0 and len(_id) == 0:
return
if _id:
try:
self.db.remove("templates", _id)
except Exception, e:
return
try:
_id = self.select_by_title(title)["_id"]
self.db.remove("templates", _id)
except Exception, e:
return
def select_by_title(self, title):
rst = self.db.find("templates", {"title" : title})
if len(rst):
return rst[0]
else:
return None
def exist_title(self, title):
rst = self.db.find("templates", {"title" : title})
if len(rst):
return True
else:
return False
class NTemplateKit():
def create(self, title, content):
rst = {}
rst['title'] = title
rst['content'] = content
rst['_update_time'] = rst['_create_time'] = datetime.utcnow()
rst['_instance_count'] = 0
return rst
def derivate(self, title, template):
return self.create(title, template['content'])
def to_text(self, content):
rst = unicode(json.dumps(content, ensure_ascii = False, indent = 4, sort_keys = True))
return rst
def check_instance(self, template, text):
text = unicode(text)
try:
dct = json.loads(text, "utf-8")
except Exception, e:
return unicode(e)
else:
for key in template["content"].keys():
if dct.has_key(key) == 0:
return "lack of tag " + key + "!"
for key in dct.keys():
if template["content"].has_key(key) == 0:
return "unexpected tag " + key + "!"
return ""
def make_content(self, text):
rst = {}
text = unicode(text)
try:
rst = json.loads(text, "utf-8")
except Exception, e:
rst["_ERROR"] = unicode(e)
finally:
return rst
# ======================= instance =========================
class NInstancePool():
def __init__(self, db):
self.db = db
def update(self, instance):
instance["_update_time"] = datetime.utcnow()
self.db.save("instances", instance)
return instance
def list_by_date(self, date):
names = []
_ids = []
with self.db.find("instances", {"date" : date}) as cur:
for p in cur:
names.append(p["temp"])
_ids.append(p["_id"])
return (names, _ids)
def get_by_id(self, _id):
with self.db.find("instances", {"_id" : _id}) as cur:
if len(cur) <= 0:
return {"content" : ""}
else:
return cur[0]
def remove(self, _id = ""):
try:
self.db.remove("instances", _id)
except Exception, e:
return
class NInstanceKit():
def create(self, temp, date, content):
rst = {}
rst["temp"] = temp["title"]
rst["date"] = date
rst["content"] = content
rst["_update_time"] = rst["_create_time"] = datetime.utcnow()
return rst
def derivate(self, date, instance):
return self.create(instance["temp"], date, instance["content"])
def to_text(self, content):
rst = unicode(json.dumps(content, ensure_ascii = False, indent = 4, sort_keys = True))
return rst
def to_dict(self, text):
try:
dct = json.loads(text)
except Exception, e:
return 0
else:
return dct
# ============================ ui ================================
from PyQt5 import QtGui, QtCore, QtWidgets
from PyQt5.QtWidgets import QMessageBox, QWidget
from PyQt5.QtCore import QDate
from ui import mainwidget
from ui import templatedialog
from ui import datedialog
import sys
# -------------------------- Date Dialog -------------------------
class DateDialog(QtWidgets.QDialog):
def __init__(self, parent = None, current = None):
QtWidgets.QDialog.__init__(self, parent)
self.parent = parent
self.ui = datedialog.Ui_Dialog()
self.ui.setupUi(self)
if current != None:
self.today = self.to_date(current)
self.set_today()
self.ui.but_today.clicked.connect(self.set_today)
self.ui.buttonBox.accepted.connect(self.change_date)
def to_date(self, date_str):
a = date_str.split("-")
return QDate(int(a[0]), int(a[1]), int(a[2]))
def to_str(self, date_obj):
return str(date_obj.year()) + "-" + str(date_obj.month()) + "-" + str(date_obj.day())
def set_today(self):
self.ui.date_edit.setDate(self.today)
def change_date(self):
date = self.ui.date_edit.date()
self.parent.date = self.to_str(date)[:]
# -------------------------- Temp Dialog -------------------------
class TempDialog(QtWidgets.QDialog):
def __init__(self, parent = None, title = "", text = "{\n\n}"):
QtWidgets.QDialog.__init__(self, parent)
self.ui = templatedialog.Ui_Dialog()
self.ui.setupUi(self)
self.ui.textEdit.setText(unicode(text))
self.ui.lineEdit.setText(unicode(title))
self.ui.buttonBox.disconnect()
self.ui.buttonBox.accepted.connect(self.try_save)
self.ui.buttonBox.rejected.connect(self.finish)
self.template_pool = parent.template_pool
self.template_kit = parent.template_kit
def finish(self):
self.done(0)
def try_save(self):
title = unicode(self.ui.lineEdit.text())
text = unicode(self.ui.textEdit.toPlainText())
if len(title) == 0:
box = QMessageBox(self)
box.setText(u"无效的模板名")
box.exec_()
elif self.template_pool.exist_title(title):
box = QMessageBox(self)
box.setText(u"模板名重复")
box.exec_()
else:
content = self.template_kit.make_content(text)
if not content.has_key("_ERROR"):
self.template_pool.update(self.template_kit.create(title, content))
box = QMessageBox(self)
box.setText(u"成功添加模板")
box.exec_()
self.done(1)
return
else:
box = QMessageBox(self)
box.setText(u"模板书写错误")
box.setDetailedText(content["_ERROR"])
box.exec_()
# ------------------------ Main GUI --------------------------
class MyGui(QtWidgets.QWidget):
def __init__(self, parent = None):
QtWidgets.QWidget.__init__(self, parent)
self.db = pyejdb.EJDB("data/db", pyejdb.DEFAULT_OPEN_MODE)
self.ui = mainwidget.Ui_Form()
self.ui.setupUi(self)
self.date = note_date_str()
self.today = note_date_str()
self.template_kit = NTemplateKit()
self.template_pool = NTemplatePool(self.db)
self.template_cur = None
self.instance_pool = NInstancePool(self.db)
self.instance_kit = NInstanceKit()
self.template_list = []
self.instance_list = []
self.ui.but_instantiate.clicked.connect(self.template_instantiate)
self.ui.butbox.accepted.connect(self.instance_submit)
self.ui.butbox.rejected.connect(self.instance_abandon)
self.ui.but_temp_new.clicked.connect(self.template_new)
self.ui.but_temp_der.clicked.connect(self.template_derivate)
self.ui.but_temp_rm.clicked.connect(self.template_remove)
self.ui.but_inst_rm.clicked.connect(self.instance_remove)
self.ui.but_date_sel.clicked.connect(self.date_select)
self.date_refresh()
self.template_list_all()
print "ok"
# =================== functions ===================
def template_list_all(self):
(lst, self.template_list) = self.template_pool.list_all()
self.ui.templates_view.clear()
if lst == None:
return 0
for each in lst:
self.ui.templates_view.addItem(each)
self.ui.templates_view.sortItems()
return 1
def template_instantiate(self):
if self.ui.templates_view.currentItem() == None:
self.template_cur = None
return
else:
self.template_cur = self.template_pool.select_by_title(
self.ui.templates_view.currentItem().text())
self.ui.instance_edit.setText(unicode(self.template_kit.to_text(self.template_cur["content"])))
def instance_submit(self):
text = unicode(self.ui.instance_edit.toPlainText())
if len(text.strip()) == 0:
return
msg = self.template_kit.check_instance(self.template_cur, text)
if len(msg):
box = QMessageBox()
box.setText(u"记录书写错误")
box.setDetailedText(msg)
box.exec_()
else:
box = QMessageBox()
box.setDetailedText(text)
box.setText(u"确定要将记录添加入日记中吗?")
box.setStandardButtons(QMessageBox.Save | QMessageBox.Discard | QMessageBox.Cancel);
ret = box.exec_()
if ret == QMessageBox.Discard:
self.instance_abandon()
return
elif ret == QMessageBox.Cancel:
return
else:
content = self.instance_kit.to_dict(text)
self.instance_pool.update(self.instance_kit.create(self.template_cur, self.date, content))
self.instance_list_by_date(self.date)
def instance_abandon(self):
self.template_cur = None
self.ui.instance_edit.setText("")
def instance_list_by_date(self, date):
self.ui.instances_view.clear()
(lst, self.instance_list) = self.instance_pool.list_by_date(date)
if lst == None:
return 0
for each in lst:
self.ui.instances_view.addItem(each)
return 1
def instance_remove(self):
if self.ui.instances_view.currentItem() == None:
return
else:
instance_cur = self.instance_list[self.ui.instances_view.currentRow()]
box = QMessageBox()
box.setText(u"确定要移除记录?")
box.setDetailedText(self.instance_kit.to_text(self.instance_pool.get_by_id(instance_cur)["content"]))
box.setStandardButtons(QMessageBox.Ok | QMessageBox.Cancel)
ret = box.exec_()
if ret == QMessageBox.Ok:
self.instance_pool.remove(_id = instance_cur)
self.instance_list_by_date(self.date)
def template_new(self):
dialog = TempDialog(self)
dialog.exec_()
self.template_list_all()
def template_derivate(self):
if self.ui.templates_view.currentItem() == None:
template_cur = None
return
else:
template_cur = self.template_pool.select_by_title(
self.ui.templates_view.currentItem().text())
title = template_cur["title"]
text = self.template_kit.to_text(template_cur["content"])
dialog = TempDialog(self, title, text)
dialog.exec_()
self.template_list_all()
def template_remove(self):
if self.ui.templates_view.currentItem() == None:
template_cur = None
return
else:
template_cur = self.template_pool.select_by_title(
self.ui.templates_view.currentItem().text())
box = QMessageBox()
box.setText(u"确定要移除模板?")
box.setDetailedText(self.template_kit.to_text(template_cur["content"]))
box.setStandardButtons(QMessageBox.Ok | QMessageBox.Cancel)
ret = box.exec_()
if ret == QMessageBox.Ok:
self.template_pool.remove(_id = template_cur["_id"])
self.template_list_all()
def date_refresh(self):
self.ui.date_label.setText(u"日期 : " + self.date)
self.instance_list_by_date(self.date)
def date_select(self):
dialog = DateDialog(self, self.today)
dialog.exec_()
self.date_refresh()
# ======================== test and main =========================
def test1():
db = pyejdb.EJDB("data/db", pyejdb.DEFAULT_OPEN_MODE | pyejdb.JBOTRUNC)
template_kit = NTemplateKit()
template_pool = NTemplatePool(db)
sleep_content = {
"Y/M/D" : "",
"sleep_time" : "",
"awake_time" : "",
"place" : ""
}
sleep_template = template_kit.create(u"sleep", sleep_content)
template_pool.update(sleep_template)
meal_content = {
"dishes" : [],
"cost" : "",
"place" : "",
"with" : [],
}
template_pool.update(template_kit.create(u"dinner", meal_content))
template_pool.update(template_kit.create(u"lunch", meal_content))
print template_pool.list_all()
template_cur = template_pool.select_by_title(u"lunch")
print type(template_cur)
print type(template_cur["content"])
print template_cur["content"]
def test_gui():
app = QtWidgets.QApplication(sys.argv)
myapp = MyGui()
myapp.show()
sys.exit(app.exec_())
if __name__ == "__main__":
test_gui()
test1()
|
xunkai55/lifenote
|
lifenote.py
|
Python
|
gpl-2.0
| 14,796
|
# -*- coding: utf-8 -*-
import logging as _logging
import sys
__author__ = 'luckydonald'
__all__ = ["logging", "ColoredFormatter", "ColoredStreamHandler", "LevelByNameFilter"]
DEFAULT_DATE_FORMAT = "%Y-%m-%d %H:%M:%S"
class ColoredFormatter(_logging.Formatter):
class Color(object):
"""
utility to return ansi colored text.
just to store the colors next to the function.
"""
# Color codes: http://misc.flogisoft.com/bash/tip_colors_and_formatting
def __init__(self, formatter):
self.formatter = formatter
# end def
colors = {
'default': 39,
'black': 30,
'red': 31,
'green': 32,
'yellow': 33,
'blue': 34,
'magenta': 35,
'cyan': 36,
'white': 37,
'grey': 90,
'bgred': 41,
'bggrey': 100
}
mapping = {
'INFO': 'default',
'WARNING': 'yellow',
'ERROR': 'red',
'CRITICAL': 'magenta',
'DEBUG': 'grey',
'SUCCESS': 'green'
}
color_prefix = '\033['
def prepare_color(self, color_number):
return ('%s%dm') % (self.color_prefix, color_number)
# end def
def colored(self, record):
"""
adsasd
"""
color = self.mapping.get(record.levelname, 'default')
clr = self.colors[color]
formatter = dict(
all_off=self.prepare_color(0), # Reset all attributes
color_on=self.prepare_color(clr), # Color as given/from lookup
color_off=self.prepare_color(39), # Default foreground color
inverse_on=self.prepare_color(7), # Reverse (invert the foreground and background colors)
inverse_off=self.prepare_color(27), # Reset reverse
background_off=self.prepare_color(49), # Default background color
file_color_on=self.prepare_color(94), # Light blue
)
lines = []
# log level
level = "{level:8}".format(level=record.levelname)
level_filler = "{:{}}".format("", len(level))
# file/function name
filepart = record.name if record.name else ""
filepart += "." + record.funcName if record.funcName != "<module>" else ""
# date
timestamp = " " + record.asctime if record.asctime else ""
timestamp_filler = " " * len(timestamp)
# Process / Thread names
process_thread_part = process_thread_part_filler = ""
has_process = hasattr(record, "processName") and record.processName != "MainProcess"
has_thread = hasattr(record, "threadName") and record.threadName != "MainThread"
if has_process:
process_thread_part += "{inverse_on}{file_color_on}{thread}{inverse_off}".format(
thread=record.processName, **formatter)
# end if
if has_process and has_thread:
process_thread_part += " ".format(**formatter)
# end if
if has_thread:
process_thread_part += "{inverse_on}{file_color_on}{process}{inverse_off}".format(
process=record.threadName, **formatter)
# end if
if has_process or has_thread and len(timestamp) > 1:
# inject the formatting here, as empty formatting without text would break
process_thread_part_filler = " " * len(process_thread_part)
process_thread_part = "{file_color_on}{inverse_on}{process_thread_part}{inverse_off}".format(
process_thread_part=process_thread_part, **formatter
)
# abuse {date} to contain a space for us. Because a blue colored space is still a space.
timestamp += " " # so the file don't immediatly follows after the date.
timestamp_filler += " "
# end if
# original message
lines_ = record.message.splitlines()
first_line = True if len(lines_) > 1 else None
for line in lines_:
if first_line is None: # single line
lines.append(
"{color_on}{inverse_on}{level}{inverse_off}{color_on}{date}{color_off}{file_color_on}{process_thread_part} {file_color_on}{filepart}:{color_off} {color_on}{message}{color_off}{background_off}{all_off}".format(
filepart=filepart, level=level, message=line, date=timestamp,
process_thread_part=process_thread_part, **formatter))
break
elif first_line: # first line
lines.append(
"{color_on}{inverse_on}{level}{inverse_off}{color_on}{date}{color_off}{file_color_on}{process_thread_part} {file_color_on}{filepart}:{color_off} {all_off}".format(
filepart=filepart, level=level, message=line, date=timestamp,
process_thread_part=process_thread_part, **formatter))
lines.append(
"{color_on}{inverse_on}{level_filler}{inverse_off}{color_off} {color_on}{message}{color_off}{background_off}{all_off}".format(
level_filler=level_filler, message=line, date=timestamp, date_filler=timestamp_filler,
process_thread_part=process_thread_part, process_thread_part_filler=process_thread_part_filler,
**formatter))
first_line = False
# end for
return "\n".join(lines)
# end def
def __init__(self, date_formatter=None):
super(ColoredFormatter, self).__init__(datefmt=date_formatter)
self.color_instance = self.Color(self)
def colored(self, record):
return self.color_instance.colored(record)
# end def
def format(self, record):
super(ColoredFormatter, self).format(record)
# if record.threadName == "MainThread":
# pass
# part1 = self.firstpart.format(record)
if self.usesTime():
record.asctime = self.formatTime(record, self.datefmt)
else:
record.asctime = ""
s = self._fmt % record.__dict__ # py3: s = self.formatMessage(record)
if record.exc_text:
if s[-1:] != "\n":
s += "\n"
try:
s = s + record.exc_text
except UnicodeError: # PYTHON 2, LOL!
# Sometimes filenames have non-ASCII chars, which can lead
# to errors when s is Unicode and record.exc_text is str
# See issue 8924.
# We also use replace for when there are multiple
# encodings, e.g. UTF-8 for the filesystem and latin-1
# for a script. See issue 13232.
s = s + record.exc_text.decode(sys.getfilesystemencoding(), 'replace')
if hasattr(record, "stack_info") and record.stack_info: # py2 doesn't have .stack_info
if s[-1:] != "\n":
s += "\n"
s = s + record.stack_info # py3: self.formatStack()
record.message = s
return self.colored(record)
# end def
def usesTime(self):
return bool(self.datefmt)
# end def
# end class
class ColoredStreamHandler(_logging.StreamHandler):
DEFAULT_DATE_FORMAT = DEFAULT_DATE_FORMAT
"""
Like the normal StreamHandler,
but it automatically sets
`self.formatter = ColoredFormatter()`
"""
def __init__(self, stream=None, date_formatter=DEFAULT_DATE_FORMAT):
super(ColoredStreamHandler, self).__init__(stream)
self.formatter = ColoredFormatter(date_formatter=date_formatter)
# noinspection PyProtectedMember,PyProtectedMember
class _LoggingWrapper(object):
SUCCESS = 25 # between WARNING and INFO
def __init__(self):
_logging.addLevelName(self.SUCCESS, 'SUCCESS')
def getLoglevelInt(self, level_string):
"""
You provide a String, and get a level int
:param level_string: The level.
:type level_string: str
:return: level
:rtype : int
:raises KeyError: if the level does not exists.
"""
if isinstance(level_string, int):
return level_string
# end if
try:
return {
# as names:
"NOTSET": _logging.NOTSET,
"DEBUG": _logging.DEBUG,
"INFO": _logging.INFO,
"SUCCESS": self.SUCCESS,
"WARNING": _logging.WARNING,
"WARN": _logging.WARN, # = WARNING
"ERROR": _logging.ERROR,
"FATAL": _logging.FATAL, # = CRITICAL
"CRITICAL": _logging.CRITICAL,
}[level_string]
except KeyError:
try:
return int(level_string)
except ValueError:
pass
# end try
raise # key not known, and is no integer either.
# end try
# end def
def __call__(self, logger_name):
"""
alias to logger.getLogger(logger_name)
:param logger_name:
:return: self.getLogger(logger_name)
"""
return self.getLogger(logger_name)
# end def
def add_colored_handler(
self, logger_name=None, stream=None, level=None, date_formatter=DEFAULT_DATE_FORMAT, filter=None,
):
"""
Register a logger handler to colorfull print the messages.
If stream is specified, the instance will use it for logging output; otherwise, sys.stdout will be used.
If you supply a date_formatter, there will also be printed a date/time for the logged messages.
Uses python `time.strftime` time formating, see https://docs.python.org/library/time.html#time.strftime
:keyword logger_name: the name of the logger you want to register the printing to.
Probably you should use __name__ , to use your package's logger,
"root" will force all loggers to output.
:type logger_name: str
:keyword stream: An output stream. Default: sys.stdout
:keyword date_formatter: Apply a format for time output. If `None` is given, no time gets printed.
Something like "%Y-%m-%d %H:%M:%S". Uses python `time.strftime` time formating,
see https://docs.python.org/library/time.html#time.strftime
:type date_formatter: str
:keyword filter: A filter applied to the handler.
:return: None
"""
logger = self.getLogger(logger_name) # __name__
if stream is None:
import sys
stream = sys.stdout
# end if
handler = ColoredStreamHandler(stream=stream, date_formatter=date_formatter)
if filter:
handler.addFilter(filter)
# end if
logger.addHandler(handler)
if level:
logger.setLevel(level)
# end if
return logger
# end def
def test_logger_levels(self, name=__name__, force_all_levels=True):
logger = self.getLogger(name)
logger_level = logger.getEffectiveLevel()
if force_all_levels:
logger.setLevel(logging.DEBUG)
logger.debug('level debug')
logger.info('level info')
logger.success('level success')
logger.warning('level warning')
logger.error('level error')
logger.critical('level critical')
if force_all_levels:
logger.setLevel(logger_level)
# end if
# end def
def getLogger(self, name=None):
"""
Adds the .success() function to the logger, else it is same as logger.getLogger()
:param logger: a logging.getLogger() logger.
:return:
"""
logger = _logging.getLogger(name)
logger.SUCCESS = self.SUCCESS
setattr(logger, "success", lambda message, *args: logger._log(self.SUCCESS, message, args))
return logger
if sys.version < "3":
def success(self, msg, *args, **kwargs):
"""
Log 'msg % args' with severity 'SUCCESS'.
To pass exception information, use the keyword argument exc_info with
a true value.
logger.debug("Houston, we landed in the %s", "moon", exc_info=False)
"""
self._success(msg, *args, **kwargs)
else:
from .py3 import success
def _success(self, msg, *args, **kwargs):
if len(self.root.handlers) == 0:
self.basicConfig()
self.root._log(self.SUCCESS, msg, args, **kwargs)
def __getattr__(self, item):
if item != "__getattr__":
if item in self.__dict__:
return self.__dict__[item]
if item == "getLogger":
return self.getLogger
elif item == "success":
return self.success
elif item == "SUCCESS":
return self.SUCCESS
# end if
pass
else:
return getattr(_logging, item)
# end def
# end class
logging = _LoggingWrapper()
class LevelByNameFilter(object):
def __init__(self, root=logging.WARNING, debug=None, info=None, success=None, warning=None, error=None,
critical=None, by_level=None):
"""
A filter where you specify logging levels bound to names (package names, as known from importing)
:param root: level the root should have to be logged. None to disable.
:param debug: all loggers which should log debug and above.
:param info: all loggers which should log info and above.
:param success: all loggers which should log success and above.
:param warning: all loggers which should log warning and above.
:param error: all loggers which should log error and above.
:param critical: all loggers which should log critical and above.
:param by_level: a dict with levels as a key, and names to log as value.
Example: {10: "__main__", 20: "a.b.c", 30: ["a.b.d", "a.b.e"], logging.WARNING: "a"}
"""
self.mapping = dict()
if root:
if isinstance(root, str):
root = logging.getLoglevelInt(root)
assert isinstance(root, int)
self.mapping[""] = root
# end
level = logging.DEBUG
self.parse_argument(debug, logging.DEBUG)
self.parse_argument(info, logging.INFO)
self.parse_argument(success, logging.SUCCESS)
self.parse_argument(warning, logging.WARNING)
self.parse_argument(error, logging.ERROR)
self.parse_argument(critical, logging.CRITICAL)
if by_level:
assert isinstance(by_level, dict)
for level, files in by_level.items():
self.parse_argument(files, level)
# end for
# end if
# end def
def parse_argument(self, argument, level):
if argument:
if isinstance(argument, tuple):
argument = list(argument)
if not isinstance(argument, list):
argument = [argument]
# end if
assert isinstance(argument, list)
for part in argument:
if isinstance(part, (list, tuple)):
argument.extend(part)
elif not isinstance(part, str):
raise TypeError("argument {val!r} is type {type}, should be str.".format(val=part, type=type(part)))
elif "," in part:
argument.append(part.split(","))
else:
self.mapping[part.strip() + "."] = level
# end if
# end for
# end if
# end def
def filter(self, record):
if not self.mapping:
return False # allow
# end if
name = record.name + "."
mapping_path = "" # default is "" = root
for k in self.mapping:
if name.startswith(k):
if len(mapping_path) < len(k): # we got a longer path. longer = more specific.
mapping_path = k
# end if
# end if
# end for
if mapping_path in self.mapping: # e.g. root "" is not specified.
level = self.mapping[mapping_path]
return record.levelno >= level
# end if
return False
# end def
# end class
# # Test code to get a threaded logger:
# from luckydonaldUtils.logger import logging;import threading; from time import sleep;
# def lel():
# logger.debug(threading.current_thread().name)
# logging.test_logger_levels(),logger.critical("littlepip is\nBEST\npony!")
# # end def
# logger = logging.add_colored_handler(level=logging.DEBUG, date_formatter="%Y-%m-%d %H:%M:%S");logging.add_colored_handler(level=logging.DEBUG); lel();sleep(1);thread=threading.Thread(target=lel);thread.start();thread.join()
|
luckydonald/python-utils
|
luckydonaldUtils/logger/__init__.py
|
Python
|
gpl-2.0
| 17,417
|
from cms.models.pluginmodel import CMSPlugin
from cms.plugin_base import CMSPluginBase
from cms.plugin_pool import plugin_pool
from django.utils.translation import gettext_lazy as _
from django.utils.translation import get_language
from partners.models import Partner
class PartnersPlugin(CMSPluginBase):
name = _("Partners")
model = CMSPlugin
render_template = "partners/partners_plugin.html"
text_enabled = False
allow_children = False
def render(self, context, instance, placeholder):
language = get_language()
if language is None:
language = 'en'
partners = Partner.objects.filter(active=True).translated(language).order_by('translations__name').all()
context.update({
'partners': partners,
})
return context
plugin_pool.register_plugin(PartnersPlugin)
|
gitsimon/tq_website
|
partners/cms_plugins/partners_plugin.py
|
Python
|
gpl-2.0
| 859
|
# -*- Mode: Python; test-case-name: flumotion.test.test_dialogs -*-
# -*- coding: UTF-8 -*-
# vi:si:et:sw=4:sts=4:ts=4
#
# Flumotion - a streaming media server
# Copyright (C) 2004,2005,2006,2007,2008 Fluendo, S.L. (www.fluendo.com).
# All rights reserved.
# This file may be distributed and/or modified under the terms of
# the GNU General Public License version 2 as published by
# the Free Software Foundation.
# This file is distributed without any warranty; without even the implied
# warranty of merchantability or fitness for a particular purpose.
# See "LICENSE.GPL" in the source distribution for more information.
# Licensees having purchased or holding a valid Flumotion Advanced
# Streaming Server license may use this file in accordance with the
# Flumotion Advanced Streaming Server Commercial License Agreement.
# See "LICENSE.Flumotion" in the source distribution for more information.
# Headers in this file shall remain intact.
"""generic dialogs such as progress, error and about"""
import gettext
import os
import gobject
import gtk
from flumotion.configure import configure
from flumotion.common.errors import AlreadyConnectedError, \
AlreadyConnectingError, ConnectionFailedError, \
ConnectionRefusedError
__version__ = "$Rev: 7833 $"
_ = gettext.gettext
def exceptionHandler(exctype, value, tb):
"""
Opens a dialog showing an exception in a nice dialog allowing
the users to report it directly to trac.
@param exctype : The class of the catched exception.
@type exctype : type
@param value : The exception itself.
@type value : exctype
@param tb : Contains the full traceback information.
@type tb : traceback
"""
if exctype is KeyboardInterrupt:
return
from flumotion.extern.exceptiondialog import ExceptionDialog
dialog = ExceptionDialog((exctype, value, tb))
response = dialog.run()
if response != ExceptionDialog.RESPONSE_BUG:
dialog.destroy()
return
from flumotion.common.bugreporter import BugReporter
br = BugReporter()
br.submit(dialog.getFilenames(),
dialog.getDescription(),
dialog.getSummary())
dialog.destroy()
class ProgressDialog(gtk.Dialog):
def __init__(self, title, message, parent = None):
gtk.Dialog.__init__(self, title, parent,
gtk.DIALOG_MODAL | gtk.DIALOG_DESTROY_WITH_PARENT)
self.label = gtk.Label(message)
self.vbox.pack_start(self.label, True, True, 6)
self.label.show()
self.bar = gtk.ProgressBar()
self.bar.show()
self.vbox.pack_end(self.bar, True, True, 6)
self.active = False
self._timeout_id = None
self.connect('destroy', self._destroy_cb)
def start(self):
"Show the dialog and start pulsating."
self.active = True
self.show()
self.bar.pulse()
self._timeout_id = gobject.timeout_add(200, self._pulse)
def stop(self):
"Remove the dialog and stop pulsating."
self.active = False
if self._timeout_id:
gobject.source_remove(self._timeout_id)
self._timeout_id = None
def message(self, message):
"Set the message on the dialog."
self.label.set_text(message)
def _pulse(self):
if not self.active:
# we were disabled, so stop pulsating
return False
self.bar.pulse()
return True
def _destroy_cb(self, widget):
self.stop()
class ErrorDialog(gtk.MessageDialog):
def __init__(self, message, parent=None, close_on_response=True,
secondary_text=None):
gtk.MessageDialog.__init__(self, parent, gtk.DIALOG_MODAL,
gtk.MESSAGE_ERROR, gtk.BUTTONS_OK, message)
b = self.action_area.get_children()[0]
b.set_name('ok_button')
self.message = message
if close_on_response:
self.connect("response", lambda self, response: self.hide())
# GTK 2.4 does not have format_secondary_text
if not hasattr(self, 'format_secondary_text'):
self.format_secondary_text = self._format_secondary_text_backport
if secondary_text:
self.format_secondary_text(secondary_text)
def _format_secondary_text_backport(self, secondary_text):
self.set_markup('<span weight="bold" size="larger">%s</span>'
'\n\n%s' % (self.message, secondary_text))
def run(self):
# can't run a recursive mainloop, because that mucks with
# twisted's reactor.
from twisted.internet import defer
deferred = defer.Deferred()
def callback(_, response, deferred):
self.destroy()
deferred.callback(None)
self.connect('response', callback, deferred)
self.show()
return deferred
class AboutDialog(gtk.Dialog):
def __init__(self, parent=None):
gtk.Dialog.__init__(self, _('About Flumotion'), parent,
gtk.DIALOG_MODAL | gtk.DIALOG_DESTROY_WITH_PARENT,
(gtk.STOCK_CLOSE, gtk.RESPONSE_CLOSE))
self.set_has_separator(False)
self.set_resizable(False)
self.set_border_width(12)
self.vbox.set_spacing(6)
image = gtk.Image()
self.vbox.pack_start(image)
image.set_from_file(os.path.join(configure.imagedir, 'flumotion.png'))
image.show()
version = gtk.Label(
'<span size="xx-large"><b>Flumotion %s</b></span>' %
configure.version)
version.set_selectable(True)
self.vbox.pack_start(version)
version.set_use_markup(True)
version.show()
text = _('Flumotion is a streaming media server.\n\n'
'© 2004, 2005, 2006, 2007, 2008 Fluendo S.L.')
authors = (
'Johan Dahlin',
'Pedro Gracia Fajardo',
'Arek Korbik',
'Julien Le Goff',
'Xavier Martinez',
'Jordi Massaguer Pla',
'Zaheer Abbas Merali',
'Sébastien Merle',
'Xavier Queralt Mateu',
'Mike Smith',
'Wim Taymans',
'Jan Urbański',
'Thomas Vander Stichele',
'Andy Wingo',
)
text += '\n\n<small>' + _('Authors') + ':\n'
for author in authors:
text += ' %s\n' % author
text += '</small>'
info = gtk.Label(text)
self.vbox.pack_start(info)
info.set_use_markup(True)
info.set_selectable(True)
info.set_justify(gtk.JUSTIFY_FILL)
info.set_line_wrap(True)
info.show()
def showConnectionErrorDialog(failure, info, parent=None):
if failure.check(ConnectionRefusedError):
title = _('Connection Refused')
message = (
_('"%s" refused your connection.\n'
'Check your user name and password and try again.')
% (info.host, ))
elif failure.check(ConnectionFailedError):
title = _('Connection Failed')
message = (_("Connection to manager on %s failed (%s).")
% (str(info), str(failure.getErrorMessage())))
elif failure.check(AlreadyConnectedError,
AlreadyConnectingError):
title =_('Already Connected to %s') % (info, )
message = _("You cannot connect twice to the same manager. Try "
"disconnecting first.")
else:
raise AssertionError(failure)
dialog = ErrorDialog(title, parent, True, message)
return dialog.run()
|
flyapen/UgFlu
|
flumotion/admin/gtk/dialogs.py
|
Python
|
gpl-2.0
| 7,734
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
from yali.storage import StorageError
class LibraryError(StorageError):
pass
|
akuster/yali
|
yali/storage/library/__init__.py
|
Python
|
gpl-2.0
| 125
|
# framework/core.py
#
# Copyright 2011 Spencer J. McIntyre <SMcIntyre [at] SecureState [dot] net>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
# MA 02110-1301, USA.
import logging
import logging.handlers
import os
import re
import serial
import sys
from c1218.connection import Connection
from c1218.errors import C1218IOError, C1218ReadTableError
from framework.errors import FrameworkConfigurationError, FrameworkRuntimeError
from framework.options import AdvancedOptions, Options
from framework.templates import TermineterModule, TermineterModuleOptical
from framework.utilities import FileWalker, Namespace, get_default_serial_settings
from serial.serialutil import SerialException
class Framework(object):
"""
This is the main instance of the framework. It contains and
manages the serial connection as well as all of the loaded
modules.
"""
def __init__(self, stdout=None):
self.modules = {}
self.__package__ = '.'.join(self.__module__.split('.')[:-1])
package_path = __import__(self.__package__, None, None, ['__path__']).__path__[0] # that's some python black magic trickery for you
if stdout == None:
stdout = sys.stdout
self.stdout = stdout
self.directories = Namespace()
self.directories.user_data = os.path.expanduser('~') + os.sep + '.termineter' + os.sep
self.directories.modules_path = package_path + os.sep + 'modules' + os.sep
self.directories.data_path = package_path + os.sep + 'data' + os.sep
if not os.path.isdir(self.directories.data_path):
self.logger.critical('path to data not found')
raise FrameworkConfigurationError('path to data not found')
if not os.path.isdir(self.directories.user_data):
os.mkdir(self.directories.user_data)
self.serial_connection = None
self.__serial_connected__ = False
# setup logging stuff
self.logger = logging.getLogger(self.__package__ + '.' + self.__class__.__name__.lower())
main_file_handler = logging.handlers.RotatingFileHandler(self.directories.user_data + self.__package__ + '.log', maxBytes=262144, backupCount=5)
main_file_handler.setLevel(logging.DEBUG)
main_file_handler.setFormatter(logging.Formatter("%(asctime)s %(name)-50s %(levelname)-10s %(message)s"))
logging.getLogger('').addHandler(main_file_handler)
# setup and configure options
# Whether or not these are 'required' is really enforced by the individual
# modules get_missing_options method and by which options they require based
# on their respective types. See framework/templates.py for more info.
self.options = Options(self.directories)
self.options.add_boolean('USECOLOR', 'enable color on the console interface', default=False)
self.options.add_string('CONNECTION', 'serial connection string')
self.options.add_string('USERNAME', 'serial username', default='0000')
self.options.add_integer('USERID', 'serial userid', default=0)
self.options.add_string('PASSWORD', 'serial c12.18 password', default='00000000000000000000')
self.options.add_boolean('PASSWORDHEX', 'if the password is in hex', default=True)
self.advanced_options = AdvancedOptions(self.directories)
self.advanced_options.add_integer('BAUDRATE', 'serial connection baud rate', default=9600)
self.advanced_options.add_integer('BYTESIZE', 'serial connection byte size', default=serial.EIGHTBITS)
self.advanced_options.add_boolean('CACHETBLS', 'cache certain read-only tables', default=True)
self.advanced_options.set_callback('CACHETBLS', self.__opt_callback_set_table_cache_policy)
self.advanced_options.add_integer('STOPBITS', 'serial connection stop bits', default=serial.STOPBITS_ONE)
self.advanced_options.add_integer('NBRPKTS', 'c12.18 maximum packets for reassembly', default=2)
self.advanced_options.add_integer('PKTSIZE', 'c12.18 maximum packet size', default=512)
if sys.platform.startswith('linux'):
self.options.set_option('USECOLOR', 'True')
# check and configure rfcat stuff
self.rfcat_available = False
try:
import rflib
self.logger.info('the rfcat library is available')
self.rfcat_available = True
except ImportError:
self.logger.info('the rfcat library is not available, it can be found at https://code.google.com/p/rfcat/')
pass
if self.rfcat_available:
# init the values to be used
self.rfcat_connection = None
self.__rfcat_connected__ = False
self.is_rfcat_connected = lambda: self.__rfcat_connected__
# self.options.add_integer('RFCATIDX', 'the rfcat device to use', default = 0)
# start loading modules
modules_path = self.directories.modules_path
self.logger.debug('searching for modules in: ' + modules_path)
self.current_module = None
if not os.path.isdir(modules_path):
self.logger.critical('path to modules not found')
raise FrameworkConfigurationError('path to modules not found')
for module_path in FileWalker(modules_path, absolute_path=True, skip_dirs=True):
module_path = module_path.replace(os.path.sep, '/')
if not module_path.endswith('.py'):
continue
module_path = module_path[len(modules_path):-3]
module_name = module_path.split(os.path.sep)[-1]
if module_name.startswith('__'):
continue
if module_name.lower() != module_name:
continue
if module_path.startswith('rfcat') and not self.rfcat_available:
self.logger.debug('skipping module: ' + module_path + ' because rfcat is not available')
continue
# looks good, proceed to load
self.logger.debug('loading module: ' + module_path)
try:
module_instance = self.import_module(module_path)
except FrameworkRuntimeError:
continue
if not isinstance(module_instance, TermineterModule):
self.logger.error('module: ' + module_path + ' is not derived from the TermineterModule class')
continue
# if isinstance(module_instance, TermineterModuleRfcat) and not self.rfcat_available:
# self.logger.debug('skipping module: ' + module_path + ' because rfcat is not available')
# continue
if not hasattr(module_instance, 'run'):
self.logger.critical('module: ' + module_path + ' has no run() method')
raise FrameworkRuntimeError('module: ' + module_path + ' has no run() method')
if not isinstance(module_instance.options, Options) or not isinstance(module_instance.advanced_options, Options):
self.logger.critical('module: ' + module_path + ' options and advanced_options must be Options instances')
raise FrameworkRuntimeError('options and advanced_options must be Options instances')
module_instance.name = module_name
module_instance.path = module_path
self.modules[module_path] = module_instance
self.logger.info('successfully loaded ' + str(len(self.modules)) + ' modules into the framework')
return
def __repr__(self):
return '<' + self.__class__.__name__ + ' Loaded Modules: ' + str(len(self.modules)) + ', Serial Connected: ' + str(self.is_serial_connected()) + ' >'
def reload_module(self, module_path=None):
"""
Reloads a module into the framework. If module_path is not
specified, then the curent_module variable is used. Returns True
on success, False on error.
@type module_path: String
@param module_path: The name of the module to reload
"""
if module_path == None:
if self.current_module != None:
module_path = self.current_module.path
else:
self.logger.warning('must specify module if not module is currently being used')
return False
if not module_path in self.modules.keys():
self.logger.error('invalid module requested for reload')
raise FrameworkRuntimeError('invalid module requested for reload')
self.logger.info('reloading module: ' + module_path)
module_instance = self.import_module(module_path, reload_module=True)
if not isinstance(module_instance, TermineterModule):
self.logger.error('module: ' + module_path + ' is not derived from the TermineterModule class')
raise FrameworkRuntimeError('module: ' + module_path + ' is not derived from the TermineterModule class')
if not hasattr(module_instance, 'run'):
self.logger.error('module: ' + module_path + ' has no run() method')
raise FrameworkRuntimeError('module: ' + module_path + ' has no run() method')
if not isinstance(module_instance.options, Options) or not isinstance(module_instance.advanced_options, Options):
self.logger.error('module: ' + module_path + ' options and advanced_options must be Options instances')
raise FrameworkRuntimeError('options and advanced_options must be Options instances')
module_instance.name = module_path.split('/')[-1]
module_instance.path = module_path
self.modules[module_path] = module_instance
if self.current_module != None:
if self.current_module.path == module_instance.path:
self.current_module = module_instance
return True
def run(self, module=None):
if not isinstance(module, TermineterModule) and not isinstance(self.current_module, TermineterModule):
raise FrameworkRuntimeError('either the module or the current_module must be sent')
if module == None:
module = self.current_module
if isinstance(module, TermineterModuleOptical):
if not self.is_serial_connected:
raise FrameworkRuntimeError('the serial interface is disconnected')
# if isinstance(module, TermineterModuleRfcat):
# self.rfcat_connect()
result = None
self.logger.info('running module: ' + module.path)
try:
result = module.run()
except KeyboardInterrupt as error:
if isinstance(module, TermineterModuleOptical):
self.serial_connection.stop()
# if isinstance(module, TermineterModuleRfcat):
# self.rfcat_disconnect()
raise error
# if isinstance(module, TermineterModuleRfcat):
# self.rfcat_disconnect()
return result
@property
def use_colors(self):
return self.options['USECOLOR']
@use_colors.setter
def use_colors(self, value):
self.options.set_option('USECOLOR', str(value))
def get_module_logger(self, name):
"""
This returns a logger for individual modules to allow them to be
inherited from the framework and thus be named appropriately.
@type name: String
@param name: The name of the module requesting the logger
"""
return logging.getLogger(self.__package__ + '.modules.' + name)
def import_module(self, module_path, reload_module=False):
try:
module = __import__(self.__package__ + '.modules.' + module_path.replace('/', '.'), None, None, ['Module'])
if reload_module:
reload(module)
module_instance = module.Module(self)
except Exception as err:
message = 'failed to load module: ' + module_path
if isinstance(err, SyntaxError):
message += ', ' + err.msg + ' line number: ' + str(err.lineno)
self.logger.error(message)
raise FrameworkRuntimeError(message)
return module_instance
def print_error(self, message):
if self.options['USECOLOR']:
self.stdout.write('\033[1;31m[-] \033[1;m' + (os.linesep + '\033[1;31m[-] \033[1;m').join(message.split(os.linesep)) + os.linesep)
else:
self.stdout.write('[-] ' + (os.linesep + '[-] ').join(message.split(os.linesep)) + os.linesep)
self.stdout.flush()
def print_good(self, message):
if self.options['USECOLOR']:
self.stdout.write('\033[1;32m[+] \033[1;m' + (os.linesep + '\033[1;32m[+] \033[1;m').join(message.split(os.linesep)) + os.linesep)
else:
self.stdout.write('[+] ' + (os.linesep + '[+] ').join(message.split(os.linesep)) + os.linesep)
self.stdout.flush()
def print_line(self, message):
self.stdout.write(message + os.linesep)
self.stdout.flush()
def print_status(self, message):
if self.options['USECOLOR']:
self.stdout.write('\033[1;34m[*] \033[1;m' + (os.linesep + '\033[1;34m[*] \033[1;m').join(message.split(os.linesep)) + os.linesep)
else:
self.stdout.write('[*] ' + (os.linesep + '[*] ').join(message.split(os.linesep)) + os.linesep)
self.stdout.flush()
def print_hexdump(self, data):
x = str(data)
l = len(x)
i = 0
while i < l:
self.stdout.write("%04x " % i)
for j in range(16):
if i + j < l:
self.stdout.write("%02X " % ord(x[i + j]))
else:
self.stdout.write(" ")
if j % 16 == 7:
self.stdout.write(" ")
self.stdout.write(" ")
r = ""
for j in x[i:i + 16]:
j = ord(j)
if (j < 32) or (j >= 127):
r = r + "."
else:
r = r + chr(j)
self.stdout.write(r + os.linesep)
i += 16
self.stdout.flush()
def is_serial_connected(self):
"""
Returns True if the serial interface is connected.
"""
return self.__serial_connected__
def serial_disconnect(self):
"""
Closes the serial connection to the meter and disconnects from the
device.
"""
if self.__serial_connected__:
try:
self.serial_connection.close()
except C1218IOError as error:
self.logger.error('caught C1218IOError: ' + str(error))
except SerialException as error:
self.logger.error('caught SerialException: ' + str(error))
self.__serial_connected__ = False
self.logger.warning('the serial interface has been disconnected')
return True
def serial_get(self):
"""
Create the serial connection from the framework settings and return
it, setting the framework instance in the process.
"""
frmwk_c1218_settings = {
'nbrpkts': self.advanced_options['NBRPKTS'],
'pktsize': self.advanced_options['PKTSIZE']
}
frmwk_serial_settings = get_default_serial_settings()
frmwk_serial_settings['baudrate'] = self.advanced_options['BAUDRATE']
frmwk_serial_settings['bytesize'] = self.advanced_options['BYTESIZE']
frmwk_serial_settings['stopbits'] = self.advanced_options['STOPBITS']
self.logger.info('opening serial device: ' + self.options['CONNECTION'])
try:
self.serial_connection = Connection(self.options['CONNECTION'], c1218_settings=frmwk_c1218_settings, serial_settings=frmwk_serial_settings, enable_cache=self.advanced_options['CACHETBLS'])
except Exception as error:
self.logger.error('could not open the serial device')
raise error
return self.serial_connection
def serial_connect(self):
"""
Connect to the serial device and then verifies that the meter is
responding. Once the serial device is opened, this function attempts
to retreive the contents of table #0 (GEN_CONFIG_TBL) to configure
the endianess it will use. Returns True on success.
"""
username = self.options['USERNAME']
userid = self.options['USERID']
if len(username) > 10:
self.logger.error('username cannot be longer than 10 characters')
raise FrameworkConfigurationError('username cannot be longer than 10 characters')
if not (0 <= userid <= 0xffff):
self.logger.error('user id must be between 0 and 0xffff')
raise FrameworkConfigurationError('user id must be between 0 and 0xffff')
self.serial_get()
try:
self.serial_connection.start()
if not self.serial_connection.login(username, userid):
self.logger.error('the meter has rejected the username and userid')
raise FrameworkConfigurationError('the meter has rejected the username and userid')
except C1218IOError as error:
self.logger.error('serial connection has been opened but the meter is unresponsive')
raise error
try:
general_config_table = self.serial_connection.get_table_data(0)
except C1218ReadTableError as error:
self.logger.error('serial connection as been opened but the general configuration table (table #0) could not be read')
raise error
if (ord(general_config_table[0]) & 1):
self.logger.info('setting the connection to use big-endian for C1219 data')
self.serial_connection.c1219_endian = '>'
else:
self.logger.info('setting the connection to use little-endian for C1219 data')
self.serial_connection.c1219_endian = '<'
try:
self.serial_connection.stop()
except C1218IOError as error:
self.logger.error('serial connection has been opened but the meter is unresponsive')
raise error
self.__serial_connected__ = True
self.logger.warning('the serial interface has been connected')
return True
def serial_login(self):
"""
Attempt to log into the meter over the C12.18 protocol. Returns
True on success, False on a failure. This can be called by modules
in order to login with a username and password configured within
the framework instance.
"""
username = self.options['USERNAME']
userid = self.options['USERID']
password = self.options['PASSWORD']
if self.options['PASSWORDHEX']:
hex_regex = re.compile('^([0-9a-fA-F]{2})+$')
if hex_regex.match(password) == None:
self.print_error('Invalid characters in password')
raise FrameworkConfigurationError('invalid characters in password')
password = password.decode('hex')
if len(username) > 10:
self.print_error('Username cannot be longer than 10 characters')
raise FrameworkConfigurationError('username cannot be longer than 10 characters')
if not (0 <= userid <= 0xffff):
self.print_error('User id must be between 0 and 0xffff')
raise FrameworkConfigurationError('user id must be between 0 and 0xffff')
if len(password) > 20:
self.print_error('Password cannot be longer than 20 characters')
raise FrameworkConfigurationError('password cannot be longer than 20 characters')
if not self.serial_connection.start():
return False
if not self.serial_connection.login(username, userid, password):
return False
return True
def __opt_callback_set_table_cache_policy(self, policy):
if self.is_serial_connected():
self.serial_connection.set_table_cache_policy(policy)
return True
|
firebitsbr/termineter
|
framework/core.py
|
Python
|
gpl-3.0
| 18,028
|
#!/usr/bin/env python
# -*- coding: utf-8
import sys
import argparse
import anvio.db as db
import anvio.utils as utils
import anvio.terminal as terminal
from anvio.errors import ConfigError
run = terminal.Run()
progress = terminal.Progress()
current_version, next_version = [x[1:] for x in __name__.split('_to_')]
def migrate(db_path):
if db_path is None:
raise ConfigError("No database path is given.")
# make sure someone is not being funny
utils.is_profile_db(db_path)
# make sure the version is accurate
profile_db = db.DB(db_path, None, ignore_version = True)
if str(profile_db.get_version()) != current_version:
raise ConfigError("Version of this profile database is not %s (hence, this script cannot really do anything)." % current_version)
profile_db._exec('ALTER TABLE "item_orders" ADD COLUMN "additional" text')
profile_db._exec('UPDATE "item_orders" SET "additional" = "{}"')
# set the version
profile_db.remove_meta_key_value_pair('version')
profile_db.set_version(next_version)
# bye
profile_db.disconnect()
progress.end()
run.info_single('Your profile db is now %s, and you know this deserves a celebration.' % next_version, nl_after=1, nl_before=1, mc='green')
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='A simple script to upgrade profile database from version %s to version %s' % (current_version, next_version))
parser.add_argument('profile_db', metavar = 'PROFILE_DB', help = "An anvi'o profile database of version %s" % current_version)
args, unknown = parser.parse_known_args()
try:
migrate(args.profile_db)
except ConfigError as e:
print(e)
sys.exit(-1)
|
meren/anvio
|
anvio/migrations/profile/v28_to_v29.py
|
Python
|
gpl-3.0
| 1,741
|
# urllib3/util.py
# Copyright 2008-2013 Andrey Petrov and contributors (see CONTRIBUTORS.txt)
#
# This module is part of urllib3 and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
from base64 import b64encode
from collections import namedtuple
from socket import error as SocketError
from hashlib import md5, sha1
from binascii import hexlify, unhexlify
try:
from select import poll, POLLIN
except ImportError: # `poll` doesn't exist on OSX and other platforms
poll = False
try:
from select import select
except ImportError: # `select` doesn't exist on AppEngine.
select = False
try: # Test for SSL features
SSLContext = None
HAS_SNI = False
import ssl
from ssl import wrap_socket, CERT_NONE, PROTOCOL_SSLv23
from ssl import SSLContext # Modern SSL?
from ssl import HAS_SNI # Has SNI?
except ImportError:
pass
from .packages import six
from .exceptions import LocationParseError, SSLError
class Url(namedtuple('Url', ['scheme', 'auth', 'host', 'port', 'path', 'query', 'fragment'])):
"""
Datastructure for representing an HTTP URL. Used as a return value for
:func:`parse_url`.
"""
slots = ()
def __new__(cls, scheme=None, auth=None, host=None, port=None, path=None, query=None, fragment=None):
return super(Url, cls).__new__(cls, scheme, auth, host, port, path, query, fragment)
@property
def hostname(self):
"""For backwards-compatibility with urlparse. We're nice like that."""
return self.host
@property
def request_uri(self):
"""Absolute path including the query string."""
uri = self.path or '/'
if self.query is not None:
uri += '?' + self.query
return uri
def split_first(s, delims):
"""
Given a string and an iterable of delimiters, split on the first found
delimiter. Return two split parts and the matched delimiter.
If not found, then the first part is the full input string.
Example: ::
>>> split_first('foo/bar?baz', '?/=')
('foo', 'bar?baz', '/')
>>> split_first('foo/bar?baz', '123')
('foo/bar?baz', '', None)
Scales linearly with number of delims. Not ideal for large number of delims.
"""
min_idx = None
min_delim = None
for d in delims:
idx = s.find(d)
if idx < 0:
continue
if min_idx is None or idx < min_idx:
min_idx = idx
min_delim = d
if min_idx is None or min_idx < 0:
return s, '', None
return s[:min_idx], s[min_idx+1:], min_delim
def parse_url(url):
"""
Given a url, return a parsed :class:`.Url` namedtuple. Best-effort is
performed to parse incomplete urls. Fields not provided will be None.
Partly backwards-compatible with :mod:`urlparse`.
Example: ::
>>> parse_url('http://google.com/mail/')
Url(scheme='http', host='google.com', port=None, path='/', ...)
>>> parse_url('google.com:80')
Url(scheme=None, host='google.com', port=80, path=None, ...)
>>> parse_url('/foo?bar')
Url(scheme=None, host=None, port=None, path='/foo', query='bar', ...)
"""
# While this code has overlap with stdlib's urlparse, it is much
# simplified for our needs and less annoying.
# Additionally, this imeplementations does silly things to be optimal
# on CPython.
scheme = None
auth = None
host = None
port = None
path = None
fragment = None
query = None
# Scheme
if '://' in url:
scheme, url = url.split('://', 1)
# Find the earliest Authority Terminator
# (http://tools.ietf.org/html/rfc3986#section-3.2)
url, path_, delim = split_first(url, ['/', '?', '#'])
if delim:
# Reassemble the path
path = delim + path_
# Auth
if '@' in url:
auth, url = url.split('@', 1)
# IPv6
if url and url[0] == '[':
host, url = url[1:].split(']', 1)
# Port
if ':' in url:
_host, port = url.split(':', 1)
if not host:
host = _host
if not port.isdigit():
raise LocationParseError("Failed to parse: %s" % url)
port = int(port)
elif not host and url:
host = url
if not path:
return Url(scheme, auth, host, port, path, query, fragment)
# Fragment
if '#' in path:
path, fragment = path.split('#', 1)
# Query
if '?' in path:
path, query = path.split('?', 1)
return Url(scheme, auth, host, port, path, query, fragment)
def get_host(url):
"""
Deprecated. Use :func:`.parse_url` instead.
"""
p = parse_url(url)
return p.scheme or 'http', p.hostname, p.port
def make_headers(keep_alive=None, accept_encoding=None, user_agent=None,
basic_auth=None):
"""
Shortcuts for generating request headers.
:param keep_alive:
If ``True``, adds 'connection: keep-alive' header.
:param accept_encoding:
Can be a boolean, list, or string.
``True`` translates to 'gzip,deflate'.
List will get joined by comma.
String will be used as provided.
:param user_agent:
String representing the user-agent you want, such as
"python-urllib3/0.6"
:param basic_auth:
Colon-separated username:password string for 'authorization: basic ...'
auth header.
Example: ::
>>> make_headers(keep_alive=True, user_agent="Batman/1.0")
{'connection': 'keep-alive', 'user-agent': 'Batman/1.0'}
>>> make_headers(accept_encoding=True)
{'accept-encoding': 'gzip,deflate'}
"""
headers = {}
if accept_encoding:
if isinstance(accept_encoding, str):
pass
elif isinstance(accept_encoding, list):
accept_encoding = ','.join(accept_encoding)
else:
accept_encoding = 'gzip,deflate'
headers['accept-encoding'] = accept_encoding
if user_agent:
headers['user-agent'] = user_agent
if keep_alive:
headers['connection'] = 'keep-alive'
if basic_auth:
headers['authorization'] = 'Basic ' + \
b64encode(six.b(basic_auth)).decode('utf-8')
return headers
def is_connection_dropped(conn): # Platform-specific
"""
Returns True if the connection is dropped and should be closed.
:param conn:
:class:`httplib.HTTPConnection` object.
Note: For platforms like AppEngine, this will always return ``False`` to
let the platform handle connection recycling transparently for us.
"""
sock = getattr(conn, 'sock', False)
if not sock: # Platform-specific: AppEngine
return False
if not poll:
if not select: # Platform-specific: AppEngine
return False
try:
return select([sock], [], [], 0.0)[0]
except SocketError:
return True
# This version is better on platforms that support it.
p = poll()
p.register(sock, POLLIN)
for (fno, ev) in p.poll(0.0):
if fno == sock.fileno():
# Either data is buffered (bad), or the connection is dropped.
return True
def resolve_cert_reqs(candidate):
"""
Resolves the argument to a numeric constant, which can be passed to
the wrap_socket function/method from the ssl module.
Defaults to :data:`ssl.CERT_NONE`.
If given a string it is assumed to be the name of the constant in the
:mod:`ssl` module or its abbrevation.
(So you can specify `REQUIRED` instead of `CERT_REQUIRED`.
If it's neither `None` nor a string we assume it is already the numeric
constant which can directly be passed to wrap_socket.
"""
if candidate is None:
return CERT_NONE
if isinstance(candidate, str):
res = getattr(ssl, candidate, None)
if res is None:
res = getattr(ssl, 'CERT_' + candidate)
return res
return candidate
def resolve_ssl_version(candidate):
"""
like resolve_cert_reqs
"""
if candidate is None:
return PROTOCOL_SSLv23
if isinstance(candidate, str):
res = getattr(ssl, candidate, None)
if res is None:
res = getattr(ssl, 'PROTOCOL_' + candidate)
return res
return candidate
def assert_fingerprint(cert, fingerprint):
"""
Checks if given fingerprint matches the supplied certificate.
:param cert:
Certificate as bytes object.
:param fingerprint:
Fingerprint as string of hexdigits, can be interspersed by colons.
"""
# Maps the length of a digest to a possible hash function producing
# this digest.
hashfunc_map = {
16: md5,
20: sha1
}
fingerprint = fingerprint.replace(':', '').lower()
digest_length, rest = divmod(len(fingerprint), 2)
if rest or digest_length not in hashfunc_map:
raise SSLError('Fingerprint is of invalid length.')
# We need encode() here for py32; works on py2 and p33.
fingerprint_bytes = unhexlify(fingerprint.encode())
hashfunc = hashfunc_map[digest_length]
cert_digest = hashfunc(cert).digest()
if not cert_digest == fingerprint_bytes:
raise SSLError('Fingerprints did not match. Expected "{0}", got "{1}".'
.format(hexlify(fingerprint_bytes),
hexlify(cert_digest)))
def is_fp_closed(obj):
"""
Checks whether a given file-like object is closed.
:param obj:
The file-like object to check.
"""
if hasattr(obj, 'fp'):
# Object is a container for another file-like object that gets released
# on exhaustion (e.g. HTTPResponse)
return obj.fp is None
return obj.closed
if SSLContext is not None: # Python 3.2+
def ssl_wrap_socket(sock, keyfile=None, certfile=None, cert_reqs=None,
ca_certs=None, server_hostname=None,
ssl_version=None):
"""
All arguments except `server_hostname` have the same meaning as for
:func:`ssl.wrap_socket`
:param server_hostname:
Hostname of the expected certificate
"""
context = SSLContext(ssl_version)
context.verify_mode = cert_reqs
if ca_certs:
try:
context.load_verify_locations(ca_certs)
# Py32 raises IOError
# Py33 raises FileNotFoundError
except Exception as e: # Reraise as SSLError
raise SSLError(e)
if certfile:
# FIXME: This block needs a test.
context.load_cert_chain(certfile, keyfile)
if HAS_SNI: # Platform-specific: OpenSSL with enabled SNI
return context.wrap_socket(sock, server_hostname=server_hostname)
return context.wrap_socket(sock)
else: # Python 3.1 and earlier
def ssl_wrap_socket(sock, keyfile=None, certfile=None, cert_reqs=None,
ca_certs=None, server_hostname=None,
ssl_version=None):
return wrap_socket(sock, keyfile=keyfile, certfile=certfile,
ca_certs=ca_certs, cert_reqs=cert_reqs,
ssl_version=ssl_version)
|
mattesno1/Sick-Beard
|
lib/requests/packages/urllib3/util.py
|
Python
|
gpl-3.0
| 11,326
|
#
# Copyright 2012 Markus Pielmeier
#
# This file is part of tagfs.
#
# tagfs is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# tagfs is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with tagfs. If not, see <http://www.gnu.org/licenses/>.
import unittest
import tagfs.freebase_support as freebase_support
class WhenQueryWithOneFilerAndOneSelector(unittest.TestCase):
def setUp(self):
super(WhenQueryWithOneFilerAndOneSelector, self).setUp()
self.query = freebase_support.Query({'filter': 'filterValue', 'selector': None, })
def testThenSelectedKeysIsSelector(self):
self.assertEqual(list(self.query.selectedKeys), ['selector',])
def testThenQueryStringIs(self):
self.assertEqual(self.query.queryString, '{"filter":"filterValue","selector":[]}')
|
rowhit/tagfs
|
src/test/tagfs_test_small/test_freebase_support_query.py
|
Python
|
gpl-3.0
| 1,239
|
# -*- coding: utf-8 -*-
# enzyme - Video metadata parser
# Copyright 2011-2012 Antoine Bertin <diaoulael@gmail.com>
# Copyright 2003-2006 Dirk Meyer <dischi@freevo.org>
#
# This file is part of enzyme.
#
# enzyme is free software; you can redistribute it and/or modify it under
# the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# enzyme is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with enzyme. If not, see <http://www.gnu.org/licenses/>.
import re
from six import string_types
__all__ = ['resolve']
def resolve(code):
"""
Transform the given (2- or 3-letter) language code to a human readable
language name. The return value is a 2-tuple containing the given
language code and the language name. If the language code cannot be
resolved, name will be 'Unknown (<code>)'.
"""
if not code:
return None, None
if not isinstance(code, string_types):
raise ValueError('Invalid language code specified by parser')
# Take up to 3 letters from the code.
code = re.split(r'[^a-z]', code.lower())[0][:3]
for spec in codes:
if code in spec[:-1]:
return code, spec[-1]
return code, u'Unknown (%r)' % code
# Parsed from http://www.loc.gov/standards/iso639-2/ISO-639-2_utf-8.txt
codes = (
('aar', 'aa', u'Afar'),
('abk', 'ab', u'Abkhazian'),
('ace', u'Achinese'),
('ach', u'Acoli'),
('ada', u'Adangme'),
('ady', u'Adyghe'),
('afa', u'Afro-Asiatic '),
('afh', u'Afrihili'),
('afr', 'af', u'Afrikaans'),
('ain', u'Ainu'),
('aka', 'ak', u'Akan'),
('akk', u'Akkadian'),
('alb', 'sq', u'Albanian'),
('ale', u'Aleut'),
('alg', u'Algonquian languages'),
('alt', u'Southern Altai'),
('amh', 'am', u'Amharic'),
('ang', u'English, Old '),
('anp', u'Angika'),
('apa', u'Apache languages'),
('ara', 'ar', u'Arabic'),
('arc', u'Official Aramaic '),
('arg', 'an', u'Aragonese'),
('arm', 'hy', u'Armenian'),
('arn', u'Mapudungun'),
('arp', u'Arapaho'),
('art', u'Artificial '),
('arw', u'Arawak'),
('asm', 'as', u'Assamese'),
('ast', u'Asturian'),
('ath', u'Athapascan languages'),
('aus', u'Australian languages'),
('ava', 'av', u'Avaric'),
('ave', 'ae', u'Avestan'),
('awa', u'Awadhi'),
('aym', 'ay', u'Aymara'),
('aze', 'az', u'Azerbaijani'),
('bad', u'Banda languages'),
('bai', u'Bamileke languages'),
('bak', 'ba', u'Bashkir'),
('bal', u'Baluchi'),
('bam', 'bm', u'Bambara'),
('ban', u'Balinese'),
('baq', 'eu', u'Basque'),
('bas', u'Basa'),
('bat', u'Baltic '),
('bej', u'Beja'),
('bel', 'be', u'Belarusian'),
('bem', u'Bemba'),
('ben', 'bn', u'Bengali'),
('ber', u'Berber '),
('bho', u'Bhojpuri'),
('bih', 'bh', u'Bihari'),
('bik', u'Bikol'),
('bin', u'Bini'),
('bis', 'bi', u'Bislama'),
('bla', u'Siksika'),
('bnt', u'Bantu '),
('bos', 'bs', u'Bosnian'),
('bra', u'Braj'),
('bre', 'br', u'Breton'),
('btk', u'Batak languages'),
('bua', u'Buriat'),
('bug', u'Buginese'),
('bul', 'bg', u'Bulgarian'),
('bur', 'my', u'Burmese'),
('byn', u'Blin'),
('cad', u'Caddo'),
('cai', u'Central American Indian '),
('car', u'Galibi Carib'),
('cat', 'ca', u'Catalan'),
('cau', u'Caucasian '),
('ceb', u'Cebuano'),
('cel', u'Celtic '),
('cha', 'ch', u'Chamorro'),
('chb', u'Chibcha'),
('che', 'ce', u'Chechen'),
('chg', u'Chagatai'),
('chi', 'zh', u'Chinese'),
('chk', u'Chuukese'),
('chm', u'Mari'),
('chn', u'Chinook jargon'),
('cho', u'Choctaw'),
('chp', u'Chipewyan'),
('chr', u'Cherokee'),
('chu', 'cu', u'Church Slavic'),
('chv', 'cv', u'Chuvash'),
('chy', u'Cheyenne'),
('cmc', u'Chamic languages'),
('cop', u'Coptic'),
('cor', 'kw', u'Cornish'),
('cos', 'co', u'Corsican'),
('cpe', u'Creoles and pidgins, English based '),
('cpf', u'Creoles and pidgins, French-based '),
('cpp', u'Creoles and pidgins, Portuguese-based '),
('cre', 'cr', u'Cree'),
('crh', u'Crimean Tatar'),
('crp', u'Creoles and pidgins '),
('csb', u'Kashubian'),
('cus', u'Cushitic '),
('cze', 'cs', u'Czech'),
('dak', u'Dakota'),
('dan', 'da', u'Danish'),
('dar', u'Dargwa'),
('day', u'Land Dayak languages'),
('del', u'Delaware'),
('den', u'Slave '),
('dgr', u'Dogrib'),
('din', u'Dinka'),
('div', 'dv', u'Divehi'),
('doi', u'Dogri'),
('dra', u'Dravidian '),
('dsb', u'Lower Sorbian'),
('dua', u'Duala'),
('dum', u'Dutch, Middle '),
('dut', 'nl', u'Dutch'),
('dyu', u'Dyula'),
('dzo', 'dz', u'Dzongkha'),
('efi', u'Efik'),
('egy', u'Egyptian '),
('eka', u'Ekajuk'),
('elx', u'Elamite'),
('eng', 'en', u'English'),
('enm', u'English, Middle '),
('epo', 'eo', u'Esperanto'),
('est', 'et', u'Estonian'),
('ewe', 'ee', u'Ewe'),
('ewo', u'Ewondo'),
('fan', u'Fang'),
('fao', 'fo', u'Faroese'),
('fat', u'Fanti'),
('fij', 'fj', u'Fijian'),
('fil', u'Filipino'),
('fin', 'fi', u'Finnish'),
('fiu', u'Finno-Ugrian '),
('fon', u'Fon'),
('fre', 'fr', u'French'),
('frm', u'French, Middle '),
('fro', u'French, Old '),
('frr', u'Northern Frisian'),
('frs', u'Eastern Frisian'),
('fry', 'fy', u'Western Frisian'),
('ful', 'ff', u'Fulah'),
('fur', u'Friulian'),
('gaa', u'Ga'),
('gay', u'Gayo'),
('gba', u'Gbaya'),
('gem', u'Germanic '),
('geo', 'ka', u'Georgian'),
('ger', 'de', u'German'),
('gez', u'Geez'),
('gil', u'Gilbertese'),
('gla', 'gd', u'Gaelic'),
('gle', 'ga', u'Irish'),
('glg', 'gl', u'Galician'),
('glv', 'gv', u'Manx'),
('gmh', u'German, Middle High '),
('goh', u'German, Old High '),
('gon', u'Gondi'),
('gor', u'Gorontalo'),
('got', u'Gothic'),
('grb', u'Grebo'),
('grc', u'Greek, Ancient '),
('gre', 'el', u'Greek, Modern '),
('grn', 'gn', u'Guarani'),
('gsw', u'Swiss German'),
('guj', 'gu', u'Gujarati'),
('gwi', u"Gwich'in"),
('hai', u'Haida'),
('hat', 'ht', u'Haitian'),
('hau', 'ha', u'Hausa'),
('haw', u'Hawaiian'),
('heb', 'he', u'Hebrew'),
('her', 'hz', u'Herero'),
('hil', u'Hiligaynon'),
('him', u'Himachali'),
('hin', 'hi', u'Hindi'),
('hit', u'Hittite'),
('hmn', u'Hmong'),
('hmo', 'ho', u'Hiri Motu'),
('hsb', u'Upper Sorbian'),
('hun', 'hu', u'Hungarian'),
('hup', u'Hupa'),
('iba', u'Iban'),
('ibo', 'ig', u'Igbo'),
('ice', 'is', u'Icelandic'),
('ido', 'io', u'Ido'),
('iii', 'ii', u'Sichuan Yi'),
('ijo', u'Ijo languages'),
('iku', 'iu', u'Inuktitut'),
('ile', 'ie', u'Interlingue'),
('ilo', u'Iloko'),
('ina', 'ia', u'Interlingua '),
('inc', u'Indic '),
('ind', 'id', u'Indonesian'),
('ine', u'Indo-European '),
('inh', u'Ingush'),
('ipk', 'ik', u'Inupiaq'),
('ira', u'Iranian '),
('iro', u'Iroquoian languages'),
('ita', 'it', u'Italian'),
('jav', 'jv', u'Javanese'),
('jbo', u'Lojban'),
('jpn', 'ja', u'Japanese'),
('jpr', u'Judeo-Persian'),
('jrb', u'Judeo-Arabic'),
('kaa', u'Kara-Kalpak'),
('kab', u'Kabyle'),
('kac', u'Kachin'),
('kal', 'kl', u'Kalaallisut'),
('kam', u'Kamba'),
('kan', 'kn', u'Kannada'),
('kar', u'Karen languages'),
('kas', 'ks', u'Kashmiri'),
('kau', 'kr', u'Kanuri'),
('kaw', u'Kawi'),
('kaz', 'kk', u'Kazakh'),
('kbd', u'Kabardian'),
('kha', u'Khasi'),
('khi', u'Khoisan '),
('khm', 'km', u'Central Khmer'),
('kho', u'Khotanese'),
('kik', 'ki', u'Kikuyu'),
('kin', 'rw', u'Kinyarwanda'),
('kir', 'ky', u'Kirghiz'),
('kmb', u'Kimbundu'),
('kok', u'Konkani'),
('kom', 'kv', u'Komi'),
('kon', 'kg', u'Kongo'),
('kor', 'ko', u'Korean'),
('kos', u'Kosraean'),
('kpe', u'Kpelle'),
('krc', u'Karachay-Balkar'),
('krl', u'Karelian'),
('kro', u'Kru languages'),
('kru', u'Kurukh'),
('kua', 'kj', u'Kuanyama'),
('kum', u'Kumyk'),
('kur', 'ku', u'Kurdish'),
('kut', u'Kutenai'),
('lad', u'Ladino'),
('lah', u'Lahnda'),
('lam', u'Lamba'),
('lao', 'lo', u'Lao'),
('lat', 'la', u'Latin'),
('lav', 'lv', u'Latvian'),
('lez', u'Lezghian'),
('lim', 'li', u'Limburgan'),
('lin', 'ln', u'Lingala'),
('lit', 'lt', u'Lithuanian'),
('lol', u'Mongo'),
('loz', u'Lozi'),
('ltz', 'lb', u'Luxembourgish'),
('lua', u'Luba-Lulua'),
('lub', 'lu', u'Luba-Katanga'),
('lug', 'lg', u'Ganda'),
('lui', u'Luiseno'),
('lun', u'Lunda'),
('luo', u'Luo '),
('lus', u'Lushai'),
('mac', 'mk', u'Macedonian'),
('mad', u'Madurese'),
('mag', u'Magahi'),
('mah', 'mh', u'Marshallese'),
('mai', u'Maithili'),
('mak', u'Makasar'),
('mal', 'ml', u'Malayalam'),
('man', u'Mandingo'),
('mao', 'mi', u'Maori'),
('map', u'Austronesian '),
('mar', 'mr', u'Marathi'),
('mas', u'Masai'),
('may', 'ms', u'Malay'),
('mdf', u'Moksha'),
('mdr', u'Mandar'),
('men', u'Mende'),
('mga', u'Irish, Middle '),
('mic', u"Mi'kmaq"),
('min', u'Minangkabau'),
('mis', u'Uncoded languages'),
('mkh', u'Mon-Khmer '),
('mlg', 'mg', u'Malagasy'),
('mlt', 'mt', u'Maltese'),
('mnc', u'Manchu'),
('mni', u'Manipuri'),
('mno', u'Manobo languages'),
('moh', u'Mohawk'),
('mol', 'mo', u'Moldavian'),
('mon', 'mn', u'Mongolian'),
('mos', u'Mossi'),
('mul', u'Multiple languages'),
('mun', u'Munda languages'),
('mus', u'Creek'),
('mwl', u'Mirandese'),
('mwr', u'Marwari'),
('myn', u'Mayan languages'),
('myv', u'Erzya'),
('nah', u'Nahuatl languages'),
('nai', u'North American Indian'),
('nap', u'Neapolitan'),
('nau', 'na', u'Nauru'),
('nav', 'nv', u'Navajo'),
('nbl', 'nr', u'Ndebele, South'),
('nde', 'nd', u'Ndebele, North'),
('ndo', 'ng', u'Ndonga'),
('nds', u'Low German'),
('nep', 'ne', u'Nepali'),
('new', u'Nepal Bhasa'),
('nia', u'Nias'),
('nic', u'Niger-Kordofanian '),
('niu', u'Niuean'),
('nno', 'nn', u'Norwegian Nynorsk'),
('nob', 'nb', u'Bokm\xe5l, Norwegian'),
('nog', u'Nogai'),
('non', u'Norse, Old'),
('nor', 'no', u'Norwegian'),
('nqo', u"N'Ko"),
('nso', u'Pedi'),
('nub', u'Nubian languages'),
('nwc', u'Classical Newari'),
('nya', 'ny', u'Chichewa'),
('nym', u'Nyamwezi'),
('nyn', u'Nyankole'),
('nyo', u'Nyoro'),
('nzi', u'Nzima'),
('oci', 'oc', u'Occitan '),
('oji', 'oj', u'Ojibwa'),
('ori', 'or', u'Oriya'),
('orm', 'om', u'Oromo'),
('osa', u'Osage'),
('oss', 'os', u'Ossetian'),
('ota', u'Turkish, Ottoman '),
('oto', u'Otomian languages'),
('paa', u'Papuan '),
('pag', u'Pangasinan'),
('pal', u'Pahlavi'),
('pam', u'Pampanga'),
('pan', 'pa', u'Panjabi'),
('pap', u'Papiamento'),
('pau', u'Palauan'),
('peo', u'Persian, Old '),
('per', 'fa', u'Persian'),
('phi', u'Philippine '),
('phn', u'Phoenician'),
('pli', 'pi', u'Pali'),
('pol', 'pl', u'Polish'),
('pon', u'Pohnpeian'),
('por', 'pt', u'Portuguese'),
('pra', u'Prakrit languages'),
('pro', u'Proven\xe7al, Old '),
('pus', 'ps', u'Pushto'),
('qaa-qtz', u'Reserved for local use'),
('que', 'qu', u'Quechua'),
('raj', u'Rajasthani'),
('rap', u'Rapanui'),
('rar', u'Rarotongan'),
('roa', u'Romance '),
('roh', 'rm', u'Romansh'),
('rom', u'Romany'),
('rum', 'ro', u'Romanian'),
('run', 'rn', u'Rundi'),
('rup', u'Aromanian'),
('rus', 'ru', u'Russian'),
('sad', u'Sandawe'),
('sag', 'sg', u'Sango'),
('sah', u'Yakut'),
('sai', u'South American Indian '),
('sal', u'Salishan languages'),
('sam', u'Samaritan Aramaic'),
('san', 'sa', u'Sanskrit'),
('sas', u'Sasak'),
('sat', u'Santali'),
('scc', 'sr', u'Serbian'),
('scn', u'Sicilian'),
('sco', u'Scots'),
('scr', 'hr', u'Croatian'),
('sel', u'Selkup'),
('sem', u'Semitic '),
('sga', u'Irish, Old '),
('sgn', u'Sign Languages'),
('shn', u'Shan'),
('sid', u'Sidamo'),
('sin', 'si', u'Sinhala'),
('sio', u'Siouan languages'),
('sit', u'Sino-Tibetan '),
('sla', u'Slavic '),
('slo', 'sk', u'Slovak'),
('slv', 'sl', u'Slovenian'),
('sma', u'Southern Sami'),
('sme', 'se', u'Northern Sami'),
('smi', u'Sami languages '),
('smj', u'Lule Sami'),
('smn', u'Inari Sami'),
('smo', 'sm', u'Samoan'),
('sms', u'Skolt Sami'),
('sna', 'sn', u'Shona'),
('snd', 'sd', u'Sindhi'),
('snk', u'Soninke'),
('sog', u'Sogdian'),
('som', 'so', u'Somali'),
('son', u'Songhai languages'),
('sot', 'st', u'Sotho, Southern'),
('spa', 'es', u'Spanish'),
('srd', 'sc', u'Sardinian'),
('srn', u'Sranan Tongo'),
('srr', u'Serer'),
('ssa', u'Nilo-Saharan '),
('ssw', 'ss', u'Swati'),
('suk', u'Sukuma'),
('sun', 'su', u'Sundanese'),
('sus', u'Susu'),
('sux', u'Sumerian'),
('swa', 'sw', u'Swahili'),
('swe', 'sv', u'Swedish'),
('syc', u'Classical Syriac'),
('syr', u'Syriac'),
('tah', 'ty', u'Tahitian'),
('tai', u'Tai '),
('tam', 'ta', u'Tamil'),
('tat', 'tt', u'Tatar'),
('tel', 'te', u'Telugu'),
('tem', u'Timne'),
('ter', u'Tereno'),
('tet', u'Tetum'),
('tgk', 'tg', u'Tajik'),
('tgl', 'tl', u'Tagalog'),
('tha', 'th', u'Thai'),
('tib', 'bo', u'Tibetan'),
('tig', u'Tigre'),
('tir', 'ti', u'Tigrinya'),
('tiv', u'Tiv'),
('tkl', u'Tokelau'),
('tlh', u'Klingon'),
('tli', u'Tlingit'),
('tmh', u'Tamashek'),
('tog', u'Tonga '),
('ton', 'to', u'Tonga '),
('tpi', u'Tok Pisin'),
('tsi', u'Tsimshian'),
('tsn', 'tn', u'Tswana'),
('tso', 'ts', u'Tsonga'),
('tuk', 'tk', u'Turkmen'),
('tum', u'Tumbuka'),
('tup', u'Tupi languages'),
('tur', 'tr', u'Turkish'),
('tut', u'Altaic '),
('tvl', u'Tuvalu'),
('twi', 'tw', u'Twi'),
('tyv', u'Tuvinian'),
('udm', u'Udmurt'),
('uga', u'Ugaritic'),
('uig', 'ug', u'Uighur'),
('ukr', 'uk', u'Ukrainian'),
('umb', u'Umbundu'),
('und', u'Undetermined'),
('urd', 'ur', u'Urdu'),
('uzb', 'uz', u'Uzbek'),
('vai', u'Vai'),
('ven', 've', u'Venda'),
('vie', 'vi', u'Vietnamese'),
('vol', 'vo', u'Volap\xfck'),
('vot', u'Votic'),
('wak', u'Wakashan languages'),
('wal', u'Walamo'),
('war', u'Waray'),
('was', u'Washo'),
('wel', 'cy', u'Welsh'),
('wen', u'Sorbian languages'),
('wln', 'wa', u'Walloon'),
('wol', 'wo', u'Wolof'),
('xal', u'Kalmyk'),
('xho', 'xh', u'Xhosa'),
('yao', u'Yao'),
('yap', u'Yapese'),
('yid', 'yi', u'Yiddish'),
('yor', 'yo', u'Yoruba'),
('ypk', u'Yupik languages'),
('zap', u'Zapotec'),
('zbl', u'Blissymbols'),
('zen', u'Zenaga'),
('zha', 'za', u'Zhuang'),
('znd', u'Zande languages'),
('zul', 'zu', u'Zulu'),
('zun', u'Zuni'),
('zxx', u'No linguistic content'),
('zza', u'Zaza'),
)
|
SickGear/SickGear
|
lib/enzyme/language.py
|
Python
|
gpl-3.0
| 15,177
|
#
# Copyright (C) 2012-2014, Quarkslab.
#
# This file is part of qb-sync.
#
# qb-sync is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import sys
import socket
import select
import base64
import binascii
import re
import ConfigParser
import traceback
HOST = 'localhost'
PORT = 9100
try:
import json
except:
print "[-] failed to import json\n%s" % repr(sys.exc_info())
sys.exit(0)
class Client():
def __init__(self, s_client, s_srv, name):
self.client_sock = s_client
self.srv_sock = s_srv
self.name = name
self.enabled = False
self.buffer = ''
def close(self):
self.enabled = False
if self.client_sock:
self.client_sock.close()
if self.srv_sock:
self.srv_sock.close()
def feed(self, data):
batch = []
self.buffer = ''.join([self.buffer, data])
if self.buffer.endswith("\n"):
batch = [req for req in self.buffer.strip().split('\n') if req != '']
self.buffer = ''
return batch
class DispatcherSrv():
def __init__(self):
self.idb_clients = []
self.dbg_client = None
self.srv_socks = []
self.opened_socks = []
self.current_dbg = None
self.current_dialect = 'unknown'
self.current_idb = None
self.current_module = None
self.sync_mode_auto = True
self.pat = re.compile('dbg disconnected')
self.req_handlers = {
'new_client': self.req_new_client,
'new_dbg': self.req_new_dbg,
'dbg_quit': self.req_dbg_quit,
'idb_n': self.req_idb_n,
'idb_list': self.req_idb_list,
'module': self.req_module,
'sync_mode': self.req_sync_mode,
'cmd': self.req_cmd,
'bc': self.req_bc,
'kill': self.req_kill
}
def bind(self, host, port):
self.dbg_srv_sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self.dbg_srv_sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
self.dbg_srv_sock.bind((host, port))
self.srv_socks.append(self.dbg_srv_sock)
if not (socket.gethostbyname(host) == '127.0.0.1'):
self.localhost_sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self.localhost_sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
self.localhost_sock.bind(('localhost', port))
self.srv_socks.append(self.localhost_sock)
def accept(self, s):
new_socket, addr = s.accept()
self.opened_socks.append(new_socket)
def listen(self):
for s in self.srv_socks:
s.listen(5)
def close(self, s):
s.close()
self.opened_socks.remove(s)
def loop(self):
self.listen()
self.announcement("dispatcher listening")
while True:
rlist, wlist, xlist = select.select(self.srv_socks + self.opened_socks, [], [])
if not rlist:
self.announcement("socket error: select")
raise Exception("rabbit eating the cable")
for s in rlist:
if s in self.srv_socks:
self.accept(s)
else:
self.handle(s)
def handle(self, s):
client = self.sock_to_client(s)
for req in self.recvall(client):
self.parse_exec(s, req)
# find client object for its srv socket
def sock_to_client(self, s):
if self.current_dbg and (s == self.current_dbg.srv_sock):
client = self.current_dbg
else:
clist = [client for client in self.idb_clients if (client.srv_sock == s)]
if not clist:
client = Client(None, s, None)
self.idb_clients.append(client)
else:
client = clist[0]
return client
# buffered readline like function
def recvall(self, client):
try:
data = client.srv_sock.recv(4096)
if data == '':
raise
except:
if client == self.current_dbg:
self.broadcast("debugger closed the connection")
self.dbg_quit()
else:
self.client_quit(client.srv_sock)
self.broadcast("a client quit, nb client(s) left: %d" % len(self.idb_clients))
return []
return client.feed(data)
# parse and execute requests from clients (idbs or dbg)
def parse_exec(self, s, req):
if not (req[0:8] == '[notice]'):
# this is a normal [sync] request from debugger, forward it
self.forward(req)
# receive 'dbg disconnected', socket can be closed
if re.search(self.pat, req):
self.close(s)
return
req = self.normalize(req, 8)
try:
hash = json.loads(req)
except:
print "[-] dispatcher failed to parse json\n %s\n" % req
return
type = hash['type']
if not type in self.req_handlers:
print ("[*] dispatcher unknown request: %s" % type)
return
req_handler = self.req_handlers[type]
req_handler(s, hash)
def normalize(self, req, taglen):
req = req[taglen:]
req = req.replace("\\", "\\\\")
req = req.replace("\n", "")
return req
def puts(self, msg, s):
s.sendall(msg)
# dispatcher announcements are forwarded to the idb
def announcement(self, msg, s=None):
if not s:
if not self.current_idb:
return
s = self.current_idb.client_sock
try:
s.sendall("[notice]{\"type\":\"dispatcher\",\"subtype\":\"msg\",\"msg\":\"%s\"}\n" % msg)
except:
return
# send message to all connected idb clients
def broadcast(self, msg):
for idbc in self.idb_clients:
self.announcement(msg, idbc.client_sock)
# send dbg message to currently active idb client
def forward(self, msg, s=None):
if not s:
if not self.current_idb:
return
s = self.current_idb.client_sock
if s:
s.sendall(msg + "\n")
# send dbg message to all idb clients
def forward_all(self, msg, s=None):
for idbc in self.idb_clients:
self.forward(msg, idbc.client_sock)
# disable current idb and enable new idb matched from current module name
def switch_idb(self, new_idb):
msg = "[sync]{\"type\":\"broker\",\"subtype\":\"%s\"}\n"
if (not self.current_idb == new_idb) & (self.current_idb.enabled):
self.current_idb.client_sock.sendall(msg % "disable_idb")
self.current_idb.enabled = False
if new_idb:
new_idb.client_sock.sendall(msg % "enable_idb")
self.current_idb = new_idb
new_idb.enabled = True
# a new idb client connects to the dispatcher via its broker
def req_new_client(self, srv_sock, hash):
port, name = hash['port'], hash['idb']
try:
client_sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
client_sock.connect(('localhost', port))
self.opened_socks.append(client_sock)
except:
self.opened_socks.remove(srv_sock)
srv_sock.close()
return
# check if an idb client is already registered with the same name
conflicting = [client for client in self.idb_clients if (client.name == name)]
# promote to idb client
new_client = self.sock_to_client(srv_sock)
new_client.client_sock = client_sock
new_client.name = name
self.broadcast("add new client (listening on port %d), nb client(s): %d" % (port, len(self.idb_clients)))
if conflicting:
self.broadcast("conflicting name: %s !" % new_client.name)
if not self.current_idb:
self.current_idb = new_client
# if new client match current module name, then enable it
if self.current_module == name:
self.switch_idb(new_client)
# inform new client about debugger's dialect
self.dbg_dialect(new_client)
# clean state when a client is quiting
def client_quit(self, s):
self.opened_socks.remove(s)
# remove exiting client from the list of active clients
for idbc in [idbc for idbc in self.idb_clients if (idbc.srv_sock == s)]:
self.idb_clients.remove(idbc)
self.opened_socks.remove(idbc.client_sock)
idbc.close()
# no more clients, let's kill ourself
if not self.idb_clients:
for s in self.srv_socks:
s.close()
sys.exit()
# a new debugger client connects to the dispatcher
def req_new_dbg(self, s, hash):
msg = hash['msg']
if self.current_dbg:
self.dbg_quit()
# promote to dbg client
self.current_dbg = self.sock_to_client(s)
self.current_dbg.client_sock = s
self.idb_clients.remove(self.current_dbg)
self.broadcast("new debugger client: %s" % msg)
# store dbb's dialect
if 'dialect' in hash:
self.current_dialect = hash['dialect']
self.dbg_dialect()
# inform client about debugger's dialect
def dbg_dialect(self, client=None):
msg = "[sync]{\"type\":\"dialect\",\"dialect\":\"%s\"}\n" % self.current_dialect
if client:
client.client_sock.sendall(msg)
else:
for idbc in self.idb_clients:
idbc.client_sock.sendall(msg)
# debugger client disconnect from the dispatcher
def req_dbg_quit(self, s, hash):
msg = hash['msg']
self.broadcast("debugger quit: %s" % msg)
self.dbg_quit()
# clean state when debugger is quiting
def dbg_quit(self):
self.opened_socks.remove(self.current_dbg.srv_sock)
self.current_dbg.close()
self.current_dbg = None
self.current_module = None
self.switch_idb(None)
self.current_dialect = 'unknown'
# handle kill notice from a client, exit properly if no more client
def req_kill(self, s, hash):
self.client_quit(s)
self.broadcast("received a kill notice from client, %d client(s) left" % len(self.idb_clients))
# send list of currently connected idb clients
def req_idb_list(self, s, hash):
clist = "> currently connected idb(s):\n"
if not self.idb_clients:
clist += " no idb client yet\n"
else:
for i in range(len(self.idb_clients)):
clist += (" [%d] %s\n" % (i, self.idb_clients[i].name))
s.sendall(clist)
# manually set current active idb to idb n from idb list
def req_idb_n(self, s, hash):
idb = hash['idb']
try:
idbn = int(idb)
except:
s.sendall("> n should be a decimal value")
return
try:
idbc = self.idb_clients[idbn]
except:
s.sendall("> %d is invalid (see idblist)" % idbn)
return
self.switch_idb(idbc)
s.sendall("> current idb set to %d" % idbn)
# dbg notice that its current module has changed
def req_module(self, s, hash):
modpath = hash['path']
self.current_module = modname = os.path.basename(modpath)
matching = [idbc for idbc in self.idb_clients if (idbc.name.lower() == modname.lower())]
if not self.sync_mode_auto:
self.broadcast("sync_mode_auto off")
return
if len(matching) == 1:
# matched is set as active
self.switch_idb(matching[0])
else:
if not len(matching):
msg = "mod request has no match for %s"
else:
msg = "ambiguous mod request, too many matches for %s"
self.broadcast(msg % modname)
# no match current idb (if existing) is disabled
if self.current_idb.enabled:
self.switch_idb(None)
# sync mode tells if idb switch is automatic or manual
def req_sync_mode(self, s, hash):
mode = hash['auto']
self.broadcast("sync mode auto set to %s" % mode)
self.sync_mode_auto = (mode == "on")
# bc request should be forwarded to all idbs
def req_bc(self, s, hash):
msg = "[sync]%s" % json.dumps(hash)
self.forward_all(msg)
def req_cmd(self, s, hash):
cmd = hash['cmd']
self.current_dbg.client_sock.sendall("%s\n" % cmd)
def err_log(msg):
fd = open("%s.err" % __file__, 'w')
fd.write(msg)
fd.close()
if __name__ == "__main__":
server = DispatcherSrv()
for loc in ['IDB_PATH', 'USERPROFILE', 'HOME']:
if loc in os.environ:
confpath = os.path.join(os.path.realpath(os.environ[loc]), '.sync')
if os.path.exists(confpath):
config = ConfigParser.SafeConfigParser({'host': HOST, 'port': PORT})
config.read(confpath)
HOST = config.get("INTERFACE", 'host')
PORT = config.getint("INTERFACE", 'port')
server.announcement("configuration file loaded")
break
try:
server.bind(HOST, PORT)
except Exception as e:
err_log("dispatcher failed to bind on %s:%s\n-> %s" % (HOST, PORT, repr(e)))
sys.exit()
try:
server.loop()
except Exception as e:
err_log("dispatcher failed\n-> %s" % repr(e))
server.announcement("dispatcher stop")
|
quarkslab/qb-sync
|
ext_ida/dispatcher.py
|
Python
|
gpl-3.0
| 14,364
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
import jsonfield.fields
import django.utils.timezone
from django.conf import settings
class Migration(migrations.Migration):
dependencies = [
("contenttypes", "0002_remove_content_type_name"),
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name="Action",
fields=[
(
"id",
models.AutoField(
verbose_name="ID",
serialize=False,
auto_created=True,
primary_key=True,
),
),
("actor_object_id", models.CharField(max_length=255)),
("verb", models.CharField(max_length=255)),
("description", models.TextField(null=True, blank=True)),
(
"target_object_id",
models.CharField(max_length=255, null=True, blank=True),
),
(
"action_object_object_id",
models.CharField(max_length=255, null=True, blank=True),
),
("timestamp", models.DateTimeField(default=django.utils.timezone.now)),
("public", models.BooleanField(default=True)),
("data", jsonfield.fields.JSONField(null=True, blank=True)),
(
"action_object_content_type",
models.ForeignKey(
related_name="action_object",
blank=True,
to="contenttypes.ContentType",
null=True,
),
),
(
"actor_content_type",
models.ForeignKey(
related_name="actor", to="contenttypes.ContentType"
),
),
(
"target_content_type",
models.ForeignKey(
related_name="target",
blank=True,
to="contenttypes.ContentType",
null=True,
),
),
],
options={"ordering": ("-timestamp",)},
),
migrations.CreateModel(
name="Follow",
fields=[
(
"id",
models.AutoField(
verbose_name="ID",
serialize=False,
auto_created=True,
primary_key=True,
),
),
("object_id", models.CharField(max_length=255)),
(
"actor_only",
models.BooleanField(
default=True,
verbose_name=b"Only follow actions where the object is the target.",
),
),
("started", models.DateTimeField(default=django.utils.timezone.now)),
("content_type", models.ForeignKey(to="contenttypes.ContentType")),
("user", models.ForeignKey(to=settings.AUTH_USER_MODEL)),
],
),
migrations.AlterUniqueTogether(
name="follow", unique_together=set([("user", "content_type", "object_id")])
),
]
|
digris/openbroadcast.org
|
website/apps/actstream/migrations/0001_initial.py
|
Python
|
gpl-3.0
| 3,585
|
# Copyright 2007, 2008, 2009 VIFF Development Team.
#
# This file is part of VIFF, the Virtual Ideal Functionality Framework.
#
# VIFF is free software: you can redistribute it and/or modify it
# under the terms of the GNU Lesser General Public License (LGPL) as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# VIFF is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
# or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General
# Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with VIFF. If not, see <http://www.gnu.org/licenses/>.
|
kljensen/viff
|
viff/test/__init__.py
|
Python
|
gpl-3.0
| 764
|
# -*- coding: utf-8 -*-
#########################################################################
#
# Copyright (C) 2012 OpenPlans
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
#########################################################################
from django.conf.urls.defaults import patterns, url
js_info_dict = {
'packages': ('geonode.maps',),
}
urlpatterns = patterns('geonode.maps.views',
url(r'^$', 'map_list', name='maps_browse'),
url(r'^tag/(?P<slug>[-\w]+?)/$', 'maps_tag', name='maps_browse_tag'),
url(r'^new$', 'new_map', name="new_map"),
url(r'^new/data$', 'new_map_json', name='new_map_json'),
url(r'^(?P<mapid>\d+)$', 'map_detail', name='map_detail'),
url(r'^(?P<mapid>\d+)/view$', 'map_view', name='map_view'),
url(r'^(?P<mapid>\d+)/data$', 'map_json', name='map_json'),
url(r'^(?P<mapid>\d+)/download$', 'map_download', name='map_download'),
url(r'^(?P<mapid>\d+)/wmc$', 'map_wmc', name='map_wmc'),
url(r'^(?P<mapid>\d+)/remove$', 'map_remove', name='map_remove'),
url(r'^(?P<mapid>\d+)/metadata$', 'map_metadata', name='map_metadata'),
url(r'^(?P<mapid>\d+)/embed$', 'map_embed', name='map_embed'),
url(r'^(?P<mapid>\d+)/permissions$', 'map_permissions', name='map_permissions'),
url(r'^(?P<mapid>\d+)/thumbnail$', 'map_thumbnail', name='map_thumbnail'),
url(r'^check/$', 'map_download_check', name='map_download_check'),
url(r'^embed/$', 'map_embed', name='map_embed'),
url(r'^(?P<layername>[^/]*)/attributes', 'maplayer_attributes', name='maplayer_attributes'),
#url(r'^change-poc/(?P<ids>\w+)$', 'change_poc', name='maps_change_poc'),
)
|
frippe12573/geonode
|
geonode/maps/urls.py
|
Python
|
gpl-3.0
| 2,230
|
#!/usr/lib/python2.7
##########################################################################
#
# Copyright (C) 2015-2016 Sam Westreich
#
# This program is free software; you can redistribute it and/or modify it
# under the terms of the GNU General Public License as published by the
# Free Software Foundation;
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# See the GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
#
##########################################################################
#
# DIAMOND_results_filter.py
# Created 1/30/17, this version updated 5/22/17
# Sam Westreich, stwestreich@ucdavis.edu, github.com/transcript
#
# Purpose: This takes a DIAMOND outfile and the RefSeq database and pulls
# out hits to any specific organism, identifying the raw input reads that
# were mapped to that organism.
# Usage:
#
# -I infile specifies the infile (a DIAMOND results file
# in m8 format)
# -SO specific target the organism search term, either genus,
# species, or function.
# -D database file specifies a reference database to search
# against for results
# -O outfile name optional; changes the default outfile name
#
##########################################################################
# imports
import operator, sys, time, gzip, re
# String searching function:
def string_find(usage_term):
for idx, elem in enumerate(sys.argv):
this_elem = elem
next_elem = sys.argv[(idx + 1) % len(sys.argv)]
if elem == usage_term:
return next_elem
# loading starting file
if "-I" in sys.argv:
infile_name = string_find("-I")
else:
sys.exit ("WARNING: infile must be specified using '-I' flag.")
# optional outfile of specific organism results
if "-SO" in sys.argv:
target_org = string_find("-SO")
if '"' in target_org:
for idx, elem in enumerate(sys.argv):
this_elem = elem
next_elem = sys.argv[(idx + 1) % len(sys.argv)]
second_elem = sys.argv[(idx + 2) % len(sys.argv)]
if elem == "-SO":
target_org = next_elem + " " + second_elem
if "-O" in sys.argv:
target_org_outfile = open(string_find("-O"), "w")
else:
target_org_outfile = open(infile_name[:-4] + "_" + target_org + ".tsv", "w")
else:
sys.exit("Need to specify target organism or function to filter by, using -SO flag.")
# loading database file
if "-D" in sys.argv:
db = open(string_find("-D"), "r")
else:
sys.exit("WARNING: database must be specified using '-D' flag.")
# Getting the database assembled
db_org_dictionary = {}
db_id_dictionary = {}
db_line_counter = 0
db_error_counter = 0
t0 = time.time()
for line in db:
if line.startswith(">") == True:
db_line_counter += 1
# line counter to show progress
if db_line_counter % 1000000 == 0: # each million
t95 = time.time()
print (str(db_line_counter) + " lines processed so far in " + str(t95-t0) + " seconds.")
if target_org in line:
splitline = line.split(" ")
# ID, the hit returned in DIAMOND results
db_id = str(splitline[0])[1:].split(" ")[0]
# name and functional description
db_entry = line.split("[", 1)
db_entry = db_entry[0].split(" ", 1)
db_entry = db_entry[1][:-1]
# organism name
if line.count("[") != 1:
splitline = line.split("[")
db_org = splitline[line.count("[")].strip()[:-1]
if db_org[0].isdigit():
split_db_org = db_org.split()
try:
db_org = split_db_org[1] + " " + split_db_org[2]
except IndexError:
try:
db_org = split_db_org[1]
except IndexError:
db_org = splitline[line.count("[")-1]
if db_org[0].isdigit():
split_db_org = db_org.split()
db_org = split_db_org[1] + " " + split_db_org[2]
else:
db_org = line.split("[", 1)
db_org = db_org[1].split()
try:
db_org = str(db_org[0]) + " " + str(db_org[1])
except IndexError:
db_org = line.strip().split("[", 1)
db_org = db_org[1][:-1]
db_error_counter += 1
db_org = re.sub('[^a-zA-Z0-9-_*. ]', '', db_org)
# add to dictionaries
db_org_dictionary[db_id] = db_org
db_id_dictionary[db_id] = db_entry
db.close()
print ("Database is read and set up, moving on to the infile...")
infile = open (infile_name, "r")
# setting up databases
RefSeq_hit_count_db = {}
unique_seq_db = {}
line_counter = 0
hit_counter = 0
t1 = time.time()
# reading through the infile
for line in infile:
line_counter += 1
splitline = line.split("\t")
try:
target_org_outfile.write(splitline[0] + "\t" + splitline[1] + "\t" + db_org_dictionary[splitline[1]] + "\t" + db_id_dictionary[splitline[1]] + "\n")
hit_counter += 1
except KeyError:
continue
if line_counter % 1000000 == 0:
t99 = time.time()
print (str(line_counter)[:-6] + "M lines processed so far in " + str(t99-t1) + " seconds.")
# results stats
t100 = time.time()
print ("Run complete!")
print ("Number of sequences found matching target query, " + target_org + ":\t" + str(hit_counter))
print ("Time elapsed: " + str(t100-t0) + " seconds.")
infile.close()
target_org_outfile.close()
|
transcript/samsa_v2
|
python_scripts/DIAMOND_results_filter.py
|
Python
|
gpl-3.0
| 5,358
|
# Copyright (C) 2009, 2010, 2011, 2012, 2013, 2014, 2015 Rickard Lindberg, Roger Lindberg
#
# This file is part of Timeline.
#
# Timeline is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Timeline is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Timeline. If not, see <http://www.gnu.org/licenses/>.
import wx
from timelinelib.db.utils import safe_locking
from timelinelib.repositories.dbwrapper import DbWrapperEventRepository
from timelinelib.wxgui.dialogs.editcontainer.view import EditContainerDialog
from timelinelib.wxgui.dialogs.editevent.controller import EditEventDialogController
from timelinelib.wxgui.framework import Dialog
from timelinelib.wxgui.utils import _set_focus_and_select
import timelinelib.wxgui.utils as gui_utils
class EditEventDialog(Dialog):
"""
<BoxSizerVertical>
<StaticBoxSizerVertical label="$(properties_label)" border="ALL" proportion="1">
<FlexGridSizer name="grid_sizer" columns="2" growableColumns="1" border="ALL" proportion="1">
%s
</FlexGridSizer>
</StaticBoxSizerVertical>
<CheckBox
name="add_more_checkbox"
label="$(add_more_label)"
border="LEFT|RIGHT|BOTTOM"
/>
<BoxSizerHorizontal border="LEFT|RIGHT|BOTTOM">
<TwoStateButton
initial_state_label="$(enlarge)"
second_state_label="$(reduce)"
event_EVT_INITIAL_STATE_CLICKED="on_enlarge_click"
event_EVT_SECOND_STATE_CLICKED="on_reduce_click"
/>
<StretchSpacer />
<DialogButtonsOkCancelSizer
event_EVT_BUTTON__ID_OK="on_ok_clicked"
/>
</BoxSizerHorizontal>
</BoxSizerVertical>
"""
TIME_DETAILS_ROW = """
<StaticText align="ALIGN_CENTER_VERTICAL" label="$(when_label)" />
<BoxSizerHorizontal>
<TimePicker
name="start_time"
time_type="$(time_type)"
config="$(config)"
/>
<Spacer />
<StaticText
label="$(to_label)"
name="to_label"
align="ALIGN_CENTER_VERTICAL"
/>
<Spacer />
<TimePicker
name="end_time"
time_type="$(time_type)"
config="$(config)"
/>
</BoxSizerHorizontal>
"""
CHECKBOX_ROW = """
<Spacer />
<FlexGridSizer rows="1">
<CheckBox
name="period_checkbox"
event_EVT_CHECKBOX="on_period_checkbox_changed"
label="$(period_checkbox_text)" />
<CheckBox
name="show_time_checkbox"
event_EVT_CHECKBOX="on_show_time_checkbox_changed"
label="$(show_time_checkbox_text)"
/>
<CheckBox
name="fuzzy_checkbox"
label="$(fuzzy_checkbox_text)"
/>
<CheckBox
name="locked_checkbox"
event_EVT_CHECKBOX="on_locked_checkbox_changed"
label="$(locked_checkbox_text)"
/>
<CheckBox
name="ends_today_checkbox"
label="$(ends_today_checkbox_text)"
/>
</FlexGridSizer>
"""
TEXT_FIELD_ROW = """
<StaticText align="ALIGN_CENTER_VERTICAL" label="$(text_label)" />
<TextCtrl name="name" />
"""
CATEGORY_LISTBOX_ROW = """
<StaticText align="ALIGN_CENTER_VERTICAL" label="$(category_label)" />
<CategoryChoice
name="category_choice"
allow_add="True"
allow_edit="True"
timeline="$(db)"
align="ALIGN_LEFT"
/>
"""
CONTAINER_LISTBOX_ROW = """
<StaticText align="ALIGN_CENTER_VERTICAL" label="$(container_label)" />
<ContainerChoice
name="container_choice"
event_EVT_CONTAINER_CHANGED="on_container_changed"
db="$(db)"
align="ALIGN_LEFT"
/>
"""
NOTEBOOK_ROW = """
<Spacer />
<Notebook name="notebook" style="BK_DEFAULT">
<DescriptionEditor
name="description"
notebookLabel="$(page_description)"
editor="$(self)"
proportion="1"
/>
<IconEditor
name="icon"
notebookLabel="$(page_icon)"
editor="$(self)"
proportion="1"
/>
<AlertEditor
name="alert"
notebookLabel="$(page_alert)"
editor="$(self)"
proportion="1"
/>
<HyperlinkEditor
name="hyperlink"
notebookLabel="$(page_hyperlink)"
editor="$(self)"
proportion="1"
/>
<ProgressEditor
name="progress"
notebookLabel="$(page_progress)"
editor="$(self)"
proportion="1"
/>
</Notebook>
"""
def __init__(self, parent, config, title, db, start=None, end=None, event=None):
self.timeline = db
self.config = config
self.start = start
self.event = event
self._insert_rows_in_correct_order_in_xml()
Dialog.__init__(self, EditEventDialogController, parent, {
"self": self,
"db": db,
"time_type": db.get_time_type(),
"config": config,
"properties_label": _("Event Properties"),
"when_label": _("When:"),
"period_checkbox_text": _("Period"),
"show_time_checkbox_text": _("Show time"),
"fuzzy_checkbox_text": _("Fuzzy"),
"locked_checkbox_text": _("Locked"),
"ends_today_checkbox_text": _("Ends today"),
"to_label": _("to"),
"text_label": _("Text:"),
"category_label": _("Category:"),
"container_label": _("Container:"),
"page_description": _("Description"),
"page_icon": _("Icon"),
"page_alert": _("Alert"),
"page_hyperlink": _("Hyperlink"),
"page_progress": _("Progress"),
"add_more_label": _("Add more events after this one"),
"enlarge": _("&Enlarge"),
"reduce": _("&Reduce"),
}, title=title, style=wx.DEFAULT_DIALOG_STYLE | wx.RESIZE_BORDER)
self.controller.on_init(
config,
db.get_time_type(),
DbWrapperEventRepository(db),
db,
start,
end,
event)
self._make_row_with_notebook_growable()
self.SetMinSize((800, -1))
self.Fit()
self.SetMinSize(self.GetSize())
def GetStart(self):
return self.start_time.get_value()
def SetStart(self, value):
self.start_time.set_value(value)
def GetEnd(self):
return self.end_time.get_value()
def SetEnd(self, value):
self.end_time.set_value(value)
def GetShowPeriod(self):
return self.period_checkbox.GetValue()
def SetShowPeriod(self, value):
self.period_checkbox.SetValue(value)
self.ShowToTime(value)
def ShowToTime(self, show):
self.to_label.Show(show)
self.end_time.Show(show)
def GetShowTime(self):
return self.show_time_checkbox.GetValue()
def SetShowTime(self, value):
if self.timeline.get_time_type().is_date_time_type():
self.show_time_checkbox.SetValue(value)
self.start_time.show_time(value)
self.end_time.show_time(value)
else:
self.show_time_checkbox.Hide()
def GetFuzzy(self):
return self.fuzzy_checkbox.GetValue()
def SetFuzzy(self, value):
self.fuzzy_checkbox.SetValue(value)
def GetLocked(self):
return self.locked_checkbox.GetValue()
def SetLocked(self, value):
self.locked_checkbox.SetValue(value)
def EnableLocked(self, value):
self.locked_checkbox.Enable(value)
def GetEndsToday(self):
return self.ends_today_checkbox.GetValue()
def SetEndsToday(self, value):
self.ends_today_checkbox.SetValue(value)
def EnableEndsToday(self, value):
self.ends_today_checkbox.Enable(value)
def GetName(self):
return self.name.GetValue().strip()
def SetName(self, value):
self.name.SetValue(value)
def GetCategory(self):
return self.category_choice.GetSelectedCategory()
def SetCategory(self, value):
self.category_choice.Populate(select=value)
def GetContainer(self):
return self.container_choice.GetSelectedContainer()
def SetContainer(self, value):
self.container_choice.Fill(value)
def GetEventData(self):
event_data = {}
for data_id, editor in self._get_event_data():
data = editor.get_data()
if data is not None:
event_data[data_id] = editor.get_data()
return event_data
def SetEventData(self, event_data):
for data_id, editor in self._get_event_data():
if data_id in event_data:
data = event_data[data_id]
if data is not None:
editor.set_data(data)
def ClearEventData(self):
for _, editor in self._get_event_data():
editor.clear_data()
def IsAddMoreChecked(self):
return self.add_more_checkbox.GetValue()
def SetShowAddMoreCheckbox(self, value):
self.add_more_checkbox.Show(value)
self.add_more_checkbox.SetValue(False)
self.SetSizerAndFit(self.GetSizer())
def SetFocusOnFirstControl(self):
control = {
"0": self.start_time,
"1": self.period_checkbox,
"2": self.name,
"3": self.category_choice,
"4": self.container_choice,
":": self.notebook,
}[self.config.event_editor_tab_order[0]]
_set_focus_and_select(control)
def DisplayInvalidStart(self, message):
self._display_invalid_input(message, self.start_time)
def DisplayInvalidEnd(self, message):
self._display_invalid_input(message, self.end_time)
def _display_invalid_input(self, message, control):
self.DisplayErrorMessage(message)
_set_focus_and_select(control)
def _get_event_data(self):
return [
("description", self.description),
("alert", self.alert),
("icon", self.icon),
("hyperlink", self.hyperlink),
("progress", self.progress),
]
def _insert_rows_in_correct_order_in_xml(self):
rows_by_key = {
"0": self.TIME_DETAILS_ROW,
"1": self.CHECKBOX_ROW,
"2": self.TEXT_FIELD_ROW,
"3": self.CATEGORY_LISTBOX_ROW,
"4": self.CONTAINER_LISTBOX_ROW,
":": self.NOTEBOOK_ROW,
}
placeholder_content = "".join(rows_by_key[key] for key in self.config.event_editor_tab_order)
self.__doc__ = self.__doc__ % placeholder_content
def _make_row_with_notebook_growable(self):
self.grid_sizer.AddGrowableRow(self.config.event_editor_tab_order.index(":"))
def open_event_editor_for(parent, config, db, handle_db_error, event):
def create_event_editor():
if event.is_container():
title = _("Edit Container")
return EditContainerDialog(parent, title, db, event)
else:
return EditEventDialog(
parent, config, _("Edit Event"), db, event=event)
def edit_function():
gui_utils.show_modal(create_event_editor, handle_db_error)
safe_locking(parent, edit_function)
def open_create_event_editor(parent, config, db, handle_db_error, start=None, end=None):
def create_event_editor():
label = _("Create Event")
return EditEventDialog(parent, config, label, db, start, end)
def edit_function():
gui_utils.show_modal(create_event_editor, handle_db_error)
safe_locking(parent, edit_function)
|
ezequielpereira/Time-Line
|
timelinelib/wxgui/dialogs/editevent/view.py
|
Python
|
gpl-3.0
| 12,695
|
"""
:synopsis: user-centric views for askbot
This module includes all views that are specific to a given user - his or her profile,
and other views showing profile-related information.
Also this module includes the view listing all forum users.
"""
import calendar
import collections
import functools
import datetime
import logging
import operator
import urllib
from django.db.models import Count
from django.db.models import Q
from django.conf import settings as django_settings
from django.contrib.auth.decorators import login_required
from django.core import exceptions as django_exceptions
from django.core.paginator import Paginator, EmptyPage, InvalidPage
from django.contrib.contenttypes.models import ContentType
from django.core.urlresolvers import reverse
from django.shortcuts import get_object_or_404
from django.shortcuts import render
from django.http import HttpResponse, HttpResponseForbidden
from django.http import HttpResponseRedirect, Http404
from django.utils.translation import get_language
from django.utils.translation import string_concat
from django.utils.translation import ugettext as _
from django.utils.translation import ungettext
from django.utils import simplejson
from django.utils.html import strip_tags as strip_all_tags
from django.views.decorators import csrf
from askbot.utils.slug import slugify
from askbot.utils.html import sanitize_html
from askbot.mail import send_mail
from askbot.utils.http import get_request_info
from askbot.utils import decorators
from askbot.utils import functions
from askbot import forms
from askbot import const
from askbot.views import context as view_context
from askbot.conf import settings as askbot_settings
from askbot import models
from askbot import exceptions
from askbot.models.badges import award_badges_signal
from askbot.models.tag import format_personal_group_name
from askbot.search.state_manager import SearchState
from askbot.utils import url_utils
from askbot.utils.loading import load_module
def owner_or_moderator_required(f):
@functools.wraps(f)
def wrapped_func(request, profile_owner, context):
if profile_owner == request.user:
pass
elif request.user.is_authenticated():
if request.user.can_moderate_user(profile_owner):
pass
else:
#redirect to the user profile homepage
#as this one should be accessible to all
return HttpResponseRedirect(request.path)
else:
next_url = request.path + '?' + urllib.urlencode(request.REQUEST)
params = '?next=%s' % urllib.quote(next_url)
return HttpResponseRedirect(url_utils.get_login_url() + params)
return f(request, profile_owner, context)
return wrapped_func
@decorators.ajax_only
def clear_new_notifications(request):
"""clears all new notifications for logged in user"""
user = request.user
if user.is_anonymous():
raise django_exceptions.PermissionDenied
activity_types = const.RESPONSE_ACTIVITY_TYPES_FOR_DISPLAY
activity_types += (
const.TYPE_ACTIVITY_MENTION,
)
post_data = simplejson.loads(request.raw_post_data)
memo_set = models.ActivityAuditStatus.objects.filter(
id__in=post_data['memo_ids'],
activity__activity_type__in=activity_types,
user=user,
)
memo_set.update(status = models.ActivityAuditStatus.STATUS_SEEN)
user.update_response_counts()
@decorators.ajax_only
def delete_notifications(request):
post_data = simplejson.loads(request.raw_post_data)
memo_set = models.ActivityAuditStatus.objects.filter(
id__in=post_data['memo_ids'],
user=request.user
)
memo_set.delete()
request.user.update_response_counts()
def show_users(request, by_group=False, group_id=None, group_slug=None):
"""Users view, including listing of users by group"""
if askbot_settings.GROUPS_ENABLED and not by_group:
default_group = models.Group.objects.get_global_group()
group_slug = slugify(default_group.name)
new_url = reverse('users_by_group',
kwargs={'group_id': default_group.id,
'group_slug': group_slug})
return HttpResponseRedirect(new_url)
users = models.User.objects.exclude(
status='b'
).exclude(
is_active=False
)
group = None
group_email_moderation_enabled = False
user_acceptance_level = 'closed'
user_membership_level = 'none'
if by_group == True:
if askbot_settings.GROUPS_ENABLED == False:
raise Http404
if group_id:
if all((group_id, group_slug)) == False:
return HttpResponseRedirect('groups')
else:
try:
group = models.Group.objects.get(id = group_id)
group_email_moderation_enabled = (
askbot_settings.GROUP_EMAIL_ADDRESSES_ENABLED \
and askbot_settings.CONTENT_MODERATION_MODE == 'premoderation'
)
user_acceptance_level = group.get_openness_level_for_user(
request.user
)
except models.Group.DoesNotExist:
raise Http404
if group_slug == slugify(group.name):
#filter users by full group memberships
#todo: refactor as Group.get_full_members()
full_level = models.GroupMembership.FULL
memberships = models.GroupMembership.objects.filter(
group=group, level=full_level
)
user_ids = memberships.values_list('user__id', flat=True)
users = users.filter(id__in=user_ids)
if request.user.is_authenticated():
membership = request.user.get_group_membership(group)
if membership:
user_membership_level = membership.get_level_display()
else:
group_page_url = reverse(
'users_by_group',
kwargs = {
'group_id': group.id,
'group_slug': slugify(group.name)
}
)
return HttpResponseRedirect(group_page_url)
is_paginated = True
form = forms.ShowUsersForm(request.REQUEST)
form.full_clean()#always valid
sort_method = form.cleaned_data['sort']
page = form.cleaned_data['page']
search_query = form.cleaned_data['query']
if search_query == '':
if sort_method == 'newest':
order_by_parameter = '-date_joined'
elif sort_method == 'last':
order_by_parameter = 'date_joined'
elif sort_method == 'name':
order_by_parameter = 'username'
else:
# default
order_by_parameter = '-reputation'
objects_list = Paginator(
users.order_by(order_by_parameter),
const.USERS_PAGE_SIZE
)
base_url = request.path + '?sort=%s&' % sort_method
else:
sort_method = 'reputation'
matching_users = models.get_users_by_text_query(search_query, users)
objects_list = Paginator(
matching_users.order_by('-reputation'),
const.USERS_PAGE_SIZE
)
base_url = request.path + '?name=%s&sort=%s&' % (search_query, sort_method)
try:
users_page = objects_list.page(page)
except (EmptyPage, InvalidPage):
users_page = objects_list.page(objects_list.num_pages)
paginator_data = {
'is_paginated' : is_paginated,
'pages': objects_list.num_pages,
'current_page_number': page,
'page_object': users_page,
'base_url' : base_url
}
paginator_context = functions.setup_paginator(paginator_data) #
#todo: move to contexts
#extra context for the groups
if askbot_settings.GROUPS_ENABLED:
#todo: cleanup this branched code after groups are migrated to auth_group
user_groups = models.Group.objects.exclude_personal()
if len(user_groups) <= 1:
assert(user_groups[0].name == askbot_settings.GLOBAL_GROUP_NAME)
user_groups = None
group_openness_choices = models.Group().get_openness_choices()
else:
user_groups = None
group_openness_choices = None
data = {
'active_tab': 'users',
'group': group,
'group_email_moderation_enabled': group_email_moderation_enabled,
'group_openness_choices': group_openness_choices,
'page_class': 'users-page',
'paginator_context' : paginator_context,
'search_query' : search_query,
'tab_id' : sort_method,
'user_acceptance_level': user_acceptance_level,
'user_count': users.count(),
'user_groups': user_groups,
'user_membership_level': user_membership_level,
'users' : users_page,
}
return render(request, 'users.html', data)
@csrf.csrf_protect
def user_moderate(request, subject, context):
"""user subview for moderation
"""
moderator = request.user
if moderator.is_authenticated() and not moderator.can_moderate_user(subject):
raise Http404
user_rep_changed = False
user_status_changed = False
user_status_changed_message = _('User status changed')
message_sent = False
email_error_message = None
user_rep_form = forms.ChangeUserReputationForm()
send_message_form = forms.SendMessageForm()
if request.method == 'POST':
if 'change_status' in request.POST or 'hard_block' in request.POST:
user_status_form = forms.ChangeUserStatusForm(
request.POST,
moderator = moderator,
subject = subject
)
if user_status_form.is_valid():
subject.set_status( user_status_form.cleaned_data['user_status'] )
if user_status_form.cleaned_data['delete_content'] == True:
num_deleted = request.user.delete_all_content_authored_by_user(subject)
if num_deleted:
num_deleted_message = ungettext('%d post deleted', '%d posts deleted', num_deleted) % num_deleted
user_status_changed_message = string_concat(user_status_changed_message, ', ', num_deleted_message)
user_status_changed = True
elif 'send_message' in request.POST:
send_message_form = forms.SendMessageForm(request.POST)
if send_message_form.is_valid():
subject_line = send_message_form.cleaned_data['subject_line']
body_text = send_message_form.cleaned_data['body_text']
try:
send_mail(
subject_line = subject_line,
body_text = body_text,
recipient_list = [subject.email],
headers={'Reply-to':moderator.email},
raise_on_failure = True
)
message_sent = True
except exceptions.EmailNotSent, e:
email_error_message = unicode(e)
send_message_form = forms.SendMessageForm()
else:
reputation_change_type = None
if 'subtract_reputation' in request.POST:
rep_change_type = 'subtract'
elif 'add_reputation' in request.POST:
rep_change_type = 'add'
else:
raise Http404
user_rep_form = forms.ChangeUserReputationForm(request.POST)
if user_rep_form.is_valid():
rep_delta = user_rep_form.cleaned_data['user_reputation_delta']
comment = user_rep_form.cleaned_data['comment']
if rep_change_type == 'subtract':
rep_delta = -1 * rep_delta
moderator.moderate_user_reputation(
user = subject,
reputation_change = rep_delta,
comment = comment,
timestamp = datetime.datetime.now(),
)
#reset form to preclude accidentally repeating submission
user_rep_form = forms.ChangeUserReputationForm()
user_rep_changed = True
#need to re-initialize the form even if it was posted, because
#initial values will most likely be different from the previous
user_status_form = forms.ChangeUserStatusForm(
moderator = moderator,
subject = subject
)
data = {
'active_tab': 'users',
'page_class': 'user-profile-page',
'tab_name': 'moderation',
'page_title': _('moderate user'),
'change_user_status_form': user_status_form,
'change_user_reputation_form': user_rep_form,
'send_message_form': send_message_form,
'message_sent': message_sent,
'email_error_message': email_error_message,
'user_rep_changed': user_rep_changed,
'user_status_changed': user_status_changed,
'user_status_changed_message': user_status_changed_message
}
context.update(data)
return render(request, 'user_profile/user_moderate.html', context)
#non-view function
def set_new_email(user, new_email, nomessage=False):
if new_email != user.email:
user.email = new_email
user.email_isvalid = False
user.save()
#if askbot_settings.EMAIL_VALIDATION == True:
# send_new_email_key(user,nomessage=nomessage)
def need_to_invalidate_post_caches(user, form):
"""a utility function for the edit user profile view"""
new_country = (form.cleaned_data.get('country') != user.country)
new_show_country = (form.cleaned_data.get('show_country') != user.show_country)
new_username = (form.cleaned_data.get('username') != user.username)
return (new_country or new_show_country or new_username)
@login_required
@csrf.csrf_protect
def edit_user(request, id):
"""View that allows to edit user profile.
This view is accessible to profile owners or site administrators
"""
user = get_object_or_404(models.User, id=id)
if not(request.user.pk == user.pk or request.user.is_superuser):
raise Http404
if request.method == "POST":
form = forms.EditUserForm(user, request.POST)
if form.is_valid():
if 'email' in form.cleaned_data and askbot_settings.EDITABLE_EMAIL:
new_email = sanitize_html(form.cleaned_data['email'])
set_new_email(user, new_email)
prev_username = user.username
if askbot_settings.EDITABLE_SCREEN_NAME:
new_username = strip_all_tags(form.cleaned_data['username'])
if user.username != new_username:
group = user.get_personal_group()
user.username = new_username
group.name = format_personal_group_name(user)
group.save()
#Maybe we need to clear post caches, b/c
#author info may need to be updated on posts and thread summaries
if need_to_invalidate_post_caches(user, form):
#get threads where users participated
thread_ids = models.Post.objects.filter(
Q(author=user) | Q(last_edited_by=user)
).values_list(
'thread__id', flat=True
).distinct()
threads = models.Thread.objects.filter(id__in=thread_ids)
for thread in threads:
#for each thread invalidate cache keys for posts, etc
thread.invalidate_cached_data(lazy=True)
user.real_name = strip_all_tags(form.cleaned_data['realname'])
user.website = sanitize_html(form.cleaned_data['website'])
user.location = sanitize_html(form.cleaned_data['city'])
user.date_of_birth = form.cleaned_data.get('birthday', None)
user.about = sanitize_html(form.cleaned_data['about'])
user.country = form.cleaned_data['country']
user.show_country = form.cleaned_data['show_country']
user.show_marked_tags = form.cleaned_data['show_marked_tags']
user.save()
# send user updated signal if full fields have been updated
award_badges_signal.send(None,
event = 'update_user_profile',
actor = user,
context_object = user
)
return HttpResponseRedirect(user.get_profile_url())
else:
form = forms.EditUserForm(user)
data = {
'active_tab': 'users',
'page_class': 'user-profile-edit-page',
'form' : form,
'marked_tags_setting': askbot_settings.MARKED_TAGS_ARE_PUBLIC_WHEN,
'support_custom_avatars': ('avatar' in django_settings.INSTALLED_APPS),
'view_user': user,
}
return render(request, 'user_profile/user_edit.html', data)
def user_stats(request, user, context):
question_filter = {}
if request.user != user:
question_filter['is_anonymous'] = False
if askbot_settings.CONTENT_MODERATION_MODE == 'premoderation':
question_filter['approved'] = True
#
# Questions
#
questions_qs = user.posts.get_questions(
user=request.user
).filter(
**question_filter
).order_by(
'-points', '-thread__last_activity_at'
).select_related(
'thread', 'thread__last_activity_by'
)
q_paginator = Paginator(questions_qs, const.USER_POSTS_PAGE_SIZE)
questions = q_paginator.page(1).object_list
question_count = q_paginator.count
q_paginator_context = functions.setup_paginator({
'is_paginated' : (question_count > const.USER_POSTS_PAGE_SIZE),
'pages': q_paginator.num_pages,
'current_page_number': 1,
'page_object': q_paginator.page(1),
'base_url' : '?' #this paginator will be ajax
})
#
# Top answers
#
a_paginator = user.get_top_answers_paginator(request.user)
top_answers = a_paginator.page(1).object_list
top_answer_count = a_paginator.count
a_paginator_context = functions.setup_paginator({
'is_paginated' : (top_answer_count > const.USER_POSTS_PAGE_SIZE),
'pages': a_paginator.num_pages,
'current_page_number': 1,
'page_object': a_paginator.page(1),
'base_url' : '?' #this paginator will be ajax
})
#
# Votes
#
up_votes = models.Vote.objects.get_up_vote_count_from_user(user)
down_votes = models.Vote.objects.get_down_vote_count_from_user(user)
votes_today = models.Vote.objects.get_votes_count_today_from_user(user)
votes_total = askbot_settings.MAX_VOTES_PER_USER_PER_DAY
#
# Tags
#
# INFO: There's bug in Django that makes the following query kind of broken (GROUP BY clause is problematic):
# http://stackoverflow.com/questions/7973461/django-aggregation-does-excessive-group-by-clauses
# Fortunately it looks like it returns correct results for the test data
user_tags = models.Tag.objects.filter(
threads__posts__author=user,
language_code=get_language()
).distinct().\
annotate(user_tag_usage_count=Count('threads')).\
order_by('-user_tag_usage_count')[:const.USER_VIEW_DATA_SIZE]
user_tags = list(user_tags) # evaluate
when = askbot_settings.MARKED_TAGS_ARE_PUBLIC_WHEN
if when == 'always' or \
(when == 'when-user-wants' and user.show_marked_tags == True):
#refactor into: user.get_marked_tag_names('good'/'bad'/'subscribed')
interesting_tag_names = user.get_marked_tag_names('good')
ignored_tag_names = user.get_marked_tag_names('bad')
subscribed_tag_names = user.get_marked_tag_names('subscribed')
else:
interesting_tag_names = None
ignored_tag_names = None
subscribed_tag_names = None
# tags = models.Post.objects.filter(author=user).values('id', 'thread', 'thread__tags')
# post_ids = set()
# thread_ids = set()
# tag_ids = set()
# for t in tags:
# post_ids.add(t['id'])
# thread_ids.add(t['thread'])
# tag_ids.add(t['thread__tags'])
# if t['thread__tags'] == 11:
# print t['thread'], t['id']
# import ipdb; ipdb.set_trace()
#
# Badges/Awards (TODO: refactor into Managers/QuerySets when a pattern emerges; Simplify when we get rid of Question&Answer models)
#
post_type = ContentType.objects.get_for_model(models.Post)
user_awards = models.Award.objects.filter(user=user).select_related('badge')
awarded_post_ids = []
for award in user_awards:
if award.content_type_id == post_type.id:
awarded_post_ids.append(award.object_id)
awarded_posts = models.Post.objects.filter(id__in=awarded_post_ids)\
.select_related('thread') # select related to avoid additional queries in Post.get_absolute_url()
awarded_posts_map = {}
for post in awarded_posts:
awarded_posts_map[post.id] = post
badges_dict = collections.defaultdict(list)
for award in user_awards:
if award.badge.is_enabled() == False:
continue
# Fetch content object
if award.content_type_id == post_type.id:
#here we go around a possibility of awards
#losing the content objects when the content
#objects are deleted for some reason
awarded_post = awarded_posts_map.get(award.object_id, None)
if awarded_post is not None:
#protect from awards that are associated with deleted posts
award.content_object = awarded_post
award.content_object_is_post = True
else:
award.content_object_is_post = False
else:
award.content_object_is_post = False
# "Assign" to its Badge
badges_dict[award.badge].append(award)
badges = badges_dict.items()
badges.sort(key=operator.itemgetter(1), reverse=True)
user_groups = models.Group.objects.get_for_user(user = user)
user_groups = user_groups.exclude_personal()
global_group = models.Group.objects.get_global_group()
user_groups = user_groups.exclude(name=global_group.name)
if request.user.pk == user.pk:
groups_membership_info = user.get_groups_membership_info(user_groups)
else:
groups_membership_info = collections.defaultdict()
data = {
'active_tab':'users',
'page_class': 'user-profile-page',
'support_custom_avatars': ('avatar' in django_settings.INSTALLED_APPS),
'tab_name' : 'stats',
'page_title' : _('user profile overview'),
'questions' : questions,
'question_count': question_count,
'q_paginator_context': q_paginator_context,
'top_answers': top_answers,
'top_answer_count': top_answer_count,
'a_paginator_context': a_paginator_context,
'page_size': const.USER_POSTS_PAGE_SIZE,
'up_votes' : up_votes,
'down_votes' : down_votes,
'total_votes': up_votes + down_votes,
'votes_today_left': votes_total - votes_today,
'votes_total_per_day': votes_total,
'user_tags' : user_tags,
'user_groups': user_groups,
'groups_membership_info': groups_membership_info,
'interesting_tag_names': interesting_tag_names,
'ignored_tag_names': ignored_tag_names,
'subscribed_tag_names': subscribed_tag_names,
'badges': badges,
'total_badges' : len(badges),
}
context.update(data)
extra_context = view_context.get_extra(
'ASKBOT_USER_PROFILE_PAGE_EXTRA_CONTEXT',
request,
context
)
context.update(extra_context)
return render(request, 'user_profile/user_stats.html', context)
def user_recent(request, user, context):
def get_type_name(type_id):
for item in const.TYPE_ACTIVITY:
if type_id in item:
return item[1]
class Event(object):
is_badge = False
def __init__(self, time, type, title, summary, url):
self.time = time
self.type = get_type_name(type)
self.type_id = type
self.title = title
self.summary = summary
slug_title = slugify(title)
self.title_link = url
class AwardEvent(object):
is_badge = True
def __init__(self, time, type, content_object, badge):
self.time = time
self.type = get_type_name(type)
self.content_object = content_object
self.badge = badge
# TODO: Don't process all activities here for the user, only a subset ([:const.USER_VIEW_DATA_SIZE])
activity_types = (
const.TYPE_ACTIVITY_ASK_QUESTION,
const.TYPE_ACTIVITY_ANSWER,
const.TYPE_ACTIVITY_COMMENT_QUESTION,
const.TYPE_ACTIVITY_COMMENT_ANSWER,
const.TYPE_ACTIVITY_UPDATE_QUESTION,
const.TYPE_ACTIVITY_UPDATE_ANSWER,
const.TYPE_ACTIVITY_MARK_ANSWER,
const.TYPE_ACTIVITY_PRIZE
)
#1) get source of information about activities
activity_objects = models.Activity.objects.filter(
user=user,
activity_type__in=activity_types
).order_by(
'-active_at'
)[:const.USER_VIEW_DATA_SIZE]
#2) load content objects ("c.objects) for each activity
# the return value is dictionary where activity id's are keys
content_objects_by_activity = activity_objects.fetch_content_objects_dict()
#a list of digest objects, suitable for display
#the number of activities to show is not guaranteed to be
#const.USER_VIEW_DATA_TYPE, because we don't show activity
#for deleted content
activities = []
for activity in activity_objects:
content = content_objects_by_activity.get(activity.id)
if content is None:
continue
if activity.activity_type == const.TYPE_ACTIVITY_PRIZE:
event = AwardEvent(
time=content.awarded_at,
type=activity.activity_type,
content_object=content.content_object,
badge=content.badge,
)
else:
event = Event(
time=activity.active_at,
type=activity.activity_type,
title=content.thread.title,
summary=content.summary,
url=content.get_absolute_url()
)
activities.append(event)
activities.sort(key=operator.attrgetter('time'), reverse=True)
data = {
'active_tab': 'users',
'page_class': 'user-profile-page',
'tab_name' : 'recent',
'page_title' : _('profile - recent activity'),
'activities' : activities
}
context.update(data)
return render(request, 'user_profile/user_recent.html', context)
#not a view - no direct url route here, called by `user_responses`
@csrf.csrf_protect
def show_group_join_requests(request, user, context):
"""show group join requests to admins who belong to the group"""
if request.user.is_administrator_or_moderator() is False:
raise Http404
#get group to which user belongs
groups = request.user.get_groups()
#construct a dictionary group id --> group object
#to avoid loading group via activity content object
groups_dict = dict([(group.id, group) for group in groups])
#get join requests for those groups
group_content_type = ContentType.objects.get_for_model(models.Group)
join_requests = models.Activity.objects.filter(
activity_type=const.TYPE_ACTIVITY_ASK_TO_JOIN_GROUP,
content_type=group_content_type,
object_id__in=groups_dict.keys()
).order_by('-active_at')
data = {
'active_tab':'users',
'inbox_section': 'group-join-requests',
'page_class': 'user-profile-page',
'tab_name' : 'join_requests',
'page_title' : _('profile - moderation'),
'groups_dict': groups_dict,
'join_requests': join_requests
}
context.update(data)
return render(request, 'user_inbox/group_join_requests.html', context)
@owner_or_moderator_required
def user_responses(request, user, context):
"""
We list answers for question, comments, and
answer accepted by others for this user.
as well as mentions of the user
user - the profile owner
the view has two sub-views - "forum" - i.e. responses
and "flags" - moderation items for mods only
"""
#0) temporary, till urls are fixed: update context
# to contain response counts for all sub-sections
context.update(view_context.get_for_inbox(request.user))
#1) select activity types according to section
section = request.GET.get('section', 'forum')
if section == 'forum':
activity_types = const.RESPONSE_ACTIVITY_TYPES_FOR_DISPLAY
activity_types += (const.TYPE_ACTIVITY_MENTION,)
elif section == 'join_requests':
return show_group_join_requests(request, user, context)
elif section == 'messages':
if request.user != user:
if askbot_settings.ADMIN_INBOX_ACCESS_ENABLED == False:
raise Http404
elif not(request.user.is_moderator() or request.user.is_administrator()):
raise Http404
from group_messaging.views import SendersList, ThreadsList
context.update(SendersList().get_context(request))
context.update(ThreadsList().get_context(request, user))
data = {
'inbox_threads_count': context['threads_count'],#a hackfor the inbox count
'active_tab':'users',
'page_class': 'user-profile-page',
'tab_name' : 'inbox',
'inbox_section': section,
'page_title' : _('profile - messages')
}
context.update(data)
if 'thread_id' in request.GET:
from group_messaging.models import Message
from group_messaging.views import ThreadDetails
try:
thread_id = request.GET['thread_id']
context.update(ThreadDetails().get_context(request, thread_id))
context['group_messaging_template_name'] = \
'group_messaging/home_thread_details.html'
except Message.DoesNotExist:
raise Http404
else:
context['group_messaging_template_name'] = 'group_messaging/home.html'
#here we take shortcut, because we don't care about
#all the extra context loaded below
return render(request, 'user_inbox/messages.html', context)
else:
raise Http404
#2) load the activity notifications according to activity types
#todo: insert pagination code here
memo_set = request.user.get_notifications(activity_types)
memo_set = memo_set.select_related(
'activity',
'activity__content_type',
'activity__question__thread',
'activity__user',
'activity__user__gravatar',
).order_by(
'-activity__active_at'
)[:const.USER_VIEW_DATA_SIZE]
#3) "package" data for the output
response_list = list()
for memo in memo_set:
obj = memo.activity.content_object
if obj is None:
memo.activity.delete()
continue#a temp plug due to bug in the comment deletion
act = memo.activity
act_user = act.user
act_message = act.get_activity_type_display()
act_type = 'edit'
response = {
'id': memo.id,
'timestamp': act.active_at,
'user': act_user,
'is_new': memo.is_new(),
'url': act.get_absolute_url(),
'snippet': act.get_snippet(),
'title': act.question.thread.title,
'message_type': act_message,
'memo_type': act_type,
'question_id': act.question.id,
'followup_messages': list(),
'content': obj.html or obj.text,
}
response_list.append(response)
#4) sort by response id
response_list.sort(lambda x,y: cmp(y['question_id'], x['question_id']))
#5) group responses by thread (response_id is really the question post id)
last_question_id = None #flag to know if the question id is different
filtered_message_list = list()
for message in response_list:
#todo: group responses by the user as well
if message['question_id'] == last_question_id:
original_message = dict.copy(filtered_message_list[-1])
original_message['followup_messages'].append(message)
filtered_message_list[-1] = original_message
else:
filtered_message_list.append(message)
last_question_id = message['question_id']
#6) sort responses by time
filtered_message_list.sort(lambda x,y: cmp(y['timestamp'], x['timestamp']))
reject_reasons = models.PostFlagReason.objects.all().order_by('title')
data = {
'active_tab':'users',
'page_class': 'user-profile-page',
'tab_name' : 'inbox',
'inbox_section': section,
'page_title' : _('profile - responses'),
'post_reject_reasons': reject_reasons,
'messages' : filtered_message_list,
}
context.update(data)
template = 'user_inbox/responses.html'
return render(request, template, context)
def user_network(request, user, context):
if 'followit' not in django_settings.INSTALLED_APPS:
raise Http404
data = {
'followed_users': user.get_followed_users(),
'followers': user.get_followers(),
'page_title' : _('profile - network'),
'tab_name': 'network',
}
context.update(data)
return render(request, 'user_profile/user_network.html', context)
@owner_or_moderator_required
def user_votes(request, user, context):
all_votes = list(models.Vote.objects.filter(user=user))
votes = []
for vote in all_votes:
post = vote.voted_post
if post.is_question():
vote.title = post.thread.title
vote.question_id = post.id
vote.answer_id = 0
votes.append(vote)
elif post.is_answer():
vote.title = post.thread.title
vote.question_id = post.thread._question_post().id
vote.answer_id = post.id
votes.append(vote)
votes.sort(key=operator.attrgetter('id'), reverse=True)
data = {
'active_tab':'users',
'page_class': 'user-profile-page',
'tab_name' : 'votes',
'page_title' : _('profile - votes'),
'votes' : votes[:const.USER_VIEW_DATA_SIZE]
}
context.update(data)
return render(request, 'user_profile/user_votes.html', context)
def user_reputation(request, user, context):
reputes = models.Repute.objects.filter(user=user).select_related('question', 'question__thread', 'user').order_by('-reputed_at')
# prepare data for the graph - last values go in first
rep_list = ['[%s,%s]' % (calendar.timegm(datetime.datetime.now().timetuple()) * 1000, user.reputation)]
for rep in reputes:
rep_list.append('[%s,%s]' % (calendar.timegm(rep.reputed_at.timetuple()) * 1000, rep.reputation))
reps = ','.join(rep_list)
reps = '[%s]' % reps
data = {
'active_tab':'users',
'page_class': 'user-profile-page',
'tab_name': 'reputation',
'page_title': _("Profile - User's Karma"),
'reputation': reputes,
'reps': reps
}
context.update(data)
return render(request, 'user_profile/user_reputation.html', context)
def user_favorites(request, user, context):
favorite_threads = user.user_favorite_questions.values_list('thread', flat=True)
questions_qs = models.Post.objects.filter(
post_type='question',
thread__in=favorite_threads
).select_related(
'thread', 'thread__last_activity_by'
).order_by(
'-points', '-thread__last_activity_at'
)[:const.USER_VIEW_DATA_SIZE]
q_paginator = Paginator(questions_qs, const.USER_POSTS_PAGE_SIZE)
page = forms.PageField().clean(request.GET.get('page'))
questions = q_paginator.page(page).object_list
question_count = q_paginator.count
q_paginator_context = functions.setup_paginator({
'is_paginated' : (question_count > const.USER_POSTS_PAGE_SIZE),
'pages': q_paginator.num_pages,
'current_page_number': page,
'page_object': q_paginator.page(page),
'base_url' : request.path + '?sort=favorites&' #this paginator will be ajax
})
data = {
'active_tab':'users',
'page_class': 'user-profile-page',
'tab_name' : 'favorites',
'page_title' : _('profile - favorites'),
'questions' : questions,
'q_paginator_context': q_paginator_context,
'question_count': question_count,
'page_size': const.USER_POSTS_PAGE_SIZE
}
context.update(data)
return render(request, 'user_profile/user_favorites.html', context)
@csrf.csrf_protect
@decorators.ajax_only
@decorators.post_only
def user_set_primary_language(request):
if request.user.is_anonymous():
raise django_exceptions.PermissionDenied
form = forms.LanguageForm(request.POST)
if form.is_valid():
request.user.set_primary_language(form.cleaned_data['language'])
request.user.save()
@csrf.csrf_protect
def user_select_languages(request, id=None, slug=None):
if request.method != 'POST':
raise django_exceptions.PermissionDenied
user = get_object_or_404(models.User, id=id)
if not(request.user.id == user.id or request.user.is_administrator()):
raise django_exceptions.PermissionDenied
form = forms.LanguagePrefsForm(request.POST)
if form.is_valid():
user.set_languages(form.cleaned_data['languages'])
user.set_primary_language(form.cleaned_data['primary_language'])
user.save()
redirect_url = reverse(
'user_subscriptions',
kwargs={'id': user.id, 'slug': slugify(user.username)}
)
return HttpResponseRedirect(redirect_url)
@owner_or_moderator_required
@csrf.csrf_protect
def user_email_subscriptions(request, user, context):
logging.debug(get_request_info(request))
if request.method == 'POST':
email_feeds_form = forms.EditUserEmailFeedsForm(request.POST)
tag_filter_form = forms.TagFilterSelectionForm(request.POST, instance=user)
if email_feeds_form.is_valid() and tag_filter_form.is_valid():
action_status = None
tag_filter_saved = tag_filter_form.save()
if tag_filter_saved:
action_status = _('changes saved')
if 'save' in request.POST:
feeds_saved = email_feeds_form.save(user)
if feeds_saved:
action_status = _('changes saved')
elif 'stop_email' in request.POST:
email_stopped = email_feeds_form.reset().save(user)
initial_values = forms.EditUserEmailFeedsForm.NO_EMAIL_INITIAL
email_feeds_form = forms.EditUserEmailFeedsForm(initial=initial_values)
if email_stopped:
action_status = _('email updates canceled')
else:
#user may have been created by some app that does not know
#about the email subscriptions, in that case the call below
#will add any subscription settings that are missing
#using the default frequencies
user.add_missing_askbot_subscriptions()
#initialize the form
email_feeds_form = forms.EditUserEmailFeedsForm()
email_feeds_form.set_initial_values(user)
tag_filter_form = forms.TagFilterSelectionForm(instance=user)
action_status = None
data = {
'active_tab': 'users',
'subscribed_tag_names': user.get_marked_tag_names('subscribed'),
'page_class': 'user-profile-page',
'tab_name': 'email_subscriptions',
'page_title': _('profile - email subscriptions'),
'email_feeds_form': email_feeds_form,
'tag_filter_selection_form': tag_filter_form,
'action_status': action_status,
'user_languages': user.languages.split()
}
context.update(data)
#todo: really need only if subscribed tags are enabled
context.update(view_context.get_for_tag_editor())
return render(
request,
'user_profile/user_email_subscriptions.html',
context
)
@csrf.csrf_protect
def user_custom_tab(request, user, context):
"""works only if `ASKBOT_CUSTOM_USER_PROFILE_TAB`
setting in the ``settings.py`` is properly configured"""
tab_settings = django_settings.ASKBOT_CUSTOM_USER_PROFILE_TAB
module_path = tab_settings['CONTENT_GENERATOR']
content_generator = load_module(module_path)
page_title = _('profile - %(section)s') % \
{'section': tab_settings['NAME']}
context.update({
'custom_tab_content': content_generator(request, user),
'tab_name': tab_settings['SLUG'],
'page_title': page_title
})
return render(request, 'user_profile/custom_tab.html', context)
USER_VIEW_CALL_TABLE = {
'stats': user_stats,
'recent': user_recent,
'inbox': user_responses,
'network': user_network,
'reputation': user_reputation,
'favorites': user_favorites,
'votes': user_votes,
'email_subscriptions': user_email_subscriptions,
'moderation': user_moderate,
}
CUSTOM_TAB = getattr(django_settings, 'ASKBOT_CUSTOM_USER_PROFILE_TAB', None)
if CUSTOM_TAB:
CUSTOM_SLUG = CUSTOM_TAB['SLUG']
USER_VIEW_CALL_TABLE[CUSTOM_SLUG] = user_custom_tab
#todo: rename this function - variable named user is everywhere
def user(request, id, slug=None, tab_name=None):
"""Main user view function that works as a switchboard
id - id of the profile owner
todo: decide what to do with slug - it is not used
in the code in any way
"""
profile_owner = get_object_or_404(models.User, id = id)
if profile_owner.is_blocked():
if request.user.is_anonymous() \
or not request.user.is_administrator_or_moderator():
raise Http404
if slugify(profile_owner.username) != slug:
view_url = profile_owner.get_profile_url() + '?' \
+ urllib.urlencode(request.REQUEST)
return HttpResponseRedirect(view_url)
if not tab_name:
tab_name = request.GET.get('sort', 'stats')
can_show_karma = request.user.can_see_karma(profile_owner)
if can_show_karma == False and tab_name == 'reputation':
raise Http404
user_view_func = USER_VIEW_CALL_TABLE.get(tab_name, user_stats)
search_state = SearchState(
scope=None,
sort=None,
query=None,
tags=None,
author=None,
page=None,
page_size=const.USER_POSTS_PAGE_SIZE,
user_logged_in=profile_owner.is_authenticated(),
)
context = {
'view_user': profile_owner,
'can_show_karma': can_show_karma,
'search_state': search_state,
'user_follow_feature_on': ('followit' in django_settings.INSTALLED_APPS),
}
if CUSTOM_TAB:
context['custom_tab_name'] = CUSTOM_TAB['NAME']
context['custom_tab_slug'] = CUSTOM_TAB['SLUG']
return user_view_func(request, profile_owner, context)
def groups(request, id = None, slug = None):
"""output groups page
"""
if askbot_settings.GROUPS_ENABLED == False:
raise Http404
#6 lines of input cleaning code
if request.user.is_authenticated():
scope = request.GET.get('sort', 'all-groups')
if scope not in ('all-groups', 'my-groups'):
scope = 'all-groups'
else:
scope = 'all-groups'
if scope == 'all-groups':
groups = models.Group.objects.all()
else:
groups = models.Group.objects.get_for_user(
user=request.user
)
groups = groups.exclude_personal()
groups = groups.annotate(users_count=Count('user'))
user_can_add_groups = request.user.is_authenticated() and \
request.user.is_administrator_or_moderator()
groups_membership_info = collections.defaultdict()
if request.user.is_authenticated():
#collect group memberhship information
groups_membership_info = request.user.get_groups_membership_info(groups)
data = {
'groups': groups,
'groups_membership_info': groups_membership_info,
'user_can_add_groups': user_can_add_groups,
'active_tab': 'groups',#todo vars active_tab and tab_name are too similar
'tab_name': scope,
'page_class': 'groups-page'
}
return render(request, 'groups.html', data)
|
openpgh/askpgh
|
askbot/views/users.py
|
Python
|
gpl-3.0
| 47,077
|
# -*- coding: utf8 -*-
"""
File Operations
"""
from __future__ import with_statement
import os
import io
import shutil
import hashlib
import zlib
import base64
import tempfile
import time
import struct
import sympy
import mpmath
import math
from mathics.core.expression import (Expression, Real, Complex, String, Symbol,
from_python, Integer, BoxError,
valid_context_name)
from mathics.builtin.base import (Builtin, Predefined, BinaryOperator,
PrefixOperator)
from mathics.settings import ROOT_DIR
INITIAL_DIR = os.getcwd()
HOME_DIR = os.path.expanduser('~')
SYS_ROOT_DIR = '/' if os.name == 'posix' else '\\'
TMP_DIR = tempfile.gettempdir()
DIRECTORY_STACK = [INITIAL_DIR]
INPUT_VAR = ""
INPUTFILE_VAR = ""
PATH_VAR = [HOME_DIR, os.path.join(ROOT_DIR, 'data'),
os.path.join(ROOT_DIR, 'packages')]
def path_search(filename):
# For names of the form "name`", search for name.mx and name.m
if filename[-1] == '`':
filename = filename[:-1].replace('`', os.path.sep)
for ext in ['.mx', '.m']:
result = path_search(filename + ext)
if result is not None:
filename = None
break
if filename is not None:
result = None
for p in PATH_VAR + ['']:
path = os.path.join(p, filename)
if os.path.exists(path):
result = path
break
# If FindFile resolves to a dir, search within for Kernel/init.m and init.m
if result is not None and os.path.isdir(result):
for ext in [os.path.join('Kernel', 'init.m'), 'init.m']:
tmp = os.path.join(result, ext)
if os.path.isfile(tmp):
return tmp
return result
def count():
n = 0
while True:
yield n
n += 1
NSTREAMS = count() # use next(NSTREAMS)
STREAMS = []
def _channel_to_stream(channel, mode='r'):
if isinstance(channel, String):
name = channel.get_string_value()
opener = mathics_open(name, mode)
opener.__enter__()
n = opener.n
if mode in ['r', 'rb']:
head = 'InputStream'
elif mode in ['w', 'a', 'wb', 'ab']:
head = 'OutputStream'
else:
raise ValueError("Unknown format {0}".format(mode))
return Expression(head, channel, Integer(n))
elif channel.has_form('InputStream', 2):
return channel
elif channel.has_form('OutputStream', 2):
return channel
else:
return None
def _lookup_stream(n=None):
if n is None:
return None
elif n is not None:
try:
return STREAMS[n]
except IndexError:
return None
class mathics_open:
def __init__(self, name, mode='r'):
self.name = name
self.mode = mode
if mode not in ['r', 'w', 'a', 'rb', 'wb', 'ab']:
raise ValueError("Can't handle mode {0}".format(mode))
def __enter__(self):
# find path
path = path_search(self.name)
if path is None and self.mode in ['w', 'a', 'wb', 'ab']:
path = self.name
if path is None:
raise IOError
# determine encoding
encoding = 'utf-8' if 'b' not in self.mode else None
# open the stream
stream = io.open(path, self.mode, encoding=encoding)
# build the Expression
n = next(NSTREAMS)
if self.mode in ['r', 'rb']:
self.expr = Expression(
'InputStream', String(path), Integer(n))
elif self.mode in ['w', 'a', 'wb', 'ab']:
self.expr = Expression(
'OutputStream', String(path), Integer(n))
else:
raise IOError
STREAMS.append(stream)
self.n = n
return stream
def __exit__(self, type, value, traceback):
strm = STREAMS[self.n]
if strm is not None:
strm.close()
STREAMS[self.n] = None
class InitialDirectory(Predefined):
"""
<dl>
<dt>'$InitialDirectory'
<dd>returns the directory from which \Mathics was started.
</dl>
>> $InitialDirectory
= ...
"""
name = '$InitialDirectory'
def evaluate(self, evaluation):
global INITIAL_DIR
return String(INITIAL_DIR)
class InstallationDirectory(Predefined):
"""
<dl>
<dt>'$InstallationDirectory'
<dd>returns the directory in which \Mathics was installed.
</dl>
>> $InstallationDirectory
= ...
"""
name = '$InstallationDirectory'
def evaluate(self, evaluation):
global ROOT_DIR
return String(ROOT_DIR)
class HomeDirectory(Predefined):
"""
<dl>
<dt>'$HomeDirectory'
<dd>returns the users HOME directory.
</dl>
>> $HomeDirectory
= ...
"""
name = '$HomeDirectory'
attributes = ('Protected')
def evaluate(self, evaluation):
global HOME_DIR
return String(HOME_DIR)
class RootDirectory(Predefined):
"""
<dl>
<dt>'$RootDirectory'
<dd>returns the system root directory.
</dl>
>> $RootDirectory
= ...
"""
name = '$RootDirectory'
attributes = ('Protected')
def evaluate(self, evaluation):
global SYS_ROOT_DIR
return String(SYS_ROOT_DIR)
class TemporaryDirectory(Predefined):
"""
<dl>
<dt>'$TemporaryDirectory'
<dd>returns the directory used for temporary files.
</dl>
>> $TemporaryDirectory
= ...
"""
name = '$TemporaryDirectory'
def evaluate(self, evaluation):
return String(TMP_DIR)
class Input(Predefined):
"""
<dl>
<dt>'$Input'
<dd>is the name of the stream from which input is currently being read.
</dl>
>> $Input
=
"""
attributes = ('Protected', 'ReadProtected')
name = '$Input'
def evaluate(self, evaluation):
global INPUT_VAR
return String(INPUT_VAR)
class InputFileName(Predefined):
"""
<dl>
<dt>'$InputFileName'
<dd>is the name of the file from which input is currently being read.
</dl>
While in interactive mode, '$InputFileName' is "".
>> $InputFileName
=
"""
name = '$InputFileName'
def evaluate(self, evaluation):
global INPUTFILE_VAR
return String(INPUTFILE_VAR)
class PathnameSeparator(Predefined):
"""
<dl>
<dt>'$PathnameSeparator'
<dd>returns a string for the seperator in paths.
</dl>
>> $PathnameSeparator
= ...
"""
name = '$PathnameSeparator'
def evaluate(self, evaluation):
return String(os.sep)
class Path(Predefined):
"""
<dl>
<dt>'$Path'
<dd>returns the list of directories to search when looking for a file.
</dl>
>> $Path
= ...
"""
attributes = ('Protected')
name = '$Path'
def evaluate(self, evaluation):
return Expression('List', *[String(p) for p in PATH_VAR])
class OperatingSystem(Predefined):
"""
<dl>
<dt>'$OperatingSystem'
<dd>gives the type of operating system running Mathics.
</dl>
>> $OperatingSystem
= ...
"""
attributes = ('Locked', 'Protected')
name = '$OperatingSystem'
def evaluate(self, evaluation):
if os.name == 'posix':
return String('Unix')
elif os.name == 'nt':
return String('Windows')
elif os.name == 'os2':
return String('MacOSX')
else:
return String('Unknown')
class EndOfFile(Builtin):
"""
<dl>
<dt>'EndOfFile'
<dd>is returned by 'Read' when the end of an input stream is reached.
</dl>
"""
# TODO: Improve docs for these Read[] arguments.
class Byte(Builtin):
"""
<dl>
<dt>'Byte'
<dd>is a data type for 'Read'.
</dl>
"""
class Character(Builtin):
"""
<dl>
<dt>'Character'
<dd>is a data type for 'Read'.
</dl>
"""
class Expression_(Builtin):
"""
<dl>
<dt>'Expression'
<dd>is a data type for 'Read'.
</dl>
"""
name = 'Expression'
class Number_(Builtin):
"""
<dl>
<dt>'Number'
<dd>is a data type for 'Read'.
</dl>
"""
name = 'Number'
class Record(Builtin):
"""
<dl>
<dt>'Record'
<dd>is a data type for 'Read'.
</dl>
"""
class Word(Builtin):
"""
<dl>
<dt>'Word'
<dd>is a data type for 'Read'.
</dl>
"""
class Read(Builtin):
"""
<dl>
<dt>'Read[stream]'
<dd>reads the input stream and returns one expression.
<dt>'Read[stream, type]'
<dd>reads the input stream and returns an object of the given type.
</dl>
## Malformed InputString
#> Read[InputStream[String], {Word, Number}]
= Read[InputStream[String], {Word, Number}]
## Correctly formed InputString but not open
#> Read[InputStream[String, -1], {Word, Number}]
: InputStream[String, -1] is not open.
= Read[InputStream[String, -1], {Word, Number}]
## String
>> str = StringToStream["abc123"];
>> Read[str, String]
= abc123
#> Read[str, String]
= EndOfFile
#> Close[str];
## Word
>> str = StringToStream["abc 123"];
>> Read[str, Word]
= abc
>> Read[str, Word]
= 123
#> Read[str, Word]
= EndOfFile
#> Close[str];
#> str = StringToStream[""];
#> Read[str, Word]
= EndOfFile
#> Read[str, Word]
= EndOfFile
#> Close[str];
## Number
>> str = StringToStream["123, 4"];
>> Read[str, Number]
= 123
>> Read[str, Number]
= 4
#> Read[str, Number]
= EndOfFile
#> Close[str];
#> str = StringToStream["123xyz 321"];
#> Read[str, Number]
= 123
#> Quiet[Read[str, Number]]
= $Failed
## Real
#> str = StringToStream["123, 4abc"];
#> Read[str, Real]
= 123.
#> Read[str, Real]
= 4.
#> Quiet[Read[str, Number]]
= $Failed
#> Close[str];
#> str = StringToStream["1.523E-19"]; Read[str, Real]
= 1.523*^-19
#> Close[str];
#> str = StringToStream["-1.523e19"]; Read[str, Real]
= -1.523*^19
#> Close[str];
#> str = StringToStream["3*^10"]; Read[str, Real]
= 3.*^10
#> Close[str];
#> str = StringToStream["3.*^10"]; Read[str, Real]
= 3.*^10
#> Close[str];
## Expression
#> str = StringToStream["x + y Sin[z]"]; Read[str, Expression]
= x + y Sin[z]
#> Close[str];
## #> str = Quiet[StringToStream["Sin[1 123"]; Read[str, Expression]]
## = $Failed
## Multiple types
>> str = StringToStream["123 abc"];
>> Read[str, {Number, Word}]
= {123, abc}
#> Read[str, {Number, Word}]
= EndOfFile
#> Close[str];
#> str = StringToStream["123 abc"];
#> Quiet[Read[str, {Word, Number}]]
= $Failed
#> Close[str];
#> str = StringToStream["123 123"]; Read[str, {Real, Number}]
= {123., 123}
#> Close[str];
#> Quiet[Read[str, {Real}]]
= Read[InputStream[String, ...], {Real}]
"""
messages = {
'openx': '`1` is not open.',
'readf': '`1` is not a valid format specification.',
'readn': 'Invalid real number found when reading from `1`.',
'readt': 'Invalid input found when reading `1` from `2`.',
'intnm': ('Non-negative machine-sized integer expected at '
'position 3 in `1`.'),
}
rules = {
'Read[stream_]': 'Read[stream, Expression]',
}
options = {
'NullRecords': 'False',
'NullWords': 'False',
'RecordSeparators': '{"\r\n", "\n", "\r"}',
'TokenWords': '{}',
'WordSeparators': '{" ", "\t"}',
}
attributes = ('Protected')
def check_options(self, options):
## Options:
# TODO: Proper error messages
result = {}
keys = options.keys()
# AnchoredSearch
if 'System`AnchoredSearch' in keys:
anchored_search = options['System`AnchoredSearch'].to_python()
assert anchored_search in [True, False]
result['AnchoredSearch'] = anchored_search
# IgnoreCase
if 'System`IgnoreCase' in keys:
ignore_case = options['System`IgnoreCase'].to_python()
assert ignore_case in [True, False]
result['IgnoreCase'] = ignore_case
# WordSearch
if 'System`WordSearch' in keys:
word_search = options['System`WordSearch'].to_python()
assert word_search in [True, False]
result['WordSearch'] = word_search
# RecordSeparators
if 'System`RecordSeparators' in keys:
record_separators = options['System`RecordSeparators'].to_python()
assert isinstance(record_separators, list)
assert all(isinstance(s, basestring) and s[
0] == s[-1] == '"' for s in record_separators)
record_separators = [s[1:-1] for s in record_separators]
result['RecordSeparators'] = record_separators
# WordSeparators
if 'System`WordSeparators' in keys:
word_separators = options['System`WordSeparators'].to_python()
assert isinstance(word_separators, list)
assert all(isinstance(s, basestring) and s[
0] == s[-1] == '"' for s in word_separators)
word_separators = [s[1:-1] for s in word_separators]
result['WordSeparators'] = word_separators
# NullRecords
if 'System`NullRecords' in keys:
null_records = options['System`NullRecords'].to_python()
assert null_records in [True, False]
result['NullRecords'] = null_records
# NullWords
if 'System`NullWords' in keys:
null_words = options['System`NullWords'].to_python()
assert null_words in [True, False]
result['NullWords'] = null_words
# TokenWords
if 'System`TokenWords' in keys:
token_words = options['System`TokenWords'].to_python()
assert token_words == []
result['TokenWords'] = token_words
return result
def apply(self, channel, types, evaluation, options):
'Read[channel_, types_, OptionsPattern[Read]]'
if channel.has_form('OutputStream', 2):
evaluation.message('General', 'openw', channel)
return
strm = _channel_to_stream(channel, 'r')
if strm is None:
return
[name, n] = strm.get_leaves()
stream = _lookup_stream(n.get_int_value())
if stream is None or stream.closed:
evaluation.message('Read', 'openx', strm)
return
# Wrap types in a list (if it isn't already one)
if not types.has_form('List', None):
types = Expression('List', types)
READ_TYPES = [Symbol(k) for k in
['Byte', 'Character', 'Expression',
'Number', 'Real', 'Record', 'String', 'Word']]
for typ in types.leaves:
if typ not in READ_TYPES:
evaluation.message('Read', 'readf', typ)
return Symbol('$Failed')
## Options:
# TODO: Implement extra options
py_options = self.check_options(options)
# null_records = py_options['NullRecords']
# null_words = py_options['NullWords']
record_separators = py_options['RecordSeparators']
# token_words = py_options['TokenWords']
word_separators = py_options['WordSeparators']
name = name.to_python()
result = []
def reader(stream, word_separators, accepted=None):
while True:
word = ''
while True:
tmp = stream.read(1)
if tmp == '':
if word == '':
raise EOFError
yield word
if tmp in word_separators:
if word == '':
break
if stream.seekable():
# stream.seek(-1, 1) #Python3
stream.seek(stream.tell() - 1)
yield word
if accepted is not None and tmp not in accepted:
yield word
word += tmp
read_word = reader(stream, word_separators)
read_record = reader(stream, record_separators)
read_number = reader(stream, word_separators + record_separators,
['+', '-', '.'] + [str(i) for i in range(10)])
read_real = reader(
stream, word_separators + record_separators,
['+', '-', '.', 'e', 'E', '^', '*'] + [str(i) for i in range(10)])
for typ in types.leaves:
try:
if typ == Symbol('Byte'):
tmp = stream.read(1)
if tmp == '':
raise EOFError
result.append(ord(tmp))
elif typ == Symbol('Character'):
tmp = stream.read(1)
if tmp == '':
raise EOFError
result.append(tmp)
elif typ == Symbol('Expression'):
tmp = read_record.next()
try:
try:
expr = parse(tmp, evaluation.definitions)
except NameError:
from mathics.core.parser import parse, ParseError
expr = parse(tmp, evaluation.definitions)
except ParseError:
expr = None
if expr is None:
evaluation.message('Read', 'readt', tmp, Expression(
'InputSteam', name, n))
return Symbol('$Failed')
result.append(tmp)
elif typ == Symbol('Number'):
tmp = read_number.next()
try:
tmp = int(tmp)
except ValueError:
try:
tmp = float(tmp)
except ValueError:
evaluation.message('Read', 'readn', Expression(
'InputSteam', name, n))
return Symbol('$Failed')
result.append(tmp)
elif typ == Symbol('Real'):
tmp = read_real.next()
tmp = tmp.replace('*^', 'E')
try:
tmp = float(tmp)
except ValueError:
evaluation.message('Read', 'readn', Expression(
'InputSteam', name, n))
return Symbol('$Failed')
result.append(tmp)
elif typ == Symbol('Record'):
result.append(read_record.next())
elif typ == Symbol('String'):
tmp = stream.readline()
if len(tmp) == 0:
raise EOFError
result.append(tmp.rstrip('\n'))
elif typ == Symbol('Word'):
result.append(read_word.next())
except EOFError:
return Symbol('EndOfFile')
if len(result) == 1:
return from_python(*result)
return from_python(result)
def apply_nostream(self, arg1, arg2, evaluation):
'Read[arg1_, arg2_]'
evaluation.message('General', 'stream', arg1)
return
class Write(Builtin):
"""
<dl>
<dt>'Write[$channel$, $expr1$, $expr2$, ...]'
<dd>writes the expressions to the output channel followed by a newline.
</dl>
>> str = OpenWrite[]
= ...
>> Write[str, 10 x + 15 y ^ 2]
>> Write[str, 3 Sin[z]]
>> Close[str]
= ...
>> str = OpenRead[%];
>> ReadList[str]
= {10 x + 15 y ^ 2, 3 Sin[z]}
#> Close[str];
"""
attributes = ('Protected')
def apply(self, channel, expr, evaluation):
'Write[channel_, expr___]'
strm = _channel_to_stream(channel)
if strm is None:
return
n = strm.leaves[1].get_int_value()
stream = _lookup_stream(n)
if stream is None or stream.closed:
evaluation.message('General', 'openx', name)
return
expr = expr.get_sequence()
expr = Expression('Row', Expression('List', *expr))
evaluation.format = 'text'
text = evaluation.format_output(from_python(expr))
stream.write(unicode(text) + u'\n')
return Symbol('Null')
class _BinaryFormat(object):
"""
Container for BinaryRead readers and BinaryWrite writers
"""
@staticmethod
def _IEEE_real(real):
if math.isnan(real):
return Symbol('Indeterminate')
elif math.isinf(real):
return Expression('DirectedInfinity', Integer((-1) ** (real < 0)))
else:
return Real(real)
@staticmethod
def _IEEE_cmplx(real, imag):
if math.isnan(real) or math.isnan(imag):
return Symbol('Indeterminate')
elif math.isinf(real) or math.isinf(imag):
if math.isinf(real) and math.isinf(imag):
return Symbol('Indeterminate')
return Expression('DirectedInfinity', Expression(
'Complex',
(-1) ** (real < 0) if math.isinf(real) else 0,
(-1) ** (imag < 0) if math.isinf(imag) else 0))
else:
return Complex(real, imag)
@classmethod
def get_readers(cls):
readers = {}
for funcname in dir(cls):
if funcname.startswith('_') and funcname.endswith('_reader'):
readers[funcname[1:-7]] = getattr(cls, funcname)
return readers
@classmethod
def get_writers(cls):
writers = {}
for funcname in dir(cls):
if funcname.startswith('_') and funcname.endswith('_writer'):
writers[funcname[1:-7]] = getattr(cls, funcname)
return writers
# Reader Functions
@staticmethod
def _Byte_reader(s):
"8-bit unsigned integer"
return Integer(*struct.unpack('B', s.read(1)))
@staticmethod
def _Character8_reader(s):
"8-bit character"
return String(*struct.unpack('c', s.read(1)))
@staticmethod
def _Character16_reader(s):
"16-bit character"
return String(unichr(*struct.unpack('H', s.read(2))))
@staticmethod
def _Complex64_reader(s):
"IEEE single-precision complex number"
return _BinaryFormat._IEEE_cmplx(*struct.unpack('ff', s.read(8)))
@staticmethod
def _Complex128_reader(s):
"IEEE double-precision complex number"
return _BinaryFormat._IEEE_cmplx(*struct.unpack('dd', s.read(16)))
@staticmethod
def _Complex256_reader(s):
"IEEE quad-precision complex number"
return Complex(_Real128_reader(s), _Real128_reader(s))
@staticmethod
def _Integer8_reader(s):
"8-bit signed integer"
return Integer(*struct.unpack('b', s.read(1)))
@staticmethod
def _Integer16_reader(s):
"16-bit signed integer"
return Integer(*struct.unpack('h', s.read(2)))
@staticmethod
def _Integer24_reader(s):
"24-bit signed integer"
b = s.read(3)
return Integer(*struct.unpack(
'i', b + ('\0' if b[-1] < '\x80' else '\xff')))
@staticmethod
def _Integer32_reader(s):
"32-bit signed integer"
return Integer(*struct.unpack('i', s.read(4)))
@staticmethod
def _Integer64_reader(s):
"64-bit signed integer"
return Integer(*struct.unpack('q', s.read(8)))
@staticmethod
def _Integer128_reader(s):
"128-bit signed integer"
a, b = struct.unpack('Qq', s.read(16))
return Integer((b << 64) + a)
@staticmethod
def _Real32_reader(s):
"IEEE single-precision real number"
return _BinaryFormat._IEEE_real(*struct.unpack('f', s.read(4)))
@staticmethod
def _Real64_reader(s):
"IEEE double-precision real number"
return _BinaryFormat._IEEE_real(*struct.unpack('d', s.read(8)))
@staticmethod
def _Real128_reader(s):
"IEEE quad-precision real number"
# Workaround quad missing from struct
# correctness is not guaranteed
b = s.read(16)
sig, sexp = b[:14], b[14:]
# Sign / Exponent
sexp, = struct.unpack('H', sexp)
signbit = sexp / 0x8000
expbits = sexp % 0x8000
# Signifand
fracbits = int(sig[::-1].encode('hex'), 16)
if expbits == 0x0000 and fracbits == 0:
return Real('0.' + '0' * 4965)
elif expbits == 0x7FFF:
if fracbits == 0:
return Expression('DirectedInfinity', Integer((-1) ** signbit))
else:
return Symbol('Indeterminate')
core = mpmath.fdiv(fracbits, 2 ** 112, prec=128)
if expbits == 0x000:
assert fracbits != 0
exp = -16382
core = mpmath.fmul((-1) ** signbit, core, prec=128)
else:
assert 0x0001 <= expbits <= 0x7FFE
exp = expbits - 16383
core = mpmath.fmul(
(-1) ** signbit,
mpmath.fadd(1, core, prec=128), prec=128)
if exp >= 0:
result = mpmath.fmul(core, 2 ** exp, prec=128)
else:
result = mpmath.fdiv(core, 2 ** -exp, prec=128)
return Real(mpmath.nstr(result, n=38), p=112)
@staticmethod
def _TerminatedString_reader(s):
"null-terminated string of 8-bit characters"
b = s.read(1)
string = ''
while b != '\x00':
if b == '':
raise struct.error
string += b
b = s.read(1)
return String(string)
@staticmethod
def _UnsignedInteger8_reader(s):
"8-bit unsigned integer"
return Integer(*struct.unpack('B', s.read(1)))
@staticmethod
def _UnsignedInteger16_reader(s):
"16-bit unsigned integer"
return Integer(*struct.unpack('H', s.read(2)))
@staticmethod
def _UnsignedInteger24_reader(s):
"24-bit unsigned integer"
return Integer(*struct.unpack('I', s.read(3) + '\0'))
@staticmethod
def _UnsignedInteger32_reader(s):
"32-bit unsigned integer"
return Integer(*struct.unpack('I', s.read(4)))
@staticmethod
def _UnsignedInteger64_reader(s):
"64-bit unsigned integer"
return Integer(*struct.unpack('Q', s.read(8)))
@staticmethod
def _UnsignedInteger128_reader(s):
"128-bit unsigned integer"
a, b = struct.unpack('QQ', s.read(16))
return Integer((b << 64) + a)
# Writer Functions
@staticmethod
def _Byte_writer(s, x):
"8-bit unsigned integer"
s.write(struct.pack('B', x))
@staticmethod
def _Character8_writer(s, x):
"8-bit character"
s.write(struct.pack('c', x.encode('utf-8')))
# TODO
# @staticmethod
# def _Character16_writer(s, x):
# "16-bit character"
# pass
@staticmethod
def _Complex64_writer(s, x):
"IEEE single-precision complex number"
s.write(struct.pack('ff', x.real, x.imag))
# return _BinaryFormat._IEEE_cmplx(*struct.unpack('ff', s.read(8)))
@staticmethod
def _Complex128_writer(s, x):
"IEEE double-precision complex number"
s.write(struct.pack('dd', x.real, x.imag))
# TODO
# @staticmethod
# def _Complex256_writer(s, x):
# "IEEE quad-precision complex number"
# pass
@staticmethod
def _Integer8_writer(s, x):
"8-bit signed integer"
s.write(struct.pack('b', x))
@staticmethod
def _Integer16_writer(s, x):
"16-bit signed integer"
s.write(struct.pack('h', x))
@staticmethod
def _Integer24_writer(s, x):
"24-bit signed integer"
s.write(struct.pack("i", x << 8)[1:])
@staticmethod
def _Integer32_writer(s, x):
"32-bit signed integer"
s.write(struct.pack('i', x))
@staticmethod
def _Integer64_writer(s, x):
"64-bit signed integer"
s.write(struct.pack('q', x))
@staticmethod
def _Integer128_writer(s, x):
"128-bit signed integer"
a, b = x & 0xFFFFFFFFFFFFFFFF, x >> 64
s.write(struct.pack('Qq', a, b))
@staticmethod
def _Real32_writer(s, x):
"IEEE single-precision real number"
s.write(struct.pack('f', x))
@staticmethod
def _Real64_writer(s, x):
"IEEE double-precision real number"
s.write(struct.pack('d', x))
# TODO
# @staticmethod
# def _Real128_writer(s, x):
# "IEEE quad-precision real number"
# pass
@staticmethod
def _TerminatedString_writer(s, x):
"null-terminated string of 8-bit characters"
s.write(x.encode('utf-8'))
@staticmethod
def _UnsignedInteger8_writer(s, x):
"8-bit unsigned integer"
s.write(struct.pack('B', x))
@staticmethod
def _UnsignedInteger16_writer(s, x):
"16-bit unsigned integer"
s.write(struct.pack('H', x))
@staticmethod
def _UnsignedInteger24_writer(s, x):
"24-bit unsigned integer"
s.write(struct.pack("I", x << 8)[1:])
@staticmethod
def _UnsignedInteger32_writer(s, x):
"32-bit unsigned integer"
s.write(struct.pack('I', x))
@staticmethod
def _UnsignedInteger64_writer(s, x):
"64-bit unsigned integer"
s.write(struct.pack('Q', x))
@staticmethod
def _UnsignedInteger128_writer(s, x):
"128-bit unsigned integer"
a, b = x & 0xFFFFFFFFFFFFFFFF, x >> 64
s.write(struct.pack('QQ', a, b))
class BinaryWrite(Builtin):
"""
<dl>
<dt>'BinaryWrite[$channel$, $b$]'
<dd>writes a single byte given as an integer from 0 to 255.
<dt>'BinaryWrite[$channel$, {b1, b2, ...}]'
<dd>writes a sequence of byte.
<dt>'BinaryWrite[$channel$, "string"]'
<dd>writes the raw characters in a string.
<dt>'BinaryWrite[$channel$, $x$, $type$]'
<dd>writes $x$ as the specified type.
<dt>'BinaryWrite[$channel$, {$x1$, $x2$, ...}, $type$]'
<dd>writes a sequence of objects as the specified type.
<dt>'BinaryWrite[$channel$, {$x1$, $x2$, ...}, {$type1$, $type2$, ...}]'
<dd>writes a sequence of objects using a sequence of specified types.
</dl>
>> strm = OpenWrite[BinaryFormat -> True]
= OutputStream[...]
>> BinaryWrite[strm, {39, 4, 122}]
= OutputStream[...]
>> Close[strm]
= ...
>> strm = OpenRead[%, BinaryFormat -> True]
= InputStream[...]
>> BinaryRead[strm]
= 39
>> BinaryRead[strm, "Byte"]
= 4
>> BinaryRead[strm, "Character8"]
= z
>> Close[strm];
Write a String
>> strm = OpenWrite[BinaryFormat -> True]
= OutputStream[...]
>> BinaryWrite[strm, "abc123"]
= OutputStream[...]
>> Close[%]
= ...
Read as Bytes
>> strm = OpenRead[%, BinaryFormat -> True]
= InputStream[...]
>> BinaryRead[strm, {"Character8", "Character8", "Character8", "Character8", "Character8", "Character8", "Character8"}]
= {a, b, c, 1, 2, 3, EndOfFile}
>> Close[strm]
= ...
Read as Characters
>> strm = OpenRead[%, BinaryFormat -> True]
= InputStream[...]
>> BinaryRead[strm, {"Byte", "Byte", "Byte", "Byte", "Byte", "Byte", "Byte"}]
= {97, 98, 99, 49, 50, 51, EndOfFile}
>> Close[strm]
= ...
Write Type
>> strm = OpenWrite[BinaryFormat -> True]
= OutputStream[...]
>> BinaryWrite[strm, 97, "Byte"]
= OutputStream[...]
>> BinaryWrite[strm, {97, 98, 99}, {"Byte", "Byte", "Byte"}]
= OutputStream[...]
>> Close[%]
= ...
## Write then Read as Bytes
#> WRb[bytes_, form_] := Module[{str, res={}, byte}, str = OpenWrite[BinaryFormat -> True]; BinaryWrite[str, bytes, form]; str = OpenRead[Close[str], BinaryFormat -> True]; While[Not[SameQ[byte = BinaryRead[str], EndOfFile]], res = Join[res, {byte}];]; Close[str]; res]
## Byte
#> WRb[{149, 2, 177, 132}, {"Byte", "Byte", "Byte", "Byte"}]
= {149, 2, 177, 132}
#> WRb[{149, 2, 177, 132}, {"Byte", "Byte", "Byte", "Byte"}]
= {149, 2, 177, 132}
#> (# == WRb[#, Table["Byte", {50}]]) & [RandomInteger[{0, 255}, 50]]
= True
## Character8
#> WRb[{"a", "b", "c"}, {"Character8", "Character8", "Character8"}]
= {97, 98, 99}
#> WRb[{34, 60, 39}, {"Character8", "Character8", "Character8"}]
= {51, 52, 54, 48, 51, 57}
#> WRb[{"ab", "c", "d"}, {"Character8", "Character8", "Character8", "Character8"}]
= {97, 98, 99, 100}
## Character16
## TODO
## Complex64
#> WRb[-6.36877988924*^28 + 3.434203392*^9 I, "Complex64"]
= {80, 201, 77, 239, 201, 177, 76, 79}
#> WRb[-6.98948862335*^24 + 1.52209021297*^23 I, "Complex64"]
= {158, 2, 185, 232, 18, 237, 0, 102}
#> WRb[-1.41079828148*^-19 - 0.013060791418 I, "Complex64"]
= {195, 142, 38, 160, 238, 252, 85, 188}
#> WRb[{5, -2054}, "Complex64"]
= {0, 0, 160, 64, 0, 0, 0, 0, 0, 96, 0, 197, 0, 0, 0, 0}
#> WRb[Infinity, "Complex64"]
= {0, 0, 128, 127, 0, 0, 0, 0}
#> WRb[-Infinity, "Complex64"]
= {0, 0, 128, 255, 0, 0, 0, 0}
#> WRb[DirectedInfinity[1 + I], "Complex64"]
= {0, 0, 128, 127, 0, 0, 128, 127}
#> WRb[DirectedInfinity[I], "Complex64"]
= {0, 0, 0, 0, 0, 0, 128, 127}
## FIXME (different convention to MMA)
#> WRb[Indeterminate, "Complex64"]
= {0, 0, 192, 127, 0, 0, 192, 127}
## Complex128
#> WRb[1.19839770357*^-235 - 2.64656391494*^-54 I,"Complex128"]
= {102, 217, 1, 163, 234, 98, 40, 15, 243, 104, 116, 15, 48, 57, 208, 180}
#> WRb[3.22170267142*^134 - 8.98364297498*^198 I,"Complex128"]
= {219, 161, 12, 126, 47, 94, 220, 91, 189, 66, 29, 68, 147, 11, 62, 233}
#> WRb[-Infinity, "Complex128"]
= {0, 0, 0, 0, 0, 0, 240, 255, 0, 0, 0, 0, 0, 0, 0, 0}
#> WRb[DirectedInfinity[1 - I], "Complex128"]
= {0, 0, 0, 0, 0, 0, 240, 127, 0, 0, 0, 0, 0, 0, 240, 255}
#> WRb[DirectedInfinity[I], "Complex128"]
= {0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 240, 127}
## FIXME (different convention to MMA)
#> WRb[Indeterminate, "Complex128"]
= {0, 0, 0, 0, 0, 0, 248, 127, 0, 0, 0, 0, 0, 0, 248, 127}
## Complex256
## TODO
## Integer8
#> WRb[{5, 2, 11, -4}, {"Integer8", "Integer8", "Integer8", "Integer8"}]
= {5, 2, 11, 252}
#> WRb[{127, -128, 0}, {"Integer8", "Integer8", "Integer8"}]
= {127, 128, 0}
## Integer16
#> WRb[{661, -31567, 6256}, {"Integer16", "Integer16", "Integer16"}]
= {149, 2, 177, 132, 112, 24}
#> WRb[{0, 255, -1, 32640, -32640}, Table["Integer16", {5}]]
= {0, 0, 255, 0, 255, 255, 128, 127, 128, 128}
## Integer24
#> WRb[{-6247016, -6631492}, {"Integer24", "Integer24"}]
= {152, 173, 160, 188, 207, 154}
#> WRb[{-1593967, 1989169}, {"Integer24", "Integer24"}]
= {145, 173, 231, 49, 90, 30}
## Integer32
#> WRb[{-636001327, -236143729}, {"Integer32", "Integer32"}]
= {209, 99, 23, 218, 143, 187, 236, 241}
#> WRb[{2024611599, -1139645195}, {"Integer32", "Integer32"}]
= {15, 31, 173, 120, 245, 100, 18, 188}
## Integer64
#> WRb[{1176115612243989203}, "Integer64"]
= {211, 18, 152, 2, 235, 102, 82, 16}
#> WRb[{-8526737900550694619}, "Integer64"]
= {37, 217, 208, 88, 14, 241, 170, 137}
## Integer128
#> WRb[139827542997232652313568968616424513676, "Integer128"]
= {140, 32, 24, 199, 10, 169, 248, 117, 123, 184, 75, 76, 34, 206, 49, 105}
#> WRb[103439096823027953602112616165136677221, "Integer128"]
= {101, 57, 184, 108, 43, 214, 186, 120, 153, 51, 132, 225, 56, 165, 209, 77}
#> WRb[-49058912464625098822365387707690163087, "Integer128"]
= {113, 100, 125, 144, 211, 83, 140, 24, 206, 11, 198, 118, 222, 152, 23, 219}
## Real32
#> WRb[{8.398086656*^9, 1.63880017687*^16}, {"Real32", "Real32"}]
= {81, 72, 250, 79, 52, 227, 104, 90}
#> WRb[{5.6052915284*^32, 9.631141*^6}, {"Real32", "Real32"}]
= {251, 22, 221, 117, 165, 245, 18, 75}
#> WRb[Infinity, "Real32"]
= {0, 0, 128, 127}
#> WRb[-Infinity, "Real32"]
= {0, 0, 128, 255}
## FIXME (different convention to MMA)
#> WRb[Indeterminate, "Real32"]
= {0, 0, 192, 127}
## Real64
#> WRb[-5.14646619426*^227, "Real64"]
= {91, 233, 20, 87, 129, 185, 53, 239}
#> WRb[-9.69531698809*^20, "Real64"]
= {187, 67, 162, 67, 122, 71, 74, 196}
#> WRb[9.67355569764*^159, "Real64"]
= {132, 48, 80, 125, 157, 4, 38, 97}
#> WRb[Infinity, "Real64"]
= {0, 0, 0, 0, 0, 0, 240, 127}
#> WRb[-Infinity, "Real64"]
= {0, 0, 0, 0, 0, 0, 240, 255}
## FIXME (different convention to MMA)
#> WRb[Indeterminate, "Real64"]
= {0, 0, 0, 0, 0, 0, 248, 127}
## Real128
## TODO
## TerminatedString
#> WRb["abc", "TerminatedString"]
= {97, 98, 99, 0}
#> WRb[{"123", "456"}, {"TerminatedString", "TerminatedString", "TerminatedString"}]
= {49, 50, 51, 0, 52, 53, 54, 0}
#> WRb["", "TerminatedString"]
= {0}
## UnsignedInteger8
#> WRb[{96, 94, 141, 162, 141}, Table["UnsignedInteger8", {5}]]
= {96, 94, 141, 162, 141}
#> (#==WRb[#,Table["UnsignedInteger8",{50}]])&[RandomInteger[{0, 255}, 50]]
= True
## UnsignedInteger16
#> WRb[{18230, 47466, 9875, 59141}, Table["UnsignedInteger16", {4}]]
= {54, 71, 106, 185, 147, 38, 5, 231}
#> WRb[{0, 32896, 65535}, Table["UnsignedInteger16", {3}]]
= {0, 0, 128, 128, 255, 255}
## UnsignedInteger24
#> WRb[{14820174, 15488225}, Table["UnsignedInteger24", {2}]]
= {78, 35, 226, 225, 84, 236}
#> WRb[{5374629, 3889391}, Table["UnsignedInteger24", {2}]]
= {165, 2, 82, 239, 88, 59}
## UnsignedInteger32
#> WRb[{1885507541, 4157323149}, Table["UnsignedInteger32", {2}]]
= {213, 143, 98, 112, 141, 183, 203, 247}
#> WRb[{384206740, 1676316040}, Table["UnsignedInteger32", {2}]]
= {148, 135, 230, 22, 136, 141, 234, 99}
## UnsignedInteger64
#> WRb[7079445437368829279, "UnsignedInteger64"]
= {95, 5, 33, 229, 29, 62, 63, 98}
#> WRb[5381171935514265990, "UnsignedInteger64"]
= {134, 9, 161, 91, 93, 195, 173, 74}
## UnsignedInteger128
#> WRb[293382001665435747348222619884289871468, "UnsignedInteger128"]
= {108, 78, 217, 150, 88, 126, 152, 101, 231, 134, 176, 140, 118, 81, 183, 220}
#> WRb[253033302833692126095975097811212718901, "UnsignedInteger128"]
= {53, 83, 116, 79, 81, 100, 60, 126, 202, 52, 241, 48, 5, 113, 92, 190}
## Full File
>> strm = OpenWrite["/dev/full", BinaryFormat -> True]
= OutputStream[...]
>> BinaryWrite[strm, {39, 4, 122}]
: No space left on device.
= OutputStream[...]
>> Close[strm]
: No space left on device.
= ...
"""
messages = {
'writex': '`1`.',
}
writers = _BinaryFormat.get_writers()
def apply_notype(self, name, n, b, evaluation):
'BinaryWrite[OutputStream[name_, n_], b_]'
return self.apply(name, n, b, None, evaluation)
def apply(self, name, n, b, typ, evaluation):
'BinaryWrite[OutputStream[name_, n_], b_, typ_]'
channel = Expression('OutputStream', name, n)
# Check channel
stream = _lookup_stream(n.get_int_value())
if stream is None or stream.closed:
evaluation.message('General', 'openx', name)
return
if stream.mode not in ['wb', 'ab']:
evaluation.message('BinaryWrite', 'openr', channel)
return
# Check Empty Type
if typ is None:
expr = Expression('BinaryWrite', channel, b)
typ = Expression('List')
else:
expr = Expression('BinaryWrite', channel, b, typ)
# Check b
if b.has_form('List', None):
pyb = b.leaves
else:
pyb = [b]
# Check Type
if typ.has_form('List', None):
types = typ.get_leaves()
else:
types = [typ]
if len(types) == 0: # Default type is "Bytes"
types = [String("Byte")]
types = [t.get_string_value() for t in types]
if not all(t in self.writers for t in types):
evaluation.message('BinaryRead', 'format', typ)
return
# Write to stream
result = []
i = 0
while i < len(pyb):
x = pyb[i]
# Types are "repeated as many times as necessary"
t = types[i % len(types)]
if t in ('Real128', 'Complex256'):
evaluation.message('BinaryRead', 'warnquad', t)
# Coerce x
if t == 'TerminatedString':
x = x.get_string_value() + '\x00'
elif t.startswith('Real'):
if isinstance(x, Real):
x = x.to_python()
elif x.has_form('DirectedInfinity', 1):
if x.leaves[0].get_int_value() == 1:
x = float('+inf')
elif x.leaves[0].get_int_value() == -1:
x = float('-inf')
else:
x = None
elif (isinstance(x, Symbol)
and x.get_name() == 'System`Indeterminate'):
x = float('nan')
else:
x = None
assert x is None or isinstance(x, float)
elif t.startswith('Complex'):
if isinstance(x, (Complex, Real, Integer)):
x = x.to_python()
elif x.has_form('DirectedInfinity', 1):
x = x.leaves[0].to_python(n_evaluation=evaluation)
# x*float('+inf') creates nan if x.real or x.imag are zero
x = complex(x.real * float('+inf') if x.real != 0 else 0,
x.imag * float('+inf') if x.imag != 0 else 0)
elif (isinstance(x, Symbol)
and x.get_name() == 'System`Indeterminate'):
x = complex(float('nan'), float('nan'))
else:
x = None
elif t.startswith('Character'):
if isinstance(x, Integer):
x = [String(char) for char in str(x.get_int_value())]
pyb = pyb[:i] + x + pyb[i + 1:]
x = pyb[i]
if isinstance(x, String) and len(x.get_string_value()) > 1:
x = [String(char) for char in x.get_string_value()]
pyb = pyb[:i] + x + pyb[i + 1:]
x = pyb[i]
x = x.get_string_value()
elif t == 'Byte' and isinstance(x, String):
if len(x.get_string_value()) > 1:
x = [String(char) for char in x.get_string_value()]
pyb = pyb[:i] + x + pyb[i + 1:]
x = pyb[i]
x = ord(x.get_string_value())
else:
x = x.get_int_value()
if x is None:
return evaluation.message('BinaryWrite', 'nocoerce', b)
try:
self.writers[t](stream, x)
except struct.error:
return evaluation.message('BinaryWrite', "nocoerce", b)
i += 1
try:
stream.flush()
except IOError as err:
evaluation.message('BinaryWrite', 'writex', err.strerror)
return channel
class BinaryRead(Builtin):
"""
<dl>
<dt>'BinaryRead[$stream$]'
<dd>reads one byte from the stream as an integer from 0 to 255.
<dt>'BinaryRead[$stream$, $type$]'
<dd>reads one object of specified type from the stream.
<dt>'BinaryRead[$stream$, {$type1$, $type2$, ...}]'
<dd>reads a sequence of objects of specified types.
</dl>
>> strm = OpenWrite[BinaryFormat -> True]
= OutputStream[...]
>> BinaryWrite[strm, {97, 98, 99}]
= OutputStream[...]
>> Close[strm]
= ...
>> strm = OpenRead[%, BinaryFormat -> True]
= InputStream[...]
>> BinaryRead[strm, {"Character8", "Character8", "Character8"}]
= {a, b, c}
>> Close[strm];
## Write as Bytes then Read
#> WbR[bytes_, form_] := Module[{str, res}, str = OpenWrite[BinaryFormat -> True]; BinaryWrite[str, bytes]; str = OpenRead[Close[str], BinaryFormat -> True]; res = BinaryRead[str, form]; Close[str]; res]
## Byte
#> WbR[{149, 2, 177, 132}, {"Byte", "Byte", "Byte", "Byte"}]
= {149, 2, 177, 132}
#> (# == WbR[#, Table["Byte", {50}]]) & [RandomInteger[{0, 255}, 50]]
= True
## Character8
#> WbR[{97, 98, 99}, {"Character8", "Character8", "Character8"}]
= {a, b, c}
#> WbR[{34, 60, 39}, {"Character8", "Character8", "Character8"}]
= {", <, '}
## Character16
#> WbR[{97, 0, 98, 0, 99, 0}, {"Character16", "Character16", "Character16"}]
= {a, b, c}
#> ToCharacterCode[WbR[{50, 154, 182, 236}, {"Character16", "Character16"}]]
= {{39474}, {60598}}
## #> WbR[ {91, 146, 206, 54}, {"Character16", "Character16"}]
## = {\:925b, \:36ce}
## Complex64
#> WbR[{80, 201, 77, 239, 201, 177, 76, 79}, "Complex64"]
= -6.36877988924*^28 + 3.434203392*^9 I
#> WbR[{158, 2, 185, 232, 18, 237, 0, 102}, "Complex64"]
= -6.98948862335*^24 + 1.52209021297*^23 I
#> WbR[{195, 142, 38, 160, 238, 252, 85, 188}, "Complex64"]
= -1.41079828148*^-19 - 0.013060791418 I
## Complex128
#> WbR[{15,114,1,163,234,98,40,15,214,127,116,15,48,57,208,180},"Complex128"]
= 1.19839770357*^-235 - 2.64656391494*^-54 I
#> WbR[{148,119,12,126,47,94,220,91,42,69,29,68,147, 11,62,233},"Complex128"]
= 3.22170267142*^134 - 8.98364297498*^198 I
#> WbR[{15,42,80,125,157,4,38,97, 0,0,0,0,0,0,240,255}, "Complex128"]
= -I Infinity
#> WbR[{15,42,80,125,157,4,38,97, 0,0,0,0,0,0,240,127}, "Complex128"]
= I Infinity
#> WbR[{15,42,80,125,157,4,38,97, 1,0,0,0,0,0,240,255}, "Complex128"]
= Indeterminate
#> WbR[{0,0,0,0,0,0,240,127, 15,42,80,125,157,4,38,97}, "Complex128"]
= Infinity
#> WbR[{0,0,0,0,0,0,240,255, 15,42,80,125,157,4,38,97}, "Complex128"]
= -Infinity
#> WbR[{1,0,0,0,0,0,240,255, 15,42,80,125,157,4,38,97}, "Complex128"]
= Indeterminate
#> WbR[{0,0,0,0,0,0,240,127, 0,0,0,0,0,0,240,127}, "Complex128"]
= Indeterminate
#> WbR[{0,0,0,0,0,0,240,127, 0,0,0,0,0,0,240,255}, "Complex128"]
= Indeterminate
## Complex256
## TODO
## Integer8
#> WbR[{149, 2, 177, 132}, {"Integer8", "Integer8", "Integer8", "Integer8"}]
= {-107, 2, -79, -124}
#> WbR[{127, 128, 0, 255}, {"Integer8", "Integer8", "Integer8", "Integer8"}]
= {127, -128, 0, -1}
## Integer16
#> WbR[{149, 2, 177, 132, 112, 24}, {"Integer16", "Integer16", "Integer16"}]
= {661, -31567, 6256}
#> WbR[{0, 0, 255, 0, 255, 255, 128, 127, 128, 128}, Table["Integer16", {5}]]
= {0, 255, -1, 32640, -32640}
## Integer24
#> WbR[{152, 173, 160, 188, 207, 154}, {"Integer24", "Integer24"}]
= {-6247016, -6631492}
#> WbR[{145, 173, 231, 49, 90, 30}, {"Integer24", "Integer24"}]
= {-1593967, 1989169}
## Integer32
#> WbR[{209, 99, 23, 218, 143, 187, 236, 241}, {"Integer32", "Integer32"}]
= {-636001327, -236143729}
#> WbR[{15, 31, 173, 120, 245, 100, 18, 188}, {"Integer32", "Integer32"}]
= {2024611599, -1139645195}
## Integer64
#> WbR[{211, 18, 152, 2, 235, 102, 82, 16}, "Integer64"]
= 1176115612243989203
#> WbR[{37, 217, 208, 88, 14, 241, 170, 137}, "Integer64"]
= -8526737900550694619
## Integer128
#> WbR[{140,32,24,199,10,169,248,117,123,184,75,76,34,206,49,105}, "Integer128"]
= 139827542997232652313568968616424513676
#> WbR[{101,57,184,108,43,214,186,120,153,51,132,225,56,165,209,77}, "Integer128"]
= 103439096823027953602112616165136677221
#> WbR[{113,100,125,144,211,83,140,24,206,11,198,118,222,152,23,219}, "Integer128"]
= -49058912464625098822365387707690163087
## Real32
#> WbR[{81, 72, 250, 79, 52, 227, 104, 90}, {"Real32", "Real32"}]
= {8.398086656*^9, 1.63880017687*^16}
#> WbR[{251, 22, 221, 117, 165, 245, 18, 75}, {"Real32", "Real32"}]
= {5.6052915284*^32, 9.631141*^6}
#> WbR[{0, 0, 128, 127}, "Real32"]
= Infinity
#> WbR[{0, 0, 128, 255}, "Real32"]
= -Infinity
#> WbR[{1, 0, 128, 255}, "Real32"]
= Indeterminate
#> WbR[{1, 0, 128, 127}, "Real32"]
= Indeterminate
## Real64
#> WbR[{45, 243, 20, 87, 129, 185, 53, 239}, "Real64"]
= -5.14646619426*^227
#> WbR[{192, 60, 162, 67, 122, 71, 74, 196}, "Real64"]
= -9.69531698809*^20
#> WbR[{15, 42, 80, 125, 157, 4, 38, 97}, "Real64"]
= 9.67355569764*^159
#> WbR[{0, 0, 0, 0, 0, 0, 240, 127}, "Real64"]
= Infinity
#> WbR[{0, 0, 0, 0, 0, 0, 240, 255}, "Real64"]
= -Infinity
#> WbR[{1, 0, 0, 0, 0, 0, 240, 127}, "Real64"]
= Indeterminate
#> WbR[{1, 0, 0, 0, 0, 0, 240, 255}, "Real64"]
= Indeterminate
## Real128
## 0x0000
#> WbR[{0,0,0,0, 0,0,0,0, 0,0,0,0, 0,0,0,0}, "Real128"]
: Results for the format Real128 may not be correct.
= 0.*^-4965
#> WbR[{0,0,0,0, 0,0,0,0, 0,0,0,0, 0,0,0,128}, "Real128"]
: Results for the format Real128 may not be correct.
= 0.*^-4965
## 0x0001 - 0x7FFE
#> WbR[{0,0,0,0, 0,0,0,0, 0,0,0,0, 0,0,255,63}, "Real128"]
: Results for the format Real128 may not be correct.
= 1.
#> WbR[{0,0,0,0, 0,0,0,0, 0,0,0,0, 0,0,255,191}, "Real128"]
: Results for the format Real128 may not be correct.
= -1.
#> WbR[{135, 62, 233, 137, 22, 208, 233, 210, 133, 82, 251, 92, 220, 216, 255, 63}, "Real128"]
: Results for the format Real128 may not be correct.
= 1.84711247573661489653389674493896
#> WbR[{135, 62, 233, 137, 22, 208, 233, 210, 133, 82, 251, 92, 220, 216, 207, 72}, "Real128"]
: Results for the format Real128 may not be correct.
= 2.45563355727491021879689747166252*^679
#> WbR[{74, 95, 30, 234, 116, 130, 1, 84, 20, 133, 245, 221, 113, 110, 219, 212}, "Real128"]
: Results for the format Real128 may not be correct.
= -4.52840681592341879518366539335138*^1607
## 0x7FFF
#> WbR[{0,0,0,0, 0,0,0,0, 0,0,0,0, 0,0,255,127}, "Real128"]
: Results for the format Real128 may not be correct.
= Infinity
#> WbR[{0,0,0,0, 0,0,0,0, 0,0,0,0, 0,0,255,255}, "Real128"]
: Results for the format Real128 may not be correct.
= -Infinity
#> WbR[{1,0,0,0, 0,0,0,0, 0,0,0,0, 0,0,255,127}, "Real128"]
: Results for the format Real128 may not be correct.
= Indeterminate
#> WbR[{1,0,0,0, 0,0,0,0, 0,0,0,0, 0,0,255,255}, "Real128"]
: Results for the format Real128 may not be correct.
= Indeterminate
## TerminatedString
#> WbR[{97, 98, 99, 0}, "TerminatedString"]
= abc
#> WbR[{49, 50, 51, 0, 52, 53, 54, 0, 55, 56, 57}, Table["TerminatedString", {3}]]
= {123, 456, EndOfFile}
#> WbR[{0}, "TerminatedString"] // InputForm
= ""
## UnsignedInteger8
#> WbR[{96, 94, 141, 162, 141}, Table["UnsignedInteger8", {5}]]
= {96, 94, 141, 162, 141}
#> (#==WbR[#,Table["UnsignedInteger8",{50}]])&[RandomInteger[{0, 255}, 50]]
= True
## UnsignedInteger16
#> WbR[{54, 71, 106, 185, 147, 38, 5, 231}, Table["UnsignedInteger16", {4}]]
= {18230, 47466, 9875, 59141}
#> WbR[{0, 0, 128, 128, 255, 255}, Table["UnsignedInteger16", {3}]]
= {0, 32896, 65535}
## UnsignedInteger24
#> WbR[{78, 35, 226, 225, 84, 236}, Table["UnsignedInteger24", {2}]]
= {14820174, 15488225}
#> WbR[{165, 2, 82, 239, 88, 59}, Table["UnsignedInteger24", {2}]]
= {5374629, 3889391}
## UnsignedInteger32
#> WbR[{213,143,98,112,141,183,203,247}, Table["UnsignedInteger32", {2}]]
= {1885507541, 4157323149}
#> WbR[{148,135,230,22,136,141,234,99}, Table["UnsignedInteger32", {2}]]
= {384206740, 1676316040}
## UnsignedInteger64
#> WbR[{95, 5, 33, 229, 29, 62, 63, 98}, "UnsignedInteger64"]
= 7079445437368829279
#> WbR[{134, 9, 161, 91, 93, 195, 173, 74}, "UnsignedInteger64"]
= 5381171935514265990
## UnsignedInteger128
#> WbR[{108,78,217,150,88,126,152,101,231,134,176,140,118,81,183,220}, "UnsignedInteger128"]
= 293382001665435747348222619884289871468
#> WbR[{53,83,116,79,81,100,60,126,202,52,241,48,5,113,92,190}, "UnsignedInteger128"]
= 253033302833692126095975097811212718901
## EndOfFile
#> WbR[{148}, {"Integer32", "Integer32","Integer32"}]
= {EndOfFile, EndOfFile, EndOfFile}
"""
readers = _BinaryFormat.get_readers()
messages = {
'format': '`1` is not a recognized binary format.',
'openw': '`1` is open for output.',
'bfmt': 'The stream `1` has been opened with BinaryFormat -> False and cannot be used with binary data.',
'warnquad': 'Results for the format `1` may not be correct.', # FIXME
}
def apply_empty(self, name, n, evaluation):
'BinaryRead[InputStream[name_, n_]]'
return self.apply(name, n, None, evaluation)
def apply(self, name, n, typ, evaluation):
'BinaryRead[InputStream[name_, n_], typ_]'
channel = Expression('InputStream', name, n)
# Check channel
stream = _lookup_stream(n.get_int_value())
if stream is None or stream.closed:
evaluation.message('General', 'openx', name)
return
if stream.mode not in ['rb']:
evaluation.message('BinaryRead', 'bfmt', channel)
return
# Check typ
if typ is None:
expr = Expression('BinaryRead', channel)
typ = String('Byte')
else:
expr = Expression('BinaryRead', channel, typ)
if typ.has_form('List', None):
types = typ.get_leaves()
else:
types = [typ]
types = [t.get_string_value() for t in types]
if not all(t in self.readers for t in types):
evaluation.message('BinaryRead', 'format', typ)
return
# Read from stream
result = []
for t in types:
if t in ('Real128', 'Complex256'):
evaluation.message('BinaryRead', 'warnquad', t)
try:
result.append(self.readers[t](stream))
except struct.error:
result.append(Symbol('EndOfFile'))
if typ.has_form('List', None):
return Expression('List', *result)
else:
if len(result) == 1:
return result[0]
class WriteString(Builtin):
"""
<dl>
<dt>'WriteString[$stream$, $str1, $str2$, ... ]'
<dd>writes the strings to the output stream.
</dl>
>> str = OpenWrite[];
>> WriteString[str, "This is a test 1"]
>> WriteString[str, "This is also a test 2"]
>> Close[str]
= ...
>> FilePrint[%]
| This is a test 1This is also a test 2
>> str = OpenWrite[];
>> WriteString[str, "This is a test 1", "This is also a test 2"]
>> Close[str]
= ...
>> FilePrint[%]
| This is a test 1This is also a test 2
#> str = OpenWrite[];
#> WriteString[str, 100, 1 + x + y, Sin[x + y]]
#> Close[str]
= ...
#> FilePrint[%]
| 1001 + x + ySin[x + y]
#> str = OpenWrite[];
#> WriteString[str]
#> Close[str]
= ...
#> FilePrint[%]
#> WriteString[%%, abc]
#> Streams[%%%][[1]]
= ...
#> Close[%]
= ...
#> FilePrint[%]
| abc
#> WriteString[OpenWrite["/dev/zero"], "abc"] (* Null *)
#> str = OpenWrite["/dev/full"];
#> WriteString[str, "123"]
: No space left on device.
#> Close[str]
: No space left on device.
= /dev/full
"""
messages = {
'strml': ('`1` is not a string, stream, '
'or list of strings and streams.'),
'writex': '`1`.',
}
attributes = ('Protected')
def apply(self, channel, expr, evaluation):
'WriteString[channel_, expr___]'
strm = _channel_to_stream(channel, 'w')
if strm is None:
return
stream = _lookup_stream(strm.leaves[1].get_int_value())
if stream is None or stream.closed:
return None
exprs = []
for expri in expr.get_sequence():
result = expri.format(evaluation, "System`OutputForm")
try:
result = result.boxes_to_text(evaluation=evaluation)
except BoxError:
return evaluation.message(
'General', 'notboxes',
Expression('FullForm', result).evaluate(evaluation))
exprs.append(result)
stream.write(u''.join(exprs))
try:
stream.flush()
except IOError as err:
evaluation.message('WriteString', 'writex', err.strerror)
return Symbol('Null')
class _OpenAction(Builtin):
attributes = ('Protected')
# BinaryFormat: 'False',
# CharacterEncoding :> Automatic,
# DOSTextFormat :> True,
# FormatType -> InputForm,
# NumberMarks :> $NumberMarks,
# PageHeight -> 22, PageWidth -> 78,
# TotalHeight -> Infinity,
# TotalWidth -> Infinity
options = {
'BinaryFormat': 'False',
}
messages = {
'argx': 'OpenRead called with 0 arguments; 1 argument is expected.',
'fstr': ('File specification `1` is not a string of '
'one or more characters.'),
}
def apply_empty(self, evaluation, options):
'%(name)s[OptionsPattern[]]'
if isinstance(self, (OpenWrite, OpenAppend)):
tmpf = tempfile.NamedTemporaryFile(dir=TMP_DIR)
path = String(tmpf.name)
tmpf.close()
return self.apply_path(path, evaluation, options)
else:
evaluation.message('OpenRead', 'argx')
return
def apply_path(self, path, evaluation, options):
'%(name)s[path_?NotOptionQ, OptionsPattern[]]'
## Options
# BinaryFormat
mode = self.mode
if options['System`BinaryFormat'].is_true():
if not self.mode.endswith('b'):
mode += 'b'
if not (isinstance(path, String) and len(path.to_python()) > 2):
evaluation.message(self.__class__.__name__, 'fstr', path)
return
path_string = path.get_string_value()
tmp = path_search(path_string)
if tmp is None:
if mode in ['r', 'rb']:
evaluation.message('General', 'noopen', path)
return
else:
path_string = tmp
try:
opener = mathics_open(path_string, mode=mode)
stream = opener.__enter__()
n = opener.n
except IOError:
evaluation.message('General', 'noopen', path)
return
return Expression(self.stream_type, path, Integer(n))
class OpenRead(_OpenAction):
"""
<dl>
<dt>'OpenRead["file"]'
<dd>opens a file and returns an InputStream.
</dl>
>> OpenRead["ExampleData/EinsteinSzilLetter.txt"]
= InputStream[...]
#> Close[%];
#> OpenRead[]
: OpenRead called with 0 arguments; 1 argument is expected.
= OpenRead[]
#> OpenRead[y]
: File specification y is not a string of one or more characters.
= OpenRead[y]
#> OpenRead[""]
: File specification is not a string of one or more characters.
= OpenRead[]
#> OpenRead["MathicsNonExampleFile"]
: Cannot open MathicsNonExampleFile.
= OpenRead[MathicsNonExampleFile]
#> OpenRead["ExampleData/EinsteinSzilLetter.txt", BinaryFormat -> True]
= InputStream[...]
#> Close[%];
"""
mode = 'r'
stream_type = 'InputStream'
class OpenWrite(_OpenAction):
"""
<dl>
<dt>'OpenWrite["file"]'
<dd>opens a file and returns an OutputStream.
</dl>
>> OpenWrite[]
= OutputStream[...]
#> Close[%];
#> OpenWrite[BinaryFormat -> True]
= OutputStream[...]
#> Close[%];
"""
mode = 'w'
stream_type = 'OutputStream'
class OpenAppend(_OpenAction):
"""
<dl>
<dt>'OpenAppend["file"]'
<dd>opens a file and returns an OutputStream to which writes are appended.
</dl>
>> OpenAppend[]
= OutputStream[...]
#> Close[%];
#> OpenAppend["MathicsNonExampleFile"]
= OutputStream[MathicsNonExampleFile, ...]
#> DeleteFile["MathicsNonExampleFile"]
"""
mode = 'a'
stream_type = 'OutputStream'
class Get(PrefixOperator):
r"""
<dl>
<dt>'<<name'
<dd>reads a file and evaluates each expression, returning only the last one.
</dl>
>> Put[x + y, "example_file"]
>> <<"example_file"
= x + y
>> Put[x + y, 2x^2 + 4z!, Cos[x] + I Sin[x], "example_file"]
>> <<"example_file"
= Cos[x] + I Sin[x]
#> DeleteFile["example_file"]
>> 40! >> "fourtyfactorial"
>> FilePrint["fourtyfactorial"]
| 815915283247897734345611269596115894272000000000
>> <<"fourtyfactorial"
= 815915283247897734345611269596115894272000000000
#> DeleteFile["fourtyfactorial"]
## TODO: Requires EndPackage implemented
## 'Get' can also load packages:
## >> << "VectorAnalysis`"
#> Get["SomeTypoPackage`"]
: Cannot open SomeTypoPackage`.
= $Failed
## Parser Tests
#> Hold[<< ~/some_example/dir/] // FullForm
= Hold[Get["~/some_example/dir/"]]
#> Hold[<<`/.\-_:$*~?] // FullForm
= Hold[Get["`/.\\-_:$*~?"]]
"""
operator = '<<'
precedence = 720
attributes = ('Protected')
def apply(self, path, evaluation):
'Get[path_String]'
pypath = path.get_string_value()
try:
with mathics_open(pypath, 'r') as f:
result = f.readlines()
except IOError:
evaluation.message('General', 'noopen', path)
return Symbol('$Failed')
try:
parse
ParseError
except NameError:
from mathics.core.parser import parse, ParseError
from mathics.main import wait_for_line
total_input = ""
syntax_error_count = 0
expr = Symbol('Null')
for lineno, tmp in enumerate(result):
total_input += ' ' + tmp
if wait_for_line(total_input):
continue
try:
expr = parse(total_input, evaluation.definitions)
except: # FIXME: something weird is going on here
syntax_error_count += 1
if syntax_error_count <= 4:
print "Syntax Error (line {0} of {1})".format(
lineno + 1, pypath)
if syntax_error_count == 4:
print "Supressing further syntax errors in {0}".format(
pypath)
else:
if expr is not None:
expr = expr.evaluate(evaluation)
total_input = ""
if total_input != "":
# TODO:
# evaluation.message('Syntax', 'sntue', 'line {0} of
# {1}'.format(lineno, pypath))
print 'Unexpected end of file (probably unfinished expression)'
print ' (line {0} of "{1}").'.format(lineno, pypath)
return Symbol('Null')
return expr
def apply_default(self, filename, evaluation):
'Get[filename_]'
expr = Expression('Get', filename)
evaluation.message('General', 'stream', filename)
return expr
class Put(BinaryOperator):
"""
<dl>
<dt>'$expr$ >> $filename$'
<dd>write $expr$ to a file.
<dt>'Put[$expr1$, $expr2$, ..., $"filename"$]'
<dd>write a sequence of expressions to a file.
</dl>
>> 40! >> "fourtyfactorial"
>> FilePrint["fourtyfactorial"]
| 815915283247897734345611269596115894272000000000
#> 40! >> fourtyfactorial
#> FilePrint["fourtyfactorial"]
| 815915283247897734345611269596115894272000000000
#> Put[40!, fourtyfactorial]
: fourtyfactorial is not string, InputStream[], or OutputStream[]
= 815915283247897734345611269596115894272000000000 >> fourtyfactorial
## FIXME: final line should be
## = Put[815915283247897734345611269596115894272000000000, fourtyfactorial]
#> DeleteFile["fourtyfactorial"]
>> Put[50!, "fiftyfactorial"]
>> FilePrint["fiftyfactorial"]
| 30414093201713378043612608166064768844377641568960512000000000000
#> DeleteFile["fiftyfactorial"]
>> Put[10!, 20!, 30!, "factorials"]
>> FilePrint["factorials"]
| 3628800
| 2432902008176640000
| 265252859812191058636308480000000
#> DeleteFile["factorials"]
=
#> Put[x + y, 2x^2 + 4z!, Cos[x] + I Sin[x], "example_file"]
#> FilePrint["example_file"]
| x + y
| 2*x^2 + 4*z!
| Cos[x] + I*Sin[x]
#> DeleteFile["example_file"]
## writing to dir
#> x >> /var/
: Cannot open /var/.
= x >> /var/
## writing to read only file
#> x >> /proc/uptime
: Cannot open /proc/uptime.
= x >> /proc/uptime
## writing to full file
#> x >> /dev/full
: No space left on device.
"""
operator = '>>'
precedence = 30
def apply(self, exprs, filename, evaluation):
'Put[exprs___, filename_String]'
instream = Expression('OpenWrite', filename).evaluate(evaluation)
if len(instream.leaves) == 2:
name, n = instream.leaves
else:
return # opening failed
result = self.apply_input(exprs, name, n, evaluation)
Expression('Close', instream).evaluate(evaluation)
return result
def apply_input(self, exprs, name, n, evaluation):
'Put[exprs___, OutputStream[name_, n_]]'
stream = _lookup_stream(n.get_int_value())
if stream is None or stream.closed:
evaluation.message('Put', 'openx', Expression(
'OutputSteam', name, n))
return
text = [evaluation.format_output(Expression(
'InputForm', expr)) for expr in exprs.get_sequence()]
text = u'\n'.join(text) + u'\n'
text.encode('utf-8')
stream.write(text)
return Symbol('Null')
def apply_default(self, exprs, filename, evaluation):
'Put[exprs___, filename_]'
expr = Expression('Put', exprs, filename)
evaluation.message('General', 'stream', filename)
return expr
class PutAppend(BinaryOperator):
"""
<dl>
<dt>'$expr$ >>> $filename$'
<dd>append $expr$ to a file.
<dt>'PutAppend[$expr1$, $expr2$, ..., $"filename"$]'
<dd>write a sequence of expressions to a file.
</dl>
>> Put[50!, "factorials"]
>> FilePrint["factorials"]
| 30414093201713378043612608166064768844377641568960512000000000000
>> PutAppend[10!, 20!, 30!, "factorials"]
>> FilePrint["factorials"]
| 30414093201713378043612608166064768844377641568960512000000000000
| 3628800
| 2432902008176640000
| 265252859812191058636308480000000
>> 60! >>> "factorials"
>> FilePrint["factorials"]
| 30414093201713378043612608166064768844377641568960512000000000000
| 3628800
| 2432902008176640000
| 265252859812191058636308480000000
| 8320987112741390144276341183223364380754172606361245952449277696409600000000000000
>> "string" >>> factorials
>> FilePrint["factorials"]
| 30414093201713378043612608166064768844377641568960512000000000000
| 3628800
| 2432902008176640000
| 265252859812191058636308480000000
| 8320987112741390144276341183223364380754172606361245952449277696409600000000000000
| "string"
#> DeleteFile["factorials"];
## writing to dir
#> x >>> /var/
: Cannot open /var/.
= x >>> /var/
## writing to read only file
#> x >>> /proc/uptime
: Cannot open /proc/uptime.
= x >>> /proc/uptime
"""
operator = '>>>'
precedence = 30
attributes = ('Protected')
def apply(self, exprs, filename, evaluation):
'PutAppend[exprs___, filename_String]'
instream = Expression('OpenAppend', filename).evaluate(evaluation)
if len(instream.leaves) == 2:
name, n = instream.leaves
else:
return # opening failed
result = self.apply_input(exprs, name, n, evaluation)
Expression('Close', instream).evaluate(evaluation)
return result
def apply_input(self, exprs, name, n, evaluation):
'PutAppend[exprs___, OutputStream[name_, n_]]'
stream = _lookup_stream(n.get_int_value())
if stream is None or stream.closed:
evaluation.message('Put', 'openx', Expression(
'OutputSteam', name, n))
return
text = [unicode(e.do_format(evaluation, 'System`OutputForm').__str__())
for e in exprs.get_sequence()]
text = u'\n'.join(text) + u'\n'
text.encode('ascii')
stream.write(text)
return Symbol('Null')
def apply_default(self, exprs, filename, evaluation):
'PutAppend[exprs___, filename_]'
expr = Expression('PutAppend', exprs, filename)
evaluation.message('General', 'stream', filename)
return expr
class FindFile(Builtin):
"""
<dl>
<dt>'FindFile[$name$]'
<dd>searches '$Path' for the given filename.
</dl>
>> FindFile["ExampleData/sunflowers.jpg"]
= ...
>> FindFile["VectorAnalysis`"]
= ...
>> FindFile["VectorAnalysis`VectorAnalysis`"]
= ...
#> FindFile["SomeTypoPackage`"]
= $Failed
"""
attributes = ('Protected')
messages = {
'string': 'String expected at position 1 in `1`.',
}
def apply(self, name, evaluation):
'FindFile[name_]'
py_name = name.to_python()
if not (isinstance(py_name, basestring) and
py_name[0] == py_name[-1] == '"'):
evaluation.message(
'FindFile', 'string', Expression('FindFile', name))
return
py_name = py_name[1:-1]
result = path_search(py_name)
if result is None:
return Symbol('$Failed')
return String(os.path.abspath(result))
class FileNameSplit(Builtin):
"""
<dl>
<dt>'FileNameSplit["$filenams$"]'
<dd>splits a $filename$ into a list of parts.
</dl>
>> FileNameSplit["example/path/file.txt"]
= {example, path, file.txt}
#> FileNameSplit["example/path", OperatingSystem -> x]
: The value of option OperatingSystem -> x must be one of "MacOSX", "Windows", or "Unix".
= {example, path}
"""
attributes = ('Protected')
options = {
'OperatingSystem': '$OperatingSystem',
}
messages = {
'ostype': ('The value of option OperatingSystem -> `1` '
'must be one of "MacOSX", "Windows", or "Unix".'),
}
def apply(self, filename, evaluation, options):
'FileNameSplit[filename_String, OptionsPattern[FileNameSplit]]'
path = filename.to_python()[1:-1]
operating_system = options[
'System`OperatingSystem'].evaluate(evaluation).to_python()
if operating_system not in ['"MacOSX"', '"Windows"', '"Unix"']:
evaluation.message('FileNameSplit', 'ostype', options[
'System`OperatingSystem'])
if os.name == 'posix':
operating_system = 'Unix'
elif os.name == 'nt':
operating_system = 'Windows'
elif os.name == 'os2':
operating_system = 'MacOSX'
else:
return
# TODO Implement OperatingSystem Option
result = []
while path not in ['', SYS_ROOT_DIR]:
path, ext = os.path.split(path)
if ext != '':
result.insert(0, ext)
return from_python(result)
class FileNameJoin(Builtin):
"""
<dl>
<dt>'FileNameJoin[{"$dir_1$", "$dir_2$", ...}]'
<dd>joins the $dir_i$ togeather into one path.
</dl>
>> FileNameJoin[{"dir1", "dir2", "dir3"}]
= ...
>> FileNameJoin[{"dir1", "dir2", "dir3"}, OperatingSystem -> "Unix"]
= dir1/dir2/dir3
## TODO
## #> FileNameJoin[{"dir1", "dir2", "dir3"}, OperatingSystem -> "Windows"]
## = dir1\dir2\dir3
"""
attributes = ('Protected')
options = {
'OperatingSystem': '$OperatingSystem',
}
messages = {
'ostype': ('The value of option OperatingSystem -> `1` '
'must be one of "MacOSX", "Windows", or "Unix".'),
}
def apply(self, pathlist, evaluation, options):
'FileNameJoin[pathlist_?ListQ, OptionsPattern[FileNameJoin]]'
py_pathlist = pathlist.to_python()
if not all(isinstance(p, basestring) and p[0] == p[-1] == '"'
for p in py_pathlist):
return
py_pathlist = [p[1:-1] for p in py_pathlist]
operating_system = options[
'System`OperatingSystem'].evaluate(evaluation).to_python()
if operating_system not in ['"MacOSX"', '"Windows"', '"Unix"']:
evaluation.message('FileNameSplit', 'ostype', options[
'System`OperatingSystem'])
if os.name == 'posix':
operating_system = 'Unix'
elif os.name == 'nt':
operating_system = 'Windows'
elif os.name == 'os2':
operating_system = 'MacOSX'
else:
return
# TODO Implement OperatingSystem Option
result = os.path.join(*py_pathlist)
return from_python(result)
class FileExtension(Builtin):
"""
<dl>
<dt>'FileExtension["$file$"]'
<dd>gives the extension for the specified file name.
</dl>
>> FileExtension["file.txt"]
= txt
>> FileExtension["file.tar.gz"]
= gz
#> FileExtension["file."]
=
#> FileExtension["file"]
=
"""
attributes = ('Protected')
options = {
'OperatingSystem': '$OperatingSystem',
}
def apply(self, filename, evaluation, options):
'FileExtension[filename_String, OptionsPattern[FileExtension]]'
path = filename.to_python()[1:-1]
filename_base, filename_ext = os.path.splitext(path)
filename_ext = filename_ext.lstrip('.')
return from_python(filename_ext)
class FileBaseName(Builtin):
"""
<dl>
<dt>'FileBaseName["$file$"]'
<dd>gives the base name for the specified file name.
</dl>
>> FileBaseName["file.txt"]
= file
>> FileBaseName["file.tar.gz"]
= file.tar
#> FileBaseName["file."]
= file
#> FileBaseName["file"]
= file
"""
attributes = ('Protected')
options = {
'OperatingSystem': '$OperatingSystem',
}
def apply(self, filename, evaluation, options):
'FileBaseName[filename_String, OptionsPattern[FileBaseName]]'
path = filename.to_python()[1:-1]
filename_base, filename_ext = os.path.splitext(path)
return from_python(filename_base)
class DirectoryName(Builtin):
"""
<dl>
<dt>'DirectoryName["$name$"]'
<dd>extracts the directory name from a filename.
</dl>
>> DirectoryName["a/b/c"]
= a/b
>> DirectoryName["a/b/c", 2]
= a
#> DirectoryName["a/b/c", 3] // InputForm
= ""
#> DirectoryName[""] // InputForm
= ""
#> DirectoryName["a/b/c", x]
: Positive machine-sized integer expected at position 2 in DirectoryName[a/b/c, x].
= DirectoryName[a/b/c, x]
#> DirectoryName["a/b/c", -1]
: Positive machine-sized integer expected at position 2 in DirectoryName[a/b/c, -1].
= DirectoryName[a/b/c, -1]
#> DirectoryName[x]
: String expected at position 1 in DirectoryName[x].
= DirectoryName[x]
"""
attributes = ('Protected')
options = {
'OperatingSystem': '$OperatingSystem',
}
messages = {
'string': 'String expected at position 1 in `1`.',
'intpm': ('Positive machine-sized integer expected at '
'position 2 in `1`.'),
}
def apply(self, name, n, evaluation, options):
'DirectoryName[name_, n_, OptionsPattern[DirectoryName]]'
if n is None:
expr = Expression('DirectoryName', name)
py_n = 1
else:
expr = Expression('DirectoryName', name, n)
py_n = n.to_python()
if not (isinstance(py_n, (int, long)) and py_n > 0):
evaluation.message('DirectoryName', 'intpm', expr)
return
py_name = name.to_python()
if not (isinstance(py_name, basestring) and
py_name[0] == py_name[-1] == '"'):
evaluation.message('DirectoryName', 'string', expr)
return
py_name = py_name[1:-1]
result = py_name
for i in range(py_n):
(result, tmp) = os.path.split(result)
return String(result)
def apply1(self, name, evaluation, options):
'DirectoryName[name_, OptionsPattern[DirectoryName]]'
return self.apply(name, None, evaluation, options)
class FileNameDepth(Builtin):
"""
<dl>
<dt>'FileNameDepth["$name$"]'
<dd>gives the number of path parts in the given filename.
</dl>
>> FileNameDepth["a/b/c"]
= 3
>> FileNameDepth["a/b/c/"]
= 3
#> FileNameDepth[x]
= FileNameDepth[x]
#> FileNameDepth[$RootDirectory]
= 0
"""
attributes = ('Protected')
options = {
'OperatingSystem': '$OperatingSystem',
}
rules = {
'FileNameDepth[name_String]': 'Length[FileNameSplit[name]]',
}
class AbsoluteFileName(Builtin):
"""
<dl>
<dt>'AbsoluteFileName["$name$"]'
<dd>returns the absolute version of the given filename.
</dl>
>> AbsoluteFileName["ExampleData/sunflowers.jpg"]
= ...
#> AbsoluteFileName["Some/NonExistant/Path.ext"]
: File not found during AbsoluteFileName[Some/NonExistant/Path.ext].
= $Failed
"""
attributes = ('Protected')
messages = {
'fstr': (
'File specification x is not a string of one or more characters.'),
'nffil': 'File not found during `1`.',
}
def apply(self, name, evaluation):
'AbsoluteFileName[name_]'
py_name = name.to_python()
if not (isinstance(py_name, basestring) and
py_name[0] == py_name[-1] == '"'):
evaluation.message('AbsoluteFileName', 'fstr', name)
return
py_name = py_name[1:-1]
result = path_search(py_name)
if result is None:
evaluation.message('AbsoluteFileName', 'nffil',
Expression('AbsoluteFileName', name))
return Symbol('$Failed')
return String(os.path.abspath(result))
class ExpandFileName(Builtin):
"""
<dl>
<dt>'ExpandFileName["$name$"]'
<dd>expands $name$ to an absolute filename for your system.
</dl>
>> ExpandFileName["ExampleData/sunflowers.jpg"]
= ...
"""
attributes = ('Protected')
messages = {
'string': 'String expected at position 1 in `1`.',
}
def apply(self, name, evaluation):
'ExpandFileName[name_]'
py_name = name.to_python()
if not (isinstance(py_name, basestring) and
py_name[0] == py_name[-1] == '"'):
evaluation.message('ExpandFileName', 'string',
Expression('ExpandFileName', name))
return
py_name = py_name[1:-1]
return String(os.path.abspath(py_name))
class ReadList(Read):
"""
<dl>
<dt>'ReadList["$file$"]'
<dd>Reads all the expressions until the end of file.
<dt>'ReadList["$file$", $type$]'
<dd>Reads objects of a specified type until the end of file.
<dt>'ReadList["$file$", {$type1$, $type2$, ...}]'
<dd>Reads a sequence of specified types until the end of file.
</dl>
>> ReadList[StringToStream["a 1 b 2"], {Word, Number}]
= {{a, 1}, {b, 2}}
>> str = StringToStream["abc123"];
>> ReadList[str]
= {abc123}
>> InputForm[%]
= {"abc123"}
#> ReadList[str, "Invalid"]
: Invalid is not a valid format specification.
= ReadList[..., Invalid]
#> Close[str];
#> ReadList[StringToStream["a 1 b 2"], {Word, Number}, 1]
= {{a, 1}}
"""
# TODO
"""
#> ReadList[StringToStream["a 1 b 2"], {Word, Number}, -1]
: Non-negative machine-sized integer expected at position 3 in ReadList[InputStream[String, ...], {Word, Number}, -1].
= ReadList[InputStream[String, ...], {Word, Number}, -1]
"""
# TODO: Expression type
"""
#> ReadList[StringToStream["123 45 x y"], Expression]
= {5535 x y}
"""
# TODO: Accept newlines in input
"""
>> ReadList[StringToStream["123\nabc"]]
= {123, abc}
>> InputForm[%]
= {123, abc}
"""
rules = {
'ReadList[stream_]': 'ReadList[stream, Expression]',
}
attributes = ('Protected')
options = {
'NullRecords': 'False',
'NullWords': 'False',
'RecordSeparators': '{"\r\n", "\n", "\r"}',
'TokenWords': '{}',
'WordSeparators': '{" ", "\t"}',
}
def apply(self, channel, types, evaluation, options):
'ReadList[channel_, types_, OptionsPattern[ReadList]]'
# Options
# TODO: Implement extra options
# py_options = self.check_options(options)
# null_records = py_options['NullRecords']
# null_words = py_options['NullWords']
# record_separators = py_options['RecordSeparators']
# token_words = py_options['TokenWords']
# word_separators = py_options['WordSeparators']
result = []
while True:
tmp = super(ReadList, self).apply(
channel, types, evaluation, options)
if tmp == Symbol('$Failed'):
return
if tmp == Symbol('EndOfFile'):
break
result.append(tmp)
return from_python(result)
def apply_m(self, channel, types, m, evaluation, options):
'ReadList[channel_, types_, m_, OptionsPattern[ReadList]]'
# Options
# TODO: Implement extra options
# py_options = self.check_options(options)
# null_records = py_options['NullRecords']
# null_words = py_options['NullWords']
# record_separators = py_options['RecordSeparators']
# token_words = py_options['TokenWords']
# word_separators = py_options['WordSeparators']
py_m = m.get_int_value()
if py_m < 0:
evaluation.message(
'ReadList', 'intnm', Expression('ReadList', channel, types, m))
return
result = []
for i in range(py_m):
tmp = super(ReadList, self).apply(
channel, types, evaluation, options)
if tmp == Symbol('$Failed'):
return
if tmp.to_python() == 'EndOfFile':
break
result.append(tmp)
return from_python(result)
class FilePrint(Builtin):
"""
<dl>
<dt>'FilePrint[$file$]'
<dd>prints the raw contents of $file$.
</dl>
#> exp = Sin[1];
#> FilePrint[exp]
: File specification Sin[1] is not a string of one or more characters.
= FilePrint[Sin[1]]
## Return $Failed on special files
#> FilePrint["/dev/zero"]
= $Failed
#> FilePrint["/dev/random"]
= $Failed
#> FilePrint["/dev/null"]
= $Failed
#> FilePrint["somenonexistantpath_h47sdmk^&h4"]
: Cannot open somenonexistantpath_h47sdmk^&h4.
= FilePrint[somenonexistantpath_h47sdmk^&h4]
#> FilePrint[""]
: File specification is not a string of one or more characters.
= FilePrint[]
"""
messages = {
'fstr': ('File specification `1` is not a string of '
'one or more characters.'),
}
options = {
'CharacterEncoding': '$CharacterEncoding',
'RecordSeparators': '{"\r\n", "\n", "\r"}',
'WordSeparators': '{" ", "\t"}',
}
attributes = ('Protected')
def apply(self, path, evaluation, options):
'FilePrint[path_ OptionsPattern[FilePrint]]'
pypath = path.to_python()
if not (isinstance(pypath, basestring) and
pypath[0] == pypath[-1] == '"' and len(pypath) > 2):
evaluation.message('FilePrint', 'fstr', path)
return
pypath = path_search(pypath[1:-1])
# Options
record_separators = options['System`RecordSeparators'].to_python()
assert isinstance(record_separators, list)
assert all(isinstance(s, basestring) and s[
0] == s[-1] == '"' for s in record_separators)
record_separators = [s[1:-1] for s in record_separators]
if pypath is None:
evaluation.message('General', 'noopen', path)
return
if not os.path.isfile(pypath):
return Symbol("$Failed")
try:
with mathics_open(pypath, 'r') as f:
result = f.read()
except IOError:
evaluation.message('General', 'noopen', path)
return
result = [result]
for sep in record_separators:
result = [item for res in result for item in res.split(sep)]
if result[-1] == '':
result = result[:-1]
for res in result:
evaluation.print_out(from_python(res))
return Symbol('Null')
class Close(Builtin):
"""
<dl>
<dt>'Close[$stream$]'
<dd>closes an input or output stream.
</dl>
>> Close[StringToStream["123abc"]]
= String
>> Close[OpenWrite[]]
= ...
#> Streams[] == (Close[OpenWrite[]]; Streams[])
= True
#> Close["abc"]
: abc is not open.
= Close[abc]
#> strm = OpenWrite[];
#> Close[strm];
#> Quiet[Close[strm]]
= Close[OutputStream[...]]
"""
attributes = ('Protected')
messages = {
'closex': '`1`.',
}
def apply(self, channel, evaluation):
'Close[channel_]'
if (channel.has_form('InputStream', 2) or # noqa
channel.has_form('OutputStream', 2)):
[name, n] = channel.get_leaves()
stream = _lookup_stream(n.get_int_value())
else:
stream = None
if stream is None or stream.closed:
evaluation.message('General', 'openx', channel)
return
try:
stream.close()
except IOError as err:
evaluation.message('Close', 'closex', err.strerror)
return name
class StreamPosition(Builtin):
"""
<dl>
<dt>'StreamPosition[$stream$]'
<dd>returns the current position in a stream as an integer.
</dl>
>> str = StringToStream["Mathics is cool!"]
= ...
>> Read[str, Word]
= Mathics
>> StreamPosition[str]
= 7
"""
attributes = ('Protected')
def apply_input(self, name, n, evaluation):
'StreamPosition[InputStream[name_, n_]]'
stream = _lookup_stream(n.get_int_value())
if stream is None or stream.closed:
evaluation.message('General', 'openx', name)
return
return from_python(stream.tell())
def apply_output(self, name, n, evaluation):
'StreamPosition[OutputStream[name_, n_]]'
stream = _lookup_stream(n.get_int_value())
if stream is None or stream.closed:
evaluation.message('General', 'openx', name)
return
return from_python(stream.tell())
def apply_default(self, stream, evaluation):
'StreamPosition[stream_]'
evaluation.message('General', 'stream', stream)
return
class SetStreamPosition(Builtin):
"""
<dl>
<dt>'SetStreamPosition[$stream$, $n$]'
<dd>sets the current position in a stream.
</dl>
>> str = StringToStream["Mathics is cool!"]
= ...
>> SetStreamPosition[str, 8]
= 8
>> Read[str, Word]
= is
#> SetStreamPosition[str, -5]
: Python2 cannot handle negative seeks.
= 10
>> SetStreamPosition[str, Infinity]
= 16
"""
# TODO: Seeks beyond stream should return stmrng message
"""
#> SetStreamPosition[str, 40]
= ERROR_MESSAGE_HERE
"""
messages = {
'int': 'Integer expected at position 2 in `1`.',
'stmrng': (
'Cannot set the current point in stream `1` to position `2`. The '
'requested position exceeds the number of characters in the file'),
'python2': 'Python2 cannot handle negative seeks.', # FIXME: Python3?
}
attributes = ('Protected')
def apply_input(self, name, n, m, evaluation):
'SetStreamPosition[InputStream[name_, n_], m_]'
stream = _lookup_stream(n.get_int_value())
if stream is None or stream.closed:
evaluation.message('General', 'openx', name)
return
if not stream.seekable:
raise NotImplementedError
seekpos = m.to_python()
if not (isinstance(seekpos, int) or seekpos == float('inf')):
evaluation.message('SetStreamPosition', 'stmrng',
Expression('InputStream', name, n), m)
return
try:
if seekpos == float('inf'):
stream.seek(0, 2)
else:
if seekpos < 0:
stream.seek(seekpos, 2)
else:
stream.seek(seekpos)
except IOError:
evaluation.message('SetStreamPosition', 'python2')
return from_python(stream.tell())
def apply_output(self, name, n, m, evaluation):
'SetStreamPosition[OutputStream[name_, n_], m_]'
return self.apply_input(name, n, m, evaluation)
def apply_default(self, stream, evaluation):
'SetStreamPosition[stream_]'
evaluation.message('General', 'stream', stream)
return
class Skip(Read):
"""
<dl>
<dt>'Skip[$stream$, $type$]'
<dd>skips ahead in an input steream by one object of the specified $type$.
<dt>'Skip[$stream$, $type$, $n$]'
<dd>skips ahead in an input steream by $n$ objects of the specified $type$.
</dl>
>> str = StringToStream["a b c d"];
>> Read[str, Word]
= a
>> Skip[str, Word]
>> Read[str, Word]
= c
#> Close[str];
>> str = StringToStream["a b c d"];
>> Read[str, Word]
= a
>> Skip[str, Word, 2]
>> Read[str, Word]
= d
#> Skip[str, Word]
= EndOfFile
#> Close[str];
"""
rules = {
'Skip[InputStream[name_, n_], types_]':
'Skip[InputStream[name, n], types, 1]',
}
messages = {
'intm':
'Non-negative machine-sized integer expected at position 3 in `1`',
}
options = {
'AnchoredSearch': 'False',
'IgnoreCase': 'False',
'WordSearch': 'False',
'RecordSeparators': '{"\r\n", "\n", "\r"}',
'WordSeparators': '{" ", "\t"}',
}
attributes = ('Protected')
def apply(self, name, n, types, m, evaluation, options):
'Skip[InputStream[name_, n_], types_, m_, OptionsPattern[Skip]]'
channel = Expression('InputStream', name, n)
# Options
# TODO Implement extra options
# py_options = self.check_options(options)
# null_records = py_options['NullRecords']
# null_words = py_options['NullWords']
# record_separators = py_options['RecordSeparators']
# token_words = py_options['TokenWords']
# word_separators = py_options['WordSeparators']
py_m = m.to_python()
if not (isinstance(py_m, int) and py_m > 0):
evaluation.message('Skip', 'intm', Expression(
'Skip', Expression('InputStream', name, n), types, m))
return
for i in range(py_m):
result = super(Skip, self).apply(
channel, types, evaluation, options)
if result == Symbol('EndOfFile'):
return result
return Symbol('Null')
class Find(Read):
"""
<dl>
<dt>'Find[$stream$, $text$]'
<dd>find the first line in $stream$ that contains $text$.
</dl>
>> str = OpenRead["ExampleData/EinsteinSzilLetter.txt"];
>> Find[str, "uranium"]
= in manuscript, leads me to expect that the element uranium may be turned into
>> Find[str, "uranium"]
= become possible to set up a nuclear chain reaction in a large mass of uranium,
>> Close[str]
= ...
>> str = OpenRead["ExampleData/EinsteinSzilLetter.txt"];
>> Find[str, {"energy", "power"} ]
= a new and important source of energy in the immediate future. Certain aspects
>> Find[str, {"energy", "power"} ]
= by which vast amounts of power and large quantities of new radium-like
>> Close[str]
= ...
"""
attributes = ('Protected')
options = {
'AnchoredSearch': 'False',
'IgnoreCase': 'False',
'WordSearch': 'False',
'RecordSeparators': '{"\r\n", "\n", "\r"}',
'WordSeparators': '{" ", "\t"}',
}
def apply(self, name, n, text, evaluation, options):
'Find[InputStream[name_, n_], text_, OptionsPattern[Find]]'
# Options
# TODO Implement extra options
# py_options = self.check_options(options)
# anchored_search = py_options['AnchoredSearch']
# ignore_case = py_options['IgnoreCase']
# word_search = py_options['WordSearch']
# record_separators = py_options['RecordSeparators']
# word_separators = py_options['WordSeparators']
py_text = text.to_python()
channel = Expression('InputStream', name, n)
if not isinstance(py_text, list):
py_text = [py_text]
if not all(isinstance(t, basestring) and
t[0] == t[-1] == '"' for t in py_text):
evaluation.message(
'Find', 'unknown', Expression('Find', channel, text))
return
py_text = [t[1:-1] for t in py_text]
while True:
tmp = super(Find, self).apply(
channel, Symbol('Record'), evaluation, options)
py_tmp = tmp.to_python()[1:-1]
if py_tmp == 'System`EndOfFile':
evaluation.message(
'Find', 'notfound', Expression('Find', channel, text))
return Symbol("$Failed")
for t in py_text:
if py_tmp.find(t) != -1:
return from_python(py_tmp)
class FindList(Builtin):
"""
<dl>
<dt>'FindList[$file$, $text$]'
<dd>returns a list of all lines in $file$ that contain $text$.
<dt>'FindList[$file$, {$text1$, $text2$, ...}]'
<dd>returns a list of all lines in $file$ that contain any of the specified string.
<dt>'FindList[{$file1$, $file2$, ...}, ...]'
<dd>returns a list of all lines in any of the $filei$ that contain the specified strings.
</dl>
>> str = FindList["ExampleData/EinsteinSzilLetter.txt", "uranium"];
#> Length[str]
= 7
>> FindList["ExampleData/EinsteinSzilLetter.txt", "uranium", 1]
= {in manuscript, leads me to expect that the element uranium may be turned into}
#> FindList["ExampleData/EinsteinSzilLetter.txt", "project"]
= {}
#> FindList["ExampleData/EinsteinSzilLetter.txt", "uranium", 0]
= $Failed
"""
messages = {
'strs':
'String or non-empty list of strings expected at position `1` in `2`.',
'intnm':
'Non-negative machine-sized integer expected at position `1` in `2`.',
}
attributes = ('Protected')
options = {
'AnchoredSearch': 'False',
'IgnoreCase': 'False',
'RecordSeparators': '{"\r\n", "\n", "\r"}',
'WordSearch': 'False',
'WordSeparators': '{" ", "\t"}',
}
# TODO: Extra options AnchoredSearch, IgnoreCase RecordSeparators,
# WordSearch, WordSeparators this is probably best done with a regex
def apply_without_n(self, filename, text, evaluation, options):
'FindList[filename_, text_, OptionsPattern[FindList]]'
return self.apply(filename, text, None, evaluation, options)
def apply(self, filename, text, n, evaluation, options):
'FindList[filename_, text_, n_, OptionsPattern[FindList]]'
py_text = text.to_python()
py_name = filename.to_python()
if n is None:
py_n = None
expr = Expression('FindList', filename, text)
else:
py_n = n.to_python()
expr = Expression('FindList', filename, text, n)
if not isinstance(py_text, list):
py_text = [py_text]
if not isinstance(py_name, list):
py_name = [py_name]
if not all(isinstance(t, basestring) and
t[0] == t[-1] == '"' for t in py_name):
evaluation.message('FindList', 'strs', '1', expr)
return Symbol('$Failed')
if not all(isinstance(t, basestring) and
t[0] == t[-1] == '"' for t in py_text):
evaluation.message('FindList', 'strs', '2', expr)
return Symbol('$Failed')
if not ((isinstance(py_n, int) and py_n >= 0) or py_n is None):
evaluation.message('FindList', 'intnm', '3', expr)
return Symbol('$Failed')
if py_n == 0:
return Symbol('$Failed')
py_text = [t[1:-1] for t in py_text]
py_name = [t[1:-1] for t in py_name]
results = []
for path in py_name:
try:
with mathics_open(path, 'r') as f:
lines = f.readlines()
except IOError:
evaluation.message('General', 'noopen', path)
return
result = []
for line in lines:
for t in py_text:
if line.find(t) != -1:
result.append(line[:-1])
results.append(result)
results = [r for result in results for r in result]
if isinstance(py_n, int):
results = results[:min(py_n, len(results))]
return from_python(results)
class InputStream(Builtin):
"""
<dl>
<dt>'InputStream[$name$, $n$]'
<dd>represents an input stream.
</dl>
>> str = StringToStream["Mathics is cool!"]
= ...
>> Close[str]
= String
"""
attributes = ('Protected')
def apply(self, name, n, evaluation):
'InputStream[name_, n_]'
return
class OutputStream(Builtin):
"""
<dl>
<dt>'OutputStream[$name$, $n$]'
<dd>represents an output stream.
</dl>
>> OpenWrite[]
= ...
>> Close[%]
= ...
"""
attributes = ('Protected')
def apply(self, name, n, evaluation):
'OutputStream[name_, n_]'
return
class StringToStream(Builtin):
"""
<dl>
<dt>'StringToStream[$string$]'
<dd>converts a $string$ to an open input stream.
</dl>
>> strm = StringToStream["abc 123"]
= InputStream[String, ...]
#> Read[strm, Word]
= abc
#> Read[strm, Number]
= 123
#> Close[strm]
= String
"""
attributes = ('Protected')
def apply(self, string, evaluation):
'StringToStream[string_]'
pystring = string.to_python()[1:-1]
stream = io.StringIO(unicode(pystring))
name = Symbol('String')
n = next(NSTREAMS)
result = Expression('InputStream', name, Integer(n))
STREAMS.append(stream)
return result
class Streams(Builtin):
"""
<dl>
<dt>'Streams[]'
<dd>returns a list of all open streams.
</dl>
>> Streams[]
= ...
#> OpenWrite[]
= ...
#> Streams[%[[1]]]
= {OutputStream[...]}
#> Streams["some_nonexistant_name"]
= {}
"""
attributes = ('Protected')
def apply(self, evaluation):
'Streams[]'
return self.apply_name(None, evaluation)
def apply_name(self, name, evaluation):
'Streams[name_String]'
result = []
for n in xrange(len(STREAMS)):
stream = _lookup_stream(n)
if stream is None or stream.closed:
continue
if isinstance(stream, io.StringIO):
head = 'InputStream'
_name = Symbol('String')
else:
mode = stream.mode
if mode in ['r', 'rb']:
head = 'InputStream'
elif mode in ['w', 'a', 'wb', 'ab']:
head = 'OutputStream'
else:
raise ValueError("Unknown mode {0}".format(mode))
_name = String(stream.name)
expr = Expression(head, _name, Integer(n))
if name is None or _name == name:
result.append(expr)
return Expression('List', *result)
class Compress(Builtin):
"""
<dl>
<dt>'Compress[$expr$]'
<dd>gives a compressed string representation of $expr$.
</dl>
>> Compress[N[Pi, 10]]
= eJwz1jM0MTS1NDIzNQEADRsCNw==
"""
attributes = ('Protected')
options = {
'Method': '{}',
}
def apply(self, expr, evaluation, options):
'Compress[expr_, OptionsPattern[Compress]]'
string = expr.format(evaluation, 'System`FullForm')
string = string.boxes_to_text(
evaluation=evaluation, show_string_characters=True)
string = string.encode('utf-8')
# TODO Implement other Methods
result = zlib.compress(string)
result = base64.encodestring(result)
return String(result)
class Uncompress(Builtin):
"""
<dl>
<dt>'Uncompress["$string$"]'
<dd>recovers an expression from a string generated by 'Compress'.
</dl>
>> Compress["Mathics is cool"]
= eJxT8k0sychMLlbILFZIzs/PUQIANFwF1w==
>> Uncompress[%]
= Mathics is cool
>> a = x ^ 2 + y Sin[x] + 10 Log[15];
>> b = Compress[a];
>> Uncompress[b]
= x ^ 2 + y Sin[x] + 10 Log[15]
"""
attributes = ('Protected')
def apply(self, string, evaluation):
'Uncompress[string_String]'
string = string.get_string_value()
string = base64.decodestring(string)
tmp = zlib.decompress(string)
tmp = tmp.decode('utf-8')
try:
expr = parse(tmp, evaluation.definitions)
except NameError:
from mathics.core.parser import parse
expr = parse(tmp, evaluation.definitions)
return expr
class FileByteCount(Builtin):
"""
<dl>
<dt>'FileByteCount[$file$]'
<dd>returns the number of bytes in $file$.
</dl>
>> FileByteCount["ExampleData/sunflowers.jpg"]
= 142286
"""
messages = {
'fstr':
'File specification `1` is not a string of one or more characters.',
}
def apply(self, filename, evaluation):
'FileByteCount[filename_]'
py_filename = filename.to_python()
if not (isinstance(py_filename, basestring) and
py_filename[0] == py_filename[-1] == '"'):
evaluation.message('FileByteCount', 'fstr', filename)
return
py_filename = py_filename[1:-1]
try:
with mathics_open(py_filename, 'rb') as f:
count = 0
tmp = f.read(1)
while tmp != '':
count += 1
tmp = f.read(1)
except IOError:
evaluation.message('General', 'noopen', filename)
return
return from_python(count)
class FileHash(Builtin):
"""
<dl>
<dt>'FileHash[$file$]'
<dd>returns an integer hash for the given $file$.
<dt>'FileHash[$file$, $type$]'
<dd>returns an integer hash of the specified $type$ for the given $file$.</dd>
<dd>The types supported are "MD5", "Adler32", "CRC32", "SHA", "SHA224", "SHA256", "SHA384", and "SHA512".</dd>
</dl>
>> FileHash["ExampleData/sunflowers.jpg"]
= 109937059621979839952736809235486742106
>> FileHash["ExampleData/sunflowers.jpg", "MD5"]
= 109937059621979839952736809235486742106
>> FileHash["ExampleData/sunflowers.jpg", "Adler32"]
= 1607049478
>> FileHash["ExampleData/sunflowers.jpg", "SHA256"]
= 111619807552579450300684600241129773909359865098672286468229443390003894913065
#> FileHash["ExampleData/sunflowers.jpg", "CRC32"]
= 933095683
#> FileHash["ExampleData/sunflowers.jpg", "SHA"]
= 851696818771101405642332645949480848295550938123
#> FileHash["ExampleData/sunflowers.jpg", "SHA224"]
= 8723805623766373862936267623913366865806344065103917676078120867011
#> FileHash["ExampleData/sunflowers.jpg", "SHA384"]
= 28288410602533803613059815846847184383722061845493818218404754864571944356226472174056863474016709057507799332611860
#> FileHash["ExampleData/sunflowers.jpg", "SHA512"]
= 10111462070211820348006107532340854103555369343736736045463376555356986226454343186097958657445421102793096729074874292511750542388324853755795387877480102
#> FileHash["ExampleData/sunflowers.jpg", xyzsymbol]
= FileHash[ExampleData/sunflowers.jpg, xyzsymbol]
#> FileHash["ExampleData/sunflowers.jpg", "xyzstr"]
= FileHash[ExampleData/sunflowers.jpg, xyzstr]
#> FileHash[xyzsymbol]
= FileHash[xyzsymbol]
"""
rules = {
'FileHash[filename_String]': 'FileHash[filename, "MD5"]',
}
attributes = ('Protected', 'ReadProtected')
def apply(self, filename, hashtype, evaluation):
'FileHash[filename_String, hashtype_String]'
py_hashtype = hashtype.to_python()
py_filename = filename.to_python()
# TODO: MD2?
supported_hashes = {
'Adler32': zlib.adler32,
'CRC32': zlib.crc32,
'MD5': lambda s: int(hashlib.md5(s).hexdigest(), 16),
'SHA': lambda s: int(hashlib.sha1(s).hexdigest(), 16),
'SHA224': lambda s: int(hashlib.sha224(s).hexdigest(), 16),
'SHA256': lambda s: int(hashlib.sha256(s).hexdigest(), 16),
'SHA384': lambda s: int(hashlib.sha384(s).hexdigest(), 16),
'SHA512': lambda s: int(hashlib.sha512(s).hexdigest(), 16),
}
py_hashtype = py_hashtype[1:-1]
py_filename = py_filename[1:-1]
hash_func = supported_hashes.get(py_hashtype)
if hash_func is None:
return
try:
with mathics_open(py_filename, 'rb') as f:
dump = f.read()
except IOError:
evaluation.message('General', 'noopen', filename)
return
return from_python(hash_func(dump))
class FileDate(Builtin):
"""
<dl>
<dt>'FileDate[$file$, $types$]'
<dd>returns the time and date at which the file was last modified.
</dl>
>> FileDate["ExampleData/sunflowers.jpg"]
= ...
>> FileDate["ExampleData/sunflowers.jpg", "Access"]
= ...
>> FileDate["ExampleData/sunflowers.jpg", "Creation"]
= ...
>> FileDate["ExampleData/sunflowers.jpg", "Change"]
= ...
>> FileDate["ExampleData/sunflowers.jpg", "Modification"]
= ...
>> FileDate["ExampleData/sunflowers.jpg", "Rules"]
= ...
#> FileDate["MathicsNonExistantExample"]
: File not found during FileDate[MathicsNonExistantExample].
= FileDate[MathicsNonExistantExample]
#> FileDate["MathicsNonExistantExample", "Modification"]
: File not found during FileDate[MathicsNonExistantExample, Modification].
= FileDate[MathicsNonExistantExample, Modification]
#> FileDate["ExampleData/sunflowers.jpg", "Fail"]
: Date type Fail should be "Access", "Modification", "Creation" (Windows only), "Change" (Macintosh and Unix only), or "Rules".
= FileDate[ExampleData/sunflowers.jpg, Fail]
"""
messages = {
'nffil': 'File not found during `1`.',
'datetype': ('Date type Fail should be "Access", "Modification", '
'"Creation" (Windows only), '
'"Change" (Macintosh and Unix only), or "Rules".'),
}
rules = {
'FileDate[filepath_String, "Rules"]':
'''{"Access" -> FileDate[filepath, "Access"],
"Creation" -> FileDate[filepath, "Creation"],
"Change" -> FileDate[filepath, "Change"],
"Modification" -> FileDate[filepath, "Modification"]}''',
}
attributes = ('Protected')
def apply(self, path, timetype, evaluation):
'FileDate[path_, timetype_]'
py_path = path_search(path.to_python()[1:-1])
if py_path is None:
if timetype is None:
evaluation.message(
'FileDate', 'nffil', Expression('FileDate', path))
else:
evaluation.message('FileDate', 'nffil', Expression(
'FileDate', path, timetype))
return
if timetype is None:
time_type = 'Modification'
else:
time_type = timetype.to_python()[1:-1]
if time_type == 'Access':
result = os.path.getatime(py_path)
elif time_type == 'Creation':
if os.name == 'posix':
return Expression('Missing', 'NotApplicable')
result = os.path.getctime(py_path)
elif time_type == 'Change':
if os.name != 'posix':
return Expression('Missing', 'NotApplicable')
result = os.path.getctime(py_path)
elif time_type == 'Modification':
result = os.path.getmtime(py_path)
else:
evaluation.message('FileDate', 'datetype')
return
# Offset for system epoch
epochtime = Expression('AbsoluteTime', time.strftime(
"%Y-%m-%d %H:%M",
time.gmtime(0))).to_python(n_evaluation=evaluation)
result += epochtime
return Expression('DateList', from_python(result))
def apply_default(self, path, evaluation):
'FileDate[path_]'
return self.apply(path, None, evaluation)
class SetFileDate(Builtin):
"""
<dl>
<dt>'SetFileDate["$file$"]'
<dd>set the file access and modification dates of $file$ to the current date.
<dt>'SetFileDate["$file$", $date$]'
<dd>set the file access and modification dates of $file$ to the specified date list.
<dt>'SetFileDate["$file$", $date$, "$type$"]'
<dd>set the file date of $file$ to the specified date list.
The "$type$" can be one of "$Access$", "$Creation$", "$Modification$", or 'All'.
</dl>
Create a temporary file (for example purposes)
>> tmpfilename = $TemporaryDirectory <> "/tmp0";
>> Close[OpenWrite[tmpfilename]];
>> SetFileDate[tmpfilename, {2000, 1, 1, 0, 0, 0.}, "Access"];
>> FileDate[tmpfilename, "Access"]
= {2000, 1, 1, 0, 0, 0.}
#> SetFileDate[tmpfilename, {2001, 1, 1, 0, 0, 0.}];
#> FileDate[tmpfilename, "Access"]
= {2001, 1, 1, 0, 0, 0.}
#> SetFileDate[tmpfilename]
#> FileDate[tmpfilename, "Access"]
= {...}
#> DeleteFile[tmpfilename]
#> SetFileDate["MathicsNonExample"]
: File not found during SetFileDate[MathicsNonExample].
= $Failed
"""
messages = {
'fstr': ('File specification `1` is not a string of one or '
'more characters.'),
'nffil': 'File not found during `1`.',
'fdate': ('Date specification should be either the number of seconds '
'since January 1, 1900 or a {y, m, d, h, m, s} list.'),
'datetype': ('Date type a should be "Access", "Modification", '
'"Creation" (Windows only), or All.'),
'nocreationunix': ('The Creation date of a file cannot be set on '
'Macintosh or Unix.'),
}
attributes = ('Protected')
def apply(self, filename, datelist, attribute, evaluation):
'SetFileDate[filename_, datelist_, attribute_]'
py_filename = filename.to_python()
if datelist is None:
py_datelist = Expression(
'DateList').evaluate(evaluation).to_python()
expr = Expression('SetFileDate', filename)
else:
py_datelist = datelist.to_python()
if attribute is None:
py_attr = 'All'
if datelist is not None:
expr = Expression('SetFileDate', filename, datelist)
else:
py_attr = attribute.to_python()
expr = Expression('SetFileDate', filename, datelist, attribute)
# Check filename
if not (isinstance(py_filename, basestring) and
py_filename[0] == py_filename[-1] == '"'):
evaluation.message('SetFileDate', 'fstr', filename)
return
py_filename = path_search(py_filename[1:-1])
if py_filename is None:
evaluation.message('SetFileDate', 'nffil', expr)
return Symbol('$Failed')
# Check datelist
if not (isinstance(py_datelist, list) and len(py_datelist) == 6 and
all(isinstance(d, int) for d in py_datelist[:-1]) and
isinstance(py_datelist[-1], float)):
evaluation.message('SetFileDate', 'fdate', expr)
# Check attribute
if py_attr not in ['"Access"', '"Creation"', '"Modification"', 'All']:
evaluation.message('SetFileDate', 'datetype')
return
epochtime = Expression('AbsoluteTime', time.strftime(
"%Y-%m-%d %H:%M", time.gmtime(0))).evaluate(evaluation).to_python()
stattime = Expression('AbsoluteTime', from_python(py_datelist))
stattime = stattime.to_python(n_evaluation=evaluation)
stattime -= epochtime
try:
os.stat(py_filename)
if py_attr == '"Access"':
os.utime(py_filename, (
stattime, os.path.getatime(py_filename)))
if py_attr == '"Creation"':
if os.name == 'posix':
evaluation.message('SetFileDate', 'nocreationunix')
return Symbol('$Failed')
else:
# TODO: Note: This is windows only
return Symbol('$Failed')
if py_attr == '"Modification"':
os.utime(py_filename, (os.path.getatime(
py_filename), stattime))
if py_attr == 'All':
os.utime(py_filename, (stattime, stattime))
except OSError as e:
print e
# evaluation.message(...)
return Symbol('$Failed')
return Symbol('Null')
def apply_1arg(self, filename, evaluation):
'SetFileDate[filename_]'
return self.apply(filename, None, None, evaluation)
def apply_2arg(self, filename, datelist, evaluation):
'SetFileDate[filename_, datelist_]'
return self.apply(filename, datelist, None, evaluation)
class CopyFile(Builtin):
"""
<dl>
<dt>'CopyFile["$file1$", "$file2$"]'
<dd>copies $file1$ to $file2$.
</dl>
>> CopyFile["ExampleData/sunflowers.jpg", "MathicsSunflowers.jpg"]
= MathicsSunflowers.jpg
>> DeleteFile["MathicsSunflowers.jpg"]
"""
messages = {
'filex': 'Cannot overwrite existing file `1`.',
'fstr': ('File specification `1` is not a string of '
'one or more characters.'),
'nffil': 'File not found during `1`.',
}
attributes = ('Protected')
def apply(self, source, dest, evaluation):
'CopyFile[source_, dest_]'
py_source = source.to_python()
py_dest = dest.to_python()
# Check filenames
if not (isinstance(py_source, basestring) and
py_source[0] == py_source[-1] == '"'):
evaluation.message('CopyFile', 'fstr', source)
return
if not (isinstance(py_dest, basestring) and
py_dest[0] == py_dest[-1] == '"'):
evaluation.message('CopyFile', 'fstr', dest)
return
py_source = py_source[1:-1]
py_dest = py_dest[1:-1]
py_source = path_search(py_source)
if py_source is None:
evaluation.message('CopyFile', 'filex', source)
return Symbol('$Failed')
if os.path.exists(py_dest):
evaluation.message('CopyFile', 'filex', dest)
return Symbol('$Failed')
try:
shutil.copy(py_source, py_dest)
except IOError:
evaluation.message('CopyFile', 'nffil', Expression(
'CopyFile', source, dest))
return Symbol('$Failed')
return dest
class RenameFile(Builtin):
"""
<dl>
<dt>'RenameFile["$file1$", "$file2$"]'
<dd>renames $file1$ to $file2$.
</dl>
>> CopyFile["ExampleData/sunflowers.jpg", "MathicsSunflowers.jpg"]
= MathicsSunflowers.jpg
>> RenameFile["MathicsSunflowers.jpg", "MathicsSunnyFlowers.jpg"]
= MathicsSunnyFlowers.jpg
>> DeleteFile["MathicsSunnyFlowers.jpg"]
"""
messages = {
'filex': 'Cannot overwrite existing file `1`.',
'fstr': ('File specification `1` is not a string of '
'one or more characters.'),
'nffil': 'File not found during `1`.',
}
attributes = ('Protected')
def apply(self, source, dest, evaluation):
'RenameFile[source_, dest_]'
py_source = source.to_python()
py_dest = dest.to_python()
# Check filenames
if not (isinstance(py_source, basestring) and
py_source[0] == py_source[-1] == '"'):
evaluation.message('RenameFile', 'fstr', source)
return
if not (isinstance(py_dest, basestring) and
py_dest[0] == py_dest[-1] == '"'):
evaluation.message('RenameFile', 'fstr', dest)
return
py_source = py_source[1:-1]
py_dest = py_dest[1:-1]
py_source = path_search(py_source)
if py_source is None:
evaluation.message('RenameFile', 'filex', source)
return Symbol('$Failed')
if os.path.exists(py_dest):
evaluation.message('RenameFile', 'filex', dest)
return Symbol('$Failed')
try:
shutil.move(py_source, py_dest)
except IOError:
evaluation.message('RenameFile', 'nffil', dest)
return Symbol('$Failed')
return dest
class DeleteFile(Builtin):
"""
<dl>
<dt>'Delete["$file$"]'
<dd>deletes $file$.
<dt>'Delete[{"$file1$", "$file2$", ...}]'
<dd>deletes a list of files.
</dl>
>> CopyFile["ExampleData/sunflowers.jpg", "MathicsSunflowers.jpg"];
>> DeleteFile["MathicsSunflowers.jpg"]
>> CopyFile["ExampleData/sunflowers.jpg", "MathicsSunflowers1.jpg"];
>> CopyFile["ExampleData/sunflowers.jpg", "MathicsSunflowers2.jpg"];
>> DeleteFile[{"MathicsSunflowers1.jpg", "MathicsSunflowers2.jpg"}]
"""
messages = {
'filex': 'Cannot overwrite existing file `1`.',
'strs': ('String or non-empty list of strings expected at '
'position `1` in `2`.'),
'nffil': 'File not found during `1`.',
}
attributes = ('Protected')
def apply(self, filename, evaluation):
'DeleteFile[filename_]'
py_path = filename.to_python()
if not isinstance(py_path, list):
py_path = [py_path]
py_paths = []
for path in py_path:
# Check filenames
if not (isinstance(path, basestring) and
path[0] == path[-1] == '"'):
evaluation.message('DeleteFile', 'strs', filename,
Expression('DeleteFile', filename))
return
path = path[1:-1]
path = path_search(path)
if path is None:
evaluation.message('DeleteFile', 'nffil', Expression(
'DeleteFile', filename))
return Symbol('$Failed')
py_paths.append(path)
for path in py_paths:
try:
os.remove(path)
except OSError:
return Symbol('$Failed')
return Symbol('Null')
class DirectoryStack(Builtin):
"""
<dl>
<dt>'DirectoryStack[]'
<dd>returns the directory stack.
</dl>
>> DirectoryStack[]
= ...
"""
attributes = ('Protected')
def apply(self, evaluation):
'DirectoryStack[]'
global DIRECTORY_STACK
return from_python(DIRECTORY_STACK)
class Directory(Builtin):
"""
<dl>
<dt>'Directory[]'
<dd>returns the current working directory.
</dl>
>> Directory[]
= ...
"""
attributes = ('Protected')
def apply(self, evaluation):
'Directory[]'
result = os.getcwd()
return String(result)
class ParentDirectory(Builtin):
"""
<dl>
<dt>'ParentDirectory[]'
<dd>returns the parent of the current working directory.
<dt>'ParentDirectory["$dir$"]'
<dd>returns the parent $dir$.
</dl>
>> ParentDirectory[]
= ...
"""
rules = {
'ParentDirectory[]': 'ParentDirectory[Directory[]]',
}
messages = {
'fstr': ('File specification `1` is not a string of '
'one or more characters.'),
}
attributes = ('Protected')
def apply(self, path, evaluation):
'ParentDirectory[path_]'
if not isinstance(path, String):
evaluation.message('ParentDirectory', 'fstr', path)
return
pypath = path.to_python()[1:-1]
result = os.path.abspath(os.path.join(pypath, os.path.pardir))
return String(result)
class SetDirectory(Builtin):
"""
<dl>
<dt>'SetDirectory[$dir$]'
<dd>sets the current working directory to $dir$.
</dl>
>> SetDirectory[]
= ...
#> SetDirectory["MathicsNonExample"]
: Cannot set current directory to MathicsNonExample.
= $Failed
"""
rules = {
'SetDirectory[]': 'SetDirectory[$HomeDirectory]',
}
messages = {
'fstr': ('File specification `1` is not a string of '
'one or more characters.'),
'cdir': 'Cannot set current directory to `1`.',
}
attributes = ('Protected')
def apply(self, path, evaluation):
'SetDirectory[path_]'
if not isinstance(path, String):
evaluation.message('SetDirectory', 'fstr', path)
return
py_path = path.__str__()[1:-1]
py_path = path_search(py_path)
if py_path is None:
evaluation.message('SetDirectory', 'cdir', path)
return Symbol('$Failed')
os.chdir(py_path)
DIRECTORY_STACK.append(os.getcwd())
return String(os.getcwd())
class ResetDirectory(Builtin):
"""
<dl>
<dt>'ResetDirectory[]'
<dd>pops a directory from the directory stack and returns it.
</dl>
>> ResetDirectory[]
= ...
"""
messages = {
'dtop': 'Directory stack is empty.',
}
attributes = ('Protected')
def apply(self, evaluation):
'ResetDirectory[]'
global DIRECTORY_STACK
try:
tmp = DIRECTORY_STACK.pop()
except IndexError:
tmp = os.getcwd()
evaluation.message('ResetDirectory', 'dtop')
else:
os.chdir(tmp)
return String(tmp)
class CreateDirectory(Builtin):
"""
<dl>
<dt>'CreateDirectory["$dir$"]'
<dd>creates a directory called $dir$.
<dt>'CreateDirectory[]'
<dd>creates a temporary directory.
</dl>
>> dir = CreateDirectory[]
= ...
#> DirectoryQ[dir]
= True
#> DeleteDirectory[dir]
"""
attributes = ('Listable', 'Protected')
options = {
'CreateIntermediateDirectories': 'True',
}
messages = {
'fstr': ('File specification `1` is not a string of '
'one or more characters.'),
'nffil': "File not found during `1`.",
'filex': "`1` already exists.",
}
def apply(self, dirname, evaluation, options):
'CreateDirectory[dirname_, OptionsPattern[CreateDirectory]]'
expr = Expression('CreateDirectory', dirname)
py_dirname = dirname.to_python()
if not (isinstance(py_dirname, basestring) and
py_dirname[0] == py_dirname[-1] == '"'):
evaluation.message('CreateDirectory', 'fstr', dirname)
return
py_dirname = py_dirname[1:-1]
if os.path.isdir(py_dirname):
evaluation.message(
'CreateDirectory', 'filex', os.path.abspath(py_dirname))
return
os.mkdir(py_dirname)
if not os.path.isdir(py_dirname):
evaluation.message('CreateDirectory', 'nffil', expr)
return
return String(os.path.abspath(py_dirname))
def apply_empty(self, evaluation, options):
'CreateDirectory[OptionsPattern[CreateDirectory]]'
dirname = tempfile.mkdtemp(prefix='m', dir=TMP_DIR)
return String(dirname)
class DeleteDirectory(Builtin):
"""
<dl>
<dt>'DeleteDirectory["$dir$"]'
<dd>deletes a directory called $dir$.
</dl>
>> dir = CreateDirectory[]
= ...
>> DeleteDirectory[dir]
>> DirectoryQ[dir]
= False
#> Quiet[DeleteDirectory[dir]]
= $Failed
"""
attributes = ('Protected')
options = {
'DeleteContents': 'False',
}
messages = {
'strs': ('String or non-empty list of strings expected at '
'position 1 in `1`.'),
'nodir': 'Directory `1` not found.',
'dirne': 'Directory `1` not empty.',
'optx': 'Unknown option `1` in `2`',
'idcts': 'DeleteContents expects either True or False.', # MMA Bug
}
def apply(self, dirname, evaluation, options):
'DeleteDirectory[dirname_, OptionsPattern[DeleteDirectory]]'
expr = Expression('DeleteDirectory', dirname)
py_dirname = dirname.to_python()
delete_contents = options['System`DeleteContents'].to_python()
if not delete_contents in [True, False]:
evaluation.message('DeleteDirectory', 'idcts')
return
if not (isinstance(py_dirname, basestring) and
py_dirname[0] == py_dirname[-1] == '"'):
evaluation.message('DeleteDirectory', 'strs', expr)
return
py_dirname = py_dirname[1:-1]
if not os.path.isdir(py_dirname):
evaluation.message('DeleteDirectory', 'nodir', dirname)
return Symbol('$Failed')
if delete_contents:
shutil.rmtree(py_dirname)
else:
if os.listdir(py_dirname) != []:
evaluation.message('DeleteDirectory', 'dirne', dirname)
return Symbol('$Failed')
os.rmdir(py_dirname)
return Symbol('Null')
class CopyDirectory(Builtin):
"""
<dl>
<dt>'CopyDirectory["$dir1$", "$dir2$"]'
<dd>copies directory $dir1$ to $dir2$.
</dl>
"""
attributes = ('Protected')
messages = {
'argr': 'called with `1` argument; 2 arguments are expected.',
'fstr': ('File specification `1` is not a string of '
'one or more characters.'),
'filex': 'Cannot overwrite existing file `1`.',
'nodir': 'Directory `1` not found.',
}
def apply(self, dirs, evaluation):
'CopyDirectory[dirs__]'
seq = dirs.get_sequence()
if len(seq) != 2:
evaluation.message('CopyDirectory', 'argr', len(seq))
return
(dir1, dir2) = (s.to_python() for s in seq)
if not (isinstance(dir1, basestring) and dir1[0] == dir1[-1] == '"'):
evaluation.message('CopyDirectory', 'fstr', seq[0])
return
dir1 = dir1[1:-1]
if not (isinstance(dir2, basestring) and dir2[0] == dir2[-1] == '"'):
evaluation.message('CopyDirectory', 'fstr', seq[1])
return
dir2 = dir2[1:-1]
if not os.path.isdir(dir1):
evaluation.message('CopyDirectory', 'nodir', seq[0])
return Symbol('$Failed')
if os.path.isdir(dir2):
evaluation.message('CopyDirectory', 'filex', seq[1])
return Symbol('$Failed')
shutil.copytree(dir1, dir2)
return String(os.path.abspath(dir2))
class RenameDirectory(Builtin):
"""
<dl>
<dt>'RenameyDirectory["$dir1$", "$dir2$"]'
<dd>renames directory $dir1$ to $dir2$.
</dl>
"""
attributes = ('Protected')
messages = {
'argr': 'called with `1` argument; 2 arguments are expected.',
'fstr': ('File specification `1` is not a string of '
'one or more characters.'),
'filex': 'Cannot overwrite existing file `1`.',
'nodir': 'Directory `1` not found.',
}
def apply(self, dirs, evaluation):
'RenameDirectory[dirs__]'
seq = dirs.get_sequence()
if len(seq) != 2:
evaluation.message('RenameDirectory', 'argr', len(seq))
return
(dir1, dir2) = (s.to_python() for s in seq)
if not (isinstance(dir1, basestring) and dir1[0] == dir1[-1] == '"'):
evaluation.message('RenameDirectory', 'fstr', seq[0])
return
dir1 = dir1[1:-1]
if not (isinstance(dir2, basestring) and dir2[0] == dir2[-1] == '"'):
evaluation.message('RenameDirectory', 'fstr', seq[1])
return
dir2 = dir2[1:-1]
if not os.path.isdir(dir1):
evaluation.message('RenameDirectory', 'nodir', seq[0])
return Symbol('$Failed')
if os.path.isdir(dir2):
evaluation.message('RenameDirectory', 'filex', seq[1])
return Symbol('$Failed')
shutil.move(dir1, dir2)
return String(os.path.abspath(dir2))
class FileType(Builtin):
"""
<dl>
<dt>'FileType["$file$"]'
<dd>returns the type of a file, from 'File', 'Directory' or 'None'.
</dl>
>> FileType["ExampleData/sunflowers.jpg"]
= File
>> FileType["ExampleData"]
= Directory
>> FileType["ExampleData/nonexistant"]
= None
#> FileType[x]
: File specification x is not a string of one or more characters.
= FileType[x]
"""
messages = {
'fstr': ('File specification `1` is not a string of '
'one or more characters.'),
}
attributes = ('Protected')
def apply(self, filename, evaluation):
'FileType[filename_]'
if not isinstance(filename, String):
evaluation.message('FileType', 'fstr', filename)
return
path = filename.to_python()[1:-1]
path = path_search(path)
if path is None:
return Symbol('None')
if os.path.isfile(path):
return Symbol('File')
else:
return Symbol('Directory')
class FileExistsQ(Builtin):
"""
<dl>
<dt>'FileExistsQ["$file$"]'
<dd>returns 'True' if $file$ exists and 'False' otherwise.
</dl>
>> FileExistsQ["ExampleData/sunflowers.jpg"]
= True
>> FileExistsQ["ExampleData/sunflowers.png"]
= False
"""
messages = {
'fstr': ('File specification `1` is not a string of '
'one or more characters.'),
}
attributes = ('Protected')
def apply(self, filename, evaluation):
'FileExistsQ[filename_]'
path = filename.to_python()
if not (isinstance(path, basestring) and path[0] == path[-1] == '"'):
evaluation.message('FileExistsQ', 'fstr', filename)
return
path = path[1:-1]
path = path_search(path)
if path is None:
return Symbol('False')
return Symbol('True')
class DirectoryQ(Builtin):
"""
<dl>
<dt>'DirectoryQ["$name$"]'
<dd>returns 'True' if the directory called $name$ exists and 'False' otherwise.
</dl>
>> DirectoryQ["ExampleData/"]
= True
>> DirectoryQ["ExampleData/MythicalSubdir/"]
= False
#> DirectoryQ["ExampleData"]
= True
#> DirectoryQ["ExampleData/MythicalSubdir/NestedDir/"]
= False
"""
messages = {
'fstr': ('File specification `1` is not a string of '
'one or more characters.'),
}
attributes = ('Protected')
def apply(self, pathname, evaluation):
'DirectoryQ[pathname_]'
path = pathname.to_python()
if not (isinstance(path, basestring) and path[0] == path[-1] == '"'):
evaluation.message('DirectoryQ', 'fstr', pathname)
return
path = path[1:-1]
path = path_search(path)
if path is not None and os.path.isdir(path):
return Symbol('True')
return Symbol('False')
class Needs(Builtin):
"""
<dl>'Needs["context`"]'
<dd>loads the specified context if not already in '$Packages'.
</dl>
>> Needs["VectorAnalysis`"]
#> Needs["VectorAnalysis`"]
#> Needs["SomeFakePackageOrTypo`"]
: Cannot open SomeFakePackageOrTypo`.
: Context SomeFakePackageOrTypo` was not created when Needs was evaluated.
= $Failed
#> Needs["VectorAnalysis"]
: Invalid context specified at position 1 in Needs[VectorAnalysis]. A context must consist of valid symbol names separated by and ending with `.
= Needs[VectorAnalysis]
## --- VectorAnalysis ---
#> Needs["VectorAnalysis`"]
#> DotProduct[{1,2,3}, {4,5,6}]
= 32
#> DotProduct[{-1.4, 0.6, 0.2}, {0.1, 0.6, 1.7}]
= 0.56
#> CrossProduct[{1,2,3}, {4,5,6}]
= {-3, 6, -3}
#> CrossProduct[{-1.4, 0.6, 0.2}, {0.1, 0.6, 1.7}]
= {0.9, 2.4, -0.9}
#> ScalarTripleProduct[{-2,3,1},{0,4,0},{-1,3,3}]
= -20
#> ScalarTripleProduct[{-1.4,0.6,0.2}, {0.1,0.6,1.7}, {0.7,-1.5,-0.2}]
= -2.79
#> CoordinatesToCartesian[{2, Pi, 3}, Spherical]
= {0, 0, -2}
#> CoordinatesFromCartesian[%, Spherical]
= {2, Pi, 0}
#> CoordinatesToCartesian[{2, Pi, 3}, Cylindrical]
= {-2, 0, 3}
#> CoordinatesFromCartesian[%, Cylindrical]
= {2, Pi, 3}
## Needs Sin/Cos exact value (PR #100) for these tests to pass
## #> CoordinatesToCartesian[{2, Pi / 4, Pi / 3}, Spherical]
## = {Sqrt[2] / 2, Sqrt[6] / 2, Sqrt[2]}
## #> CoordinatesFromCartesian[%, Spherical]
## = {2, Pi / 4, Pi / 3}
## #> CoordinatesToCartesian[{2, Pi / 4, -1}, Cylindrical]
## = {Sqrt[2], Sqrt[2], -1}
## #> CoordinatesFromCartesian[%, Cylindrical]
## = {2, Pi / 4, -1}
#> CoordinatesToCartesian[{0.27, 0.51, 0.92}, Cylindrical]
= {0.235641017064352841, 0.131807856658385023, 0.92}
#> CoordinatesToCartesian[{0.27, 0.51, 0.92}, Spherical]
= {0.0798518563676219116, 0.10486654429093224, 0.235641017064352841}
#> Coordinates[]
= {Xx, Yy, Zz}
#> Coordinates[Spherical]
= {Rr, Ttheta, Pphi}
#> SetCoordinates[Cylindrical]
= Cylindrical[Rr, Ttheta, Zz]
#> Coordinates[]
= {Rr, Ttheta, Zz}
#> CoordinateSystem
= Cylindrical
#> Parameters[]
= {}
#> CoordinateRanges[]
## = {0 <= Rr < Infinity, -Pi < Ttheta <= Pi, -Infinity < Zz < Infinity}
= {0 <= Rr && Rr < Infinity, -Pi < Ttheta && Ttheta <= Pi, -Infinity < Zz < Infinity}
#> CoordinateRanges[Cartesian]
= {-Infinity < Xx < Infinity, -Infinity < Yy < Infinity, -Infinity < Zz < Infinity}
#> ScaleFactors[Cartesian]
= {1, 1, 1}
#> ScaleFactors[Spherical]
= {1, Rr, Rr Sin[Ttheta]}
#> ScaleFactors[Cylindrical]
= {1, Rr, 1}
#> ScaleFactors[{2, 1, 3}, Cylindrical]
= {1, 2, 1}
#> JacobianDeterminant[Cartesian]
= 1
#> JacobianDeterminant[Spherical]
= Rr ^ 2 Sin[Ttheta]
#> JacobianDeterminant[Cylindrical]
= Rr
#> JacobianDeterminant[{2, 1, 3}, Cylindrical]
= 2
#> JacobianMatrix[Cartesian]
= {{1, 0, 0}, {0, 1, 0}, {0, 0, 1}}
#> JacobianMatrix[Spherical]
= {{Cos[Pphi] Sin[Ttheta], Rr Cos[Pphi] Cos[Ttheta], -Rr Sin[Pphi] Sin[Ttheta]}, {Sin[Pphi] Sin[Ttheta], Rr Cos[Ttheta] Sin[Pphi], Rr Cos[Pphi] Sin[Ttheta]}, {Cos[Ttheta], -Rr Sin[Ttheta], 0}}
#> JacobianMatrix[Cylindrical]
= {{Cos[Ttheta], -Rr Sin[Ttheta], 0}, {Sin[Ttheta], Rr Cos[Ttheta], 0}, {0, 0, 1}}
"""
messages = {
'ctx': ('Invalid context specified at position `2` in `1`. '
'A context must consist of valid symbol names separated by '
'and ending with `3`.'),
'nocont': 'Context `1` was not created when Needs was evaluated.',
}
def apply(self, context, evaluation):
'Needs[context_String]'
if not valid_context_name(context.get_string_value()):
evaluation.message('Needs', 'ctx', Expression(
'Needs', context), 1, '`')
return
# TODO
# if Expression('MemberQ', context, Symbol('$Packages')).is_true():
# # Already loaded
# return Symbol('Null')
result = Expression('Get', context).evaluate(evaluation)
if result == Symbol('$Failed'):
evaluation.message('Needs', 'nocont', context)
return Symbol('$Failed')
return Symbol('Null')
|
benley/Mathics
|
mathics/builtin/files.py
|
Python
|
gpl-3.0
| 138,763
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright 2010-2013 Código Sur Sociedad Civil.
# All rights reserved.
#
# This file is part of Cyclope.
#
# Cyclope is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Cyclope is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import os
from django.conf import settings
from django.contrib import admin
from django import forms
from django.db import models
from cyclope import settings as cyc_settings
from cyclope.core.collections.admin import CollectibleAdmin
from cyclope.admin import BaseContentAdmin
from models import *
from filebrowser.fields import FileBrowseField
from filebrowser.base import FileObject
from filebrowser.functions import handle_file_upload, convert_filename
# This is a standard ClearableFileInput.
# We just need to "translate" some data from the FileBrowseField
class CustomFileInput(forms.widgets.ClearableFileInput):
def render(self, name, value, attrs=None):
# FileBrowseField has no url attribute so we set url to url_full
if type(value) == FileObject:
value.url = value.url_full
return super(CustomFileInput, self).render(name, value, attrs)
class MediaAdmin(CollectibleAdmin, BaseContentAdmin):
inlines = CollectibleAdmin.inlines + BaseContentAdmin.inlines
search_fields = ('name', 'description', )
list_filter = CollectibleAdmin.list_filter + ('creation_date',)
def get_form(self, request, obj=None, **kwargs):
media_file_field = self.model.media_file_field
image_file_field = self.model.image_file_field
form = super(MediaAdmin, self).get_form(request, obj, **kwargs)
simple_widgets = False
if not request.user.is_superuser:
simple_widgets = True
form.base_fields[media_file_field].widget = CustomFileInput()
if image_file_field:
form.base_fields[image_file_field].widget = CustomFileInput()
form.simple = simple_widgets
if obj:
form.media_file_initial = getattr(obj, media_file_field)
# This is a hack; if the field is required it will fail validation
# when the user does not upload a file.
# TODO(nicoechaniz): implement proper validation for this case
form.base_fields[media_file_field].required = False
if image_file_field:
form.image_file_initial = getattr(obj, image_file_field)
form.base_fields[image_file_field].required = False
return form
has_thumbnail = [Picture, MovieClip, FlashMovie]
def media_admin_factory(media_model):
class MediaLibraryForm(forms.ModelForm):
def __init__(self, *args, **kwargs):
super(MediaLibraryForm, self).__init__(*args, **kwargs)
author_choices = [('', '------')]
for author in Author.objects.all():
if media_model in [ctype.model_class()
for ctype in author.content_types.all()]:
author_choices.append((author.id, author.name))
self.fields['author'].choices = author_choices
def save(self, *args, **kwargs):
# We override the standard behavior because we've overriden the FileBrowseField
# with a simple ClearableFileInput
if self.simple:
abs_paths = {}
instance = super(MediaLibraryForm, self).save(commit=False)
image_file_field = instance.image_file_field
file_fields = [ instance.media_file_field ]
if image_file_field:
file_fields.append(image_file_field)
for f_field in file_fields:
folder = media_model._meta.get_field_by_name(f_field)[0].directory
abs_paths[f_field] = os.path.join(
settings.MEDIA_ROOT, settings.FILEBROWSER_DIRECTORY, folder
)
if f_field in self.files.keys():
f = self.files[f_field]
f.name = convert_filename(f.name)
name = handle_file_upload(abs_paths[f_field], f)
setattr(instance, f_field, name)
else:
# TODO(nicoechaniz): this is ugly! refactor
if f_field in ["image", "still"]:
if hasattr(self, "image_file_initial"):
setattr(instance, f_field, self.image_file_initial)
else:
if hasattr(self, "media_file_initial"):
setattr(instance, f_field, self.media_file_initial)
instance.save()
return instance
else:
return super(MediaLibraryForm, self).save(*args, **kwargs)
class Meta:
model = media_model
if media_model in has_thumbnail:
list_display = ['name', 'published', 'thumbnail']
else:
list_display = ['name', 'published']
list_display += CollectibleAdmin.list_display
return type('%sAdmin' % media_model.__name__,
(MediaAdmin,),
{'form': MediaLibraryForm, 'list_display': list_display})
admin.site.register(Picture, media_admin_factory(Picture))
admin.site.register(SoundTrack, media_admin_factory(SoundTrack))
admin.site.register(MovieClip, media_admin_factory(MovieClip))
admin.site.register(Document, media_admin_factory(Document))
admin.site.register(FlashMovie, media_admin_factory(FlashMovie))
admin.site.register(RegularFile, media_admin_factory(RegularFile))
admin.site.register(ExternalContent, media_admin_factory(ExternalContent))
|
MauHernandez/cyclope
|
cyclope/apps/medialibrary/admin.py
|
Python
|
gpl-3.0
| 6,248
|
"""
Copyright (C) 2009,2014
Andreas Engelbredt Dalsgaard <andreas.dalsgaard@gmail.com>
Martin Toft <mt@martintoft.dk>
Mads Chr. Olesen <mchro@cs.aau.dk>
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>. """
def get_index_of_last_ident(node):
last_index = node.leaf
#parse out entire name (follow dots)
curnode = node
while len(curnode.children) == 2 and curnode.children[1].type == 'Identifier':
curnode = curnode.children[1]
last_index = curnode.leaf
if last_index == None:
return []
else:
return last_index.children
def get_last_name_from_complex_identifier(n):
"""Follow the children of a complex identifier node, i.e.
"a.b.c.d" to just return "d"
"""
full_str = get_full_name_from_complex_identifier(n)
if '.' in full_str:
return full_str.rsplit('.',1)[1] #FIXME this could be done without constructing the full string first
else:
return full_str
""" Takes an identifier and return the full name:
e.g., myidentifier.someotheridentifier.nestedidentifier.
"""
def get_full_name_from_complex_identifier(identifierNode):
id_str = identifierNode.children[0]
#parse out entire name (follow dots)
curnode = identifierNode
while len(curnode.children) == 2 and curnode.children[1].type == 'Identifier':
curnode = curnode.children[1]
id_str += '.' + curnode.children[0]
return id_str
""" Takes an identifier and return the list of names:
e.g., ['myidentifier', 'someotheridentifier', 'nestedidentifier']
"""
def get_name_list_from_complex_identifier(identifierNode):
n = identifierNode
names = [n.children[0]]
cur = n
while len(cur.children) == 2 and \
cur.children[1].type == 'Identifier':
cur = cur.children[1]
names.append(cur.children[0])
return names
|
yzh89/pyuppaal
|
pyuppaal/ulp/util.py
|
Python
|
gpl-3.0
| 2,478
|
# -*- coding: utf-8 -*-
#
# Copyright © 2012 - 2014 Michal Čihař <michal@cihar.com>
#
# This file is part of Weblate <http://weblate.org/>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
"""
Tests for management commands.
"""
from django.test import TestCase
from weblate.trans.tests.test_models import RepoTestCase
from weblate.trans.models import SubProject
from django.core.management import call_command
from django.core.management.base import CommandError
import django
# Django 1.5 changes behavior here
if django.VERSION >= (1, 5):
COMMAND_EXCEPTION = CommandError
else:
COMMAND_EXCEPTION = SystemExit
class ImportProjectTest(RepoTestCase):
def test_import(self):
project = self.create_project()
call_command(
'import_project',
'test',
self.repo_path,
'master',
'**/*.po',
)
# We should have loaded four subprojects
self.assertEqual(project.subproject_set.count(), 4)
def test_import_po(self):
project = self.create_project()
call_command(
'import_project',
'test',
self.repo_path,
'master',
'**/*.po',
file_format='po'
)
# We should have loaded four subprojects
self.assertEqual(project.subproject_set.count(), 4)
def test_import_invalid(self):
project = self.create_project()
self.assertRaises(
COMMAND_EXCEPTION,
call_command,
'import_project',
'test',
self.repo_path,
'master',
'**/*.po',
file_format='INVALID'
)
# We should have loaded none subprojects
self.assertEqual(project.subproject_set.count(), 0)
def test_import_aresource(self):
project = self.create_project()
call_command(
'import_project',
'test',
self.repo_path,
'master',
'**/values-*/strings.xml',
file_format='aresource',
base_file_template='android/values/strings.xml',
)
# We should have loaded one subproject
self.assertEqual(project.subproject_set.count(), 1)
def test_import_aresource_format(self):
project = self.create_project()
call_command(
'import_project',
'test',
self.repo_path,
'master',
'**/values-*/strings.xml',
file_format='aresource',
base_file_template='%s/values/strings.xml',
)
# We should have loaded one subproject
self.assertEqual(project.subproject_set.count(), 1)
def test_re_import(self):
project = self.create_project()
call_command(
'import_project',
'test',
self.repo_path,
'master',
'**/*.po',
)
# We should have loaded four subprojects
self.assertEqual(project.subproject_set.count(), 4)
call_command(
'import_project',
'test',
self.repo_path,
'master',
'**/*.po',
)
# We should load no more subprojects
self.assertEqual(project.subproject_set.count(), 4)
def test_import_against_existing(self):
'''
Test importing with a weblate:// URL
'''
android = self.create_android()
project = android.project
self.assertEqual(project.subproject_set.count(), 1)
call_command(
'import_project',
project.slug,
'weblate://%s/%s' % (project.slug, android.slug),
'master',
'**/*.po',
)
# We should have loaded five subprojects
self.assertEqual(project.subproject_set.count(), 5)
def test_import_missing_project(self):
'''
Test of correct handling of missing project.
'''
self.assertRaises(
COMMAND_EXCEPTION,
call_command,
'import_project',
'test',
self.repo_path,
'master',
'**/*.po',
)
def test_import_missing_wildcard(self):
'''
Test of correct handling of missing wildcard.
'''
self.create_project()
self.assertRaises(
COMMAND_EXCEPTION,
call_command,
'import_project',
'test',
self.repo_path,
'master',
'*/*.po',
)
class BasicCommandTest(TestCase):
def test_versions(self):
call_command('list_versions')
class PeriodicCommandTest(RepoTestCase):
def setUp(self):
super(PeriodicCommandTest, self).setUp()
self.create_subproject()
def test_cleanup(self):
call_command(
'cleanuptrans'
)
def test_update_index(self):
# Test the command
call_command(
'update_index'
)
def test_list_checks(self):
call_command(
'list_ignored_checks'
)
call_command(
'list_ignored_checks',
list_all=True
)
call_command(
'list_ignored_checks',
count=10
)
class CheckGitTest(RepoTestCase):
'''
Base class for handling tests of WeblateCommand
based commands.
'''
command_name = 'checkgit'
def setUp(self):
super(CheckGitTest, self).setUp()
self.create_subproject()
def do_test(self, *args, **kwargs):
call_command(
self.command_name,
*args,
**kwargs
)
def test_all(self):
self.do_test(
all=True,
)
def test_project(self):
self.do_test(
'test',
)
def test_subproject(self):
self.do_test(
'test/test',
)
def test_nonexisting_project(self):
self.assertRaises(
COMMAND_EXCEPTION,
self.do_test,
'notest',
)
def test_nonexisting_subproject(self):
self.assertRaises(
COMMAND_EXCEPTION,
self.do_test,
'test/notest',
)
class CommitPendingTest(CheckGitTest):
command_name = 'commit_pending'
class CommitGitTest(CheckGitTest):
command_name = 'commitgit'
class PushGitTest(CheckGitTest):
command_name = 'pushgit'
class LoadTest(CheckGitTest):
command_name = 'loadpo'
class UpdateChecksTest(CheckGitTest):
command_name = 'updatechecks'
class UpdateGitTest(CheckGitTest):
command_name = 'updategit'
class RebuildIndexTest(CheckGitTest):
command_name = 'rebuild_index'
def test_all_clean(self):
self.do_test(
all=True,
clean=True,
)
class LockTranslationTest(CheckGitTest):
command_name = 'lock_translation'
class UnLockTranslationTest(CheckGitTest):
command_name = 'unlock_translation'
class LockingCommandTest(RepoTestCase):
'''
Test locking and unlocking.
'''
def setUp(self):
super(LockingCommandTest, self).setUp()
self.create_subproject()
def test_locking(self):
subproject = SubProject.objects.all()[0]
self.assertFalse(
SubProject.objects.filter(locked=True).exists()
)
call_command(
'lock_translation',
'{0}/{1}'.format(
subproject.project.slug,
subproject.slug,
)
)
self.assertTrue(
SubProject.objects.filter(locked=True).exists()
)
call_command(
'unlock_translation',
'{0}/{1}'.format(
subproject.project.slug,
subproject.slug,
)
)
self.assertFalse(
SubProject.objects.filter(locked=True).exists()
)
class BenchmarkCommandTest(RepoTestCase):
'''
Benchmarking test.
'''
def setUp(self):
super(BenchmarkCommandTest, self).setUp()
self.create_subproject()
def test_benchmark(self):
call_command(
'benchmark', 'test', 'weblate://test/test', 'po/*.po'
)
|
paour/weblate
|
weblate/trans/tests/test_commands.py
|
Python
|
gpl-3.0
| 8,853
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
#
# Team:
# J Phani Mahesh <phanimahesh@gmail.com>
# Barneedhar (jokerdino) <barneedhar@ubuntu.com>
# Amith KK <amithkumaran@gmail.com>
# Georgi Karavasilev <motorslav@gmail.com>
# Sam Tran <samvtran@gmail.com>
# Sam Hewitt <hewittsamuel@gmail.com>
# Angel Araya <al.arayaq@gmail.com>
#
# Description:
# A One-stop configuration tool for Unity.
#
# Legal Stuff:
#
# This file is a part of Unity Tweak Tool
#
# Unity Tweak Tool is free software; you can redistribute it and/or modify it under
# the terms of the GNU General Public License as published by the Free Software
# Foundation; version 3.
#
# Unity Tweak Tool is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, see <https://www.gnu.org/licenses/gpl-3.0.txt>
from UnityTweakTool.section.skeletonpage import Section,Tab
from UnityTweakTool.elements.fontbutton import FontButton
from UnityTweakTool.elements.cbox import ComboBox
from UnityTweakTool.elements.spin import SpinButton
from UnityTweakTool.elements.radio import Radio
from UnityTweakTool.elements.checkbox import CheckBox
from UnityTweakTool.section.spaghetti.theme import Themesettings as SpaghettiThemeSettings
from UnityTweakTool.elements.option import Option,HandlerObject
from collections import defaultdict
Appearance =Section(ui='appearance.ui',id='nb_themesettings')
#=============== THEME ==========================
#=============== ICONS ==========================
#=============== CURSOR =========================
#=============== FONTS ==========================
font_default= FontButton({
'id' : 'font_default',
'builder' : Appearance.builder,
'schema' : 'org.gnome.desktop.interface',
'path' : None,
'key' : 'font-name',
'type' : 'string'
})
font_document= FontButton({
'id' : 'font_document',
'builder' : Appearance.builder,
'schema' : 'org.gnome.desktop.interface',
'path' : None,
'key' : 'document-font-name',
'type' : 'string'
})
font_monospace= FontButton({
'id' : 'font_monospace',
'builder' : Appearance.builder,
'schema' : 'org.gnome.desktop.interface',
'path' : None,
'key' : 'monospace-font-name',
'type' : 'string'
})
font_window_title= FontButton({
'id' : 'font_window_title',
'builder' : Appearance.builder,
'schema' : 'org.gnome.desktop.wm.preferences',
'path' : None,
'key' : 'titlebar-font',
'type' : 'string'
})
cbox_antialiasing=ComboBox({
'id' : 'cbox_antialiasing',
'builder' : Appearance.builder,
'schema' : 'org.gnome.settings-daemon.plugins.xsettings',
'path' : None,
'key' : 'antialiasing',
'type' : 'string',
'map' : {'none':0,'grayscale':1,'rgba':2}
})
cbox_hinting=ComboBox({
'id' : 'cbox_hinting',
'builder' : Appearance.builder,
'schema' : 'org.gnome.settings-daemon.plugins.xsettings',
'path' : None,
'key' : 'hinting',
'type' : 'string',
'map' : {'none':0,'slight':1,'medium':2,'full':3}
})
spin_textscaling=SpinButton({
'id' : 'spin_textscaling',
'builder': Appearance.builder,
'schema' : 'org.gnome.desktop.interface',
'path' : None,
'key' : 'text-scaling-factor',
'type' : 'double',
'min' : 0.50,
'max' : 3.00
})
Fonts=Tab([font_default,
font_document,
font_monospace,
font_window_title,
cbox_antialiasing,
cbox_hinting,
spin_textscaling])
#========== WINDOW CONTROLS =====================
radio_left=Radio({
'id' : 'radio_left',
'builder' : Appearance.builder,
'schema' : 'org.gnome.desktop.wm.preferences',
'path' : None,
'key' : 'button-layout',
'type' : 'string',
'group' : 'radio_left',
'value' : 'close,minimize,maximize:',
'dependants': []
})
radio_right=Radio({
'id' : 'radio_right',
'builder' : Appearance.builder,
'schema' : 'org.gnome.desktop.wm.preferences',
'path' : None,
'key' : 'button-layout',
'type' : 'string',
'group' : 'radio_right',
'value' : ':minimize,maximize,close',
'dependants': []
})
WindowControls=Tab([radio_left,
radio_right])
# Pass in the id of restore defaults button to enable it.
Fonts.enable_restore('b_theme_font_reset')
WindowControls.enable_restore('b_window_control_reset')
# Each page must be added using add_page
Appearance.add_page(Fonts)
# XXX : Disabled since the implementation is inadequate
# Appearance.add_page(WindowControls)
themesettings=HandlerObject(SpaghettiThemeSettings(Appearance.builder))
Appearance.add_page(themesettings)
# After all pages are added, the section needs to be registered to start listening for events
Appearance.register()
|
jokerdino/unity-tweak-tool
|
UnityTweakTool/section/appearance.py
|
Python
|
gpl-3.0
| 5,247
|
from requests import request, ConnectionError
from social.utils import module_member, parse_qs
from social.exceptions import AuthFailed
class BaseAuth(object):
"""A django.contrib.auth backend that authenticates the user based on
a authentication provider response"""
name = '' # provider name, it's stored in database
supports_inactive_user = False # Django auth
ID_KEY = None
EXTRA_DATA = None
REQUIRES_EMAIL_VALIDATION = False
def __init__(self, strategy=None, redirect_uri=None, *args, **kwargs):
self.strategy = strategy
self.redirect_uri = redirect_uri
self.data = {}
if strategy:
self.data = self.strategy.request_data()
self.redirect_uri = self.strategy.absolute_uri(
self.redirect_uri
)
def setting(self, name, default=None):
"""Return setting value from strategy"""
return self.strategy.setting(name, default=default, backend=self)
def auth_url(self):
"""Must return redirect URL to auth provider"""
raise NotImplementedError('Implement in subclass')
def auth_html(self):
"""Must return login HTML content returned by provider"""
raise NotImplementedError('Implement in subclass')
def auth_complete(self, *args, **kwargs):
"""Completes loging process, must return user instance"""
raise NotImplementedError('Implement in subclass')
def process_error(self, data):
"""Process data for errors, raise exception if needed.
Call this method on any override of auth_complete."""
pass
def authenticate(self, *args, **kwargs):
"""Authenticate user using social credentials
Authentication is made if this is the correct backend, backend
verification is made by kwargs inspection for current backend
name presence.
"""
# Validate backend and arguments. Require that the Social Auth
# response be passed in as a keyword argument, to make sure we
# don't match the username/password calling conventions of
# authenticate.
if 'backend' not in kwargs or kwargs['backend'].name != self.name or \
'strategy' not in kwargs or 'response' not in kwargs:
return None
self.strategy = self.strategy or kwargs.get('strategy')
self.redirect_uri = self.redirect_uri or kwargs.get('redirect_uri')
self.data = self.strategy.request_data()
pipeline = self.strategy.get_pipeline()
kwargs.setdefault('is_new', False)
if 'pipeline_index' in kwargs:
pipeline = pipeline[kwargs['pipeline_index']:]
return self.pipeline(pipeline, *args, **kwargs)
def pipeline(self, pipeline, pipeline_index=0, *args, **kwargs):
out = self.run_pipeline(pipeline, pipeline_index, *args, **kwargs)
if not isinstance(out, dict):
return out
user = out.get('user')
if user:
user.social_user = out.get('social')
user.is_new = out.get('is_new')
return user
def disconnect(self, *args, **kwargs):
pipeline = self.strategy.get_disconnect_pipeline()
if 'pipeline_index' in kwargs:
pipeline = pipeline[kwargs['pipeline_index']:]
kwargs['name'] = self.strategy.backend.name
kwargs['user_storage'] = self.strategy.storage.user
return self.run_pipeline(pipeline, *args, **kwargs)
def run_pipeline(self, pipeline, pipeline_index=0, *args, **kwargs):
out = kwargs.copy()
out.setdefault('strategy', self.strategy)
out.setdefault('backend', out.pop(self.name, None) or self)
out.setdefault('request', self.strategy.request)
for idx, name in enumerate(pipeline):
out['pipeline_index'] = pipeline_index + idx
func = module_member(name)
result = func(*args, **out) or {}
if not isinstance(result, dict):
return result
out.update(result)
self.strategy.clean_partial_pipeline()
return out
def extra_data(self, user, uid, response, details):
"""Return deafault extra data to store in extra_data field"""
data = {}
for entry in (self.EXTRA_DATA or []) + self.setting('EXTRA_DATA', []):
if not isinstance(entry, (list, tuple)):
entry = (entry,)
size = len(entry)
if size >= 1 and size <= 3:
if size == 3:
name, alias, discard = entry
elif size == 2:
(name, alias), discard = entry, False
elif size == 1:
name = alias = entry[0]
discard = False
value = response.get(name) or details.get(name)
if discard and not value:
continue
data[alias] = value
return data
def auth_allowed(self, response, details):
"""Return True if the user should be allowed to authenticate, by
default check if email is whitelisted (if there's a whitelist)"""
emails = self.setting('WHITELISTED_EMAILS', [])
domains = self.setting('WHITELISTED_DOMAINS', [])
email = details.get('email')
allowed = True
if email and (emails or domains):
domain = email.split('@', 1)[1]
allowed = email in emails or domain in domains
return allowed
def get_user_id(self, details, response):
"""Return a unique ID for the current user, by default from server
response."""
return response.get(self.ID_KEY)
def get_user_details(self, response):
"""Must return user details in a know internal struct:
{'username': <username if any>,
'email': <user email if any>,
'fullname': <user full name if any>,
'first_name': <user first name if any>,
'last_name': <user last name if any>}
"""
raise NotImplementedError('Implement in subclass')
def get_user(self, user_id):
"""
Return user with given ID from the User model used by this backend.
This is called by django.contrib.auth.middleware.
"""
from social.strategies.utils import get_current_strategy
strategy = self.strategy or get_current_strategy()
return strategy.get_user(user_id)
def continue_pipeline(self, *args, **kwargs):
"""Continue previous halted pipeline"""
kwargs.update({'backend': self})
return self.strategy.authenticate(*args, **kwargs)
def request_token_extra_arguments(self):
"""Return extra arguments needed on request-token process"""
return self.setting('REQUEST_TOKEN_EXTRA_ARGUMENTS', {})
def auth_extra_arguments(self):
"""Return extra arguments needed on auth process. The defaults can be
overriden by GET parameters."""
extra_arguments = self.setting('AUTH_EXTRA_ARGUMENTS', {})
extra_arguments.update((key, self.data[key]) for key in extra_arguments
if key in self.data)
return extra_arguments
def uses_redirect(self):
"""Return True if this provider uses redirect url method,
otherwise return false."""
return True
def request(self, url, method='GET', *args, **kwargs):
kwargs.setdefault('timeout', self.setting('REQUESTS_TIMEOUT') or
self.setting('URLOPEN_TIMEOUT'))
try:
response = request(method, url, *args, **kwargs)
except ConnectionError as err:
raise AuthFailed(self, str(err))
response.raise_for_status()
return response
def get_json(self, url, *args, **kwargs):
return self.request(url, *args, **kwargs).json()
def get_querystring(self, url, *args, **kwargs):
return parse_qs(self.request(url, *args, **kwargs).text)
def get_key_and_secret(self):
"""Return tuple with Consumer Key and Consumer Secret for current
service provider. Must return (key, secret), order *must* be respected.
"""
return self.setting('KEY'), self.setting('SECRET')
|
HackerEcology/SuggestU
|
suggestu/social/backends/base.py
|
Python
|
gpl-3.0
| 8,286
|
# Bzrflag
# Copyright 2008-2011 Brigham Young University
#
# This file is part of Bzrflag.
#
# Bzrflag is free software: you can redistribute it and/or modify it under the
# terms of the GNU General Public License as published by the Free Software
# Foundation, either version 3 of the License, or (at your option) any later
# version.
#
# Bzrflag is distributed in the hope that it will be useful, but WITHOUT ANY
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License along with
# Bzrflag. If not, see <http://www.gnu.org/licenses/>.
#
# Inquiries regarding any further use of Bzrflag, please contact the Copyright
# Licensing Office, Brigham Young University, 3760 HBLL, Provo, UT 84602,
# (801) 422-9339 or 422-3821, e-mail copyright@byu.edu.
"""BZFlag Constants
These constants are originally defined in src/common/global.cxx in the BZFlag
repository. There are more than a hundred BZFlag constants that are in
global.cxx but are not included in the list of BSFlag constants.
"""
from __future__ import division # Must be at the beginning of the file!
__author__ = "BYU AML Lab <kseppi@byu.edu>"
__copyright__ = "Copyright 2008-2011 Brigham Young University"
__license__ = "GNU GPL"
import math
import logging
logger = logging.getLogger('constants')
# Colors
COLORNAME = ('rogue', 'red', 'green', 'blue', 'purple')
# Tanks
TANKANGVEL = math.pi / 4
TANKLENGTH = 6
TANKRADIUS = 0.72 * TANKLENGTH
TANKSPEED = 25
LINEARACCEL = 0.5
ANGULARACCEL = 0.5
TANKWIDTH = 2.8
TANKALIVE = 'alive'
TANKDEAD = 'dead'
DEADZONE = -999999.0, -999999.0
# Shots
MAXSHOTS = 10
SHOTRADIUS = 0.5
SHOTRANGE = 350
SHOTSPEED = 100
RELOADTIME = SHOTRANGE/SHOTSPEED
SHOTALIVE = 'alive'
SHOTDEAD = 'dead'
# Flags
FLAGRADIUS = 2.5
INITPOINTS = 2000
CAPTUREPOINTS = 4000
# Rules
EXPLODETIME = 5
# Graphics
BASE_PATTERN = '%s_basetop.png'
SHOT_PATTERN = '%s_bolt.png'
FLAG_PATTERN = '%s_flag.png'
TANK_PATTERN = '%s_tank.png'
TILESCALE = 0.1
SHOTSCALE = 3
FLAGSCALE = 3
TANKSCALE = 1.2
FONTSIZE = 16
# A higher loop timeout decreases CPU usage but also decreases the frame rate.
LOOP_TIMEOUT = 0.01
# Server
BACKLOG = 5
# Game
RESPAWNTRIES = 1000
|
andrewls/bzrflag
|
bzrflag/constants.py
|
Python
|
gpl-3.0
| 2,312
|
# This module is a very stripped down version of the dateutil
# package for when dateutil has not been installed. As a replacement
# for dateutil.parser.parse, the parsing methods from
# http://blog.mfabrik.com/2008/06/30/relativity-of-time-shortcomings-in-python-datetime-and-workaround/
#As such, the following copyrights and licenses applies:
# dateutil - Extensions to the standard python 2.3+ datetime module.
#
# Copyright (c) 2003-2011 - Gustavo Niemeyer <gustavo@niemeyer.net>
#
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
# * Neither the name of the copyright holder nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
# fixed_dateime
#
# Copyright (c) 2008, Red Innovation Ltd., Finland
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of Red Innovation nor the names of its contributors
# may be used to endorse or promote products derived from this software
# without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY RED INNOVATION ``AS IS'' AND ANY
# EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL RED INNOVATION BE LIABLE FOR ANY
# DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import re
import datetime
ZERO = datetime.timedelta(0)
try:
from dateutil.parser import parse as parse_iso
from dateutil.tz import tzoffset, tzutc
except:
# As a stopgap, define the two timezones here based
# on the dateutil code.
class tzutc(datetime.tzinfo):
def utcoffset(self, dt):
return ZERO
def dst(self, dt):
return ZERO
def tzname(self, dt):
return "UTC"
def __eq__(self, other):
return (isinstance(other, tzutc) or
(isinstance(other, tzoffset) and other._offset == ZERO))
def __ne__(self, other):
return not self.__eq__(other)
def __repr__(self):
return "%s()" % self.__class__.__name__
__reduce__ = object.__reduce__
class tzoffset(datetime.tzinfo):
def __init__(self, name, offset):
self._name = name
self._offset = datetime.timedelta(seconds=offset)
def utcoffset(self, dt):
return self._offset
def dst(self, dt):
return ZERO
def tzname(self, dt):
return self._name
def __eq__(self, other):
return (isinstance(other, tzoffset) and
self._offset == other._offset)
def __ne__(self, other):
return not self.__eq__(other)
def __repr__(self):
return "%s(%s, %s)" % (self.__class__.__name__,
repr(self._name),
self._offset.days*86400+self._offset.seconds)
__reduce__ = object.__reduce__
_fixed_offset_tzs = { }
UTC = tzutc()
def _get_fixed_offset_tz(offsetmins):
"""For internal use only: Returns a tzinfo with
the given fixed offset. This creates only one instance
for each offset; the zones are kept in a dictionary"""
if offsetmins == 0:
return UTC
if not offsetmins in _fixed_offset_tzs:
if offsetmins < 0:
sign = '-'
absoff = -offsetmins
else:
sign = '+'
absoff = offsetmins
name = "UTC%s%02d:%02d" % (sign, int(absoff / 60), absoff % 60)
inst = tzoffset(offsetmins, name)
_fixed_offset_tzs[offsetmins] = inst
return _fixed_offset_tzs[offsetmins]
_iso8601_parser = re.compile("""
^
(?P<year> [0-9]{4})?(?P<ymdsep>-?)?
(?P<month>[0-9]{2})?(?P=ymdsep)?
(?P<day> [0-9]{2})?
(?: # time part... optional... at least hour must be specified
(?:T|\s+)?
(?P<hour>[0-9]{2})
(?:
# minutes, separated with :, or none, from hours
(?P<hmssep>[:]?)
(?P<minute>[0-9]{2})
(?:
# same for seconds, separated with :, or none, from hours
(?P=hmssep)
(?P<second>[0-9]{2})
)?
)?
# fractions
(?: [,.] (?P<frac>[0-9]{1,10}))?
# timezone, Z, +-hh or +-hh:?mm. MUST BE, but complain if not there.
(
(?P<tzempty>Z)
|
(?P<tzh>[+-][0-9]{2})
(?: :? # optional separator
(?P<tzm>[0-9]{2})
)?
)?
)?
$
""", re.X) # """
def parse_iso(timestamp):
"""Internal function for parsing a timestamp in
ISO 8601 format"""
timestamp = timestamp.strip()
m = _iso8601_parser.match(timestamp)
if not m:
raise ValueError("Not a proper ISO 8601 timestamp!: %s" % timestamp)
vals = m.groupdict()
def_vals = {'year': 1970, 'month': 1, 'day': 1}
for key in vals:
if vals[key] is None:
vals[key] = def_vals.get(key, 0)
elif key not in ['ymdsep', 'hmssep', 'tzempty']:
vals[key] = int(vals[key])
year = vals['year']
month = vals['month']
day = vals['day']
h, min, s, us = None, None, None, 0
frac = 0
if m.group('tzempty') == None and m.group('tzh') == None:
raise ValueError("Not a proper ISO 8601 timestamp: " +
"missing timezone (Z or +hh[:mm])!")
if m.group('frac'):
frac = m.group('frac')
power = len(frac)
frac = int(frac) / 10.0 ** power
if m.group('hour'):
h = vals['hour']
if m.group('minute'):
min = vals['minute']
if m.group('second'):
s = vals['second']
if frac != None:
# ok, fractions of hour?
if min == None:
frac, min = _math.modf(frac * 60.0)
min = int(min)
# fractions of second?
if s == None:
frac, s = _math.modf(frac * 60.0)
s = int(s)
# and extract microseconds...
us = int(frac * 1000000)
if m.group('tzempty') == 'Z':
offsetmins = 0
else:
# timezone: hour diff with sign
offsetmins = vals['tzh'] * 60
tzm = m.group('tzm')
# add optional minutes
if tzm != None:
tzm = int(tzm)
offsetmins += tzm if offsetmins > 0 else -tzm
tz = _get_fixed_offset_tz(offsetmins)
return datetime.datetime(year, month, day, h, min, s, us, tz)
|
TheGurke/Progenitus
|
sleekxmpp/thirdparty/mini_dateutil.py
|
Python
|
gpl-3.0
| 9,155
|
# A Magento 2 module generator library
# Copyright (C) 2016 Maikel Martens
#
# This file is part of Mage2Gen.
#
# Mage2Gen is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import string
class DefaultFormatter(string.Formatter):
def __init__(self, default=''):
self.default = default
def get_field(self, field_name, args, kwargs):
try:
return super().get_field(field_name, args, kwargs)
except (KeyError, AttributeError):
return self.default
def upperfirst(word):
return word[0].upper() + word[1:]
def lowerfirst(word):
return word[0].lower() + word[1:]
|
krukas/Mage2Gen
|
mage2gen/utils.py
|
Python
|
gpl-3.0
| 1,144
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('experiment', '0004_auto_20150727_1405'),
]
operations = [
migrations.AlterField(
model_name='customerexperiment',
name='experiment_description',
field=models.TextField(max_length=1024, verbose_name='Experiment description'),
),
migrations.AlterField(
model_name='customerexperiment',
name='test_subject_count',
field=models.PositiveIntegerField(default=0, verbose_name='Number of test participants'),
),
migrations.AlterField(
model_name='customerexperiment',
name='test_subject_description',
field=models.TextField(max_length=512, verbose_name='Test description'),
),
]
|
CentechMTL/TableauDeBord
|
app/experiment/migrations/0005_auto_20160330_1625.py
|
Python
|
gpl-3.0
| 919
|
"""Provides some useful utilities for the Discord bot, mostly to do with cleaning."""
import re
import discord
__all__ = ['clean', 'is_clean']
mass_mention = re.compile('@(everyone|here)')
member_mention = re.compile(r'<@\!?(\d+)>')
role_mention = re.compile(r'<@&(\d+)>')
channel_mention = re.compile(r'<#(\d+)>')
def clean(ctx, text=None, *, mass=True, member=True, role=True, channel=True):
"""Cleans the message of anything specified in the parameters passed."""
if text is None:
text = ctx.message.content
if mass:
cleaned_text = mass_mention.sub(lambda match: '@\N{ZERO WIDTH SPACE}' + match.group(1), text)
if member:
cleaned_text = member_mention.sub(lambda match: clean_member_name(ctx, int(match.group(1))), cleaned_text)
if role:
cleaned_text = role_mention.sub(lambda match: clean_role_name(ctx, int(match.group(1))), cleaned_text)
if channel:
cleaned_text = channel_mention.sub(lambda match: clean_channel_name(ctx, int(match.group(1))), cleaned_text)
return cleaned_text
def is_clean(ctx, text=None):
"""Checks if the message is clean already and doesn't need to be cleaned."""
if text is None:
text = ctx.message.content
return all(regex.search(text) is None for regex in (mass_mention, member_mention, role_mention, channel_mention))
def clean_member_name(ctx, member_id):
"""Cleans a member's name from the message."""
member = ctx.guild.get_member(member_id)
if member is None:
return '<@\N{ZERO WIDTH SPACE}%d>' % member_id
elif is_clean(ctx, member.display_name):
return member.display_name
elif is_clean(ctx, str(member)):
return str(member)
else:
return '<@\N{ZERO WIDTH SPACE}%d>' % member.id
def clean_role_name(ctx, role_id):
"""Cleans role pings from messages."""
role = discord.utils.get(ctx.guild.roles, id=role_id) # Guild.get_role doesn't exist
if role is None:
return '<@&\N{ZERO WIDTH SPACE}%d>' % role_id
elif is_clean(ctx, role.name):
return '@' + role.name
else:
return '<@&\N{ZERO WIDTH SPACE}%d>' % role.id
def clean_channel_name(ctx, channel_id):
"""Cleans channel mentions from messages."""
channel = ctx.guild.get_channel(channel_id)
if channel is None:
return '<#\N{ZERO WIDTH SPACE}%d>' % channel_id
elif is_clean(ctx, channel.name):
return '#' + channel.name
else:
return '<#\N{ZERO WIDTH SPACE}%d>' % channel.id
def pretty_concat(strings, single_suffix='', multi_suffix=''):
"""Concatenates things in a pretty way"""
if len(strings) == 1:
return strings[0] + single_suffix
elif len(strings) == 2:
return '{} and {}{}'.format(*strings, multi_suffix)
else:
return '{}, and {}{}'.format(', '.join(strings[:-1]), strings[-1], multi_suffix)
|
guineawheek/Dozer
|
dozer/utils.py
|
Python
|
gpl-3.0
| 2,868
|
#!/usr/bin/env python
from setuptools import setup, find_packages
from bang import VERSION
import os.path
ETC = os.path.join(os.path.dirname(__file__), 'etc')
with open(os.path.join(ETC, 'requirements.pip')) as f:
reqs = [l.strip() for l in f if '://' not in l]
reqs.append('distribute')
setup(
name='bang',
version=VERSION,
author='fr33jc',
author_email='fr33jc@gmail.com',
packages=find_packages(exclude=['tests']),
package_data={'bang': ['bang.wav']},
license='GPLv3',
description='Server and cloud resource deployment automation',
platforms='POSIX',
url='https://github.com/fr33jc/bang',
install_requires=reqs,
scripts=['bin/bang'],
)
|
fr33jc/bang-unstable
|
setup.py
|
Python
|
gpl-3.0
| 751
|
import pytest
from chandere.errors import ChandereError
from chandere.loader import load_scraper
scraper = load_scraper("8chan")
VALID_CROSSLINK_TARGETS = [
("/tech/589254", ("tech", "589254")),
("/tech/ 589254", ("tech", "589254")),
("tech/589254", ("tech", "589254")),
("/tech 589254", ("tech", "589254")),
("tech 589254", ("tech", "589254")),
("/tech/", ("tech", None)),
("/tech", ("tech", None)),
("tech/", ("tech", None)),
("tech", ("tech", None)),
]
INVALID_CROSSLINK_TARGETS = [
"/"
]
VALID_URI_TARGETS = [
("https://8ch.net/tech/res/589254.html", ("tech", "589254")),
("http://8ch.net/tech/res/589254.html", ("tech", "589254")),
("https://8ch.net/tech/res/589254.json", ("tech", "589254")),
("http://8ch.net/tech/res/589254.json", ("tech", "589254")),
("https://8ch.net/tech/", ("tech", None)),
("http://8ch.net/tech/", ("tech", None)),
]
INVALID_URI_TARGETS = [
"https://8ch.net/",
"http://8ch.net/",
"https://google.com/",
"http://google.com/",
]
def test_parse_valid_uri_target():
for target, expected in VALID_URI_TARGETS:
assert scraper.parse_target(target) == expected
def test_parse_invalid_uri_target():
for target in INVALID_URI_TARGETS:
with pytest.raises(ChandereError):
scraper.parse_target(target)
def test_parse_valid_crosslink_target():
for target, expected in VALID_CROSSLINK_TARGETS:
assert scraper.parse_target(target) == expected
def test_parse_invalid_crosslink_target():
for target in INVALID_CROSSLINK_TARGETS:
with pytest.raises(ChandereError):
scraper.parse_target(target)
|
TsarFox/chandere2
|
tests/websites/test_8chan.py
|
Python
|
gpl-3.0
| 1,671
|
# -*- coding: utf-8 -*-
# This file is part of emesene.
#
# emesene is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# emesene is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with emesene; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
import e3
import gui
import extension
import PyQt4.QtGui as QtGui
import PyQt4.QtCore as QtCore
class StatusButton(QtGui.QToolButton):
'''a button that when clicked displays a popup that allows the user to
select a status'''
NAME = 'Status Button'
DESCRIPTION = 'A button to select the status'
AUTHOR = 'Jose Rostagno'
WEBSITE = 'www.emesene.org'
def __init__(self, session=None):
QtGui.QToolButton.__init__(self, None)
self.session = session
# a cache of gtk.Images to not load the images everytime we change
# our status
self.cache_imgs = {}
self.setAutoRaise(True)
StatusMenu = extension.get_default('menu status')
self.menu = StatusMenu(self.set_status)
self.invertStatus = {}
for stat in e3.status.STATUS:
self.invertStatus[unicode(e3.status.STATUS[stat])] = stat
if self.session:
self.status = self.session.account.status
else:
self.status = e3.status.OFFLINE
self.set_status(self.status)
self.menu.triggered.connect(self.statusactionchange)
self.setMenu(self.menu)
# show status menu on button click
self.clicked.connect(self.showMenu)
def statusactionchange(self, action):
status = self.invertStatus[str(action.text())]
self.set_status(status)
def set_status(self, stat):
'''load an image representing a status and store it on cache'''
current_status = -1
if self.session:
current_status = self.session.account.status
if stat not in self.cache_imgs:
qt_icon = QtGui.QIcon(\
gui.theme.image_theme.status_icons[stat])
self.cache_imgs[stat] = qt_icon
else:
qt_icon = self.cache_imgs[stat]
self.setIcon(qt_icon)
if stat not in e3.status.ALL or stat == current_status:
return
self.status = stat
if self.session:
self.session.set_status(stat)
|
tiancj/emesene
|
emesene/gui/qt4ui/widgets/StatusButton.py
|
Python
|
gpl-3.0
| 2,828
|
import sys
from ocelot.adaptors.genesis import *
from ocelot.cpbd.elements import Element, Quadrupole, RBend, Drift, Undulator
from ocelot import MagneticLattice
from ocelot.cpbd.beam import Beam
from ocelot.cpbd.optics import *
import numpy.fft as fft
from sim_info import SimInfo, RunInfo
#params = {'backend': 'ps', 'axes.labelsize': 18, 'text.fontsize': 18, 'legend.fontsize': 18, 'xtick.labelsize': 18, 'ytick.labelsize': 18, 'text.usetex': True}
#rcParams.update(params)
#rc('text', usetex=True) # required to have greek fonts on redhat
import argparse
h = 4.135667516e-15
c = 299792458.0
parser = argparse.ArgumentParser(description='FEL simulation postprocessor')
#parser.add_argument('--submit', help='submit to main index file', action='store_true')
parser.add_argument('--path', help='path to the experiment', default='./')
parser.add_argument('--stage', help='undulator/seeding stages 1 through 5', default='1')
parser.add_argument('--range', help='range of runs in the form i1:i2')
parser.add_argument('--field_file', help='read in field file', action='store_true')
args = parser.parse_args()
run_start, run_end = [int(i) for i in args.range.split(':') ]
fig1 = plt.figure()
ax1 = fig1.add_subplot(111)
ax1.set_xlabel('Time [fs]')
ax1.set_ylabel('Power [W]')
fig2 = plt.figure()
ax2 = fig2.add_subplot(111)
ax2.set_xlabel('Photon Energy [eV]')
ax2.set_ylabel('Spectrum [arb. units]')
ax2.get_xaxis().get_major_formatter().set_useOffset(False)
ax3 = ax2.twiny()
ax3.set_xlabel('Wavelength [nm]')
power_av = None
spec_av = None
runs = xrange(run_start, run_end+1)
for run_id in runs:
run_dir = args.path + '/run_' + str(run_id)
if args.stage in ['1','3','5']:
run_file = run_dir + '/run.' + str(run_id) + '.s' + str(args.stage) + '.gout'
if args.stage == '5' : run_file = run_dir + '/run.' + str(run_id) + '.gout'
print 'reading', run_file
g = readGenesisOutput(run_file)
field_file = run_file + '.dfl'
if args.field_file:
slices = readRadiationFile(fileName=field_file, npoints=g('ncar'))
P = np.zeros_like(slices[:,0,0])
for i in xrange(len(P)):
P[i] = sum( np.abs(np.multiply(slices[i,:,:], slices[i,:,:].conjugate())) )
t = np.linspace(g.t[0], g.t[-1], len(P))
else:
P = g.power_int
t = g.t
w_l_m = g('xlamds')
w_l_ev = h * c / g('xlamds')
x = np.roll(g.freq_ev, len(g.freq_ev)/2)+ w_l_ev
y = np.roll( np.abs(g.spec)**2, len(g.freq_ev)/2)
else:
run_file = run_dir + '/run.' + str(run_id) + '.s' + str( int(args.stage) - 1) + '.gout'
field_file = 'tmp' + str(args.stage) + '.dfl'
print 'reading', run_file, 'and', field_file
g = readGenesisOutput(run_file)
slices = readRadiationFile(fileName=run_dir + '/' + field_file, npoints=g('ncar'))
P = np.zeros_like(slices[:,0,0])
spec = np.zeros_like(slices[:,0,0])
for i in xrange(len(P)):
P[i] = sum( np.abs(np.multiply(slices[i,:,:], slices[i,:,:].conjugate())) )
t = np.linspace(g.t[0], g.t[-1], len(P))
w_l_m = g('xlamds')
w_l_ev = h * c / g('xlamds')
#x = np.roll(g.freq_ev, len(g.freq_ev)/2)+ w_l_ev
spec = fft.fft(slices[:,int( g('ncar')/2),int( g('ncar')/2)])
y = np.abs(spec)**2
x = h * fftfreq(len(spec), d=g('zsep') * g('xlamds') / c) + w_l_ev
if power_av == None:
power_av = P / len(runs)
else:
power_av += P / len(runs)
p1, = ax1.plot(t, P, color='black',alpha=0.4)
if spec_av == None:
spec_av = y / len(runs)
else:
spec_av += y / len(runs)
p2, = ax2.plot(x, y, color='black', alpha = 0.4)
ax2.set_xlim(x[0],x[-1])
ax3.set_xlim(x[0],x[-1])
x_ticks = ax2.get_xticks()[1:]
x2 = h*c/(x_ticks) * 1.e9 # coordinates in nm
ax3.set_xticks(x_ticks)
ax3.set_xticklabels(["%.4f" % z for z in x2])
ax1.plot(t, power_av, 'b')
ax2.plot(x, spec_av, 'b')
plt.show()
|
sserkez/ocelot
|
utils/sim_plot.py
|
Python
|
gpl-3.0
| 4,107
|
import random
from io import StringIO
from gi.repository import Gtk
from pychess.compat import create_task
from pychess.System.prefix import addDataPrefix
from pychess.Utils.const import WHITE, BLACK, LOCAL, NORMALCHESS, ARTIFICIAL, chr2Sign, chrU2Sign, FAN_PIECES, HINT, ENDGAME
from pychess.Utils.LearnModel import LearnModel
from pychess.Utils.TimeModel import TimeModel
from pychess.Utils.lutils.attack import isAttacked
from pychess.Utils.lutils.LBoard import LBoard
from pychess.Utils.lutils.lmove import FILE, RANK
from pychess.Variants import variants
from pychess.Players.Human import Human
from pychess.Players.engineNest import discoverer
from pychess.System import conf
from pychess.perspectives import perspective_manager
from pychess.Savers import fen as fen_loader
__title__ = _("Endgames")
__icon__ = addDataPrefix("glade/panel_book.svg")
__desc__ = _("Practice endgames with computer")
# TODO: get it from a text file
ENDGAMES = (
("kpk", "King and Pawn vs King"),
("kbnk", "King, Bishop and Knight vs King"),
("kbbk", "King and 2 Bishops vs King"),
("krk", "King and Rook vs King"),
("kqk", "King and Queen vs King"),
("kqkr", "King and Queen vs King and Rook"),
("krpkr", "King, Rook and Pawn vs King and Rook"),
("kppkp", "King and 2 Pawns vs King and Pawn"),
("kpkp", "King and Pawn vs King and Pawn"),
("kqpkq", "King, Queen and Pawn vs King and Queen"),
("knnkp", "King and Two Knights and vs King and Pawn"),
("kppkpp", "King and two pawns vs King and two pawns"),
("kqqkqr", "King and two queens vs King and Queen"),
)
class Sidepanel():
def load(self, persp):
self.persp = persp
self.box = Gtk.Box(orientation=Gtk.Orientation.VERTICAL)
self.tv = Gtk.TreeView()
renderer = Gtk.CellRendererText()
renderer.props.font = "Times 14"
column = Gtk.TreeViewColumn(_("White"), renderer, text=0)
self.tv.append_column(column)
renderer = Gtk.CellRendererText()
renderer.props.font = "Times 14"
column = Gtk.TreeViewColumn(_("Black"), renderer, text=1)
self.tv.append_column(column)
renderer = Gtk.CellRendererText()
column = Gtk.TreeViewColumn(_("Title"), renderer, text=2)
self.tv.append_column(column)
self.tv.connect("row-activated", self.row_activated)
self.store = Gtk.ListStore(str, str, str)
for pieces, title in ENDGAMES:
if pieces.count("k") != 2:
print("Game needs exactly 2 kings! %s" % pieces)
continue
elif len(pieces) > 6:
print("Max 6 pieces, please! %s" % pieces)
continue
else:
for piece in pieces:
if piece not in ("kqrbnp"):
print("Invalid piece %s in %s" % (piece, pieces))
continue
pos = pieces.rfind("k")
white_pieces, black_pieces = pieces[:pos], pieces[pos:]
wfan = []
for piece in white_pieces:
wfan.append(FAN_PIECES[0][chr2Sign[piece]])
bfan = []
for piece in black_pieces:
bfan.append(FAN_PIECES[1][chr2Sign[piece]])
self.store.append(["".join(wfan), "".join(bfan), title])
self.tv.set_model(self.store)
self.tv.get_selection().set_mode(Gtk.SelectionMode.BROWSE)
self.tv.set_cursor(conf.get("learncombo%s" % ENDGAME))
scrollwin = Gtk.ScrolledWindow()
scrollwin.add(self.tv)
scrollwin.show_all()
self.box.pack_start(scrollwin, True, True, 0)
self.box.show_all()
return self.box
def row_activated(self, widget, path, col):
if path is None:
return
else:
pieces = ENDGAMES[path[0]][0].lower()
conf.set("categorycombo", ENDGAME)
from pychess.widgets.TaskerManager import learn_tasker
learn_tasker.learn_combo.set_active(path[0])
start_endgame_from(pieces)
def start_endgame_from(pieces):
fen = create_fen(pieces)
timemodel = TimeModel(0, 0)
gamemodel = LearnModel(timemodel)
gamemodel.set_learn_data(ENDGAME, pieces)
player_name = conf.get("firstName")
p0 = (LOCAL, Human, (WHITE, player_name), player_name)
engine = discoverer.getEngineByName(discoverer.getEngineLearn())
ponder_off = True
engine_name = discoverer.getName(engine)
p1 = (ARTIFICIAL, discoverer.initPlayerEngine,
(engine, BLACK, 20, variants[NORMALCHESS], 60, 0, 0, ponder_off), engine_name)
def restart_analyzer(gamemodel):
create_task(gamemodel.restart_analyzer(HINT))
gamemodel.connect("learn_success", restart_analyzer)
def on_game_started(gamemodel):
perspective.activate_panel("annotationPanel")
create_task(gamemodel.start_analyzer(HINT, force_engine=discoverer.getEngineLearn()))
gamemodel.connect("game_started", on_game_started)
perspective = perspective_manager.get_perspective("games")
create_task(perspective.generalStart(gamemodel, p0, p1, loaddata=(StringIO(fen), fen_loader, 0, -1)))
def create_fen(pieces):
""" Create a random FEN position using given pieces """
pos = pieces.rfind("k")
pieces = pieces[:pos], pieces[pos:]
ok = False
while not ok:
lboard = LBoard()
lboard.applyFen("8/8/8/8/8/8/8/8 w - - 0 1")
bishop_cords = [[], []]
bishop_colors_ok = True
cords = list(range(0, 64))
pawn_cords = list(range(0 + 8, 64 - 8))
# Order of color is important here to prevent offering
# positions with trivial captures in first move
for color in (WHITE, BLACK):
for char in pieces[color]:
piece = chrU2Sign[char.upper()]
attacked = True
limit = 100
while attacked and limit > 0:
cord = random.choice(pawn_cords if char == "p" else cords)
attacked = isAttacked(lboard, cord, 1 - color)
limit -= 1
lboard._addPiece(cord, piece, color)
cords.remove(cord)
if cord in pawn_cords:
pawn_cords.remove(cord)
if char == "b":
bishop_cords[color].append(cord)
# 2 same color bishop is not ok
if len(bishop_cords[color]) == 2 and bishop_colors_ok:
b0, b1 = bishop_cords[color]
b0_color = BLACK if RANK(b0) % 2 == FILE(b0) % 2 else WHITE
b1_color = BLACK if RANK(b1) % 2 == FILE(b1) % 2 else WHITE
if b0_color == b1_color:
bishop_colors_ok = False
break
ok = (not lboard.isChecked()) and (not lboard.opIsChecked()) and bishop_colors_ok
fen = lboard.asFen()
return fen
|
leogregianin/pychess
|
lib/pychess/perspectives/learn/EndgamesPanel.py
|
Python
|
gpl-3.0
| 6,976
|
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
import re
import sys
if len(sys.argv) != 2:
print "usage: ./%s PATH/TO/OBJDIR" % sys.argv[0]
objdir_path = sys.argv[1]
def line_to_atom(line):
result = re.match('^GK_ATOM\((.+),\s*"(.*)"\)', line)
return (result.group(1), result.group(2))
def symbolify(ident):
return "_ZN9nsGkAtoms" + str(len(ident)) + ident + "E"
with open(objdir_path + "/dist/include/nsGkAtomList.h") as f:
lines = [line for line in f.readlines() if line.startswith("GK_ATOM")]
atoms = [line_to_atom(line) for line in lines]
with open("atom_macro.rs", "w") as f:
f.write("use gecko_bindings::structs::nsIAtom;\n\n")
f.write("use Atom;\n\n")
f.write("pub fn unsafe_atom_from_static(ptr: *mut nsIAtom) -> Atom { unsafe { Atom::from_static(ptr) } }\n\n")
for atom in atoms:
f.write('extern { pub static %s: *mut nsIAtom; }\n' % symbolify(atom[0]))
f.write("#[macro_export]\n")
f.write("macro_rules! atom {\n")
f.writelines(['("%s") => { $crate::atom_macro::unsafe_atom_from_static($crate::atom_macro::%s) };\n'
% (atom[1], symbolify(atom[0])) for atom in atoms])
f.write("}\n")
|
fduraffourg/servo
|
ports/geckolib/string_cache/regen_atom_macro.py
|
Python
|
mpl-2.0
| 1,335
|
# -*- coding: utf-8 -*-
# Copyright(C) 2010-2011 Romain Bignon
#
# This file is part of weboob.
#
# weboob is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# weboob is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with weboob. If not, see <http://www.gnu.org/licenses/>.
import sys
import logging
import re
from threading import Event
from copy import copy
from PyQt4.QtCore import QTimer, SIGNAL, QObject, QString, QSize, QVariant, QMutex, Qt
from PyQt4.QtGui import QMainWindow, QApplication, QStyledItemDelegate, \
QStyleOptionViewItemV4, QTextDocument, QStyle, \
QAbstractTextDocumentLayout, QPalette, QMessageBox, \
QSpinBox, QLineEdit, QComboBox, QCheckBox, QInputDialog
from weboob.core.ouiboube import Weboob, VersionsMismatchError
from weboob.core.scheduler import IScheduler
from weboob.core.repositories import ModuleInstallError
from weboob.tools.config.iconfig import ConfigError
from weboob.tools.browser import BrowserUnavailable, BrowserIncorrectPassword, BrowserForbidden
from weboob.tools.value import ValueInt, ValueBool, ValueBackendPassword
from weboob.tools.misc import to_unicode
from weboob.capabilities import UserError
from ..base import BaseApplication, MoreResultsAvailable
__all__ = ['QtApplication', 'QtMainWindow', 'QtDo', 'HTMLDelegate']
class QtScheduler(IScheduler):
def __init__(self, app):
self.app = app
self.count = 0
self.timers = {}
def schedule(self, interval, function, *args):
timer = QTimer()
timer.setInterval(interval * 1000)
timer.setSingleShot(True)
count = self.count
self.count += 1
timer.start()
self.app.connect(timer, SIGNAL("timeout()"), lambda: self.timeout(count, None, function, *args))
self.timers[count] = timer
def repeat(self, interval, function, *args):
timer = QTimer()
timer.setSingleShot(False)
count = self.count
self.count += 1
timer.start(0)
self.app.connect(timer, SIGNAL("timeout()"), lambda: self.timeout(count, interval, function, *args))
self.timers[count] = timer
def timeout(self, _id, interval, function, *args):
function(*args)
if interval is None:
self.timers.pop(_id)
else:
self.timers[_id].setInterval(interval * 1000)
def want_stop(self):
self.app.quit()
def run(self):
self.app.exec_()
class QCallbacksManager(QObject):
class Request(object):
def __init__(self):
self.event = Event()
self.answer = None
def __call__(self):
raise NotImplementedError()
class LoginRequest(Request):
def __init__(self, backend_name, value):
QCallbacksManager.Request.__init__(self)
self.backend_name = backend_name
self.value = value
def __call__(self):
password, ok = QInputDialog.getText(None,
'%s request' % self.value.label,
'Please enter %s for %s' % (self.value.label,
self.backend_name),
QLineEdit.Password)
return password
def __init__(self, weboob, parent=None):
QObject.__init__(self, parent)
self.weboob = weboob
self.weboob.callbacks['login'] = self.callback(self.LoginRequest)
self.mutex = QMutex()
self.requests = []
self.connect(self, SIGNAL('new_request'), self.do_request)
def callback(self, klass):
def cb(*args, **kwargs):
return self.add_request(klass(*args, **kwargs))
return cb
def do_request(self):
self.mutex.lock()
request = self.requests.pop()
request.answer = request()
request.event.set()
self.mutex.unlock()
def add_request(self, request):
self.mutex.lock()
self.requests.append(request)
self.mutex.unlock()
self.emit(SIGNAL('new_request'))
request.event.wait()
return request.answer
class QtApplication(QApplication, BaseApplication):
def __init__(self):
QApplication.__init__(self, sys.argv)
self.setApplicationName(self.APPNAME)
BaseApplication.__init__(self)
self.cbmanager = QCallbacksManager(self.weboob, self)
def create_weboob(self):
return Weboob(scheduler=QtScheduler(self))
def load_backends(self, *args, **kwargs):
while True:
try:
return BaseApplication.load_backends(self, *args, **kwargs)
except VersionsMismatchError as e:
msg = 'Versions of modules mismatch with version of weboob.'
except ConfigError as e:
msg = unicode(e)
res = QMessageBox.question(None, 'Configuration error', u'%s\n\nDo you want to update repositories?' % msg, QMessageBox.Yes|QMessageBox.No)
if res == QMessageBox.No:
raise e
# Do not import it globally, it causes circular imports
from .backendcfg import ProgressDialog
pd = ProgressDialog('Update of repositories', "Cancel", 0, 100)
pd.setWindowModality(Qt.WindowModal)
try:
self.weboob.update(pd)
except ModuleInstallError as err:
QMessageBox.critical(None, self.tr('Update error'),
unicode(self.tr('Unable to update repositories: %s' % err)),
QMessageBox.Ok)
pd.setValue(100)
QMessageBox.information(None, self.tr('Update of repositories'),
self.tr('Repositories updated!'), QMessageBox.Ok)
class QtMainWindow(QMainWindow):
def __init__(self, parent=None):
QMainWindow.__init__(self, parent)
class QtDo(QObject):
def __init__(self, weboob, cb, eb=None):
QObject.__init__(self)
if not eb:
eb = self.default_eb
self.weboob = weboob
self.process = None
self.cb = cb
self.eb = eb
self.connect(self, SIGNAL('cb'), self.local_cb)
self.connect(self, SIGNAL('eb'), self.local_eb)
def do(self, *args, **kwargs):
self.process = self.weboob.do(*args, **kwargs)
self.process.callback_thread(self.thread_cb, self.thread_eb)
def default_eb(self, backend, error, backtrace):
if isinstance(error, MoreResultsAvailable):
# This is not an error, ignore.
return
msg = unicode(error)
if isinstance(error, BrowserIncorrectPassword):
if not msg:
msg = 'Invalid login/password.'
elif isinstance(error, BrowserUnavailable):
if not msg:
msg = 'Website is unavailable.'
elif isinstance(error, BrowserForbidden):
if not msg:
msg = 'This action is forbidden.'
elif isinstance(error, NotImplementedError):
msg = u'This feature is not supported by this backend.\n\n' \
u'To help the maintainer of this backend implement this feature, please contact: %s <%s>' % (backend.MAINTAINER, backend.EMAIL)
elif isinstance(error, UserError):
if not msg:
msg = type(error).__name__
elif logging.root.level == logging.DEBUG:
msg += u'<br />'
ul_opened = False
for line in backtrace.split('\n'):
m = re.match(' File (.*)', line)
if m:
if not ul_opened:
msg += u'<ul>'
ul_opened = True
else:
msg += u'</li>'
msg += u'<li><b>%s</b>' % m.group(1)
else:
msg += u'<br />%s' % to_unicode(line)
if ul_opened:
msg += u'</li></ul>'
print >>sys.stderr, error
print >>sys.stderr, backtrace
QMessageBox.critical(None, unicode(self.tr('Error with backend %s')) % backend.name,
msg, QMessageBox.Ok)
def local_cb(self, backend, data):
self.cb(backend, data)
if not backend:
self.disconnect(self, SIGNAL('cb'), self.local_cb)
self.disconnect(self, SIGNAL('eb'), self.local_eb)
self.process = None
def local_eb(self, backend, error, backtrace):
self.eb(backend, error, backtrace)
def thread_cb(self, backend, data):
self.emit(SIGNAL('cb'), backend, data)
def thread_eb(self, backend, error, backtrace):
self.emit(SIGNAL('eb'), backend, error, backtrace)
class HTMLDelegate(QStyledItemDelegate):
def paint(self, painter, option, index):
optionV4 = QStyleOptionViewItemV4(option)
self.initStyleOption(optionV4, index)
style = optionV4.widget.style() if optionV4.widget else QApplication.style()
doc = QTextDocument()
doc.setHtml(optionV4.text)
# painting item without text
optionV4.text = QString()
style.drawControl(QStyle.CE_ItemViewItem, optionV4, painter)
ctx = QAbstractTextDocumentLayout.PaintContext()
# Hilight text if item is selected
if optionV4.state & QStyle.State_Selected:
ctx.palette.setColor(QPalette.Text, optionV4.palette.color(QPalette.Active, QPalette.HighlightedText))
textRect = style.subElementRect(QStyle.SE_ItemViewItemText, optionV4)
painter.save()
painter.translate(textRect.topLeft())
painter.setClipRect(textRect.translated(-textRect.topLeft()))
doc.documentLayout().draw(painter, ctx)
painter.restore()
def sizeHint(self, option, index):
optionV4 = QStyleOptionViewItemV4(option)
self.initStyleOption(optionV4, index)
doc = QTextDocument()
doc.setHtml(optionV4.text)
doc.setTextWidth(optionV4.rect.width())
return QSize(doc.idealWidth(), max(doc.size().height(), optionV4.decorationSize.height()))
class _QtValueStr(QLineEdit):
def __init__(self, value):
QLineEdit.__init__(self)
self._value = value
if value.default:
self.setText(unicode(value.default))
if value.masked:
self.setEchoMode(self.Password)
def set_value(self, value):
self._value = value
self.setText(self._value.get())
def get_value(self):
self._value.set(unicode(self.text()))
return self._value
class _QtValueBackendPassword(_QtValueStr):
def get_value(self):
self._value._domain = None
return _QtValueStr.get_value(self)
class _QtValueBool(QCheckBox):
def __init__(self, value):
QCheckBox.__init__(self)
self._value = value
if value.default:
self.setChecked(True)
def set_value(self, value):
self._value = value
self.setChecked(self._value.get())
def get_value(self):
self._value.set(self.isChecked())
return self._value
class _QtValueInt(QSpinBox):
def __init__(self, value):
QSpinBox.__init__(self)
self._value = value
if value.default:
self.setValue(int(value.default))
def set_value(self, value):
self._value = value
self.setValue(self._value.get())
def get_value(self):
self._value.set(self.getValue())
return self._value
class _QtValueChoices(QComboBox):
def __init__(self, value):
QComboBox.__init__(self)
self._value = value
for k, l in value.choices.iteritems():
self.addItem(l, QVariant(k))
if value.default == k:
self.setCurrentIndex(self.count()-1)
def set_value(self, value):
self._value = value
for i in xrange(self.count()):
if unicode(self.itemData(i).toString()) == self._value.get():
self.setCurrentIndex(i)
return
def get_value(self):
self._value.set(unicode(self.itemData(self.currentIndex()).toString()))
return self._value
def QtValue(value):
if isinstance(value, ValueBool):
klass = _QtValueBool
elif isinstance(value, ValueInt):
klass = _QtValueInt
elif isinstance(value, ValueBackendPassword):
klass = _QtValueBackendPassword
elif value.choices is not None:
klass = _QtValueChoices
else:
klass = _QtValueStr
return klass(copy(value))
|
blckshrk/Weboob
|
weboob/tools/application/qt/qt.py
|
Python
|
agpl-3.0
| 13,100
|
# -*- coding: utf-8 -*-
import json
import logging
import sys
from intelmq.lib.bot import Bot
from intelmq.lib.message import Report
from sdk.blueliv_api import BluelivAPI
class BluelivCrimeserverCollectorBot(Bot):
def process(self):
self.logger.info("Downloading report through API")
http_proxy = getattr(self.parameters, 'http_proxy', None)
https_proxy = getattr(self.parameters, 'http_ssl_proxy', None)
proxy = None
if http_proxy and https_proxy:
proxy = {'http': http_proxy,
'https': https_proxy}
api = BluelivAPI(base_url='https://freeapi.blueliv.com',
token=self.parameters.api_key,
log_level=logging.INFO,
proxy=proxy)
response = api.crime_servers.online()
self.logger.info("Report downloaded.")
report = Report()
report.add("raw", json.dumps([item for item in response.items]))
report.add("feed.name", self.parameters.feed)
report.add("feed.accuracy", self.parameters.accuracy)
self.send_message(report)
if __name__ == "__main__":
bot = BluelivCrimeserverCollectorBot(sys.argv[1])
bot.start()
|
robcza/intelmq
|
intelmq/bots/collectors/blueliv/collector_crimeserver.py
|
Python
|
agpl-3.0
| 1,233
|
#
# Copyright (C) 2017 Maha Farhat
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
"""
Global anchor for the website's urls
"""
from django.urls import path
from django.conf.urls import include, url
from django.conf.urls.static import static
from django.conf import settings
from django.contrib import admin
from django.views.generic import TemplateView as Tv
from apps.versioner.views import Version
urlpatterns = [ # pylint: disable=invalid-name
path('', Tv.as_view(template_name='home.html'), name="home"),
# Sometimes the proxy server misbehaves on the front page
path('index.html', Tv.as_view(template_name='home.html'), name="home"),
path('about/', Tv.as_view(template_name='about.html'), name="about"),
path('data/', Tv.as_view(template_name='data.html'), name="data"),
path('data/info/', Tv.as_view(template_name='info.html'), name="info"),
path('terms/', Tv.as_view(template_name='terms.html'), name="terms"),
path('gentb-admin/', admin.site.urls),
url(r'^models/', include('django_spaghetti.urls')),
url(r'^user/', include('apps.tb_users.urls')),
url(r'^auth/', include('apps.tb_users.auth_urls')),
#url(r'.+', Tv.as_view(template_name='offline.html'), name="offline"),
url(r'^predict/', include('apps.predict.urls', namespace='predict')),
url(r'^pipeline/', include('apps.pipeline.urls')),
url(r'^uploads/', include('apps.uploads.urls')),
url(r'^genes/', include('apps.mutations.urls')),
url(r'^maps/', include('apps.maps.urls')),
path('version/', Version.as_view(), name='version'),
]
if settings.DEBUG:
urlpatterns += static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
urlpatterns += static(settings.STATIC_URL, document_root=settings.STATIC_ROOT)
import debug_toolbar
urlpatterns.append(url(r'^__debug__/', include(debug_toolbar.urls)))
from .views import Error
for e in ('403','404','500'):
locals()['handler'+e] = Error.as_error(e)
urlpatterns.append(url('^error/%s/$' % e, Error.as_error(e)))
|
IQSS/gentb-site
|
tb_website/urls.py
|
Python
|
agpl-3.0
| 2,636
|
# Copyright (c) 2008, 2012 testtools developers. See LICENSE for details.
from testtools import TestCase
from testtools.matchers import Equals, MatchesException, Raises
from testtools.content_type import (
ContentType,
JSON,
UTF8_TEXT,
)
class TestContentType(TestCase):
def test___init___None_errors(self):
raises_value_error = Raises(MatchesException(ValueError))
self.assertThat(lambda:ContentType(None, None), raises_value_error)
self.assertThat(lambda:ContentType(None, "traceback"),
raises_value_error)
self.assertThat(lambda:ContentType("text", None), raises_value_error)
def test___init___sets_ivars(self):
content_type = ContentType("foo", "bar")
self.assertEqual("foo", content_type.type)
self.assertEqual("bar", content_type.subtype)
self.assertEqual({}, content_type.parameters)
def test___init___with_parameters(self):
content_type = ContentType("foo", "bar", {"quux": "thing"})
self.assertEqual({"quux": "thing"}, content_type.parameters)
def test___eq__(self):
content_type1 = ContentType("foo", "bar", {"quux": "thing"})
content_type2 = ContentType("foo", "bar", {"quux": "thing"})
content_type3 = ContentType("foo", "bar", {"quux": "thing2"})
self.assertTrue(content_type1.__eq__(content_type2))
self.assertFalse(content_type1.__eq__(content_type3))
def test_basic_repr(self):
content_type = ContentType('text', 'plain')
self.assertThat(repr(content_type), Equals('text/plain'))
def test_extended_repr(self):
content_type = ContentType(
'text', 'plain', {'foo': 'bar', 'baz': 'qux'})
self.assertThat(
repr(content_type), Equals('text/plain; baz="qux"; foo="bar"'))
class TestBuiltinContentTypes(TestCase):
def test_plain_text(self):
# The UTF8_TEXT content type represents UTF-8 encoded text/plain.
self.assertThat(UTF8_TEXT.type, Equals('text'))
self.assertThat(UTF8_TEXT.subtype, Equals('plain'))
self.assertThat(UTF8_TEXT.parameters, Equals({'charset': 'utf8'}))
def test_json_content(self):
# The JSON content type represents implictly UTF-8 application/json.
self.assertThat(JSON.type, Equals('application'))
self.assertThat(JSON.subtype, Equals('json'))
self.assertThat(JSON.parameters, Equals({}))
def test_suite():
from unittest import TestLoader
return TestLoader().loadTestsFromName(__name__)
|
GbalsaC/bitnamiP
|
venv/lib/python2.7/site-packages/testtools/tests/test_content_type.py
|
Python
|
agpl-3.0
| 2,543
|
import pkg_resources
from datetime import datetime
import re
from inbox.api.validation import (
get_recipients, get_attachments, get_thread, get_message)
from inbox.api.err import InputError
from inbox.contacts.process_mail import update_contacts_from_message
from inbox.models import Message, Part
from inbox.models.action_log import schedule_action
from inbox.sqlalchemy_ext.util import generate_public_id
VERSION = pkg_resources.get_distribution('inbox-sync').version
class SendMailException(Exception):
"""
Raised when sending fails.
Parameters
----------
message: string
A descriptive error message.
http_code: int
An appropriate HTTP error code for the particular type of failure.
server_error: string, optional
The error returned by the mail server.
failures: dict, optional
If sending only failed for some recipients, information on the specific
failures.
"""
def __init__(self, message, http_code, server_error=None, failures=None):
self.message = message
self.http_code = http_code
self.server_error = server_error
self.failures = failures
super(SendMailException, self).__init__(
message, http_code, server_error, failures)
def get_sendmail_client(account):
from inbox.sendmail import module_registry
sendmail_mod = module_registry.get(account.provider)
sendmail_cls = getattr(sendmail_mod, sendmail_mod.SENDMAIL_CLS)
sendmail_client = sendmail_cls(account)
return sendmail_client
def create_draft_from_mime(account, raw_mime, db_session):
our_uid = generate_public_id() # base-36 encoded string
new_headers = ('X-INBOX-ID: {0}-0\r\n'
'Message-Id: <{0}-0@mailer.nylas.com>\r\n'
'User-Agent: NylasMailer/{1}\r\n').format(our_uid, VERSION)
new_body = new_headers + raw_mime
with db_session.no_autoflush:
msg = Message.create_from_synced(account, '', '',
datetime.utcnow(), new_body)
if msg.from_addr and len(msg.from_addr) > 1:
raise InputError("from_addr field can have at most one item")
if msg.reply_to and len(msg.reply_to) > 1:
raise InputError("reply_to field can have at most one item")
if msg.subject is not None and not \
isinstance(msg.subject, basestring):
raise InputError('"subject" should be a string')
if not isinstance(msg.body, basestring):
raise InputError('"body" should be a string')
if msg.references or msg.in_reply_to:
msg.is_reply = True
thread_cls = account.thread_cls
msg.thread = thread_cls(
subject=msg.subject,
recentdate=msg.received_date,
namespace=account.namespace,
subjectdate=msg.received_date)
msg.is_created = True
msg.is_sent = True
msg.is_draft = False
msg.is_read = True
db_session.add(msg)
db_session.flush()
return msg
def block_to_part(block, message, namespace):
inline_image_uri = r'cid:{}'.format(block.public_id)
is_inline = re.search(inline_image_uri, message.body) is not None
# Create a new Part object to associate to the message object.
# (You can't just set block.message, because if block is an
# attachment on an existing message, that would dissociate it from
# the existing message.)
part = Part(block=block)
part.content_id = block.public_id if is_inline else None
part.namespace_id = namespace.id
part.content_disposition = 'inline' if is_inline else 'attachment'
part.is_inboxapp_attachment = True
return part
def create_message_from_json(data, namespace, db_session, is_draft):
""" Construct a Message instance from `data`, a dictionary representing the
POST body of an API request. All new objects are added to the session, but
not committed."""
# Validate the input and get referenced objects (thread, attachments)
# as necessary.
to_addr = get_recipients(data.get('to'), 'to')
cc_addr = get_recipients(data.get('cc'), 'cc')
bcc_addr = get_recipients(data.get('bcc'), 'bcc')
from_addr = get_recipients(data.get('from'), 'from')
reply_to = get_recipients(data.get('reply_to'), 'reply_to')
if from_addr and len(from_addr) > 1:
raise InputError("from_addr field can have at most one item")
if reply_to and len(reply_to) > 1:
raise InputError("reply_to field can have at most one item")
subject = data.get('subject')
if subject is not None and not isinstance(subject, basestring):
raise InputError('"subject" should be a string')
body = data.get('body', '')
if not isinstance(body, basestring):
raise InputError('"body" should be a string')
blocks = get_attachments(data.get('file_ids'), namespace.id, db_session)
reply_to_thread = get_thread(data.get('thread_id'), namespace.id,
db_session)
reply_to_message = get_message(data.get('reply_to_message_id'),
namespace.id, db_session)
if reply_to_message is not None and reply_to_thread is not None:
if reply_to_message not in reply_to_thread.messages:
raise InputError('Message {} is not in thread {}'.
format(reply_to_message.public_id,
reply_to_thread.public_id))
with db_session.no_autoflush:
account = namespace.account
dt = datetime.utcnow()
uid = generate_public_id()
to_addr = to_addr or []
cc_addr = cc_addr or []
bcc_addr = bcc_addr or []
blocks = blocks or []
if subject is None:
# If this is a reply with no explicitly specified subject, set the
# subject from the prior message/thread by default.
# TODO(emfree): Do we want to allow changing the subject on a reply
# at all?
if reply_to_message is not None:
subject = reply_to_message.subject
elif reply_to_thread is not None:
subject = reply_to_thread.subject
subject = subject or ''
message = Message()
message.namespace = namespace
message.is_created = True
message.is_draft = is_draft
message.from_addr = from_addr if from_addr else \
[(account.name, account.email_address)]
# TODO(emfree): we should maybe make received_date nullable, so its
# value doesn't change in the case of a drafted-and-later-reconciled
# message.
message.received_date = dt
message.subject = subject
message.body = body
message.to_addr = to_addr
message.cc_addr = cc_addr
message.bcc_addr = bcc_addr
message.reply_to = reply_to
# TODO(emfree): this is different from the normal 'size' value of a
# message, which is the size of the entire MIME message.
message.size = len(body)
message.is_read = True
message.is_sent = False
message.public_id = uid
message.version = 0
message.regenerate_nylas_uid()
# Set the snippet
message.snippet = message.calculate_html_snippet(body)
# Associate attachments to the draft message
for block in blocks:
message.parts.append(block_to_part(block, message, namespace))
update_contacts_from_message(db_session, message, namespace)
if reply_to_message is not None:
message.is_reply = True
_set_reply_headers(message, reply_to_message)
thread = reply_to_message.thread
message.reply_to_message = reply_to_message
elif reply_to_thread is not None:
message.is_reply = True
thread = reply_to_thread
# Construct the in-reply-to and references headers from the last
# message currently in the thread.
previous_messages = [m for m in thread.messages if not m.is_draft]
if previous_messages:
last_message = previous_messages[-1]
message.reply_to_message = last_message
_set_reply_headers(message, last_message)
else:
# If this isn't a reply to anything, create a new thread object for
# the draft. We specialize the thread class so that we can, for
# example, add the g_thrid for Gmail later if we reconcile a synced
# message with this one. This is a huge hack, but works.
message.is_reply = False
thread_cls = account.thread_cls
thread = thread_cls(
subject=message.subject,
recentdate=message.received_date,
namespace=namespace,
subjectdate=message.received_date)
message.thread = thread
db_session.add(message)
if is_draft:
schedule_action('save_draft', message, namespace.id, db_session,
version=message.version)
db_session.flush()
return message
def update_draft(db_session, account, draft, to_addr=None,
subject=None, body=None, blocks=None, cc_addr=None,
bcc_addr=None, from_addr=None, reply_to=None):
"""
Update draft with new attributes.
"""
def update(attr, value=None):
if value is not None:
setattr(draft, attr, value)
if attr == 'body':
# Update size, snippet too
draft.size = len(value)
draft.snippet = draft.calculate_html_snippet(
value)
update('to_addr', to_addr)
update('cc_addr', cc_addr)
update('bcc_addr', bcc_addr)
update('reply_to', reply_to)
update('from_addr', from_addr)
update('subject', subject if subject else None)
update('body', body if body else None)
update('received_date', datetime.utcnow())
# Remove any attachments that aren't specified
new_block_ids = [b.id for b in blocks]
for part in filter(lambda x: x.block_id not in new_block_ids,
draft.parts):
draft.parts.remove(part)
db_session.delete(part)
# Parts require special handling
for block in blocks:
# Don't re-add attachments that are already attached
if block.id in [p.block_id for p in draft.parts]:
continue
draft.parts.append(block_to_part(block, draft, account.namespace))
thread = draft.thread
if len(thread.messages) == 1:
# If there are no prior messages on the thread, update its subject and
# dates to match the draft.
thread.subject = draft.subject
thread.subjectdate = draft.received_date
thread.recentdate = draft.received_date
# Remove previous message-contact associations, and create new ones.
draft.contacts = []
update_contacts_from_message(db_session, draft, account.namespace)
# The draft we're updating may or may not be one authored through the API:
# - Ours: is_created = True, Message-Id = public_id+version
# - Not Ours: is_created = False, Message-Id = ???
# Mark that the draft is now created by us
draft.is_created = True
# Save the current Message-Id so we know which draft to delete in syncback
old_message_id_header = draft.message_id_header
# Increment version and rebuild the message ID header.
draft.version += 1
draft.regenerate_nylas_uid()
# Sync to remote
schedule_action('update_draft', draft, draft.namespace.id, db_session,
version=draft.version,
old_message_id_header=old_message_id_header)
db_session.commit()
return draft
def delete_draft(db_session, account, draft):
""" Delete the given draft. """
thread = draft.thread
assert draft.is_draft
# Delete remotely.
schedule_action('delete_draft', draft, draft.namespace.id, db_session,
nylas_uid=draft.nylas_uid,
message_id_header=draft.message_id_header)
db_session.delete(draft)
# Delete the thread if it would now be empty.
if not thread.messages:
db_session.delete(thread)
db_session.commit()
def generate_attachments(message, blocks):
attachment_dicts = []
for block in blocks:
content_disposition = 'attachment'
for part in block.parts:
if part.message_id == message.id and part.content_disposition == 'inline':
content_disposition = 'inline'
break
attachment_dicts.append({
'block_id': block.public_id,
'filename': block.filename,
'data': block.data,
'content_type': block.content_type,
'content_disposition': content_disposition,
})
return attachment_dicts
def _set_reply_headers(new_message, previous_message):
"""When creating a draft in reply to a thread, set the In-Reply-To and
References headers appropriately, if possible."""
if previous_message.message_id_header:
new_message.in_reply_to = previous_message.message_id_header
if previous_message.references:
new_message.references = (previous_message.references +
[previous_message.message_id_header])
else:
new_message.references = [previous_message.message_id_header]
|
nylas/sync-engine
|
inbox/sendmail/base.py
|
Python
|
agpl-3.0
| 13,544
|
# uso dizionario come cache per evitare di calcolare sempre
PEOPLE = []
def main():
"""
devo far inserire name, city, salary come input e salvarli nel dizionario
# 1.finche utente non smette.
# 2.l'utente inserisce il nome
usa raw_input per chiedere le info all'utente
# 3.l'utente inserisce la città
# 4.l'utente inserisce lo stipendio
# 5.inserisci il dizionario con chiavi
'name','city','salary'
nella lista PEOPLE = []
PEOPLE.append(person_d)
# 6.STAMPA A VIDEO PEOPLE nel modo che ti piace
# 7.ri-inizia da Step 1
# FINE
#----BONUS-----
#STEP 8.QUANDO L'UTENTE SMETTE --> SCRIVI I DATI IN UN FILE
# SE VUOI STEP 8.1 IN FOMRATO JSON
# SE VUOI STEP 8.2 IN FORMATO CSV
# SE VUOI STEP 8.3 IN FORMATO XML
# STEP 9. FALLO ANCHE SE L 'UTENTE PREME CTRL+C O CTRL+Z
"""
cont = True
while cont:
cont = insert_person()
stampa_lista()
scrivi_file()
def insert_person():
ret_val = False
nome = get_input("Come ti chiami ? ")
if nome :
cit = get_input("Dove vivi ? ")
if cit :
salario = get_input("Quanto guadagni mensilmente ? ")
try:
salario = int(salario)
except ValueError:
print("Salario non valido")
return False
if salario :
persona = {"name":nome , "city" : cit, "salary" : salario }
PEOPLE.append(persona)
ret_val = True
#print(ret_val)
return ret_val
def stampa_lista():
print("Stampo la mia lista... ")
for x in PEOPLE:
print("Sig: {name} di {city} guadagna {salary}".format(**x) )
def get_input(msg):
try:
ret = raw_input(msg)
except KeyboardInterrupt:
ret =''
return ret
def scrivi_file():
print("Scrivo file... ")
if __name__ == "__main__":
main()
|
feroda/lessons-python4beginners
|
students/2016-09-04/pnatile/usodiz.py
|
Python
|
agpl-3.0
| 1,771
|
"""
API for initiating and tracking requests for credit from a provider.
"""
from __future__ import absolute_import
import datetime
import logging
import uuid
import pytz
import six
from django.db import transaction
from edx_proctoring.api import get_last_exam_completion_date
from openedx.core.djangoapps.credit.exceptions import (
CreditProviderNotConfigured,
CreditRequestNotFound,
InvalidCreditStatus,
RequestAlreadyCompleted,
UserIsNotEligible
)
from openedx.core.djangoapps.credit.models import (
CreditEligibility,
CreditProvider,
CreditRequest,
CreditRequirementStatus
)
from openedx.core.djangoapps.credit.signature import get_shared_secret_key, signature
from student.models import CourseEnrollment, User
from util.date_utils import to_timestamp
from util.json_request import JsonResponse
# TODO: Cleanup this mess! ECOM-2908
log = logging.getLogger(__name__)
def get_credit_providers(providers_list=None):
"""Retrieve all available credit providers or filter on given providers_list.
Arguments:
providers_list (list of strings or None): contains list of ids of credit providers
or None.
Returns:
list of credit providers represented as dictionaries
Response Values:
>>> get_credit_providers(['hogwarts'])
[
{
"id": "hogwarts",
"name": "Hogwarts School of Witchcraft and Wizardry",
"url": "https://credit.example.com/",
"status_url": "https://credit.example.com/status/",
"description: "A new model for the Witchcraft and Wizardry School System.",
"enable_integration": false,
"fulfillment_instructions": "
<p>In order to fulfill credit, Hogwarts School of Witchcraft and Wizardry requires learners to:</p>
<ul>
<li>Sample instruction abc</li>
<li>Sample instruction xyz</li>
</ul>",
},
...
]
"""
return CreditProvider.get_credit_providers(providers_list=providers_list)
def get_credit_provider_info(request, provider_id): # pylint: disable=unused-argument
"""Retrieve the 'CreditProvider' model data against provided
credit provider.
Args:
provider_id (str): The identifier for the credit provider
Returns: 'CreditProvider' data dictionary
Example Usage:
>>> get_credit_provider_info("hogwarts")
{
"provider_id": "hogwarts",
"display_name": "Hogwarts School of Witchcraft and Wizardry",
"provider_url": "https://credit.example.com/",
"provider_status_url": "https://credit.example.com/status/",
"provider_description: "A new model for the Witchcraft and Wizardry School System.",
"enable_integration": False,
"fulfillment_instructions": "
<p>In order to fulfill credit, Hogwarts School of Witchcraft and Wizardry requires learners to:</p>
<ul>
<li>Sample instruction abc</li>
<li>Sample instruction xyz</li>
</ul>",
"thumbnail_url": "https://credit.example.com/logo.png"
}
"""
credit_provider = CreditProvider.get_credit_provider(provider_id=provider_id)
credit_provider_data = {}
if credit_provider:
credit_provider_data = {
"provider_id": credit_provider.provider_id,
"display_name": credit_provider.display_name,
"provider_url": credit_provider.provider_url,
"provider_status_url": credit_provider.provider_status_url,
"provider_description": credit_provider.provider_description,
"enable_integration": credit_provider.enable_integration,
"fulfillment_instructions": credit_provider.fulfillment_instructions,
"thumbnail_url": credit_provider.thumbnail_url
}
return JsonResponse(credit_provider_data)
@transaction.atomic
def create_credit_request(course_key, provider_id, username):
"""
Initiate a request for credit from a credit provider.
This will return the parameters that the user's browser will need to POST
to the credit provider. It does NOT calculate the signature.
Only users who are eligible for credit (have satisfied all credit requirements) are allowed to make requests.
A provider can be configured either with *integration enabled* or not.
If automatic integration is disabled, this method will simply return
a URL to the credit provider and method set to "GET", so the student can
visit the URL and request credit directly. No database record will be created
to track these requests.
If automatic integration *is* enabled, then this will also return the parameters
that the user's browser will need to POST to the credit provider.
These parameters will be digitally signed using a secret key shared with the credit provider.
A database record will be created to track the request with a 32-character UUID.
The returned dictionary can be used by the user's browser to send a POST request to the credit provider.
If a pending request already exists, this function should return a request description with the same UUID.
(Other parameters, such as the user's full name may be different than the original request).
If a completed request (either accepted or rejected) already exists, this function will
raise an exception. Users are not allowed to make additional requests once a request
has been completed.
Arguments:
course_key (CourseKey): The identifier for the course.
provider_id (str): The identifier of the credit provider.
username (str): The user initiating the request.
Returns: dict
Raises:
UserIsNotEligible: The user has not satisfied eligibility requirements for credit.
CreditProviderNotConfigured: The credit provider has not been configured for this course.
RequestAlreadyCompleted: The user has already submitted a request and received a response
from the credit provider.
Example Usage:
>>> create_credit_request(course.id, "hogwarts", "ron")
{
"url": "https://credit.example.com/request",
"method": "POST",
"parameters": {
"request_uuid": "557168d0f7664fe59097106c67c3f847",
"timestamp": 1434631630,
"course_org": "HogwartsX",
"course_num": "Potions101",
"course_run": "1T2015",
"final_grade": "0.95",
"user_username": "ron",
"user_email": "ron@example.com",
"user_full_name": "Ron Weasley",
"user_mailing_address": "",
"user_country": "US",
"signature": "cRCNjkE4IzY+erIjRwOQCpRILgOvXx4q2qvx141BCqI="
}
}
"""
try:
user_eligibility = CreditEligibility.objects.select_related('course').get(
username=username,
course__course_key=course_key
)
credit_course = user_eligibility.course
credit_provider = CreditProvider.objects.get(provider_id=provider_id)
except CreditEligibility.DoesNotExist:
log.warning(
u'User "%s" tried to initiate a request for credit in course "%s", '
u'but the user is not eligible for credit',
username, course_key
)
raise UserIsNotEligible
except CreditProvider.DoesNotExist:
log.error(u'Credit provider with ID "%s" has not been configured.', provider_id)
raise CreditProviderNotConfigured
# Check if we've enabled automatic integration with the credit
# provider. If not, we'll show the user a link to a URL
# where the user can request credit directly from the provider.
# Note that we do NOT track these requests in our database,
# since the state would always be "pending" (we never hear back).
if not credit_provider.enable_integration:
return {
"url": credit_provider.provider_url,
"method": "GET",
"parameters": {}
}
else:
# If automatic credit integration is enabled, then try
# to retrieve the shared signature *before* creating the request.
# That way, if there's a misconfiguration, we won't have requests
# in our system that we know weren't sent to the provider.
shared_secret_key = get_shared_secret_key(credit_provider.provider_id)
if shared_secret_key is None:
msg = u'Credit provider with ID "{provider_id}" does not have a secret key configured.'.format(
provider_id=credit_provider.provider_id
)
log.error(msg)
raise CreditProviderNotConfigured(msg)
# Initiate a new request if one has not already been created
credit_request, created = CreditRequest.objects.get_or_create(
course=credit_course,
provider=credit_provider,
username=username,
)
# Check whether we've already gotten a response for a request,
# If so, we're not allowed to issue any further requests.
# Skip checking the status if we know that we just created this record.
if not created and credit_request.status != "pending":
log.warning(
(
u'Cannot initiate credit request because the request with UUID "%s" '
u'exists with status "%s"'
), credit_request.uuid, credit_request.status
)
raise RequestAlreadyCompleted
if created:
credit_request.uuid = uuid.uuid4().hex
# Retrieve user account and profile info
user = User.objects.select_related('profile').get(username=username)
# Retrieve the final grade from the eligibility table
try:
final_grade = CreditRequirementStatus.objects.get(
username=username,
requirement__namespace="grade",
requirement__name="grade",
requirement__course__course_key=course_key,
status="satisfied"
).reason["final_grade"]
# NOTE (CCB): Limiting the grade to seven characters is a hack for ASU.
if len(six.text_type(final_grade)) > 7:
final_grade = u'{:.5f}'.format(final_grade)
else:
final_grade = six.text_type(final_grade)
except (CreditRequirementStatus.DoesNotExist, TypeError, KeyError):
msg = u'Could not retrieve final grade from the credit eligibility table for ' \
u'user [{user_id}] in course [{course_key}].'.format(user_id=user.id, course_key=course_key)
log.exception(msg)
raise UserIsNotEligible(msg)
# Getting the students's enrollment date
course_enrollment = CourseEnrollment.get_enrollment(user, course_key)
enrollment_date = course_enrollment.created if course_enrollment else ""
# Getting the student's course completion date
completion_date = get_last_exam_completion_date(course_key, username)
parameters = {
"request_uuid": credit_request.uuid,
"timestamp": to_timestamp(datetime.datetime.now(pytz.UTC)),
"course_org": course_key.org,
"course_num": course_key.course,
"course_run": course_key.run,
"enrollment_timestamp": to_timestamp(enrollment_date) if enrollment_date else "",
"course_completion_timestamp": to_timestamp(completion_date) if completion_date else "",
"final_grade": final_grade,
"user_username": user.username,
"user_email": user.email,
"user_full_name": user.profile.name,
"user_mailing_address": "",
"user_country": (
user.profile.country.code
if user.profile.country.code is not None
else ""
),
}
credit_request.parameters = parameters
credit_request.save()
if created:
log.info(u'Created new request for credit with UUID "%s"', credit_request.uuid)
else:
log.info(
u'Updated request for credit with UUID "%s" so the user can re-issue the request',
credit_request.uuid
)
# Sign the parameters using a secret key we share with the credit provider.
parameters["signature"] = signature(parameters, shared_secret_key)
return {
"url": credit_provider.provider_url,
"method": "POST",
"parameters": parameters
}
def update_credit_request_status(request_uuid, provider_id, status):
"""
Update the status of a credit request.
Approve or reject a request for a student to receive credit in a course
from a particular credit provider.
This function does NOT check that the status update is authorized.
The caller needs to handle authentication and authorization (checking the signature
of the message received from the credit provider)
The function is idempotent; if the request has already been updated to the status,
the function does nothing.
Arguments:
request_uuid (str): The unique identifier for the credit request.
provider_id (str): Identifier for the credit provider.
status (str): Either "approved" or "rejected"
Returns: None
Raises:
CreditRequestNotFound: No request exists that is associated with the given provider.
InvalidCreditStatus: The status is not either "approved" or "rejected".
"""
if status not in [CreditRequest.REQUEST_STATUS_APPROVED, CreditRequest.REQUEST_STATUS_REJECTED]:
raise InvalidCreditStatus
try:
request = CreditRequest.objects.get(uuid=request_uuid, provider__provider_id=provider_id)
old_status = request.status
request.status = status
request.save()
log.info(
u'Updated request with UUID "%s" from status "%s" to "%s" for provider with ID "%s".',
request_uuid, old_status, status, provider_id
)
except CreditRequest.DoesNotExist:
msg = (
u'Credit provider with ID "{provider_id}" attempted to '
u'update request with UUID "{request_uuid}", but no request '
u'with this UUID is associated with the provider.'
).format(provider_id=provider_id, request_uuid=request_uuid)
log.warning(msg)
raise CreditRequestNotFound(msg)
def get_credit_requests_for_user(username):
"""
Retrieve the status of a credit request.
Returns either "pending", "approved", or "rejected"
Arguments:
username (unicode): The username of the user who initiated the requests.
Returns: list
Example Usage:
>>> get_credit_request_status_for_user("bob")
[
{
"uuid": "557168d0f7664fe59097106c67c3f847",
"timestamp": 1434631630,
"course_key": "course-v1:HogwartsX+Potions101+1T2015",
"provider": {
"id": "HogwartsX",
"display_name": "Hogwarts School of Witchcraft and Wizardry",
},
"status": "pending" # or "approved" or "rejected"
}
]
"""
return CreditRequest.credit_requests_for_user(username)
def get_credit_request_status(username, course_key):
"""Get the credit request status.
This function returns the status of credit request of user for given course.
It returns the latest request status for the any credit provider.
The valid status are 'pending', 'approved' or 'rejected'.
Args:
username(str): The username of user
course_key(CourseKey): The course locator key
Returns:
A dictionary of credit request user has made if any
"""
credit_request = CreditRequest.get_user_request_status(username, course_key)
return {
"uuid": credit_request.uuid,
"timestamp": credit_request.modified,
"course_key": credit_request.course.course_key,
"provider": {
"id": credit_request.provider.provider_id,
"display_name": credit_request.provider.display_name
},
"status": credit_request.status
} if credit_request else {}
|
ESOedX/edx-platform
|
openedx/core/djangoapps/credit/api/provider.py
|
Python
|
agpl-3.0
| 16,234
|
'''
Created on August 19, 2013
@package: livedesk-sync
@copyright: 2013 Sourcefabric o.p.s.
@license: http://www.gnu.org/licenses/gpl-3.0.txt
@author: Martin Saturka
Content for icons of collaborators of chained blogs.
'''
import socket
import logging
from urllib.request import urlopen
from ally.api.model import Content
from urllib.error import HTTPError
from ally.exception import InputError, Ref
from ally.internationalization import _
from urllib.request import Request
from urllib.parse import quote, urlsplit, SplitResult, urlunsplit
# --------------------------------------------------------------------
log = logging.getLogger(__name__)
# --------------------------------------------------------------------
class ChainedIconContent(Content):
'''
Simple remote icon content taking
'''
__slots__ = ('_url', '_response')
def __init__(self, contentURL, fileName):
'''
Initialize the content.
@param contentURL: string
The URL of the icon to be downloaded.
@param fileName: string
The name of file under that the icon should be saved.
'''
Content.__init__(self, fileName, 'image', 'binary', 0)
(scheme, netloc, path, query, fragment) = urlsplit(contentURL if not isinstance(contentURL, Request) else contentURL.full_url)
if not scheme: scheme = 'http'
parsed = SplitResult(scheme, netloc, quote(path), quote(query), fragment)
if isinstance(contentURL, Request): contentURL.full_url = urlunsplit(parsed)
else: contentURL = urlunsplit(parsed)
self._url = contentURL
self._response = None
def read(self, nbytes=None):
'''
@see: Content.read
'''
if not self._response:
try:
req = Request(self._url, headers={'User-Agent' : 'Magic Browser'})
self._response = urlopen(req)
except (HTTPError, socket.error) as e:
log.error('Can not read icon image data %s' % e)
raise InputError(Ref(_('Can not open icon URL'),))
if not self._response:
log.error('Can not read icon image data %s' % e)
raise InputError(Ref(_('Can not open icon URL'),))
if str(self._response.status) != '200':
raise InputError(Ref(_('Can not open icon URL'),))
self.type = self._response.getheader('Content-Type')
if not self.type:
self.type = 'image'
self.length = self._response.getheader('Content-Length')
if not self.length:
self.length = 0
if (not self._response) or self._response.closed:
return ''
try:
if nbytes:
return self._response.read(nbytes)
return self._response.read()
except (HTTPError, socket.error) as e:
log.error('Can not read icon image data %s' % e)
raise InputError(Ref(_('Can not read from icon URL'),))
def next(self):
'''
@see: Content.next
'''
return None
|
superdesk/Live-Blog
|
plugins/livedesk-sync/livedesk/core/impl/icon_content.py
|
Python
|
agpl-3.0
| 3,125
|
# coding=utf-8
__author__ = "Daniel Arroyo <daniel@astroprint.com>"
__license__ = 'GNU Affero General Public License http://www.gnu.org/licenses/agpl.html'
import logging
from astroprint.network import NetworkManager as NetworkManagerBase
class MacDevNetworkManager(NetworkManagerBase):
def __init__(self):
self.name = "astrobox-dev"
self.logger = logging.getLogger(__name__)
super(MacDevNetworkManager, self).__init__()
def getActiveConnections(self):
return {
'wired': {
'id': 'localhost',
'signal': None,
'name': 'Localhost',
'ip': '127.0.0.1:5000',
'secured': True
},
'wireless': None,
'manual': None
}
def storedWifiNetworks(self):
return [
{'id': '1', 'name': 'Test Connection 1', 'active': True},
{'id': '2', 'name': 'Test Connection 2', 'active': False},
{'id': '3', 'name': 'Test Connection 3', 'active': False}
]
def deleteStoredWifiNetwork(self, networkId):
return (networkId in [c['id'] for c in self.storedWifiNetworks()])
def hasWifi(self):
return False
def isOnline(self):
return True
def startHotspot(self):
# return True when succesful
return "Not supporded on Mac"
def stopHotspot(self):
# return True when succesful
return "Not supporded on Mac"
def getHostname(self):
return self.name
def setHostname(self, name):
self.name = name
self.logger.info('Host name is set to %s ' % name)
return True
|
abinashk-inf/AstroBox
|
src/astroprint/network/mac_dev.py
|
Python
|
agpl-3.0
| 1,674
|
import os
import functools
import logging
import six
from pelican.utils import (slugify, python_2_unicode_compatible)
logger = logging.getLogger(__name__)
@python_2_unicode_compatible
@functools.total_ordering
class URLWrapper(object):
def __init__(self, name, settings):
# next 2 lines are redundant with the setter of the name property
# but are here for clarity
self.settings = settings
self._name = name
self.slug = slugify(name, self.settings.get('SLUG_SUBSTITUTIONS', ()))
self.name = name
@property
def name(self):
return self._name
@name.setter
def name(self, name):
self._name = name
self.slug = slugify(name, self.settings.get('SLUG_SUBSTITUTIONS', ()))
def as_dict(self):
d = self.__dict__
d['name'] = self.name
return d
def __hash__(self):
return hash(self.slug)
def _key(self):
return self.slug
def _normalize_key(self, key):
subs = self.settings.get('SLUG_SUBSTITUTIONS', ())
return six.text_type(slugify(key, subs))
def __eq__(self, other):
return self._key() == self._normalize_key(other)
def __ne__(self, other):
return self._key() != self._normalize_key(other)
def __lt__(self, other):
return self._key() < self._normalize_key(other)
def __str__(self):
return self.name
def __repr__(self):
return '<{} {}>'.format(type(self).__name__, str(self))
def _from_settings(self, key, get_page_name=False):
"""Returns URL information as defined in settings.
When get_page_name=True returns URL without anything after {slug} e.g.
if in settings: CATEGORY_URL="cat/{slug}.html" this returns
"cat/{slug}" Useful for pagination.
"""
setting = "%s_%s" % (self.__class__.__name__.upper(), key)
value = self.settings[setting]
if not isinstance(value, six.string_types):
logger.warning('%s is set to %s', (setting, value))
return value
else:
if get_page_name:
return os.path.splitext(value)[0].format(**self.as_dict())
else:
return value.format(**self.as_dict())
page_name = property(functools.partial(_from_settings, key='URL',
get_page_name=True))
url = property(functools.partial(_from_settings, key='URL'))
save_as = property(functools.partial(_from_settings, key='SAVE_AS'))
class Category(URLWrapper):
pass
class Tag(URLWrapper):
def __init__(self, name, *args, **kwargs):
super(Tag, self).__init__(name.strip(), *args, **kwargs)
class Author(URLWrapper):
pass
|
0xMF/pelican
|
pelican/urlwrappers.py
|
Python
|
agpl-3.0
| 2,731
|
"""Automatically download MLdata datasets."""
# Copyright (c) 2011 Pietro Berkes
# License: Simplified BSD
import os
from os.path import join, exists
import re
import numpy as np
import scipy as sp
from scipy import io
from shutil import copyfileobj
import urllib2
from .base import get_data_home, Bunch
MLDATA_BASE_URL = "http://mldata.org/repository/data/download/matlab/%s"
def mldata_filename(dataname):
"""Convert a raw name for a data set in a mldata.org filename."""
dataname = dataname.lower().replace(' ', '-')
return re.sub(r'[().]', '', dataname)
def fetch_mldata(dataname, target_name='label', data_name='data',
transpose_data=True, data_home=None):
"""Fetch an mldata.org data set
If the file does not exist yet, it is downloaded from mldata.org .
mldata.org does not have an enforced convention for storing data or
naming the columns in a data set. The default behavior of this function
works well with the most common cases:
1) data values are stored in the column 'data', and target values in the
column 'label'
2) alternatively, the first column stores target values, and the second
data values
3) the data array is stored as `n_features x n_samples` , and thus needs
to be transposed to match the `sklearn` standard
Keyword arguments allow to adapt these defaults to specific data sets
(see parameters `target_name`, `data_name`, `transpose_data`, and
the examples below).
mldata.org data sets may have multiple columns, which are stored in the
Bunch object with their original name.
Parameters
----------
dataname:
Name of the data set on mldata.org,
e.g.: "leukemia", "Whistler Daily Snowfall", etc.
The raw name is automatically converted to a mldata.org URL .
target_name: optional, default: 'label'
Name or index of the column containing the target values.
data_name: optional, default: 'data'
Name or index of the column containing the data.
transpose_data: optional, default: True
If True, transpose the downloaded data array.
data_home: optional, default: None
Specify another download and cache folder for the data sets. By default
all scikit learn data is stored in '~/scikit_learn_data' subfolders.
Returns
-------
data : Bunch
Dictionary-like object, the interesting attributes are:
'data', the data to learn, 'target', the classification labels,
'DESCR', the full description of the dataset, and
'COL_NAMES', the original names of the dataset columns.
Examples
--------
Load the 'iris' dataset from mldata.org:
>>> from sklearn.datasets.mldata import fetch_mldata
>>> iris = fetch_mldata('iris')
>>> iris.target[0]
1
>>> print(iris.data[0])
[-0.555556 0.25 -0.864407 -0.916667]
Load the 'leukemia' dataset from mldata.org, which needs to be transposed
to respects the sklearn axes convention:
>>> leuk = fetch_mldata('leukemia', transpose_data=True)
>>> print(leuk.data.shape[0])
72
Load an alternative 'iris' dataset, which has different names for the
columns:
>>> iris2 = fetch_mldata('datasets-UCI iris', target_name=1,
... data_name=0)
>>> iris3 = fetch_mldata('datasets-UCI iris',
... target_name='class', data_name='double0')
"""
# normalize dataset name
dataname = mldata_filename(dataname)
# check if this data set has been already downloaded
data_home = get_data_home(data_home=data_home)
data_home = join(data_home, 'mldata')
if not exists(data_home):
os.makedirs(data_home)
matlab_name = dataname + '.mat'
filename = join(data_home, matlab_name)
# if the file does not exist, download it
if not exists(filename):
urlname = MLDATA_BASE_URL % urllib2.quote(dataname)
try:
mldata_url = urllib2.urlopen(urlname)
except urllib2.HTTPError as e:
if e.code == 404:
e.msg = "Dataset '%s' not found on mldata.org." % dataname
raise
# store Matlab file
try:
with open(filename, 'w+b') as matlab_file:
copyfileobj(mldata_url, matlab_file)
except:
os.remove(filename)
raise
mldata_url.close()
# load dataset matlab file
with open(filename, 'rb') as matlab_file:
matlab_dict = io.loadmat(matlab_file, struct_as_record=True)
# -- extract data from matlab_dict
# flatten column names
col_names = [str(descr[0])
for descr in matlab_dict['mldata_descr_ordering'][0]]
# if target or data names are indices, transform then into names
if isinstance(target_name, (int, np.integer)):
target_name = col_names[target_name]
if isinstance(data_name, (int, np.integer)):
data_name = col_names[data_name]
# rules for making sense of the mldata.org data format
# (earlier ones have priority):
# 1) there is only one array => it is "data"
# 2) there are multiple arrays
# a) copy all columns in the bunch, using their column name
# b) if there is a column called `target_name`, set "target" to it,
# otherwise set "target" to first column
# c) if there is a column called `data_name`, set "data" to it,
# otherwise set "data" to second column
dataset = {'DESCR': 'mldata.org dataset: %s' % dataname,
'COL_NAMES': col_names}
# 1) there is only one array => it is considered data
if len(col_names) == 1:
data_name = col_names[0]
dataset['data'] = matlab_dict[data_name]
# 2) there are multiple arrays
else:
for name in col_names:
dataset[name] = matlab_dict[name]
if target_name in col_names:
del dataset[target_name]
dataset['target'] = matlab_dict[target_name]
else:
del dataset[col_names[0]]
dataset['target'] = matlab_dict[col_names[0]]
if data_name in col_names:
del dataset[data_name]
dataset['data'] = matlab_dict[data_name]
else:
del dataset[col_names[1]]
dataset['data'] = matlab_dict[col_names[1]]
# set axes to sklearn conventions
if transpose_data:
dataset['data'] = dataset['data'].T
if 'target' in dataset:
if not sp.sparse.issparse(dataset['target']):
dataset['target'] = dataset['target'].squeeze()
return Bunch(**dataset)
|
GbalsaC/bitnamiP
|
venv/lib/python2.7/site-packages/sklearn/datasets/mldata.py
|
Python
|
agpl-3.0
| 6,651
|
#
# Copyright (c) 2014 ThoughtWorks Deutschland GmbH
#
# Pixelated is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Pixelated is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with Pixelated. If not, see <http://www.gnu.org/licenses/>.
import json
import unittest
import os
import threading
import time
import psutil
import requests
from tempdir import TempDir
from mock import patch
from pixelated.client.dispatcher_api_client import PixelatedDispatcherClient
from pixelated.proxy import DispatcherProxy
from pixelated.manager import DispatcherManager, SSLConfig, DEFAULT_PORT
from pixelated.test.util import EnforceTLSv1Adapter, cafile, certfile, keyfile
__author__ = 'fbernitt'
INHERIT = None
class SmokeTest(unittest.TestCase):
__slots__ = ('_run_method', '_shutdown_method', '_thread_name', '_thread')
class Server(object):
def __init__(self, run_method, shutdown_method, thread_name=None):
self._run_method = run_method
self._shutdown_method = shutdown_method
self._thread_name = thread_name
self._thread = None
def _start_server(self):
self._thread = threading.Thread(target=self._run_method)
self._thread.setDaemon(True)
if self._thread_name:
self._thread.setName(self._thread_name)
self._thread.start()
def __enter__(self):
self._start_server()
time.sleep(0.3) # let server start
return self
def __exit__(self, exc_type, exc_val, exc_tb):
self._shutdown_method()
self._thread.join()
self._kill_subprocesses()
def _kill_subprocesses(self):
for child in psutil.Process(os.getpid()).children():
try:
p = psutil.Process(child.pid)
p.kill()
except psutil.Error:
pass
def setUp(self):
self._tmpdir = TempDir()
self.ssl_request = requests.Session()
self.ssl_request.mount('https://', EnforceTLSv1Adapter())
def tearDown(self):
self._tmpdir.dissolve()
def _dispatcher_manager(self):
fake_mailpile = os.path.join(os.path.dirname(__file__), 'fake_mailpile.py')
ssl_config = SSLConfig(certfile(), keyfile())
provider_ca = None
server = DispatcherManager(self._tmpdir.name, fake_mailpile, ssl_config, 'leap provider hostname', provider_ca, mailpile_virtualenv=INHERIT)
return SmokeTest.Server(server.serve_forever, server.shutdown, thread_name='PixelatedServerManager')
def _dispatcher_proxy(self):
dispatcher = DispatcherProxy(PixelatedDispatcherClient('localhost', DEFAULT_PORT, cacert=cafile(), assert_hostname=False), port=12345, certfile=certfile(),
keyfile=keyfile())
return SmokeTest.Server(dispatcher.serve_forever, dispatcher.shutdown, thread_name='PixelatedDispatcherProxy')
def _method(self, method, url, form_data=None, json_data=None, timeout=5.0):
if json_data:
headers = {'content-type': 'application/json'}
data = json.dumps(json_data)
cookies = None
else:
cookies = {'_xsrf': '2|7586b241|47c876d965112a2f547c63c95cbc44b1|1402910163'}
headers = None
data = form_data.copy()
data['_xsrf'] = '2|7586b241|47c876d965112a2f547c63c95cbc44b1|1402910163'
return method(url, data=data, headers=headers, cookies=cookies, timeout=timeout, verify=cafile())
def get(self, url):
return self.ssl_request.get(url, verify=cafile())
def put(self, url, form_data=None, json_data=None):
return self._method(self.ssl_request.put, url, form_data=form_data, json_data=json_data)
def post(self, url, form_data=None, json_data=None):
return self._method(self.ssl_request.post, url, form_data=form_data, json_data=json_data)
@patch('pixelated.manager.LeapCertificate')
@patch('pixelated.manager.LeapProvider')
def test_dispatcher_run(self, leap_provider_mock, leap_certificate_mock):
with self._dispatcher_manager():
self.assertSuccess(
self.post('https://localhost:4443/agents', json_data={'name': 'test', 'password': 'some password'}))
self.assertSuccess(self.get('https://localhost:4443/agents'), json_body={
'agents': [{'name': 'test', 'state': 'stopped', 'uri': 'http://localhost:4443/agents/test'}]})
self.assertSuccess(
self.put('https://localhost:4443/agents/test/state', json_data={'state': 'running'}))
self.assertSuccess(self.get('https://localhost:4443/agents/test/runtime'),
json_body={'state': 'running', 'port': 5000})
time.sleep(2) # let mailpile start
self.assertSuccess(self.get('http://localhost:5000/'))
self.assertSuccess(
self.put('https://localhost:4443/agents/test/state', json_data={'state': 'stopped'}))
def test_dispatcher_starts(self):
with self._dispatcher_proxy():
self.assertSuccess(self.get('https://localhost:12345/auth/login'))
@patch('pixelated.manager.LeapCertificate')
@patch('pixelated.manager.LeapProvider')
def test_server_dispatcher_combination(self, leap_provider_mock, leap_certificate_mock):
with self._dispatcher_manager():
with self._dispatcher_proxy():
# add user
self.assertSuccess(
self.post('https://localhost:4443/agents', json_data={'name': 'test', 'password': 'some password'}))
# try to login with agent down
# self.assertError(302, self.post('https://localhost:12345/auth/login',
# form_data={'username': 'test', 'password': 'test'}))
# start agent
self.assertSuccess(
self.put('https://localhost:4443/agents/test/state', json_data={'state': 'running'}))
# let mailpile start
time.sleep(1)
self.assertMemoryUsage(
self.get('https://localhost:4443/stats/memory_usage'))
try:
# try to login with agent up
self.assertSuccess(self.post('https://localhost:12345/auth/login',
form_data={'username': 'test', 'password': 'some password'}),
body='Hello World!')
finally:
# shutdown mailple
self.put('https://localhost:4443/agents/test/state', json_data={'state': 'stopped'})
def assertSuccess(self, response, body=None, json_body=None):
status = response.status_code
self.assertTrue(200 <= status < 300, msg='%d: %s' % (response.status_code, response.reason))
if body:
self.assertEqual(body, response.content)
if json_body:
self.assertEqual(json_body, response.json())
def assertError(self, error_code, response):
self.assertEqual(error_code, response.status_code,
'Expected status code %d but got %d' % (error_code, response.status_code))
def assertMemoryUsage(self, response):
self.assertSuccess(response)
usage = response.json()
self.assertEqual(1, len(usage['agents']))
|
pixelated-project/pixelated-dispatcher
|
pixelated/test/integration/smoke_test.py
|
Python
|
agpl-3.0
| 7,956
|
from unittest.mock import patch
from superdesk.tests import TestCase
from apps.publish.enqueue.enqueue_service import EnqueueService
class NoTakesEnqueueTestCase(TestCase):
def setUp(self):
super().setUp()
self.product_ids = self.app.data.insert(
"products",
[
{"name": "all"},
],
)
self.subscriber_ids = self.app.data.insert(
"subscribers",
[
{"name": "digi", "subscriber_type": "digital", "is_targetable": True, "products": self.product_ids},
],
)
self.desk_ids = self.app.data.insert(
"desks",
[
{"name": "sports"},
],
)
self.service = EnqueueService()
def test_resend_no_takes(self):
doc = {"_id": "test"}
subscribers = [s for s in self.app.data.find_all("subscribers")]
subscriber_codes = self.service._get_subscriber_codes(subscribers)
with patch.object(self.service, "_resend_to_subscribers") as resend:
with patch.object(self.service, "publish_content_api") as content_api:
self.service.resend(doc, subscribers)
resend.assert_called_with(doc, subscribers, subscriber_codes, {})
content_api.assert_called_with(doc, [])
|
superdesk/superdesk-core
|
tests/enqueue_test.py
|
Python
|
agpl-3.0
| 1,353
|
# -*- coding: utf-8 -*-
import re
from django import forms
from django.contrib.auth.models import User
from django.forms import formset_factory
from django.forms.widgets import TextInput
from django.utils import timezone
from dal import autocomplete
from tagging.fields import TagField
import accounts.utils
from bulb.models import Book, NeededBook, Request, Group, Session, Report, Membership, ReaderProfile, Recruitment, NewspaperSignup, DewanyaSuggestion, BookCommitment, RecommendedBook, BookRecommendation
from bulb import models, utils
city_choices = (
('-', u'الرياض وجدة والأحساء'),
(u'الرياض', u'الرياض فقط'),
(u'جدة', u'جدة فقط'),
(u'الأخساء', u'الأحساء فقط'),
)
gender_choices = (
('-', u'الطلاب والطالبات'),
('F', u'الطالبات'),
('M', u'الطلاب'),
)
class CommonControl:
def control_gender(self):
# Modify the choice only if the user is not a superuser not a
# Bulb coordinator. This is a really, really, really stupid
# default option, but it's just to make sure that people know
# what are chosing.
if self.user_gender == 'F':
if not self.instance.id:
self.fields['gender'].initial = 'F'
self.fields['gender'].choices = (
('-', u'الطلاب والطالبات'),
('F', u'الطالبات'),
)
elif self.user_gender == 'M':
if not self.instance.id:
self.fields['gender'].initial = 'M'
self.fields['gender'].choices = (
('-', u'الطلاب والطالبات'),
('M', u'الطلاب')
)
class NeededBookForm(forms.ModelForm):
class Meta:
model = models.NeededBook
fields = ['title', 'authors', 'description', 'cover', 'tags',
'category']
class GenericBookForm(forms.ModelForm):
def __init__(self, *args, **kwargs):
# Remove is_publicly_owned field from ordinary users.
user = kwargs.pop('user')
super(GenericBookForm, self).__init__(*args, **kwargs)
if not user.is_superuser and \
not utils.is_bulb_coordinator_or_deputy(user) and \
not utils.is_bulb_member(user):
del self.fields['is_publicly_owned']
class BookEditForm(GenericBookForm):
"""Form used to edit books. It allows changing contribution type from
giving to lending."""
tags = TagField()
class Meta:
model = models.Book
fields = ['title', 'authors', 'edition', 'pages', 'condition',
'description', 'cover', 'tags', 'category',
'contribution', 'available_until', 'is_publicly_owned']
class BookGiveForm(GenericBookForm):
class Meta:
model = models.Book
fields = ['title', 'authors', 'edition', 'pages',
'condition', 'description', 'cover', 'tags',
'category', 'is_publicly_owned']
class BookLendForm(GenericBookForm):
class Meta:
model = models.Book
fields = ['title', 'authors', 'edition', 'pages', 'condition',
'description', 'cover', 'category', 'tags',
'available_until', 'is_publicly_owned']
class RequestForm(forms.ModelForm):
def __init__(self, *args, **kwargs):
instance = kwargs.get('instance', None)
super(RequestForm, self).__init__(*args, **kwargs)
if instance.book.contribution == 'L':
self.fields['borrowing_end_date'].required = True
def clean_delivery(self):
# You know the "males and females are not supposed to meet"
# bullshit? Yeah.
data = self.cleaned_data['delivery']
if not data:
return data
requester_gender = accounts.utils.get_user_gender(self.instance.requester)
owner_gender = accounts.utils.get_user_gender(self.instance.book.submitter)
if data == 'I' or requester_gender != owner_gender:
delivery = 'I'
else:
delivery = 'D'
return delivery
class Meta:
model = models.Request
fields = ['delivery', 'borrowing_end_date']
widgets = {'delivery': forms.HiddenInput()}
class GroupForm(forms.ModelForm, CommonControl):
def __init__(self, *args, **kwargs):
self.user = kwargs.pop('user')
super(GroupForm, self).__init__(*args, **kwargs)
# After creating the group, members can be controlled for a
# dedicated page.
if self.instance.pk:
del self.fields['members']
if self.instance.id:
self.user_city = accounts.utils.get_user_city(self.instance.coordinator)
self.user_gender = accounts.utils.get_user_gender(self.instance.coordinator)
if self.instance.is_limited_by_city:
self.fields['city'].initial = self.user_city
if self.instance.is_limited_by_gender:
self.fields['gender'].initial = self.user_gender
else:
self.user_city = accounts.utils.get_user_city(self.user)
self.user_gender = accounts.utils.get_user_gender(self.user)
self.fields['city'].initial = '-'
if not self.user.is_superuser and \
not utils.is_bulb_coordinator_or_deputy(self.user):
self.control_gender()
if self.user_city == u'الرياض':
self.fields['city'].choices = (
('-', u'الرياض وجدة والأحساء'),
('R', u'الرياض فقط'),
)
elif self.user_city == u'الأحساء':
self.fields['city'].choices = (
('-', u'الرياض وجدة والأحساء'),
('A', u'الأحساء فقط'),
)
elif self.user_city == u'جدة':
self.fields['city'].choices = (
('-', u'الرياض وجدة والأحساء'),
('J', u'جدة فقط'),
)
gender = forms.ChoiceField(choices=gender_choices, label=u"المجموعة تقبل عضوية")
city = forms.ChoiceField(choices=city_choices, label=u"تشمل المجموعة")
members = forms.ModelMultipleChoiceField(
widget=autocomplete.ModelSelect2Multiple(url='bulb:bulb-user-autocomplete',
attrs={
'data-html': 'true',
'data-placeholder': 'أَضف عنصرا',
}),
label=u"الأعضاء",
queryset=User.objects.all(),
required=False)
def save(self):
group = super(GroupForm, self).save(commit=False)
if self.user_gender == self.cleaned_data['gender']:
group.is_limited_by_gender = True
else:
group.is_limited_by_gender = False
if self.user_city == self.cleaned_data['city']:
group.is_limited_by_city = True
else:
group.is_limited_by_city = False
group.save()
return group
class Meta:
model = models.Group
fields = ['name', 'image', 'description', 'category',
'is_private']
class FreeSessionForm(forms.ModelForm, CommonControl):
def __init__(self, *args, **kwargs):
self.user = kwargs.pop('user')
super(FreeSessionForm, self).__init__(*args, **kwargs)
self.user_city = accounts.utils.get_user_city(self.user)
self.user_gender = accounts.utils.get_user_gender(self.user)
# Limit the choice only if the user is not a superuser not a
# Bulb coordinator.
if not self.user.is_superuser and \
not utils.is_bulb_coordinator_or_deputy(self.user):
self.control_gender()
def save(self):
session = super(FreeSessionForm, self).save(commit=False)
if self.user_gender == self.cleaned_data['gender']:
session.is_limited_by_gender = True
session.save()
return session
gender = forms.ChoiceField(choices=gender_choices, label=u"الجلسة تقبل حضور")
class Meta:
model = models.Session
fields = ['title', 'agenda', 'location', 'date', 'start_time',
'end_time']
class SessionForm(forms.ModelForm):
class Meta:
model = models.Session
fields = ['title', 'agenda', 'location', 'date', 'start_time',
'end_time']
class ReportForm(forms.ModelForm):
attendees = forms.ModelMultipleChoiceField(
widget=autocomplete.ModelSelect2Multiple(url='bulb:bulb-user-autocomplete',
attrs={
'data-placeholder': 'أَضف اسما',
'data-html': 'true',
}),
label=u"الحضور",
queryset=User.objects.all(),
required=False)
class Meta:
model = models.Report
fields = ['attendees']#, 'description']
class ReaderProfileForm(forms.ModelForm):
def clean_twitter(self):
data = self.cleaned_data['twitter']
if not data:
return data
data = re.sub(u'^(?:https?://(?:m\.)?twitter\.com/)?@?', '', data)
if not re.match(u'^[A-Za-z\d_]+$', data):
raise forms.ValidationError(u"أدخل اسم مستخدم صحيح.")
else:
return data
def clean_goodreads(self):
data = self.cleaned_data['goodreads']
if not data:
return data
if not re.match(u'^(?:https?://)?(?:www.)?goodreads\.com/user/show/', data):
raise forms.ValidationError(u"أدخل رابط صفحتك على Goodreads.")
else:
# Because!
data = re.sub('^http://', 'https://', data)
if not re.match('^https?://', data):
data = u"https://" + data
return data
class Meta:
model = models.ReaderProfile
fields = ['areas_of_interests', 'favorite_books',
'favorite_writers', 'average_reading',
'goodreads', 'twitter']
class RecruitmentForm(forms.ModelForm):
class Meta:
model = models.Recruitment
exclude = ['user', 'year']
class NewspaperSignupForm(forms.ModelForm):
email = forms.EmailField(required=True)
class Meta:
model = models.NewspaperSignup
fields = ['email']
class DewanyaSuggestionForm(forms.ModelForm):
class Meta:
model = models.DewanyaSuggestion
fields = ['name', 'subject']
widgets = {'name': forms.widgets.TextInput(attrs={'class': 'user-autocomplete'})}
DewanyaSuggestionFormSet = forms.formset_factory(DewanyaSuggestionForm, extra=3)
class BookCommitmentForm(forms.ModelForm):
def __init__(self, *args, **kwargs):
readathon = kwargs.pop('readathon')
super(BookCommitmentForm, self).__init__(*args, **kwargs)
if readathon.start_date < timezone.now().date():
del self.fields['wants_to_attend']
class Meta:
model = models.BookCommitment
fields = ['title', 'cover', 'pages', 'reason',
'wants_to_attend', 'wants_to_contribute']
class UpdateBookCommitmentForm(forms.ModelForm):
class Meta:
model = models.BookCommitment
fields = ['pages', 'completed_pages']
class CulturalProgramForm(forms.Form):
user = forms.ModelChoiceField(
widget=autocomplete.ModelSelect2(url='bulb:bulb-user-autocomplete',
attrs={
'data-html': 'true',
'data-placeholder': 'أَضف شخصا',
}),
label=u"المستعير/ة",
queryset=User.objects.filter(is_active=True))
book = forms.ModelChoiceField(
widget=autocomplete.ModelSelect2(url='bulb:bulb-book-autocomplete',
attrs={
'data-placeholder': 'أَضف كتابا',
}),
label=u"الكتاب",
queryset=models.Book.objects.available())
class EditBookRecommendationForm(forms.ModelForm):
class Meta:
model = models.BookRecommendation
fields = ['comment']
class AddBookRecommendationForm(forms.Form):
recommended_book = forms.ModelChoiceField(required=False,
widget=autocomplete.ModelSelect2(url='bulb:bulb-recommended-book-autocomplete',
attrs={
'data-html': 'true',
'data-placeholder': 'أَضف كتابا',
}),
label=u"الكتاب",
queryset=models.RecommendedBook.objects.all())
category = forms.ModelChoiceField(label=u"التصنيف",
required=False,
queryset=models.Category.objects.filter(is_meta=False))
title = forms.CharField(required=False, max_length=200, label=u"العنوان")
authors = forms.CharField(required=False, max_length=200, label=u"تأليف")
cover = forms.ImageField(required=False, label=u"الغلاف")
comment = forms.CharField(widget=forms.Textarea(attrs={'class': 'form-control input-lg'}), label=u"تعليق")
def clean(self):
cleaned_data = super(AddBookRecommendationForm, self).clean()
self.recommended_book = self.cleaned_data.get('recommended_book')
self.recommended_book_fields = {'title': self.cleaned_data['title'],
'authors': self.cleaned_data['authors'],
'category': self.cleaned_data['category'],
'cover': self.cleaned_data['cover']}
if not self.recommended_book and\
not all(self.recommended_book_fields.values()):
raise forms.ValidationError(u"لم تدخل بيانات كافية عن الكتاب")
def save(self, user):
if self.recommended_book:
book_recommendation = models.BookRecommendation.objects\
.create(recommended_book=self.recommended_book,
user=user,
comment=self.cleaned_data['comment'])
else:
recommended_book = models.RecommendedBook.objects.create(**self.recommended_book_fields)
book_recommendation = models.BookRecommendation.objects\
.create(recommended_book=recommended_book,
user=user,
comment=self.cleaned_data['comment'])
return book_recommendation
|
enjaz/enjaz
|
bulb/forms.py
|
Python
|
agpl-3.0
| 15,723
|
#!/usr/bin/python2
from pypixel import *
show()
h = 0
while True:
x = random(WIDTH)
y = random(HEIGHT)
r = random(50, 100)
h += 1
h %= 360
s = 100
v = 100
c = hsv2rgb((h, s, v))
circle(c, (x, y), r)
update()
|
saikobee/pypixel
|
examples/rainbow_random_circles.py
|
Python
|
lgpl-2.1
| 257
|
"""Check if SOCKS5 relays are disabled in muc"""
import os
if os.name != 'posix':
# skipped on non-Unix for now, because it uses a Unix socket
raise SystemExit(77)
import dbus
from servicetest import call_async, EventPattern, EventProtocolClientFactory
from gabbletest import acknowledge_iq, make_muc_presence, exec_test
import constants as cs
import ns
from mucutil import join_muc
from bytestream import BytestreamS5BRelay, create_from_si_offer, announce_socks5_proxy
from twisted.internet import reactor
def test(q, bus, conn, stream):
iq_event, disco_event = q.expect_many(
EventPattern('stream-iq', to=None, query_ns='vcard-temp',
query_name='vCard'),
EventPattern('stream-iq', to='localhost', query_ns=ns.DISCO_ITEMS))
acknowledge_iq(stream, iq_event.stanza)
announce_socks5_proxy(q, stream, disco_event.stanza)
join_muc(q, bus, conn, stream, 'chat@conf.localhost')
# bob offers a stream tube
stream_tube_id = 1
presence = make_muc_presence('owner', 'moderator', 'chat@conf.localhost', 'bob')
tubes = presence.addElement((ns.TUBES, 'tubes'))
tube = tubes.addElement((None, 'tube'))
tube['type'] = 'stream'
tube['service'] = 'echo'
tube['id'] = str(stream_tube_id)
parameters = tube.addElement((None, 'parameters'))
stream.send(presence)
def new_chan_predicate(e):
path, props = e.args[0][0]
return props[cs.CHANNEL_TYPE] == cs.CHANNEL_TYPE_STREAM_TUBE
e = q.expect('dbus-signal', signal='NewChannels',
predicate=new_chan_predicate)
channels = e.args[0]
assert len(channels) == 1
path, props = channels[0]
assert props[cs.CHANNEL_TYPE] == cs.CHANNEL_TYPE_STREAM_TUBE
tube_chan = bus.get_object(conn.bus_name, path)
tube_iface = dbus.Interface(tube_chan, cs.CHANNEL_TYPE_STREAM_TUBE)
call_async(q, tube_iface, 'Accept', 0, 0, '',
byte_arrays=True)
accept_return_event, _ = q.expect_many(
EventPattern('dbus-return', method='Accept'),
EventPattern('dbus-signal', signal='TubeChannelStateChanged',
args=[cs.TUBE_CHANNEL_STATE_OPEN]))
unix_socket_adr = accept_return_event.value[0]
factory = EventProtocolClientFactory(q)
reactor.connectUNIX(unix_socket_adr, factory)
# expect SI request
e = q.expect('stream-iq', to='chat@conf.localhost/bob', query_ns=ns.SI,
query_name='si')
bytestream, profile = create_from_si_offer(stream, q, BytestreamS5BRelay, e.stanza,
'chat@conf.localhost/bob')
result, si = bytestream.create_si_reply(e.stanza, 'test@localhost/Resource')
si.addElement((ns.TUBES, 'tube'))
stream.send(result)
# wait SOCKS5 init iq
id, mode, si, hosts = bytestream._expect_socks5_init()
for jid, host, port in hosts:
# the proxy is not announced because we are in a muc
assert jid != 'proxy.localhost'
if __name__ == '__main__':
exec_test(test)
|
Ziemin/telepathy-gabble
|
tests/twisted/tubes/test-socks5-muc.py
|
Python
|
lgpl-2.1
| 2,964
|
# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import platform as py_platform
from spack.architecture import OperatingSystem
from spack.version import Version
from spack.util.executable import Executable
# FIXME: store versions inside OperatingSystem as a Version instead of string
def macos_version():
"""temporary workaround to return a macOS version as a Version object
"""
return Version(py_platform.mac_ver()[0])
def macos_sdk_path():
"""Return SDK path
"""
xcrun = Executable('xcrun')
return xcrun('--show-sdk-path', output=str, error=str).rstrip()
class MacOs(OperatingSystem):
"""This class represents the macOS operating system. This will be
auto detected using the python platform.mac_ver. The macOS
platform will be represented using the major version operating
system name, i.e el capitan, yosemite...etc.
"""
def __init__(self):
"""Autodetects the mac version from a dictionary.
If the mac version is too old or too new for Spack to recognize,
will use a generic "macos" version string until Spack is updated.
"""
mac_releases = {
'10.0': 'cheetah',
'10.1': 'puma',
'10.2': 'jaguar',
'10.3': 'panther',
'10.4': 'tiger',
'10.5': 'leopard',
'10.6': 'snowleopard',
'10.7': 'lion',
'10.8': 'mountainlion',
'10.9': 'mavericks',
'10.10': 'yosemite',
'10.11': 'elcapitan',
'10.12': 'sierra',
'10.13': 'highsierra',
'10.14': 'mojave',
'10.15': 'catalina',
'10.16': 'bigsur',
'11': 'bigsur',
}
# Big Sur versions go 11.0, 11.0.1, 11.1 (vs. prior versions that
# only used the minor component)
part = 1 if macos_version() >= Version('11') else 2
mac_ver = str(macos_version().up_to(part))
name = mac_releases.get(mac_ver, "macos")
super(MacOs, self).__init__(name, mac_ver)
def __str__(self):
return self.name
|
iulian787/spack
|
lib/spack/spack/operating_systems/mac_os.py
|
Python
|
lgpl-2.1
| 2,263
|
#!/usr/bin/env python3
import vtktools
import sys
import math
import re
import matplotlib.pyplot as plt
import getopt
from scipy.special import erf
from numpy import poly1d
from matplotlib.pyplot import figure, show
from numpy import pi, sin, linspace
from matplotlib.mlab import stineman_interp
from numpy import exp, cos
from fluidity_tools import stat_parser
def mirror(x):
return 13800-x
def usage():
print('Usage:')
print('plotfs_detec.py [-w] --file=detector_filename --save=filename')
print('--save=... saves the plots as images instead of plotting them on the screen.')
print('-w plots the wetting procedure (drying is default).')
# should be copied from the diamond extrude function. X is 2 dimensional
def bathymetry_function(X):
return -5.0*X/13800
################# Main ###########################
def main(argv=None):
filename=''
timestep_ana=0.0
dzero=0.01
save='' # If nonempty, we save the plots as images instead if showing them
wetting=False
try:
opts, args = getopt.getopt(sys.argv[1:], ":w", ['file=','save='])
except getopt.GetoptError:
usage()
sys.exit(2)
for opt, arg in opts:
if opt == '--file':
filename=arg
elif opt == '--save':
save=arg
elif opt == '-w':
wetting=True
if filename=='':
print('No filename specified. You have to give the detectors filename.')
usage()
sys.exit(2)
####################### Print time plot ###########################
print('Generating time plot')
s = stat_parser(filename)
timesteps=s["ElapsedTime"]["value"]
timestep=timesteps[1]-timesteps[0]
print("Found ", len(timesteps), " timesteps with dt=", timestep)
if timestep_ana==0.0:
timestep_ana=timestep
fs=s["water"]["FreeSurface"]
print("Found ", len(fs), " detectors. We assume they are equidistant distributed over the domain (", 0, "-", 13800, ").")
# Get and plot results
plt.ion() # swith on interactive mode
fig2 = figure()
ax2 = fig2.add_subplot(111)
if wetting:
##plot_start=90 # in timesteps
plot_start=18 # in timesteps, after 18 timesteps the waterlevel reaches its lowest point
##plot_end=114 # in timesteps
plot_end=54 # in timesteps
plot_name='Wetting'
else:
plot_start=54 # in timesteps
plot_end=89 # in timesteps
plot_name='Drying'
for t in range(0,len(timesteps)):
# ignore the first waveperiod
if t<plot_start:
continue
if t>plot_end:
continue
fsvalues=[]
xcoords=[]
for name, item in fs.iteritems():
#print name
xcoords.append(mirror(s[name]['position'][0][0]))
#print xcoord
fsvalues.append(fs[name][t])
# Plot result of one timestep
ax2.plot(xcoords,fsvalues,'r,', label='Numerical solution')
# Plot Analytical solution
fsvalues_ana=[]
offset=-bathymetry_function(0.0)+dzero
xcoords.sort()
for x in xcoords:
fsvalues_ana.append(bathymetry_function(mirror(x))-offset)
# Plot vertical line in bathmetry on right boundary
xcoords.append(xcoords[len(xcoords)-1]+0.000000001)
fsvalues_ana.append(2.1)
ax2.plot(xcoords, fsvalues_ana, 'k', label='Bathymetry')
#plt.legend()
if t==plot_end:
# change from meters in kilometers in the x-axis
# return locs, labels where locs is an array of tick locations and
# labels is an array of tick labels.
locs, labels = plt.xticks()
for i in range(0,len(locs)):
labels[i]=str(locs[i]/1000)
plt.xticks(locs, labels)
plt.ylim(-2.2,1.4)
#plt.title(plot_name)
plt.xlabel('Position [km]')
plt.ylabel('Free surface [m]')
if save=='':
plt.draw()
raw_input("Please press Enter")
else:
plt.savefig(save+'_'+plot_name+'.pdf', facecolor='white', edgecolor='black', dpi=100)
plt.cla()
t=t+1
# Make video from the images:
# mencoder "mf://*.png" -mf type=png:fps=30 -ovc lavc -o output.avi
if __name__ == "__main__":
main()
|
FluidityStokes/fluidity
|
tests/wetting_and_drying_balzano1_cg/plotfs_detec.py
|
Python
|
lgpl-2.1
| 5,325
|
# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class Libgcrypt(AutotoolsPackage):
"""Cryptographic library based on the code from GnuPG."""
homepage = "https://gnupg.org/software/libgcrypt/index.html"
url = "https://gnupg.org/ftp/gcrypt/libgcrypt/libgcrypt-1.8.5.tar.bz2"
maintainers = ['alalazo']
version('1.9.4', sha256='ea849c83a72454e3ed4267697e8ca03390aee972ab421e7df69dfe42b65caaf7')
version('1.9.3', sha256='97ebe4f94e2f7e35b752194ce15a0f3c66324e0ff6af26659bbfb5ff2ec328fd')
version('1.9.2', sha256='b2c10d091513b271e47177274607b1ffba3d95b188bbfa8797f948aec9053c5a')
version('1.9.1', sha256='c5a67a8b9b2bd370fb415ed1ee31c7172e5683076493cf4a3678a0fbdf0265d9')
version('1.8.7', sha256='03b70f028299561b7034b8966d7dd77ef16ed139c43440925fe8782561974748')
version('1.8.6', sha256='0cba2700617b99fc33864a0c16b1fa7fdf9781d9ed3509f5d767178e5fd7b975')
version('1.8.5', sha256='3b4a2a94cb637eff5bdebbcaf46f4d95c4f25206f459809339cdada0eb577ac3')
version('1.8.4', sha256='f638143a0672628fde0cad745e9b14deb85dffb175709cacc1f4fe24b93f2227')
version('1.8.1', sha256='7a2875f8b1ae0301732e878c0cca2c9664ff09ef71408f085c50e332656a78b3')
version('1.7.6', sha256='626aafee84af9d2ce253d2c143dc1c0902dda045780cc241f39970fc60be05bc')
version('1.6.2', sha256='de084492a6b38cdb27b67eaf749ceba76bf7029f63a9c0c3c1b05c88c9885c4c')
depends_on('libgpg-error@1.25:')
def check(self):
# Without this hack, `make check` fails on macOS when SIP is enabled
# https://bugs.gnupg.org/gnupg/issue2056
# https://github.com/Homebrew/homebrew-core/pull/3004
if self.spec.satisfies('platform=darwin'):
old = self.prefix.lib.join('libgcrypt.20.dylib')
new = join_path(
self.stage.source_path, 'src', '.libs', 'libgcrypt.20.dylib')
filename = 'tests/.libs/random'
install_name_tool = Executable('install_name_tool')
install_name_tool('-change', old, new, filename)
make('check')
|
LLNL/spack
|
var/spack/repos/builtin/packages/libgcrypt/package.py
|
Python
|
lgpl-2.1
| 2,211
|
##! /usr/bin/env python
# _*_ coding: latin-1 _*_
import jtutil
import jtsocket
import jtdom
def jtexec(cmd):
jtsocket.send("<jtexec>"+jtutil.cdataif(cmd)+"</jtexec>")
# <exitvalue>value</exitvalue>
# vagy
# <execerror>error</execerror>
while 1:
rsp=jtsocket.recv()
if rsp==None:
return None
dom=jtdom.domparse(rsp)
node=jtdom.domfirst(dom)
type=jtdom.domname(node)
value=jtdom.domtext(node)
if type=="execerror":
raise jtutil.applicationerror, ("jtexec", "failed", value)
elif type=="exitvalue":
return value
|
mrev11/ccc3
|
jt/jtpython/jtlib/jtexec.py
|
Python
|
lgpl-2.1
| 653
|
# -*- python -*-
# Package : omniidl
# template.py Created on: 2000/01/18
# Author : David Scott (djs)
#
# Copyright (C) 2003-2008 Apasphere Ltd
# Copyright (C) 1999 AT&T Laboratories Cambridge
#
# This file is part of omniidl.
#
# omniidl is free software; you can redistribute it and/or modify it
# under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA
# 02111-1307, USA.
#
# Description:
#
# C++ templates for the .hh file
# $Id: template.py 5867 2009-05-06 16:16:18Z dgrisby $
# $Log$
# Revision 1.8.2.20 2008/12/29 18:44:38 dgrisby
# Globally scope array functions to avoid ambiguities.
#
# Revision 1.8.2.19 2008/12/03 10:53:58 dgrisby
# Tweaks leading to Python 3 support; other minor clean-ups.
#
# Revision 1.8.2.18 2007/09/19 14:16:07 dgrisby
# Avoid namespace clashes if IDL defines modules named CORBA.
#
# Revision 1.8.2.17 2007/05/11 09:52:27 dgrisby
# New -Wbguard_prefix option. Thanks Austin Bingham.
#
# Revision 1.8.2.16 2006/10/23 15:36:25 dgrisby
# Undefine USE_stub_in_nt_dll at the end of header if it was not defined
# at the start.
#
# Revision 1.8.2.15 2006/09/04 11:40:06 dgrisby
# Remove crazy switch code in enum marshalling.
#
# Revision 1.8.2.14 2006/01/10 12:24:03 dgrisby
# Merge from omni4_0_develop pre 4.0.7 release.
#
# Revision 1.8.2.13 2005/11/14 11:02:16 dgrisby
# Local interface fixes.
#
# Revision 1.8.2.12 2005/11/09 12:22:17 dgrisby
# Local interfaces support.
#
# Revision 1.8.2.11 2005/08/16 13:51:21 dgrisby
# Problems with valuetype / abstract interface C++ mapping.
#
# Revision 1.8.2.10 2005/07/22 17:18:37 dgrisby
# Another merge from omni4_0_develop.
#
# Revision 1.8.2.9 2005/01/06 23:10:06 dgrisby
# Big merge from omni4_0_develop.
#
# Revision 1.8.2.8 2005/01/06 16:35:18 dgrisby
# Narrowing for abstract interfaces.
#
# Revision 1.8.2.7 2004/10/13 17:58:24 dgrisby
# Abstract interfaces support; values support interfaces; value bug fixes.
#
# Revision 1.8.2.6 2004/07/31 23:46:27 dgrisby
# Correct constness of exception Any insertion operator.
#
# Revision 1.8.2.5 2004/07/23 10:29:59 dgrisby
# Completely new, much simpler Any implementation.
#
# Revision 1.8.2.4 2004/07/04 23:53:39 dgrisby
# More ValueType TypeCode and Any support.
#
# Revision 1.8.2.3 2004/02/16 10:10:32 dgrisby
# More valuetype, including value boxes. C++ mapping updates.
#
# Revision 1.8.2.2 2003/10/23 11:25:55 dgrisby
# More valuetype support.
#
# Revision 1.8.2.1 2003/03/23 21:02:36 dgrisby
# Start of omniORB 4.1.x development branch.
#
# Revision 1.5.2.19 2001/11/12 13:46:07 dpg1
# _unchecked_narrow, improved _narrow.
#
# Revision 1.5.2.18 2001/11/08 16:33:51 dpg1
# Local servant POA shortcut policy.
#
# Revision 1.5.2.17 2001/10/29 17:42:41 dpg1
# Support forward-declared structs/unions, ORB::create_recursive_tc().
#
# Revision 1.5.2.16 2001/10/18 12:45:28 dpg1
# IDL compiler tweaks.
#
# Revision 1.5.2.15 2001/10/17 16:44:05 dpg1
# Update DynAny to CORBA 2.5 spec, const Any exception extraction.
#
# Revision 1.5.2.14 2001/09/19 17:29:04 dpg1
# Cosmetic changes.
#
# Revision 1.5.2.13 2001/08/17 13:45:56 dpg1
# C++ mapping fixes.
#
# Revision 1.5.2.12 2001/08/15 10:26:10 dpg1
# New object table behaviour, correct POA semantics.
#
# Revision 1.5.2.11 2001/08/03 17:41:17 sll
# System exception minor code overhaul. When a system exeception is raised,
# a meaning minor code is provided.
#
# Revision 1.5.2.10 2001/07/31 19:25:11 sll
# Array _var should be separated into fixed and variable size ones.
#
# Revision 1.5.2.9 2001/06/18 20:30:51 sll
# Only define 1 conversion operator from T_var to T* if the compiler is
# gcc. Previously, this is only done for gcc 2.7.2. It seems that gcc 3.0
# requires this to be the case. This is the default for all versions of
# gcc.
#
# Revision 1.5.2.8 2001/05/29 17:03:50 dpg1
# In process identity.
#
# Revision 1.5.2.7 2001/04/19 09:30:12 sll
# Big checkin with the brand new internal APIs.
# Scoped where appropriate with the omni namespace.
#
# Revision 1.5.2.6 2001/03/13 10:32:09 dpg1
# Fixed point support.
#
# Revision 1.5.2.5 2000/11/20 14:43:25 sll
# Added support for wchar and wstring.
#
# Revision 1.5.2.4 2000/11/09 12:27:55 dpg1
# Huge merge from omni3_develop, plus full long long from omni3_1_develop.
#
# Revision 1.5.2.3 2000/11/03 19:20:41 sll
# Replaced old marshal operators with a unified operator for cdrStream.
#
# Revision 1.5.2.2 2000/10/12 15:37:51 sll
# Updated from omni3_1_develop.
#
# Revision 1.6.2.2 2000/08/21 11:35:18 djs
# Lots of tidying
#
# Revision 1.6.2.1 2000/08/02 10:52:02 dpg1
# New omni3_1_develop branch, merged from omni3_develop.
#
# Revision 1.6 2000/07/13 15:26:00 dpg1
# Merge from omni3_develop for 3.0 release.
#
# Revision 1.3.2.15 2000/07/26 15:29:11 djs
# Missing typedef and forward when generating BOA skeletons
#
# Revision 1.3.2.14 2000/07/24 09:35:20 dpg1
# Adding the missing constructor meant that there was no longer a
# default constructor.
#
# Revision 1.3.2.13 2000/07/24 10:17:31 djs
# Added missing BOA skeleton constructor
#
# Revision 1.3.2.12 2000/07/04 12:57:55 djs
# Fixed Any insertion/extraction operators for unions and exceptions
#
# Revision 1.3.2.11 2000/06/26 16:24:00 djs
# Better handling of #include'd files (via new commandline options)
# Refactoring of configuration state mechanism.
#
# Revision 1.3.2.10 2000/06/19 18:19:50 djs
# Implemented union discriminant setting function _d(_value) with checks for
# illegal uses (setting to a label corresponding to a non-current member and
# setting before initialisation)
#
# Revision 1.3.2.9 2000/06/05 13:03:57 djs
# Removed union member name clash (x & pd_x, pd__default, pd__d)
# Removed name clash when a sequence is called "pd_seq"
# Nested union within union fix
# Actually generates BOA non-flattened tie templates
#
# Revision 1.3.2.8 2000/05/31 18:02:58 djs
# Better output indenting (and preprocessor directives now correctly output at
# the beginning of lines)
#
# Revision 1.3.2.7 2000/05/30 15:59:25 djs
# Removed inheritance ambiguity in generated BOA _sk_ and POA_ classes
#
# Revision 1.3.2.6 2000/05/18 15:57:33 djs
# Added missing T* data constructor for bounded sequence types
#
# Revision 1.3.2.5 2000/03/20 11:50:20 djs
# Removed excess buffering- output templates have code attached which is
# lazily evaluated when required.
#
# Revision 1.3.2.4 2000/03/10 12:01:03 djr
# Re-fixed omniidl (make exception _NP_duplicate() public).
#
# Revision 1.3.2.3 2000/03/09 15:22:42 djs
# Changing the protection status of an exception method, mirroring a change
# in omniidl3
#
# Revision 1.3.2.2 2000/03/07 18:07:33 djr
# Fixed user-exceptions when can't catch by base class.
#
# Revision 1.3.2.1 2000/03/03 14:29:17 djr
# Improvement to BOA skeletons (less generated code).
#
# Revision 1.3 2000/02/01 09:26:45 djs
# Tracking fixes in old compiler: powerpc-aix scoped identifier workarounds
#
# Revision 1.2 2000/01/19 11:23:29 djs
# Moved most C++ code to template file
#
# Revision 1.1 2000/01/18 18:05:53 djs
# Extracted most C++ from header/defs and put in a template file.
# General refactoring.
#
"""C++ templates for the .hh file"""
##
## File header
##
header = """\
// This file is generated by @program@- @library@. Do not edit.
#ifndef @guard_prefix@__@guard@_hh__
#define @guard_prefix@__@guard@_hh__
"""
footer = """\
#endif
"""
##
## Main file
##
main = """\
#ifndef __CORBA_H_EXTERNAL_GUARD__
#include <omniORB4/CORBA.h>
#endif
#ifndef USE_stub_in_nt_dll
# define USE_stub_in_nt_dll_NOT_DEFINED_@guard@
#endif
#ifndef USE_core_stub_in_nt_dll
# define USE_core_stub_in_nt_dll_NOT_DEFINED_@guard@
#endif
#ifndef USE_dyn_stub_in_nt_dll
# define USE_dyn_stub_in_nt_dll_NOT_DEFINED_@guard@
#endif
@sub_include_pre@
@cxx_direct_include@
@includes@
@sub_include_post@
#ifdef USE_stub_in_nt_dll
# ifndef USE_core_stub_in_nt_dll
# define USE_core_stub_in_nt_dll
# endif
# ifndef USE_dyn_stub_in_nt_dll
# define USE_dyn_stub_in_nt_dll
# endif
#endif
#ifdef _core_attr
# error "A local CPP macro _core_attr has already been defined."
#else
# ifdef USE_core_stub_in_nt_dll
# define _core_attr _OMNIORB_NTDLL_IMPORT
# else
# define _core_attr
# endif
#endif
#ifdef _dyn_attr
# error "A local CPP macro _dyn_attr has already been defined."
#else
# ifdef USE_dyn_stub_in_nt_dll
# define _dyn_attr _OMNIORB_NTDLL_IMPORT
# else
# define _dyn_attr
# endif
#endif
@forward_declarations@
@string_tcParser_declarations@
@defs@
@poa@
@obv@
@other_tie@
#undef _core_attr
#undef _dyn_attr
@operators@
@marshalling@
#ifdef USE_stub_in_nt_dll_NOT_DEFINED_@guard@
# undef USE_stub_in_nt_dll
# undef USE_stub_in_nt_dll_NOT_DEFINED_@guard@
#endif
#ifdef USE_core_stub_in_nt_dll_NOT_DEFINED_@guard@
# undef USE_core_stub_in_nt_dll
# undef USE_core_stub_in_nt_dll_NOT_DEFINED_@guard@
#endif
#ifdef USE_dyn_stub_in_nt_dll_NOT_DEFINED_@guard@
# undef USE_dyn_stub_in_nt_dll
# undef USE_dyn_stub_in_nt_dll_NOT_DEFINED_@guard@
#endif
#endif // __@guard@_hh__
"""
sub_include_pre = """\
#ifdef INCLUDED_stub_in_nt_dll
# ifdef USE_stub_in_nt_dll
# error "cannot use both INCLUDED_stub_in_nt_dll and USE_stub_in_nt_dll."
# else
# define USE_stub_in_nt_dll
# endif
# define INCLUDED_stub_in_nt_dll_DEFINED_@guard@
# undef INCLUDED_stub_in_nt_dll
#endif
"""
sub_include_post = """\
#ifdef INCLUDED_stub_in_nt_dll_DEFINED_@guard@
# undef USE_stub_in_nt_dll
# define INCLUDED_stub_in_nt_dll
# undef INCLUDED_stub_in_nt_dll_DEFINED_@guard@
#endif
"""
main_include = """\
#ifndef @guard_prefix@__@guardname@_EXTERNAL_GUARD__
#define @guard_prefix@__@guardname@_EXTERNAL_GUARD__
#include @filename@
#endif"""
##
## Modules
##
# name => C++ form of the module identifier
module_begin = """\
_CORBA_MODULE @name@
_CORBA_MODULE_BEG
"""
module_end = """\
_CORBA_MODULE_END
"""
POA_module_begin = """\
_CORBA_MODULE @POA_prefix@@name@
_CORBA_MODULE_BEG
"""
POA_module_end = """\
_CORBA_MODULE_END
"""
OBV_module_begin = """\
_CORBA_MODULE @OBV_prefix@@name@
_CORBA_MODULE_BEG
"""
OBV_module_end = """\
_CORBA_MODULE_END
"""
POA_interface = """\
class @POA_name@ :
public virtual @impl_scopedID@,
@inherits@
{
public:
virtual ~@POA_name@();
inline ::@scopedID@_ptr _this() {
return (::@scopedID@_ptr) _do_this(::@scopedID@::_PD_repoId);
}
};
"""
##
## Interfaces
##
interface_Helper = """\
#ifndef __@guard@__
#define __@guard@__
class @name@;
class _objref_@name@;
class _impl_@name@;
@class_sk_name@
typedef _objref_@name@* @name@_ptr;
typedef @name@_ptr @name@Ref;
class @name@_Helper {
public:
typedef @name@_ptr _ptr_type;
static _ptr_type _nil();
static _CORBA_Boolean is_nil(_ptr_type);
static void release(_ptr_type);
static void duplicate(_ptr_type);
static void marshalObjRef(_ptr_type, cdrStream&);
static _ptr_type unmarshalObjRef(cdrStream&);
};
typedef _CORBA_ObjRef_Var<_objref_@name@, @name@_Helper> @name@_var;
typedef _CORBA_ObjRef_OUT_arg<_objref_@name@,@name@_Helper > @name@_out;
#endif
"""
interface_type = """\
// interface @name@
class @name@ {
public:
// Declarations for this interface type.
typedef @name@_ptr _ptr_type;
typedef @name@_var _var_type;
static _ptr_type _duplicate(_ptr_type);
static _ptr_type _narrow(::CORBA::Object_ptr);
static _ptr_type _unchecked_narrow(::CORBA::Object_ptr);
@abstract_narrows@
static _ptr_type _nil();
static inline void _marshalObjRef(_ptr_type, cdrStream&);
static inline _ptr_type _unmarshalObjRef(cdrStream& s) {
omniObjRef* o = omniObjRef::_unMarshal(_PD_repoId,s);
if (o)
return (_ptr_type) o->_ptrToObjRef(_PD_repoId);
else
return _nil();
}
static _core_attr const char* _PD_repoId;
// Other IDL defined within this scope.
@Other_IDL@
};
"""
interface_abstract_narrows = """\
static _ptr_type _narrow(::CORBA::AbstractBase_ptr);
static _ptr_type _unchecked_narrow(::CORBA::AbstractBase_ptr);
"""
##
## Abstract Interfaces
##
abstract_interface_Helper = """\
#ifndef __@guard@__
#define __@guard@__
class @name@;
class _objref_@name@;
typedef @name@* @name@_ptr;
typedef @name@_ptr @name@Ref;
class @name@_Helper {
public:
typedef @name@_ptr _ptr_type;
static _ptr_type _nil();
static _CORBA_Boolean is_nil(_ptr_type);
static void release(_ptr_type);
static void duplicate(_ptr_type);
static void marshalObjRef(_ptr_type, cdrStream&);
static _ptr_type unmarshalObjRef(cdrStream&);
};
typedef _CORBA_ObjRef_Var<@name@, @name@_Helper> @name@_var;
typedef _CORBA_ObjRef_OUT_arg<@name@,@name@_Helper > @name@_out;
#endif
"""
abstract_interface_type = """\
// abstract interface @name@
class @name@ :
@inherits@
{
public:
// Declarations for this interface type.
typedef @name@_ptr _ptr_type;
typedef @name@_var _var_type;
static _ptr_type _duplicate(_ptr_type);
static _ptr_type _narrow(::CORBA::AbstractBase_ptr);
static _ptr_type _unchecked_narrow(::CORBA::AbstractBase_ptr);
static _ptr_type _nil();
static inline void _marshalObjRef(_ptr_type, cdrStream&);
static inline _ptr_type _unmarshalObjRef(cdrStream& s) {
_CORBA_Boolean b = s.unmarshalBoolean();
if (b) {
omniObjRef* o = omniObjRef::_unMarshal(_PD_repoId,s);
if (o)
return (_ptr_type) o->_ptrToObjRef(_PD_repoId);
else
return _nil();
}
else {
::CORBA::ValueBase* v = ::CORBA::ValueBase::_NP_unmarshal(s);
if (v)
return (_ptr_type) v->_ptrToValue(_PD_repoId);
else
return 0;
}
}
static _core_attr const char* _PD_repoId;
// Other IDL defined within this scope.
@Other_IDL@
// Operations declared in this abstract interface
@operations@
};
"""
##
## Local Interfaces
##
local_interface_Helper = """\
#ifndef __@guard@__
#define __@guard@__
class @name@;
typedef @name@* @name@_ptr;
typedef @name@_ptr @name@Ref;
class @name@_Helper {
public:
typedef @name@_ptr _ptr_type;
static _ptr_type _nil();
static _CORBA_Boolean is_nil(_ptr_type);
static void release(_ptr_type);
static void duplicate(_ptr_type);
static void marshalObjRef(_ptr_type, cdrStream&);
static _ptr_type unmarshalObjRef(cdrStream&);
};
typedef _CORBA_ObjRef_Var<@name@, @name@_Helper> @name@_var;
typedef _CORBA_ObjRef_OUT_arg<@name@,@name@_Helper > @name@_out;
#endif
"""
local_interface_type = """\
// local interface @name@
class @name@ :
@inherits@
{
public:
// Declarations for this interface type.
typedef @name@_ptr _ptr_type;
typedef @name@_var _var_type;
static _ptr_type _duplicate(_ptr_type);
static _ptr_type _narrow(::CORBA::Object_ptr);
static _ptr_type _unchecked_narrow(::CORBA::Object_ptr);
@abstract_narrows@
static _ptr_type _nil();
static inline void _marshalObjRef(_ptr_type, cdrStream& s) {
OMNIORB_THROW(MARSHAL, _OMNI_NS(MARSHAL_LocalObject),
(::CORBA::CompletionStatus)s.completion());
}
static inline _ptr_type _unmarshalObjRef(cdrStream& s) {
OMNIORB_THROW(MARSHAL, _OMNI_NS(MARSHAL_LocalObject),
(::CORBA::CompletionStatus)s.completion());
#ifdef NEED_DUMMY_RETURN
return 0;
#endif
}
static _core_attr const char* _PD_repoId;
// Other IDL defined within this scope.
@Other_IDL@
// Operations declared in this local interface
@operations@
private:
virtual void* _ptrToObjRef(const char*);
protected:
@name@();
virtual ~@name@();
};
class _nil_@name@ :
@nil_inherits@
public virtual @name@
{
public:
@nil_operations@
inline _nil_@name@() { _PR_setobj(0); }
protected:
virtual ~_nil_@name@();
};
"""
##
## Object reference
##
interface_objref = """\
class _objref_@name@ :
@inherits@
{
public:
@operations@
inline _objref_@name@() @init_shortcut@ { _PR_setobj(0); } // nil
_objref_@name@(omniIOR*, omniIdentity*);
protected:
virtual ~_objref_@name@();
@shortcut@
private:
virtual void* _ptrToObjRef(const char*);
_objref_@name@(const _objref_@name@&);
_objref_@name@& operator = (const _objref_@name@&);
// not implemented
friend class @name@;
};
"""
interface_shortcut = """\
virtual void _enableShortcut(omniServant*, const _CORBA_Boolean*);
_impl_@name@* _shortcut;
const _CORBA_Boolean* _invalid;\
"""
##
## Proxy Object Factory
##
interface_pof = """\
class _pof_@name@ : public _OMNI_NS(proxyObjectFactory) {
public:
inline _pof_@name@() : _OMNI_NS(proxyObjectFactory)(@name@::_PD_repoId) {}
virtual ~_pof_@name@();
virtual omniObjRef* newObjRef(omniIOR*,omniIdentity*);
virtual _CORBA_Boolean is_a(const char*) const;
};
"""
##
## Interface Impl class
##
interface_impl = """\
class _impl_@name@ :
@inherits@
{
public:
virtual ~_impl_@name@();
@operations@
public: // Really protected, workaround for xlC
virtual _CORBA_Boolean _dispatch(omniCallHandle&);
private:
virtual void* _ptrToInterface(const char*);
virtual const char* _mostDerivedRepoId();
@abstract@
};
"""
interface_impl_abstract = """\
virtual void _interface_is_abstract() = 0;"""
interface_impl_not_abstract = """\
virtual void _interface_is_abstract();"""
##
## Old BOA skeleton class
##
interface_sk = """\
class _sk_@name@ :
public virtual _impl_@name@,
@inherits@
{
public:
_sk_@name@() {}
_sk_@name@(const omniOrbBoaKey&);
virtual ~_sk_@name@();
inline @name@::_ptr_type _this() {
return (@name@::_ptr_type) omniOrbBoaServant::_this(@name@::_PD_repoId);
}
};
"""
##
## Objref marshal function
##
interface_marshal_forward = """\
inline void
@name@::_marshalObjRef(::@name@_ptr obj, cdrStream& s) {
omniObjRef::_marshal(obj->_PR_getobj(),s);
}
"""
abstract_interface_marshal_forward = """\
inline void
@name@::_marshalObjRef(::@name@_ptr obj, cdrStream& s) {
if (obj) {
::CORBA::ValueBase* v = obj->_NP_to_value();
if (v) {
s.marshalBoolean(0);
::CORBA::ValueBase::_NP_marshal(v,s);
return;
}
::CORBA::Object_ptr o = obj->_NP_to_object();
if (o) {
s.marshalBoolean(1);
omniObjRef::_marshal(o->_PR_getobj(),s);
return;
}
}
s.marshalBoolean(0);
::CORBA::ValueBase::_NP_marshal(0, s);
}
"""
##
## Typedefs
##
typedef_simple_to_array = """\
typedef @base@ @derived@;
typedef @base@_slice @derived@_slice;
typedef @base@_copyHelper @derived@_copyHelper;
typedef @base@_var @derived@_var;
typedef @base@_out @derived@_out;
typedef @base@_forany @derived@_forany;
@inline_qualifier@ @derived@_slice* @derived@_alloc() { return @base@_alloc(); }
@inline_qualifier@ @derived@_slice* @derived@_dup(const @derived@_slice* p) { return @base@_dup(p); }
@inline_qualifier@ void @derived@_copy( @derived@_slice* _to, const @derived@_slice* _from ) { @base@_copy(_to, _from); }
@inline_qualifier@ void @derived@_free( @derived@_slice* p) { @base@_free(p); }
"""
typedef_simple_string = """\
typedef char* @name@;
typedef ::CORBA::String_var @name@_var;
typedef ::CORBA::String_out @name@_out;
"""
typedef_simple_wstring = """\
typedef ::CORBA::WChar* @name@;
typedef ::CORBA::WString_var @name@_var;
typedef ::CORBA::WString_out @name@_out;
"""
typedef_simple_typecode = """\
typedef ::CORBA::TypeCode_ptr @name@_ptr;
typedef ::CORBA::TypeCode_var @name@_var;
"""
typedef_simple_any = """\
typedef ::CORBA::Any @name@;
typedef ::CORBA::Any_var @name@_var;
typedef ::CORBA::Any_out @name@_out;
"""
typedef_simple_fixed = """\
typedef _omni_Fixed<@digits@,@scale@> @name@;
typedef @name@& @name@_out;
"""
typedef_simple_basic = """\
typedef @base@ @derived@;
typedef @base@_out @derived@_out;
"""
typedef_simple_constructed = """\
typedef @base@ @name@;
typedef @base@_var @name@_var;
typedef @base@_out @name@_out;
"""
typedef_simple_objref = """\
typedef @base@ @name@;
typedef @base@_ptr @name@_ptr;
typedef @base@Ref @name@Ref;
@impl_base@
typedef @base@_Helper @name@_Helper;
@objref_base@
typedef @base@_var @name@_var;
typedef @base@_out @name@_out;
"""
typedef_enum_oper_friend = """\
// Need to declare <<= for elem type, as GCC expands templates early
#if defined(__GNUG__) && __GNUG__ == 2 && __GNUC_MINOR__ == 7
@friend@ inline void operator >>= (@element@, cdrStream&);
@friend@ inline void operator <<= (@element@&, cdrStream&);
#endif
"""
# Arrays
typedef_array = """\
typedef @type@ @name@@dims@;
typedef @type@ @name@_slice@taildims@;
@inline_qualifier@ @name@_slice* @name@_alloc() {
return new @name@_slice[@firstdim@];
}
@inline_qualifier@ @name@_slice* @name@_dup(const @name@_slice* _s) {
if (!_s) return 0;
@name@_slice* _data = @name@_alloc();
if (_data) {
@dup_loop@
}
return _data;
}
@inline_qualifier@ void @name@_copy(@name@_slice* _to, const @name@_slice* _from){
@copy_loop@
}
@inline_qualifier@ void @name@_free(@name@_slice* _s) {
delete [] _s;
}
"""
typedef_array_copyHelper = """\
class @name@_copyHelper {
public:
static inline @name@_slice* alloc() { return ::@fqname@_alloc(); }
static inline @name@_slice* dup(const @name@_slice* p) { return ::@fqname@_dup(p); }
static inline void free(@name@_slice* p) { ::@fqname@_free(p); }
};
typedef _CORBA_Array_@var_or_fix@_Var<@name@_copyHelper,@name@_slice> @name@_var;
typedef _CORBA_Array_@var_or_fix@_Forany<@name@_copyHelper,@name@_slice> @name@_forany;
"""
typedef_array_fix_out_type = """\
typedef @name@_slice* @name@_out;
"""
typedef_array_variable_out_type = """\
typedef _CORBA_Array_Variable_OUT_arg<@name@_slice,@name@_var > @name@_out;
"""
##
## Sequences
##
sequence_type = """\
class @name@_var;
class @name@ : public @derived@ {
public:
typedef @name@_var _var_type;
inline @name@() {}
inline @name@(const @name@& _s)
: @derived@(_s) {}
@bounds@
inline @name@& operator = (const @name@& _s) {
@derived@::operator=(_s);
return *this;
}
};
"""
sequence_forward_type = """\
class @name@_var;
class @name@ : public @derived@ {
public:
typedef @name@_var _var_type;
inline @name@() {}
@name@(const @name@& _s);
@name@& operator=(const @name@& _s);
@bounds@
virtual ~@name@();
@element@& operator[] (_CORBA_ULong _index);
const @element@& operator[] (_CORBA_ULong _index) const;
static @element@* allocbuf(_CORBA_ULong _nelems);
static void freebuf(@element@* _b);
void operator>>= (cdrStream &_s) const;
void operator<<= (cdrStream &_s);
protected:
void NP_copybuffer(_CORBA_ULong _newmax);
void NP_freebuf();
};
"""
sequence_unbounded_ctors = """\
inline @name@(_CORBA_ULong _max)
: @derived@(_max) {}
inline @name@(_CORBA_ULong _max, _CORBA_ULong _len, @element@* _val, _CORBA_Boolean _rel=0)
: @derived@(_max, _len, _val, _rel) {}
"""
sequence_bounded_ctors = """\
inline @name@(_CORBA_ULong _len, @element@* _val, _CORBA_Boolean _rel=0)
: @derived@(_len, _val, _rel) {}
"""
sequence_var_array_subscript = """\
inline @element@_slice* operator [] (_CORBA_ULong _s) {
return (@element@_slice*) ((_pd_seq->NP_data())[_s]);
}
"""
sequence_var_subscript = """\
inline @element@ operator [] (_CORBA_ULong _s) {
return (*_pd_seq)[_s];
}
"""
sequence_var = """\
class @name@_out;
class @name@_var {
public:
inline @name@_var() : _pd_seq(0) {}
inline @name@_var(@name@* _s) : _pd_seq(_s) {}
inline @name@_var(const @name@_var& _s) {
if( _s._pd_seq ) _pd_seq = new @name@(*_s._pd_seq);
else _pd_seq = 0;
}
inline ~@name@_var() { if( _pd_seq ) delete _pd_seq; }
inline @name@_var& operator = (@name@* _s) {
if( _pd_seq ) delete _pd_seq;
_pd_seq = _s;
return *this;
}
inline @name@_var& operator = (const @name@_var& _s) {
if( _s._pd_seq ) {
if( !_pd_seq ) _pd_seq = new @name@;
*_pd_seq = *_s._pd_seq;
} else if( _pd_seq ) {
delete _pd_seq;
_pd_seq = 0;
}
return *this;
}
@subscript_operator@
inline @name@* operator -> () { return _pd_seq; }
inline const @name@* operator -> () const { return _pd_seq; }
#if defined(__GNUG__)
inline operator @name@& () const { return *_pd_seq; }
#else
inline operator const @name@& () const { return *_pd_seq; }
inline operator @name@& () { return *_pd_seq; }
#endif
inline const @name@& in() const { return *_pd_seq; }
inline @name@& inout() { return *_pd_seq; }
inline @name@*& out() {
if( _pd_seq ) { delete _pd_seq; _pd_seq = 0; }
return _pd_seq;
}
inline @name@* _retn() { @name@* tmp = _pd_seq; _pd_seq = 0; return tmp; }
friend class @name@_out;
private:
@name@* _pd_seq;
};
"""
sequence_out_array_subscript = """\
inline @element@_slice* operator [] (_CORBA_ULong _i) {
return (@element@_slice*) ((_data->NP_data())[_i]);
}
"""
sequence_out_subscript = """\
inline @element@ operator [] (_CORBA_ULong _i) {
return (*_data)[_i];
}
"""
sequence_out = """\
class @name@_out {
public:
inline @name@_out(@name@*& _s) : _data(_s) { _data = 0; }
inline @name@_out(@name@_var& _s)
: _data(_s._pd_seq) { _s = (@name@*) 0; }
inline @name@_out(const @name@_out& _s) : _data(_s._data) {}
inline @name@_out& operator = (const @name@_out& _s) {
_data = _s._data;
return *this;
}
inline @name@_out& operator = (@name@* _s) {
_data = _s;
return *this;
}
inline operator @name@*&() { return _data; }
inline @name@*& ptr() { return _data; }
inline @name@* operator->() { return _data; }
@subscript_operator@
@name@*& _data;
private:
@name@_out();
@name@_out& operator=(const @name@_var&);
};
"""
##
## Structs
##
struct = """\
struct @name@ {
typedef _CORBA_ConstrType_@fix_or_var@_Var<@name@> _var_type;
@Other_IDL@
@members@
void operator>>= (cdrStream &) const;
void operator<<= (cdrStream &);
};
typedef @name@::_var_type @name@_var;
"""
struct_fix_out_type = """\
typedef @name@& @name@_out;
"""
struct_variable_out_type = """\
typedef _CORBA_ConstrType_Variable_OUT_arg< @name@,@name@_var > @name@_out;
"""
struct_array_declarator = """\
typedef @memtype@ @prefix@_@cxx_id@@dims@;
typedef @memtype@ _@cxx_id@_slice@tail_dims@;
"""
struct_nonarray_sequence = """\
typedef @memtype@ _@cxx_id@_seq;
_@cxx_id@_seq @cxx_id@;
"""
struct_normal_member = """\
@memtype@ @cxx_id@@dims@;
"""
struct_forward = """\
struct @name@;
"""
##
## Exceptions
##
exception = """\
class @name@ : public ::CORBA::UserException {
public:
@Other_IDL@
@members@
inline @name@() {
pd_insertToAnyFn = insertToAnyFn;
pd_insertToAnyFnNCP = insertToAnyFnNCP;
}
@name@(const @name@&);
@constructor@
@name@& operator=(const @name@&);
virtual ~@name@();
virtual void _raise() const;
static @name@* _downcast(::CORBA::Exception*);
static const @name@* _downcast(const ::CORBA::Exception*);
static inline @name@* _narrow(::CORBA::Exception* _e) {
return _downcast(_e);
}
@inline@void operator>>=(cdrStream&) const @body@
@inline@void operator<<=(cdrStream&) @body@
static _core_attr insertExceptionToAny insertToAnyFn;
static _core_attr insertExceptionToAnyNCP insertToAnyFnNCP;
virtual ::CORBA::Exception* _NP_duplicate() const;
static _core_attr const char* _PD_repoId;
static _core_attr const char* _PD_typeId;
private:
virtual const char* _NP_typeId() const;
virtual const char* _NP_repoId(int*) const;
virtual void _NP_marshal(cdrStream&) const;
};
"""
exception_array_declarator = """\
typedef @memtype@ @private_prefix@_@cxx_id@@dims@;
typedef @memtype@ _@cxx_id@_slice@tail_dims@;
"""
exception_member = """\
@memtype@ @cxx_id@@dims@;
"""
##
## Unions
##
union_ctor_nonexhaustive = """\
if ((_pd__default = _value._pd__default)) {
@default@
}
else {
switch(_value._pd__d) {
@cases@
}
}
_pd__d = _value._pd__d;
"""
union_ctor_exhaustive = """\
switch(_value._pd__d) {
@cases@
}
_pd__d = _value._pd__d;"""
union_ctor_case = """\
case @discrimvalue@: @name@(_value._pd_@name@); break;
"""
union_ctor_bool_default = """\
#ifndef HAS_Cplusplus_Bool
default: break;
#endif
"""
union_ctor_default = """\
default: break;
"""
union = """\
class @unionname@ {
public:
typedef _CORBA_ConstrType_@fixed@_Var<@unionname@> _var_type;
@Other_IDL@
@unionname@(): _pd__initialised(0) {
@default_constructor@
}
@unionname@(const @unionname@& _value) {
_pd__initialised = _value._pd__initialised;
@copy_constructor@
}
~@unionname@() {}
@unionname@& operator=(const @unionname@& _value) {
_pd__initialised = _value._pd__initialised;
@copy_constructor@
return *this;
}
@discrimtype@ _d() const { return _pd__d;}
void _d(@discrimtype@ _value){
@_d_body@
}
@implicit_default@
@members@
void operator>>= (cdrStream&) const;
void operator<<= (cdrStream&);
private:
@discrimtype@ _pd__d;
_CORBA_Boolean _pd__default;
_CORBA_Boolean _pd__initialised;
@union@
@outsideUnion@
};
typedef @unionname@::_var_type @unionname@_var;
"""
union_fix_out_type = """\
typedef @unionname@& @unionname@_out;
"""
union_variable_out_type = """\
typedef _CORBA_ConstrType_Variable_OUT_arg< @unionname@,@unionname@_var > @unionname@_out;
"""
union_union = """\
union {
@members@
};
"""
union_d_fn_body = """\
// illegal to set discriminator before making a member active
if (!_pd__initialised)
OMNIORB_THROW(BAD_PARAM,_OMNI_NS(BAD_PARAM_InvalidUnionDiscValue),::CORBA::COMPLETED_NO);
if (_value == _pd__d) return; // no change
@switch@
fail:
OMNIORB_THROW(BAD_PARAM,_OMNI_NS(BAD_PARAM_InvalidUnionDiscValue),::CORBA::COMPLETED_NO);
"""
union_constructor_implicit = """\
_default();
"""
union_constructor_default = """\
_pd__default = 1;
_pd__d = @default@;
"""
union_implicit_default = """\
void _default()
{
_pd__initialised = 1;
_pd__d = @arbitraryDefault@;
_pd__default = 1;
}
"""
union_proxy_float = """
#ifdef USING_PROXY_FLOAT
@type@ _pd_@name@@dims@;
#endif
"""
union_noproxy_float = """
#ifndef USING_PROXY_FLOAT
@type@ _pd_@name@@dims@;
#endif
"""
union_array_declarator = """\
typedef @memtype@ @prefix@_@name@@dims@;
typedef @memtype@ _@name@_slice@tail_dims@;
"""
union_array = """\
const @memtype@_slice *@name@ () const { return _pd_@name@; }
void @name@ (const @const_type@ _value) {
_pd__initialised = 1;
_pd__d = @discrimvalue@;
_pd__default = @isDefault@;
@loop@
}
"""
union_any = """\
const @type@ &@name@ () const { return _pd_@name@; }
@type@ &@name@ () { return _pd_@name@; }
void @name@ (const @type@& _value) {
_pd__initialised = 1;
_pd__d = @discrimvalue@;
_pd__default = @isDefault@;
_pd_@name@ = _value;
}
"""
union_typecode = """\
::CORBA::TypeCode_ptr @name@ () const { return _pd_@name@._ptr; }
void @name@(::CORBA::TypeCode_ptr _value) {
_pd__initialised = 1;
_pd__d = @discrimvalue@;
_pd__default = @isDefault@;
_pd_@name@ = ::CORBA::TypeCode::_duplicate(_value);
}
void @name@(const ::CORBA::TypeCode_member& _value) {
_pd__initialised = 1;
_pd__d = @discrimvalue@;
_pd__default = @isDefault@;
_pd_@name@ = _value;
}
void @name@(const ::CORBA::TypeCode_var& _value) {
_pd__initialised = 1;
_pd__d = @discrimvalue@;
_pd__default = @isDefault@;
_pd_@name@ = _value;
}
"""
union_basic = """\
@type@ @name@ () const { return _pd_@name@; }
void @name@ (@type@ _value) {
_pd__initialised = 1;
_pd__d = @discrimvalue@;
_pd__default = @isDefault@;
_pd_@name@ = _value;
}
"""
union_string = """\
const char * @name@ () const { return (const char*) _pd_@name@; }
void @name@(char* _value) {
_pd__initialised = 1;
_pd__d = @discrimvalue@;
_pd__default = @isDefault@;
_pd_@name@ = _value;
}
void @name@(const char* _value) {
_pd__initialised = 1;
_pd__d = @discrimvalue@;
_pd__default = @isDefault@;
_pd_@name@ = _value;
}
void @name@(const ::CORBA::String_var& _value) {
_pd__initialised = 1;
_pd__d = @discrimvalue@;
_pd__default = @isDefault@;
_pd_@name@ = _value;
}
void @name@(const ::CORBA::String_member& _value) {
_pd__initialised = 1;
_pd__d = @discrimvalue@;
_pd__default = @isDefault@;
_pd_@name@ = _value;
}
"""
union_wstring = """\
const ::CORBA::WChar * @name@ () const {
return (const ::CORBA::WChar*) _pd_@name@;
}
void @name@(::CORBA::WChar* _value) {
_pd__initialised = 1;
_pd__d = @discrimvalue@;
_pd__default = @isDefault@;
_pd_@name@ = _value;
}
void @name@(const ::CORBA::WChar* _value) {
_pd__initialised = 1;
_pd__d = @discrimvalue@;
_pd__default = @isDefault@;
_pd_@name@ = _value;
}
void @name@(const ::CORBA::WString_var& _value) {
_pd__initialised = 1;
_pd__d = @discrimvalue@;
_pd__default = @isDefault@;
_pd_@name@ = _value;
}
void @name@(const ::CORBA::WString_member& _value) {
_pd__initialised = 1;
_pd__d = @discrimvalue@;
_pd__default = @isDefault@;
_pd_@name@ = _value;
}
"""
union_objref = """\
@ptr_name@ @member@ () const { return _pd_@member@._ptr; }
void @member@(@ptr_name@ _value) {
_pd__initialised = 1;
_pd__d = @discrimvalue@;
_pd__default = @isDefault@;
@Helper_name@::duplicate(_value);
_pd_@member@ = _value;
}
void @member@(const @memtype@& _value) {
_pd__initialised = 1;
_pd__d = @discrimvalue@;
_pd__default = @isDefault@;
_pd_@member@ = _value;
}
void @member@(const @var_name@& _value) {
_pd__initialised = 1;
_pd__d = @discrimvalue@;
_pd__default = @isDefault@;
_pd_@member@ = _value;
}
"""
union_constructed = """\
const @type@ &@name@ () const { return _pd_@name@; }
@type@ &@name@ () { return _pd_@name@; }
void @name@ (const @type@& _value) {
_pd__initialised = 1;
_pd__d = @discrimvalue@;
_pd__default = @isDefault@;
_pd_@name@ = _value;
}
"""
union_sequence = """\
typedef @sequence_template@ _@member@_seq;
const _@member@_seq& @member@ () const { return _pd_@member@; }
_@member@_seq& @member@ () { return _pd_@member@; }
void @member@ (const _@member@_seq& _value) {
_pd__initialised = 1;
_pd__d = @discrimvalue@;
_pd__default = @isDefault@;
_pd_@member@ = _value;
}
"""
union_value = """\
@type@* @member@() const { return _pd_@member@.in(); }
void @member@(@type@* _value) {
_pd__initialised = 1;
_pd__d = @discrimvalue@;
_pd__default = @isDefault@;
::CORBA::add_ref(_value);
_pd_@member@ = _value;
}
"""
union_member = """\
@type@ _pd_@name@@dims@;
"""
union_forward = """\
class @name@;
"""
##
## Enum
##
enum = """\
enum @name@ { @memberlist@ /*, __max_@name@=0xffffffff */ };
typedef @name@& @name@_out;
"""
##
## Const
##
const_inclass_isinteger = """\
static _core_attr const @type@ @name@ _init_in_cldecl_( = @val@ );
"""
const_inclass_notinteger = """\
static _core_attr const @type@ @name@;
"""
const_outsideclass_isinteger = """\
_CORBA_@where@_VARINT const @type@ @name@ _init_in_decl_( = @val@ );
"""
const_outsideclass_notinteger = """\
_CORBA_@where@_VAR _core_attr const @type@ @name@;
"""
##
## Typecode_ptr
##
typecode = """\
@qualifier@ _dyn_attr const ::CORBA::TypeCode_ptr _tc_@name@;
"""
##
## Operators
##
any_struct = """\
extern void operator<<=(::CORBA::Any& _a, const @fqname@& _s);
extern void operator<<=(::CORBA::Any& _a, @fqname@* _sp);
extern _CORBA_Boolean operator>>=(const ::CORBA::Any& _a, @fqname@*& _sp);
extern _CORBA_Boolean operator>>=(const ::CORBA::Any& _a, const @fqname@*& _sp);
"""
any_exception = """\
void operator<<=(::CORBA::Any& _a, const @fqname@& _s);
void operator<<=(::CORBA::Any& _a, const @fqname@* _sp);
_CORBA_Boolean operator>>=(const ::CORBA::Any& _a, const @fqname@*& _sp);
"""
any_union = """\
void operator<<=(::CORBA::Any& _a, const @fqname@& _s);
void operator<<=(::CORBA::Any& _a, @fqname@* _sp);
_CORBA_Boolean operator>>=(const ::CORBA::Any& _a, const @fqname@*& _sp);
_CORBA_Boolean operator>>=(const ::CORBA::Any& _a, @fqname@*& _sp);
"""
any_enum = """\
void operator<<=(::CORBA::Any& _a, @name@ _s);
_CORBA_Boolean operator>>=(const ::CORBA::Any& _a, @name@& _s);
"""
any_interface = """\
void operator<<=(::CORBA::Any& _a, @fqname@_ptr _s);
void operator<<=(::CORBA::Any& _a, @fqname@_ptr* _s);
_CORBA_Boolean operator>>=(const ::CORBA::Any& _a, @fqname@_ptr& _s);
"""
any_array_declarator = """\
void operator<<=(::CORBA::Any& _a, const @fqname@_forany& _s);
_CORBA_Boolean operator>>=(const ::CORBA::Any& _a, @fqname@_forany& _s);
"""
any_sequence = """\
void operator<<=(::CORBA::Any& _a, const @fqname@& _s);
void operator<<=(::CORBA::Any& _a, @fqname@* _sp);
_CORBA_Boolean operator>>=(const ::CORBA::Any& _a, @fqname@*& _sp);
_CORBA_Boolean operator>>=(const ::CORBA::Any& _a, const @fqname@*& _sp);
"""
any_value = """\
void operator<<=(::CORBA::Any& _a, @fqname@* _s);
void operator<<=(::CORBA::Any& _a, @fqname@** _s);
_CORBA_Boolean operator>>=(const ::CORBA::Any& _a, @fqname@*& _s);
"""
enum_operators = """\
inline void operator >>=(@name@ _e, cdrStream& s) {
::operator>>=((::CORBA::ULong)_e, s);
}
inline void operator <<= (@name@& _e, cdrStream& s) {
::CORBA::ULong @private_prefix@_e;
::operator<<=(@private_prefix@_e,s);
if (@private_prefix@_e <= @last_item@) {
_e = (@name@) @private_prefix@_e;
}
else {
OMNIORB_THROW(MARSHAL,_OMNI_NS(MARSHAL_InvalidEnumValue),
(::CORBA::CompletionStatus)s.completion());
}
}
"""
##
## tie template
##
tie_template = """\
template <class _omniT>
class @tie_name@ : public virtual @inherits@
{
public:
@tie_name@(_omniT& t)
: pd_obj(&t), pd_poa(0), pd_rel(0) {}
@tie_name@(_omniT& t, ::PortableServer::POA_ptr p)
: pd_obj(&t), pd_poa(p), pd_rel(0) {}
@tie_name@(_omniT* t, _CORBA_Boolean r=1)
: pd_obj(t), pd_poa(0), pd_rel(r) {}
@tie_name@(_omniT* t, ::PortableServer::POA_ptr p,_CORBA_Boolean r=1)
: pd_obj(t), pd_poa(p), pd_rel(r) {}
~@tie_name@() {
if( pd_poa ) ::CORBA::release(pd_poa);
if( pd_rel ) delete pd_obj;
}
_omniT* _tied_object() { return pd_obj; }
void _tied_object(_omniT& t) {
if( pd_rel ) delete pd_obj;
pd_obj = &t;
pd_rel = 0;
}
void _tied_object(_omniT* t, _CORBA_Boolean r=1) {
if( pd_rel ) delete pd_obj;
pd_obj = t;
pd_rel = r;
}
_CORBA_Boolean _is_owner() { return pd_rel; }
void _is_owner(_CORBA_Boolean io) { pd_rel = io; }
::PortableServer::POA_ptr _default_POA() {
if( !pd_poa ) return ::PortableServer::POA::_the_root_poa();
else return ::PortableServer::POA::_duplicate(pd_poa);
}
@callables@
private:
_omniT* pd_obj;
::PortableServer::POA_ptr pd_poa;
_CORBA_Boolean pd_rel;
};
"""
tie_template_old = """\
template <class _omniT, _CORBA_Boolean release>
class @tie_name@ : public virtual @inherits@
{
public:
@tie_name@(_omniT& t)
: pd_obj(&t), pd_rel(release) {}
@tie_name@(_omniT* t)
: pd_obj(t), pd_rel(release) {}
~@tie_name@() {
if( pd_rel ) delete pd_obj;
}
@callables@
private:
_omniT* pd_obj;
_CORBA_Boolean pd_rel;
};
"""
##
## tc_string
##
tcstring = """\
#if !defined(___tc_string_@n@__) && !defined(DISABLE_Unnamed_Bounded_String_TC)
#define ___tc_string_@n@__
_CORBA_GLOBAL_VAR _dyn_attr const ::CORBA::TypeCode_ptr _tc_string_@n@;
#endif
"""
##
## tc_wstring
##
tcwstring = """\
#if !defined(___tc_wstring_@n@__) && !defined(DISABLE_Unnamed_Bounded_WString_TC)
#define ___tc_wstring_@n@__
_CORBA_GLOBAL_VAR _dyn_attr const ::CORBA::TypeCode_ptr _tc_wstring_@n@;
#endif
"""
|
ogata-lab/rtmsdk-mac
|
x86_64/lib/python2.7/site-packages/omniidl_be/cxx/header/template.py
|
Python
|
lgpl-2.1
| 39,776
|
## \file
## \ingroup tutorial_dataframe
## \notebook -draw
## \brief An example of complex analysis with RDataFrame: reconstructing the Higgs boson.
##
## This tutorial is a simplified but yet complex example of an analysis reconstructing the Higgs boson decaying to two Z
## bosons from events with four leptons. The data and simulated events are taken from CERN OpenData representing a
## subset of the data recorded in 2012 with the CMS detector at the LHC. The tutorials follows the Higgs to four leptons
## analysis published on CERN Open Data portal ([10.7483/OPENDATA.CMS.JKB8.RR42](http://opendata.cern.ch/record/5500)).
## The resulting plots show the invariant mass of the selected four lepton systems in different decay modes (four muons,
## four electrons and two of each kind) and in a combined plot indicating the decay of the Higgs boson with a mass of
## about 125 GeV.
##
## The following steps are performed for each sample with data and simulated events in order to reconstruct the Higgs
## boson from the selected muons and electrons:
## 1. Select interesting events with multiple cuts on event properties, e.g., number of leptons, kinematics of the
## leptons and quality of the tracks.
## 2. Reconstruct two Z bosons of which only one on the mass shell from the selected events and apply additional cuts on
## the reconstructed objects.
## 3. Reconstruct the Higgs boson from the remaining Z boson candidates and calculate its invariant mass.
##
## Another aim of this version of the tutorial is to show a way to blend C++ and Python code. All the functions that
## make computations on data to define new columns or filter existing ones in a precise way, better suited to be written
## in C++, have been moved to a header that is then declared to the ROOT C++ interpreter. The functions that instead
## create nodes of the computational graph (e.g. Filter, Define) remain inside the main Python script.
##
## The tutorial has the fast mode enabled by default, which reads the data from already skimmed
## datasets with a total size of only 51MB. If the fast mode is disabled, the tutorial runs over
## the full dataset with a size of 12GB.
##
## \macro_image
## \macro_code
## \macro_output
##
## \date July 2019
## \author Stefan Wunsch (KIT, CERN), Vincenzo Eduardo Padulano (UniMiB, CERN)
import ROOT
import os
# Enable multi-threading
ROOT.ROOT.EnableImplicitMT()
# Include necessary header
higgs_header_path = os.path.join(os.sep, str(ROOT.gROOT.GetTutorialDir()) + os.sep, "dataframe" + os.sep,
"df103_NanoAODHiggsAnalysis_python.h")
ROOT.gInterpreter.Declare('#include "{}"'.format(higgs_header_path))
# Python functions
def reco_higgs_to_2el2mu(df):
"""Reconstruct Higgs from two electrons and two muons"""
# Filter interesting events
df_base = selection_2el2mu(df)
# Compute masses of Z systems
df_z_mass = df_base.Define("Z_mass", "compute_z_masses_2el2mu(Electron_pt, Electron_eta, Electron_phi,"
" Electron_mass, Muon_pt, Muon_eta, Muon_phi, Muon_mass)")
# Cut on mass of Z candidates
df_z_cut = filter_z_candidates(df_z_mass)
# Reconstruct H mass
df_h_mass = df_z_cut.Define("H_mass", "compute_higgs_mass_2el2mu(Electron_pt, Electron_eta, Electron_phi,"
" Electron_mass, Muon_pt, Muon_eta, Muon_phi, Muon_mass)")
return df_h_mass
def selection_2el2mu(df):
"""Select interesting events with two electrons and two muons"""
df_ge2el2mu = df.Filter("nElectron>=2 && nMuon>=2", "At least two electrons and two muons")
df_eta = df_ge2el2mu.Filter("All(abs(Electron_eta)<2.5) && All(abs(Muon_eta)<2.4)", "Eta cuts")
df_pt = df_eta.Filter("pt_cuts(Muon_pt, Electron_pt)", "Pt cuts")
df_dr = df_pt.Filter("dr_cuts(Muon_eta, Muon_phi, Electron_eta, Electron_phi)", "Dr cuts")
df_iso = df_dr.Filter("All(abs(Electron_pfRelIso03_all)<0.40) && All(abs(Muon_pfRelIso04_all)<0.40)",
"Require good isolation")
df_el_ip3d = df_iso.Define("Electron_ip3d_el", "sqrt(Electron_dxy*Electron_dxy + Electron_dz*Electron_dz)")
df_el_sip3d = df_el_ip3d.Define("Electron_sip3d_el",
"Electron_ip3d_el/sqrt(Electron_dxyErr*Electron_dxyErr + "
"Electron_dzErr*Electron_dzErr)")
df_el_track = df_el_sip3d.Filter("All(Electron_sip3d_el<4) && All(abs(Electron_dxy)<0.5) &&"
" All(abs(Electron_dz)<1.0)",
"Electron track close to primary vertex with small uncertainty")
df_mu_ip3d = df_el_track.Define("Muon_ip3d_mu", "sqrt(Muon_dxy*Muon_dxy + Muon_dz*Muon_dz)")
df_mu_sip3d = df_mu_ip3d.Define("Muon_sip3d_mu",
"Muon_ip3d_mu/sqrt(Muon_dxyErr*Muon_dxyErr + Muon_dzErr*Muon_dzErr)")
df_mu_track = df_mu_sip3d.Filter("All(Muon_sip3d_mu<4) && All(abs(Muon_dxy)<0.5) && All(abs(Muon_dz)<1.0)",
"Muon track close to primary vertex with small uncertainty")
df_2p2n = df_mu_track.Filter("Sum(Electron_charge)==0 && Sum(Muon_charge)==0",
"Two opposite charged electron and muon pairs")
return df_2p2n
def reco_higgs_to_4mu(df):
"""Reconstruct Higgs from four muons"""
# Filter interesting events
df_base = selection_4mu(df)
# Reconstruct Z systems
df_z_idx = df_base.Define("Z_idx", "reco_zz_to_4l(Muon_pt, Muon_eta, Muon_phi, Muon_mass, Muon_charge)")
# Cut on distance between muons building Z systems
df_z_dr = df_z_idx.Filter("filter_z_dr(Z_idx, Muon_eta, Muon_phi)", "Delta R separation of muons building Z system")
# Compute masses of Z systems
df_z_mass = df_z_dr.Define("Z_mass", "compute_z_masses_4l(Z_idx, Muon_pt, Muon_eta, Muon_phi, Muon_mass)")
# Cut on mass of Z candidates
df_z_cut = filter_z_candidates(df_z_mass)
# Reconstruct H mass
df_h_mass = df_z_cut.Define("H_mass", "compute_higgs_mass_4l(Z_idx, Muon_pt, Muon_eta, Muon_phi, Muon_mass)")
return df_h_mass
def selection_4mu(df):
"""Select interesting events with four muons"""
df_ge4m = df.Filter("nMuon>=4", "At least four muons")
df_iso = df_ge4m.Filter("All(abs(Muon_pfRelIso04_all)<0.40)", "Require good isolation")
df_kin = df_iso.Filter("All(Muon_pt>5) && All(abs(Muon_eta)<2.4)", "Good muon kinematics")
df_ip3d = df_kin.Define("Muon_ip3d", "sqrt(Muon_dxy*Muon_dxy + Muon_dz*Muon_dz)")
df_sip3d = df_ip3d.Define("Muon_sip3d", "Muon_ip3d/sqrt(Muon_dxyErr*Muon_dxyErr + Muon_dzErr*Muon_dzErr)")
df_pv = df_sip3d.Filter("All(Muon_sip3d<4) && All(abs(Muon_dxy)<0.5) && All(abs(Muon_dz)<1.0)",
"Track close to primary vertex with small uncertainty")
df_2p2n = df_pv.Filter("nMuon==4 && Sum(Muon_charge==1)==2 && Sum(Muon_charge==-1)==2",
"Two positive and two negative muons")
return df_2p2n
def filter_z_candidates(df):
"""Apply selection on reconstructed Z candidates"""
df_z1_cut = df.Filter("Z_mass[0] > 40 && Z_mass[0] < 120", "Mass of first Z candidate in [40, 120]")
df_z2_cut = df_z1_cut.Filter("Z_mass[1] > 12 && Z_mass[1] < 120", "Mass of second Z candidate in [12, 120]")
return df_z2_cut
def reco_higgs_to_4el(df):
"""Reconstruct Higgs from four electrons"""
# Filter interesting events
df_base = selection_4el(df)
# Reconstruct Z systems
df_z_idx = df_base.Define("Z_idx",
"reco_zz_to_4l(Electron_pt, Electron_eta, Electron_phi, Electron_mass, Electron_charge)")
# Cut on distance between Electrons building Z systems
df_z_dr = df_z_idx.Filter("filter_z_dr(Z_idx, Electron_eta, Electron_phi)",
"Delta R separation of Electrons building Z system")
# Compute masses of Z systems
df_z_mass = df_z_dr.Define("Z_mass",
"compute_z_masses_4l(Z_idx, Electron_pt, Electron_eta, Electron_phi, Electron_mass)")
# Cut on mass of Z candidates
df_z_cut = filter_z_candidates(df_z_mass)
# Reconstruct H mass
df_h_mass = df_z_cut.Define("H_mass",
"compute_higgs_mass_4l(Z_idx, Electron_pt, Electron_eta, Electron_phi, Electron_mass)")
return df_h_mass
def selection_4el(df):
"""Select interesting events with four electrons"""
df_ge4el = df.Filter("nElectron>=4", "At least our electrons")
df_iso = df_ge4el.Filter("All(abs(Electron_pfRelIso03_all)<0.40)", "Require good isolation")
df_kin = df_iso.Filter("All(Electron_pt>7) && All(abs(Electron_eta)<2.5)", "Good Electron kinematics")
df_ip3d = df_kin.Define("Electron_ip3d", "sqrt(Electron_dxy*Electron_dxy + Electron_dz*Electron_dz)")
df_sip3d = df_ip3d.Define("Electron_sip3d",
"Electron_ip3d/sqrt(Electron_dxyErr*Electron_dxyErr + Electron_dzErr*Electron_dzErr)")
df_pv = df_sip3d.Filter("All(Electron_sip3d<4) && All(abs(Electron_dxy)<0.5) && All(abs(Electron_dz)<1.0)",
"Track close to primary vertex with small uncertainty")
df_2p2n = df_pv.Filter("nElectron==4 && Sum(Electron_charge==1)==2 && Sum(Electron_charge==-1)==2",
"Two positive and two negative electrons")
return df_2p2n
def plot(sig, bkg, data, x_label, filename):
"""
Plot invariant mass for signal and background processes from simulated
events overlay the measured data.
"""
# Canvas and general style options
ROOT.gStyle.SetOptStat(0)
ROOT.gStyle.SetTextFont(42)
d = ROOT.TCanvas("d", "", 800, 700)
# Make sure the canvas stays in the list of canvases after the macro execution
ROOT.SetOwnership(d, False)
d.SetLeftMargin(0.15)
# Get signal and background histograms and stack them to show Higgs signal
# on top of the background process
h_bkg = bkg
h_cmb = sig.Clone()
h_cmb.Add(h_bkg)
h_cmb.SetTitle("")
h_cmb.GetXaxis().SetTitle(x_label)
h_cmb.GetXaxis().SetTitleSize(0.04)
h_cmb.GetYaxis().SetTitle("N_{Events}")
h_cmb.GetYaxis().SetTitleSize(0.04)
h_cmb.SetLineColor(ROOT.kRed)
h_cmb.SetLineWidth(2)
h_cmb.SetMaximum(18)
h_bkg.SetLineWidth(2)
h_bkg.SetFillStyle(1001)
h_bkg.SetLineColor(ROOT.kBlack)
h_bkg.SetFillColor(ROOT.kAzure - 9)
# Get histogram of data points
h_data = data
h_data.SetLineWidth(1)
h_data.SetMarkerStyle(20)
h_data.SetMarkerSize(1.0)
h_data.SetMarkerColor(ROOT.kBlack)
h_data.SetLineColor(ROOT.kBlack)
# Draw histograms
h_cmb.DrawCopy("HIST")
h_bkg.DrawCopy("HIST SAME")
h_data.DrawCopy("PE1 SAME")
# Add legend
legend = ROOT.TLegend(0.62, 0.70, 0.82, 0.88)
legend.SetFillColor(0)
legend.SetBorderSize(0)
legend.SetTextSize(0.03)
legend.AddEntry(h_data, "Data", "PE1")
legend.AddEntry(h_bkg, "ZZ", "f")
legend.AddEntry(h_cmb, "m_{H} = 125 GeV", "f")
legend.Draw()
# Add header
cms_label = ROOT.TLatex()
cms_label.SetTextSize(0.04)
cms_label.DrawLatexNDC(0.16, 0.92, "#bf{CMS Open Data}")
header = ROOT.TLatex()
header.SetTextSize(0.03)
header.DrawLatexNDC(0.63, 0.92, "#sqrt{s} = 8 TeV, L_{int} = 11.6 fb^{-1}")
# Save plot
d.SaveAs(filename)
def df103_NanoAODHiggsAnalysis(run_fast = True):
# In fast mode, take samples from */cms_opendata_2012_nanoaod_skimmed/*, which has
# the preselections from the selection_* functions already applied.
path = "root://eospublic.cern.ch//eos/root-eos/cms_opendata_2012_nanoaod/"
if run_fast: path = "root://eospublic.cern.ch//eos/root-eos/cms_opendata_2012_nanoaod_skimmed/"
# Create dataframes for signal, background and data samples
# Signal: Higgs -> 4 leptons
df_sig_4l = ROOT.RDataFrame("Events", path + "SMHiggsToZZTo4L.root")
# Background: ZZ -> 4 leptons
# Note that additional background processes from the original paper
# with minor contribution were left out for this
# tutorial.
df_bkg_4mu = ROOT.RDataFrame("Events", path + "ZZTo4mu.root")
df_bkg_4el = ROOT.RDataFrame("Events", path + "ZZTo4e.root")
df_bkg_2el2mu = ROOT.RDataFrame("Events", path + "ZZTo2e2mu.root")
# CMS data taken in 2012 (11.6 fb^-1 integrated luminosity)
df_data_doublemu = ROOT.RDataFrame("Events", (path + f for f in ["Run2012B_DoubleMuParked.root", "Run2012C_DoubleMuParked.root"]))
df_data_doubleel = ROOT.RDataFrame("Events", (path + f for f in ["Run2012B_DoubleElectron.root", "Run2012C_DoubleElectron.root"]))
# Number of bins for all histograms
nbins = 36
# Weights
luminosity = 11580.0 # Integrated luminosity of the data samples
xsec_ZZTo4mu = 0.077 # ZZ->4mu: Standard Model cross-section
nevt_ZZTo4mu = 1499064.0 # ZZ->4mu: Number of simulated events
xsec_ZZTo4el = 0.077 # ZZ->4el: Standard Model cross-section
nevt_ZZTo4el = 1499093.0 # ZZ->4el: Number of simulated events
xsec_ZZTo2el2mu = 0.18 # ZZ->2el2mu: Standard Model cross-section
nevt_ZZTo2el2mu = 1497445.0 # ZZ->2el2mu: Number of simulated events
xsec_SMHiggsToZZTo4L = 0.0065 # H->4l: Standard Model cross-section
nevt_SMHiggsToZZTo4L = 299973.0 # H->4l: Number of simulated events
scale_ZZTo4l = 1.386 # ZZ->4l: Scale factor for ZZ to four leptons
weight_sig_4mu = luminosity * xsec_SMHiggsToZZTo4L / nevt_SMHiggsToZZTo4L
weight_bkg_4mu = luminosity * xsec_ZZTo4mu * scale_ZZTo4l / nevt_ZZTo4mu
weight_sig_4el = luminosity * xsec_SMHiggsToZZTo4L / nevt_SMHiggsToZZTo4L
weight_bkg_4el = luminosity * xsec_ZZTo4el * scale_ZZTo4l / nevt_ZZTo4el
weight_sig_2el2mu = luminosity * xsec_SMHiggsToZZTo4L / nevt_SMHiggsToZZTo4L
weight_bkg_2el2mu = luminosity * xsec_ZZTo2el2mu * scale_ZZTo4l / nevt_ZZTo2el2mu
# Reconstruct Higgs to 4 muons
df_sig_4mu_reco = reco_higgs_to_4mu(df_sig_4l)
df_h_sig_4mu = df_sig_4mu_reco.Define("weight", "{}".format(weight_sig_4mu))\
.Histo1D(("h_sig_4mu", "", nbins, 70, 180), "H_mass", "weight")
df_bkg_4mu_reco = reco_higgs_to_4mu(df_bkg_4mu)
df_h_bkg_4mu = df_bkg_4mu_reco.Define("weight", "{}".format(weight_bkg_4mu))\
.Histo1D(("h_bkg_4mu", "", nbins, 70, 180), "H_mass", "weight")
df_data_4mu_reco = reco_higgs_to_4mu(df_data_doublemu)
df_h_data_4mu = df_data_4mu_reco.Define("weight", "1.0")\
.Histo1D(("h_data_4mu", "", nbins, 70, 180), "H_mass", "weight")
# Reconstruct Higgs to 4 electrons
df_sig_4el_reco = reco_higgs_to_4el(df_sig_4l)
df_h_sig_4el = df_sig_4el_reco.Define("weight", "{}".format(weight_sig_4el))\
.Histo1D(("h_sig_4el", "", nbins, 70, 180), "H_mass", "weight")
df_bkg_4el_reco = reco_higgs_to_4el(df_bkg_4el)
df_h_bkg_4el = df_bkg_4el_reco.Define("weight", "{}".format(weight_bkg_4el))\
.Histo1D(("h_bkg_4el", "", nbins, 70, 180), "H_mass", "weight")
df_data_4el_reco = reco_higgs_to_4el(df_data_doubleel)
df_h_data_4el = df_data_4el_reco.Define("weight", "1.0")\
.Histo1D(("h_data_4el", "", nbins, 70, 180), "H_mass", "weight")
# Reconstruct Higgs to 2 electrons and 2 muons
df_sig_2el2mu_reco = reco_higgs_to_2el2mu(df_sig_4l)
df_h_sig_2el2mu = df_sig_2el2mu_reco.Define("weight", "{}".format(weight_sig_2el2mu))\
.Histo1D(("h_sig_2el2mu", "", nbins, 70, 180), "H_mass", "weight")
df_bkg_2el2mu_reco = reco_higgs_to_2el2mu(df_bkg_2el2mu)
df_h_bkg_2el2mu = df_bkg_2el2mu_reco.Define("weight", "{}".format(weight_bkg_2el2mu))\
.Histo1D(("h_bkg_2el2mu", "", nbins, 70, 180), "H_mass", "weight")
df_data_2el2mu_reco = reco_higgs_to_2el2mu(df_data_doublemu)
df_h_data_2el2mu = df_data_2el2mu_reco.Define("weight", "1.0")\
.Histo1D(("h_data_2el2mu_doublemu", "", nbins, 70, 180), "H_mass", "weight")
# Trigger event loops and retrieve histograms
signal_4mu = df_h_sig_4mu.GetValue()
background_4mu = df_h_bkg_4mu.GetValue()
data_4mu = df_h_data_4mu.GetValue()
signal_4el = df_h_sig_4el.GetValue()
background_4el = df_h_bkg_4el.GetValue()
data_4el = df_h_data_4el.GetValue()
signal_2el2mu = df_h_sig_2el2mu.GetValue()
background_2el2mu = df_h_bkg_2el2mu.GetValue()
data_2el2mu = df_h_data_2el2mu.GetValue()
# Make plots
plot(signal_4mu, background_4mu, data_4mu, "m_{4#mu} (GeV)", "higgs_4mu.pdf")
plot(signal_4el, background_4el, data_4el, "m_{4e} (GeV)", "higgs_4el.pdf")
plot(signal_2el2mu, background_2el2mu, data_2el2mu, "m_{2e2#mu} (GeV)", "higgs_2el2mu.pdf")
# Combined plots
# If this was done before plotting the others, calling the `Add` function
# on the `signal_4mu` histogram would modify the underlying `TH1D` object.
# Thus, the histogram with the 4 muons reconstruction would be lost,
# instead resulting in the same plot as the aggregated histograms.
h_sig_4l = signal_4mu
h_sig_4l.Add(signal_4el)
h_sig_4l.Add(signal_2el2mu)
h_bkg_4l = background_4mu
h_bkg_4l.Add(background_4el)
h_bkg_4l.Add(background_2el2mu)
h_data_4l = data_4mu
h_data_4l.Add(data_4el)
h_data_4l.Add(data_2el2mu)
# Plot aggregated histograms
plot(h_sig_4l, h_bkg_4l, h_data_4l, "m_{4l} (GeV)", "higgs_4l.pdf")
if __name__ == "__main__":
run_fast = True
df103_NanoAODHiggsAnalysis(run_fast)
|
karies/root
|
tutorials/dataframe/df103_NanoAODHiggsAnalysis.py
|
Python
|
lgpl-2.1
| 17,701
|
import gettext
import __builtin__
import math
__builtin__._ = gettext.gettext
from shadowcraft.core import exceptions
from shadowcraft.calcs import armor_mitigation
from shadowcraft.objects import class_data
from shadowcraft.objects import talents
from shadowcraft.objects import procs
from shadowcraft.objects.procs import InvalidProcException
class DamageCalculator(object):
# This method holds the general interface for a damage calculator - the
# sorts of parameters and calculated values that will be need by many (or
# most) classes if they implement a damage calculator using this framework.
# Not saying that will happen, but I want to leave my options open.
# Any calculations that are specific to a particular class should go in
# calcs.<class>.<Class>DamageCalculator instead - for an example, see
# calcs.rogue.RogueDamageCalculator
TARGET_BASE_ARMOR_VALUES = {88:11977., 93:24835., 103:100000.}
AOE_TARGET_CAP = 20
# Override this in your class specfic subclass to list appropriate stats
# possible values are agi, str, spi, int, haste, crit, mastery
default_ep_stats = []
# normalize_ep_stat is the stat with value 1 EP, override in your subclass
normalize_ep_stat = None
def __init__(self, stats, talents, glyphs, buffs, race, settings=None, level=100, target_level=None, char_class='rogue'):
self.WOW_BUILD_TARGET = '6.1.0' # should reflect the game patch being targetted
self.SHADOWCRAFT_BUILD = '1.0' # <1 for beta builds, 1.00 is GM, >1 for any bug fixes, reset for each warcraft patch
self.tools = class_data.Util()
self.stats = stats
self.talents = talents
self.glyphs = glyphs
self.buffs = buffs
self.race = race
self.char_class = char_class
self.settings = settings
self.target_level = [target_level, level + 3][target_level is None] #assumes 3 levels higher if not explicit
#racials
if self.race.race_name == 'undead':
self.stats.procs.set_proc('touch_of_the_grave')
if self.race.race_name == 'goblin':
self.stats.procs.set_proc('rocket_barrage')
self.level_difference = max(self.target_level - level, 0)
self.base_one_hand_miss_rate = 0
self.base_parry_chance = .01 * self.level_difference
self.base_dodge_chance = 0
self.dw_miss_penalty = .17
self._set_constants_for_class()
self.level = level
self.recalculate_hit_constants()
self.base_block_chance = .03 + .015 * self.level_difference
def __setattr__(self, name, value):
object.__setattr__(self, name, value)
if name == 'level':
self._set_constants_for_level()
def __getattr__(self, name):
# Any status we haven't assigned a value to, we don't have.
if name == 'calculating_ep':
return False
object.__getattribute__(self, name)
def _set_constants_for_level(self):
self.buffs.level = self.level
self.stats.level = self.level
self.race.level = self.level
self.stats.gear_buffs.level = self.level
# calculate and cache the level-dependent armor mitigation parameter
self.armor_mitigation_parameter = armor_mitigation.parameter(self.level)
# target level dependent constants
try:
self.target_base_armor = self.TARGET_BASE_ARMOR_VALUES[self.target_level]
except KeyError as e:
raise exceptions.InvalidInputException(_('There\'s no armor value for a target level {level}').format(level=str(e)))
self.crit_reduction = .01 * self.level_difference
def _set_constants_for_class(self):
# These factors are class-specific. Generaly those go in the class module,
# unless it's basic stuff like combat ratings or base stats that we can
# datamine for all classes/specs at once.
if self.talents.game_class != self.glyphs.game_class:
raise exceptions.InvalidInputException(_('You must specify the same class for your talents and glyphs'))
self.game_class = self.talents.game_class
def recalculate_hit_constants(self):
self.base_dw_miss_rate = self.base_one_hand_miss_rate + self.dw_miss_penalty
def get_adv_param(self, type, default_val, min_bound=-10000, max_bound=10000, ignore_bounds=False):
if type in self.settings.adv_params and not ignore_bounds:
return max( min(float(self.settings.adv_params[type]), max_bound), min_bound )
elif type in self.settings.adv_params:
return self.settings.adv_params[type]
else:
return default_val
raise exceptions.InvalidInputException(_('Improperly defined parameter type: '+type))
def add_exported_data(self, damage_breakdown):
#used explicitly to highjack data outputs to export additional data.
if self.get_version_number:
damage_breakdown['version_' + self.WOW_BUILD_TARGET + '_' + self.SHADOWCRAFT_BUILD] = [.0, 0]
def set_rppm_uptime(self, proc):
#http://iam.yellingontheinternet.com/2013/04/12/theorycraft-201-advanced-rppm/
haste = 1.
if proc.haste_scales:
haste *= self.stats.get_haste_multiplier_from_rating(self.base_stats['haste']) * self.true_haste_mod
if proc.att_spd_scales:
haste *= 1.4
#The 1.1307 is a value that increases the proc rate due to bad luck prevention. It /should/ be constant among all rppm proc styles
if not proc.icd:
if proc.max_stacks <= 1:
proc.uptime = 1.1307 * (1 - math.e ** (-1 * haste * proc.get_rppm_proc_rate() * proc.duration / 60))
else:
lambd = haste * proc.get_rppm_proc_rate() * proc.duration / 60
e_lambda = math.e ** lambd
e_minus_lambda = math.e ** (-1 * lambd)
proc.uptime = 1.1307 * (e_lambda - 1) * (1 - ((1 - e_minus_lambda) ** proc.max_stacks))
else:
mean_proc_time = 60. / (haste * proc.get_rppm_proc_rate()) + proc.icd - min(proc.icd, 10)
proc.uptime = 1.1307 * proc.duration / mean_proc_time
def set_uptime(self, proc, attacks_per_second, crit_rates):
if proc.is_real_ppm():
self.set_rppm_uptime(proc)
else:
procs_per_second = self.get_procs_per_second(proc, attacks_per_second, crit_rates)
if proc.icd:
proc.uptime = proc.duration / (proc.icd + 1. / procs_per_second)
else:
if procs_per_second >= 1:
self.set_uptime_for_ramping_proc(proc, procs_per_second)
else:
# See http://elitistjerks.com/f31/t20747-advanced_rogue_mechanics_discussion/#post621369
# for the derivation of this formula.
q = 1 - procs_per_second
Q = q ** proc.duration
if Q < .0001:
self.set_uptime_for_ramping_proc(proc, procs_per_second)
else:
P = 1 - Q
proc.uptime = P * (1 - P ** proc.max_stacks) / Q
def average_damage_breakdowns(self, aps_dict, denom=180):
final_breakdown = {}
#key: phase name
#number: place in tuple... tuple = (phase_length, dps_breakdown)
#entry: DPS skill_name
#denom: total duration (to divide phase duration by it)
for key in aps_dict:
for entry in aps_dict[key][1]:
if entry in final_breakdown:
final_breakdown[entry] += aps_dict[key][1][entry] * (aps_dict[key][0]/denom)
else:
final_breakdown[entry] = aps_dict[key][1][entry] * (aps_dict[key][0]/denom)
return final_breakdown
def ep_helper(self, stat):
setattr(self.stats, stat, getattr(self.stats, stat) + 1.)
dps = self.get_dps()
setattr(self.stats, stat, getattr(self.stats, stat) - 1.)
return dps
def get_ep(self, ep_stats=None, normalize_ep_stat=None, baseline_dps=None):
if not normalize_ep_stat:
normalize_ep_stat = self.get_adv_param('normalize_stat', self.settings.default_ep_stat, ignore_bounds=True)
if not ep_stats:
ep_stats = self.default_ep_stats
if baseline_dps == None:
baseline_dps = self.get_dps()
if normalize_ep_stat == 'dps':
normalize_dps_difference = 1.
else:
normalize_dps = self.ep_helper(normalize_ep_stat)
normalize_dps_difference = normalize_dps - baseline_dps
if normalize_dps_difference == 0:
normalize_dps_difference = 1
ep_values = {}
for stat in ep_stats:
ep_values[stat] = 1.0
if normalize_ep_stat != stat:
dps = self.ep_helper(stat)
ep_values[stat] = abs(dps - baseline_dps) / normalize_dps_difference
return ep_values
def get_weapon_ep(self, speed_list=None, dps=False, enchants=False, normalize_ep_stat=None):
if not normalize_ep_stat:
normalize_ep_stat = self.normalize_ep_stat
weapons = ('mh', 'oh')
if speed_list is not None or dps:
baseline_dps = self.get_dps()
normalize_dps = self.ep_helper(normalize_ep_stat)
for hand in weapons:
ep_values = {}
# Weapon dps EP
if dps:
getattr(self.stats, hand).weapon_dps += 1.
new_dps = self.get_dps()
ep = abs(new_dps - baseline_dps) / (normalize_dps - baseline_dps)
ep_values[hand + '_dps'] = ep
getattr(self.stats, hand).weapon_dps -= 1.
# Enchant EP
if enchants:
old_enchant = None
for enchant in getattr(self.stats, hand).allowed_melee_enchants:
if getattr(getattr(self.stats, hand), enchant):
old_enchant = enchant
getattr(self.stats, hand).del_enchant()
no_enchant_dps = self.get_dps()
no_enchant_normalize_dps = self.ep_helper(normalize_ep_stat)
for enchant in getattr(self.stats, hand).allowed_melee_enchants:
getattr(self.stats, hand).set_enchant(enchant)
new_dps = self.get_dps()
if new_dps != no_enchant_dps:
ep = abs(new_dps - no_enchant_dps) / (no_enchant_normalize_dps - no_enchant_dps)
ep_values[hand + '_' + enchant] = ep
getattr(self.stats, hand).set_enchant(old_enchant)
# Weapon speed EP
if speed_list is not None:
old_speed = getattr(self.stats, hand).speed
for speed in speed_list:
getattr(self.stats, hand).speed = speed
new_dps = self.get_dps()
ep = (new_dps - baseline_dps) / (normalize_dps - baseline_dps)
ep_values[hand + '_' + str(speed)] = ep
getattr(self.stats, hand).speed = old_speed
if hand == 'mh':
mh_ep_values = ep_values
elif hand == 'oh':
oh_ep_values = ep_values
return mh_ep_values, oh_ep_values
def get_weapon_type_ep(self, normalize_ep_stat=None):
if not normalize_ep_stat:
normalize_ep_stat = self.normalize_ep_stat
weapons = ('mh', 'oh')
baseline_dps = self.get_dps()
normalize_dps = self.ep_helper(normalize_ep_stat)
mh_ep_values = {}
oh_ep_values = {}
for hand in weapons:
ep_values = {}
old_type = getattr(self.stats, hand).type
for wtype in ('dagger', 'one-hander'):
getattr(self.stats, hand).type = wtype
new_dps = self.get_dps()
ep = (new_dps - baseline_dps) / (normalize_dps - baseline_dps)
ep_values[hand + '_type_' + wtype] = ep
getattr(self.stats, hand).type = old_type
if hand == 'mh':
mh_ep_values = ep_values
elif hand == 'oh':
oh_ep_values = ep_values
return mh_ep_values, oh_ep_values
def get_weapon_type_modifier_helper(self, setups=None):
# Use this method if you want to test different weapon setups. It will
# return one value per setup including the current one. It takes setups
# like this one:
# (
# {'hand':'mh', 'type':mh_type, 'speed':mh_speed},
# {'hand':'oh', 'type':oh_type, 'speed':oh_speed}
# )
modifiers = {}
weapons = ('mh', 'oh')
baseline_setup = []
for hand in weapons:
weapon = getattr(self.stats, hand)
baseline_setup.append((hand, weapon.speed, weapon.type))
modifiers[tuple(baseline_setup)] = 1
if not setups:
return modifiers
baseline_dps = self.get_dps()
for setup in setups:
current_setup = []
assert len(setup) == 2
for hand in setup:
if hand is not None:
weapon = getattr(self.stats, hand['hand'])
weapon.speed = hand['speed']
weapon.type = hand['type']
current_setup.append((hand['hand'], hand['speed'], hand['type']))
try:
new_dps = self.get_dps()
if new_dps != baseline_dps:
modifiers[tuple(current_setup)] = new_dps / baseline_dps
except InputNotModeledException:
modifiers[tuple(current_setup)] = _('not allowed')
for hand in baseline_setup:
hand_name, speed, type = hand
weapon = getattr(self.stats, hand_name)
weapon.speed = speed
weapon.type = type
return modifiers
def get_oh_weapon_modifier(self, setups, format=True):
# Override this in your modeler to pass default oh weapons to test.
modifiers = self.get_weapon_type_modifier_helper(setups)
if not format:
return modifiers
formatted_mods = {}
for setup in modifiers:
for hand in setup:
if hand[0] == 'mh':
continue
formatted_mods['_'.join((hand[0], str(hand[1]), hand[2]))] = modifiers[setup]
return formatted_mods
def get_dw_weapon_modifier(self, setups, format=True):
# Override this in your modeler to pass default dw setups to test.
modifiers = self.get_weapon_type_modifier_helper(setups)
pass
def get_2h_weapon_modifier(self, setups, format=True):
# Override this in your modeler to pass default 2h setups to test.
modifiers = self.get_weapon_type_modifier_helper(setups)
pass
def get_other_ep(self, list, normalize_ep_stat=None):
if not normalize_ep_stat:
normalize_ep_stat = self.normalize_ep_stat
# This method computes ep for every other buff/proc not covered by
# get_ep or get_weapon_ep. Weapon enchants, being tied to the
# weapons they are on, are computed by get_weapon_ep.
ep_values = {}
baseline_dps = self.get_dps()
if normalize_ep_stat == 'dps':
normalize_dps_difference = 1.
else:
normalize_dps = self.ep_helper(normalize_ep_stat)
normalize_dps_difference = normalize_dps - baseline_dps
procs_list = []
gear_buffs_list = []
for i in list:
if i in self.stats.procs.allowed_procs:
procs_list.append(i)
elif i in self.stats.gear_buffs.allowed_buffs:
gear_buffs_list.append(i)
else:
ep_values[i] = _('not allowed')
for i in gear_buffs_list:
# Note that activated abilites like trinkets, potions, or
# engineering gizmos are handled as gear buffs by the engine.
setattr(self.stats.gear_buffs, i, not getattr(self.stats.gear_buffs, i))
new_dps = self.get_dps()
ep_values[i] = abs(new_dps - baseline_dps) / (normalize_dps_difference)
setattr(self.stats.gear_buffs, i, not getattr(self.stats.gear_buffs, i))
for i in procs_list:
try:
if getattr(self.stats.procs, i):
delattr(self.stats.procs, i)
else:
self.stats.procs.set_proc(i)
new_dps = self.get_dps()
ep_values[i] = abs(new_dps - baseline_dps) / (normalize_dps_difference)
if getattr(self.stats.procs, i):
delattr(self.stats.procs, i)
else:
self.stats.procs.set_proc(i)
except InvalidProcException:
# Data for these procs is not complete/correct
ep_values[i] = _('not supported')
delattr(self.stats.procs, i)
return ep_values
def get_upgrades_ep(self, _list, normalize_ep_stat=None):
if not normalize_ep_stat:
normalize_ep_stat = self.normalize_ep_stat
# This method computes ep for every other buff/proc not covered by
# get_ep or get_weapon_ep. Weapon enchants, being tied to the
# weapons they are on, are computed by get_weapon_ep.
active_procs_cache = []
procs_list = []
ep_values = {}
for i in _list:
if i in self.stats.procs.allowed_procs:
procs_list.append( (i, _list[i]) )
if getattr(self.stats.procs, i):
active_procs_cache.append((i, getattr(self.stats.procs, i).item_level))
delattr(self.stats.procs, i)
else:
ep_values[i] = _('not allowed')
baseline_dps = self.get_dps()
normalize_dps = self.ep_helper(normalize_ep_stat)
for i in procs_list:
proc_name, item_levels = i
ep_values[proc_name] = {}
try:
if getattr(self.stats.procs, proc_name):
old_proc = getattr(self.stats.procs, proc_name)
delattr(self.stats.procs, proc_name)
base_dps = self.get_dps()
base_normalize_dps = self.ep_helper(normalize_ep_stat)
else:
old_proc = False
base_dps = baseline_dps
base_normalize_dps = normalize_dps
self.stats.procs.set_proc(proc_name)
proc = getattr(self.stats.procs, proc_name)
for group in item_levels:
if not isinstance(group, (list,tuple)):
group = group,
for l in group:
proc.item_level = l
proc.update_proc_value() # after setting item_level re-set the proc value
new_dps = self.get_dps()
if new_dps != base_dps:
ep = abs(new_dps - base_dps) / (base_normalize_dps - base_dps)
ep_values[proc_name][l] = ep
if old_proc:
self.stats.procs.set_proc(proc_name)
else:
delattr(self.stats.procs, proc_name)
except InvalidProcException:
# Data for these procs is not complete/correct
ep_values[i].append(_('not supported'))
delattr(self.stats.procs, proc_name)
for proc in active_procs_cache:
self.stats.procs.set_proc(proc[0])
getattr(self.stats.procs, proc[0]).item_level = proc[1]
return ep_values
# this function is in comparison to get_upgrades_ep a lot faster but not 100% accurate
# the error is around 1% which is accurate enough for the ranking in Shadowcraft-UI
def get_upgrades_ep_fast(self, _list, normalize_ep_stat=None):
if not normalize_ep_stat:
normalize_ep_stat = self.normalize_ep_stat
# This method computes ep for every other buff/proc not covered by
# get_ep or get_weapon_ep. Weapon enchants, being tied to the
# weapons they are on, are computed by get_weapon_ep.
active_procs_cache = []
procs_list = []
ep_values = {}
for i in _list:
if i in self.stats.procs.allowed_procs:
procs_list.append( (i, _list[i]) )
if getattr(self.stats.procs, i):
active_procs_cache.append((i, getattr(self.stats.procs, i).item_level))
delattr(self.stats.procs, i)
else:
ep_values[i] = _('not allowed')
baseline_dps = self.get_dps()
normalize_dps = self.ep_helper(normalize_ep_stat)
for i in procs_list:
proc_name, item_levels = i
ep_values[proc_name] = {}
try:
if getattr(self.stats.procs, proc_name):
old_proc = getattr(self.stats.procs, proc_name)
delattr(self.stats.procs, proc_name)
base_dps = self.get_dps()
base_normalize_dps = self.ep_helper(normalize_ep_stat)
else:
old_proc = False
base_dps = baseline_dps
base_normalize_dps = normalize_dps
self.stats.procs.set_proc(proc_name)
proc = getattr(self.stats.procs, proc_name)
for group in item_levels:
if not isinstance(group, (list,tuple)):
group = group,
if proc.scaling:
proc.item_level = group[0]
proc.update_proc_value() # after setting item_level re-set the proc value
item_level = proc.item_level
if proc.proc_name == 'Rune of Re-Origination':
scale_factor = 1/(1.15**((528-item_level)/15.0)) * proc.base_ppm
else:
scale_factor = self.tools.get_random_prop_point(item_level)
new_dps = self.get_dps()
if new_dps != base_dps:
for l in group:
ep = abs(new_dps - base_dps) / (base_normalize_dps - base_dps)
if l > proc.item_level:
if proc.proc_name == 'Rune of Re-Origination':
upgraded_scale_factor = 1/(1.15**((528-(l))/15.0)) * proc.base_ppm
else:
upgraded_scale_factor = self.tools.get_random_prop_point(l)
ep *= float(upgraded_scale_factor) / float(scale_factor)
ep_values[proc_name][l] = ep
if old_proc:
self.stats.procs.set_proc(proc_name)
else:
delattr(self.stats.procs, proc_name)
except InvalidProcException:
# Data for these procs is not complete/correct
ep_values[i].append(_('not supported'))
delattr(self.stats.procs, i)
for proc in active_procs_cache:
self.stats.procs.set_proc(proc[0])
getattr(self.stats.procs, proc[0]).item_level = proc[1]
return ep_values
def get_glyphs_ranking(self, list=None):
glyphs = []
glyphs_ranking = {}
baseline_dps = self.get_dps()
if list == None:
glyphs = self.glyphs.allowed_glyphs
else:
glyphs = list
for i in glyphs:
setattr(self.glyphs, i, not getattr(self.glyphs, i))
try:
new_dps = self.get_dps()
if new_dps != baseline_dps:
glyphs_ranking[i] = abs(new_dps - baseline_dps)
except:
glyphs_ranking[i] = _('not implemented')
setattr(self.glyphs, i, not getattr(self.glyphs, i))
return glyphs_ranking
def get_talents_ranking(self, list=None):
talents_ranking = {}
baseline_dps = self.get_dps()
talent_list = []
if list is None:
talent_list = self.talents.get_allowed_talents_for_level()
else:
talent_list = list
for talent in talent_list:
setattr(self.talents, talent, not getattr(self.talents, talent))
try:
new_dps = self.get_dps()
if new_dps != baseline_dps:
talents_ranking[talent] = abs(new_dps - baseline_dps)
except:
talents_ranking[talent] = _('not implemented')
setattr(self.talents, talent, not getattr(self.talents, talent))
return talents_ranking
def get_engine_info(self):
data = {
'wow_build_target': self.WOW_BUILD_TARGET,
'shadowcraft_build': self.SHADOWCRAFT_BUILD
}
return data
def get_dps(self):
# Overwrite this function with your calculations/simulations/whatever;
# this is what callers will (initially) be looking at.
pass
#def get_all_activated_stat_boosts(self):
# racial_boosts = self.race.get_racial_stat_boosts()
# gear_boosts = self.stats.gear_buffs.get_all_activated_boosts()
# return racial_boosts + gear_boosts
def armor_mitigation_multiplier(self, armor):
return armor_mitigation.multiplier(armor, cached_parameter=self.armor_mitigation_parameter)
def max_level_armor_multiplier(self):
return 3610.0 / (3610.0 + 1938.0)
def get_trinket_cd_reducer(self):
trinket_cd_reducer_value = .0
proc = getattr(self.stats.procs, 'assurance_of_consequence')
if proc and proc.scaling:
trinket_cd_reducer_value = 0.2532840073 / 100 * self.tools.get_random_prop_point(proc.item_level)
if self.level == 100:
trinket_cd_reducer_value *= 23./110.
return 1 / (1 + trinket_cd_reducer_value)
return 1
def armor_mitigate(self, damage, armor):
# Pass in raw physical damage and armor value, get armor-mitigated
# damage value.
return damage * self.armor_mitigation_multiplier(armor)
def melee_hit_chance(self, base_miss_chance, dodgeable, parryable, weapon_type, blockable=False, bonus_hit=0):
miss_chance = base_miss_chance
# Expertise represented as the reduced chance to be dodged, not true "Expertise".
if dodgeable:
dodge_chance = self.base_dodge_chance
else:
dodge_chance = 0
if parryable:
# Expertise will negate dodge and spell miss, *then* parry
parry_expertise = max(expertise - self.base_dodge_chance, 0)
parry_chance = max(self.base_parry_chance - parry_expertise, 0)
else:
parry_chance = 0
block_chance = self.base_block_chance * blockable
return (1 - (miss_chance + dodge_chance + parry_chance)) * (1 - block_chance)
def melee_spells_hit_chance(self, bonus_hit=0):
hit_chance = self.melee_hit_chance(self.base_one_hand_miss_rate, dodgeable=False, parryable=False, weapon_type=None)
return hit_chance
def one_hand_melee_hit_chance(self, dodgeable=False, parryable=False, weapon=None, bonus_hit=0):
# Most attacks by DPS aren't parryable due to positional negation. But
# if you ever want to attacking from the front, you can just set that
# to True.
if weapon == None:
weapon = self.stats.mh
hit_chance = self.melee_hit_chance(self.base_one_hand_miss_rate, dodgeable, parryable, weapon.type)
return hit_chance
def off_hand_melee_hit_chance(self, dodgeable=False, parryable=False, weapon=None, bonus_hit=0):
# Most attacks by DPS aren't parryable due to positional negation. But
# if you ever want to attacking from the front, you can just set that
# to True.
if weapon == None:
weapon = self.stats.oh
hit_chance = self.melee_hit_chance(self.base_one_hand_miss_rate, dodgeable, parryable, weapon.type)
return hit_chance
def dual_wield_mh_hit_chance(self, dodgeable=False, parryable=False, dw_miss=None):
# Most attacks by DPS aren't parryable due to positional negation. But
# if you ever want to attacking from the front, you can just set that
# to True.
hit_chance = self.dual_wield_hit_chance(dodgeable, parryable, self.stats.mh.type, dw_miss=dw_miss)
return hit_chance
def dual_wield_oh_hit_chance(self, dodgeable=False, parryable=False, dw_miss=None):
# Most attacks by DPS aren't parryable due to positional negation. But
# if you ever want to attacking from the front, you can just set that
# to True.
hit_chance = self.dual_wield_hit_chance(dodgeable, parryable, self.stats.oh.type, dw_miss=dw_miss)
return hit_chance
def dual_wield_hit_chance(self, dodgeable, parryable, weapon_type, dw_miss=None):
if not dw_miss:
dw_miss = self.base_dw_miss_rate
hit_chance = self.melee_hit_chance(dw_miss, dodgeable, parryable, weapon_type)
return hit_chance
def buff_melee_crit(self):
return self.buffs.buff_all_crit()
def crit_damage_modifiers(self, crit_damage_bonus_modifier=1):
# The obscure formulae for the different crit enhancers can be found here
# http://elitistjerks.com/f31/t13300-shaman_relentless_earthstorm_ele/#post404567
base_modifier = 2
crit_damage_modifier = self.stats.gear_buffs.metagem_crit_multiplier()
if self.race.might_of_the_mountain:
crit_damage_modifier *= 1.02 #2x base becomes 2.04x with MotM
total_modifier = 1 + (base_modifier * crit_damage_modifier - 1) * crit_damage_bonus_modifier
return total_modifier
def target_armor(self, armor=None):
# Passes base armor reduced by armor debuffs or overridden armor
if armor is None:
armor = self.target_base_armor
return armor #* self.buffs.armor_reduction_multiplier()
def raid_settings_modifiers(self, attack_kind, armor=None, affect_resil=True):
# This function wraps spell, bleed and physical debuffs from raid
# along with all-damage buff and armor reduction. It should be called
# from every damage dealing formula. Armor can be overridden if needed.
if attack_kind not in ('physical', 'spell', 'bleed'):
raise exceptions.InvalidInputException(_('Attacks must be categorized as physical, spell or bleed'))
elif attack_kind == 'spell':
return self.buffs.spell_damage_multiplier()
elif attack_kind == 'bleed':
return self.buffs.bleed_damage_multiplier()
elif attack_kind == 'physical':
armor_override = self.target_armor(armor)
return self.buffs.physical_damage_multiplier() * self.armor_mitigation_multiplier(armor_override)
|
Pathal/ShadowCraft-Engine
|
shadowcraft/calcs/__init__.py
|
Python
|
lgpl-3.0
| 31,557
|
# -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'UI_RigDisplay.ui'
#
# Created: Wed Mar 21 21:43:33 2018
# by: pyside-uic 0.2.14 running on PySide 1.2.0
#
# WARNING! All changes made in this file will be lost!
from PySide import QtCore, QtGui
class Ui_Form(object):
def setupUi(self, Form):
Form.setObjectName("Form")
Form.resize(236, 179)
self.verticalLayout = QtGui.QVBoxLayout(Form)
self.verticalLayout.setObjectName("verticalLayout")
self.horizontalLayout = QtGui.QHBoxLayout()
self.horizontalLayout.setObjectName("horizontalLayout")
self.ChangeJointdrawStyle = QtGui.QPushButton(Form)
self.ChangeJointdrawStyle.setMaximumSize(QtCore.QSize(100, 30))
self.ChangeJointdrawStyle.setObjectName("ChangeJointdrawStyle")
self.horizontalLayout.addWidget(self.ChangeJointdrawStyle)
self.verticalLayout.addLayout(self.horizontalLayout)
self.retranslateUi(Form)
QtCore.QMetaObject.connectSlotsByName(Form)
def retranslateUi(self, Form):
Form.setWindowTitle(QtGui.QApplication.translate("Form", "Form", None, QtGui.QApplication.UnicodeUTF8))
self.ChangeJointdrawStyle.setText(QtGui.QApplication.translate("Form", "Joint DrawStyle", None, QtGui.QApplication.UnicodeUTF8))
|
rendermotion/RMPY
|
Tools/QT4/ui/FormRigDisplay.py
|
Python
|
lgpl-3.0
| 1,331
|
""" Implementaiton of a population for maintaining a GA population and
proposing structures to pair. """
from random import randrange, random
from math import tanh, sqrt, exp
from operator import itemgetter
import numpy as np
from ase.db.core import now
def count_looks_like(a, all_cand, comp):
"""Utility method for counting occurences."""
n = 0
for b in all_cand:
if a.info['confid'] == b.info['confid']:
continue
if comp.looks_like(a, b):
n += 1
return n
class Population(object):
"""Population class which maintains the current population
and proposes which candidates to pair together.
Parameters:
data_connection: DataConnection object
Bla bla bla.
population_size: int
The number of candidates in the population.
comparator: Comparator object
this will tell if two configurations are equal.
Default compare atoms objects directly.
logfile: str
Text file that contains information about the population
The format is::
timestamp: generation(if available): id1,id2,id3...
Using this file greatly speeds up convergence checks.
Default None meaning that no file is written.
use_extinct: boolean
Set this to True if mass extinction and the extinct key
are going to be used. Default is False.
"""
def __init__(self, data_connection, population_size,
comparator=None, logfile=None, use_extinct=False):
self.dc = data_connection
self.pop_size = population_size
if comparator is None:
from ase.ga.standard_comparators import AtomsComparator
comparator = AtomsComparator()
self.comparator = comparator
self.logfile = logfile
self.use_extinct = use_extinct
self.pop = []
self.pairs = None
self.all_cand = None
self.__initialize_pop__()
def __initialize_pop__(self):
""" Private method that initalizes the population when
the population is created. """
# Get all relaxed candidates from the database
ue = self.use_extinct
all_cand = self.dc.get_all_relaxed_candidates(use_extinct=ue)
all_cand.sort(key=lambda x: x.get_raw_score(), reverse=True)
# all_cand.sort(key=lambda x: x.get_potential_energy())
# Fill up the population with the self.pop_size most stable
# unique candidates.
i = 0
while i < len(all_cand) and len(self.pop) < self.pop_size:
c = all_cand[i]
i += 1
eq = False
for a in self.pop:
if self.comparator.looks_like(a, c):
eq = True
break
if not eq:
self.pop.append(c)
for a in self.pop:
a.info['looks_like'] = count_looks_like(a, all_cand,
self.comparator)
self.all_cand = all_cand
self.__calc_participation__()
def __calc_participation__(self):
""" Determines, from the database, how many times each
candidate has been used to generate new candidates. """
(participation, pairs) = self.dc.get_participation_in_pairing()
for a in self.pop:
if a.info['confid'] in participation.keys():
a.info['n_paired'] = participation[a.info['confid']]
else:
a.info['n_paired'] = 0
self.pairs = pairs
def update(self, new_cand=None):
""" New candidates can be added to the database
after the population object has been created.
This method extracts these new candidates from the
database and includes them in the population. """
if len(self.pop) == 0:
self.__initialize_pop__()
if new_cand is None:
ue = self.use_extinct
new_cand = self.dc.get_all_relaxed_candidates(only_new=True,
use_extinct=ue)
for a in new_cand:
self.__add_candidate__(a)
self.all_cand.append(a)
self.__calc_participation__()
self._write_log()
def get_current_population(self):
""" Returns a copy of the current population. """
self.update()
return [a.copy() for a in self.pop]
def get_population_after_generation(self, gen):
""" Returns a copy of the population as it where
after generation gen"""
if self.logfile is not None:
f = open(self.logfile, 'r')
gens = {}
for l in f:
_, no, popul = l.split(':')
gens[int(no)] = [int(i) for i in popul.split(',')]
f.close()
return [c.copy() for c in self.all_cand[::-1]
if c.info['relax_id'] in gens[gen]]
all_candidates = [c for c in self.all_cand
if c.info['key_value_pairs']['generation'] <= gen]
cands = [all_candidates[0]]
for b in all_candidates:
if b not in cands:
for a in cands:
if self.comparator.looks_like(a, b):
break
else:
cands.append(b)
pop = cands[:self.pop_size]
return [a.copy() for a in pop]
def __add_candidate__(self, a):
""" Adds a single candidate to the population. """
# check if the structure is too low in raw score
if a.get_raw_score() < self.pop[-1].get_raw_score() \
and len(self.pop) == self.pop_size:
return
# check if the new candidate should
# replace a similar structure in the population
for (i, b) in enumerate(self.pop):
if self.comparator.looks_like(a, b):
if b.get_raw_score() < a.get_raw_score():
del self.pop[i]
a.info['looks_like'] = count_looks_like(a,
self.all_cand,
self.comparator)
self.pop.append(a)
self.pop.sort(key=lambda x: x.get_raw_score(),
reverse=True)
return
# the new candidate needs to be added, so remove the highest
# energy one
if len(self.pop) == self.pop_size:
del self.pop[-1]
# add the new candidate
a.info['looks_like'] = count_looks_like(a,
self.all_cand,
self.comparator)
self.pop.append(a)
self.pop.sort(key=lambda x: x.get_raw_score(), reverse=True)
def __get_fitness__(self, indecies, with_history=True):
"""Calculates the fitness using the formula from
L.B. Vilhelmsen et al., JACS, 2012, 134 (30), pp 12807-12816
Sign change on the fitness compared to the formulation in the
abovementioned paper due to maximizing raw_score instead of
minimizing energy. (Set raw_score=-energy to optimize the energy)
"""
scores = [x.get_raw_score() for x in self.pop]
min_s = min(scores)
max_s = max(scores)
T = min_s - max_s
if isinstance(indecies, int):
indecies = [indecies]
f = [0.5 * (1. - tanh(2. * (scores[i] - max_s) / T - 1.))
for i in indecies]
if with_history:
M = [float(self.pop[i].info['n_paired']) for i in indecies]
L = [float(self.pop[i].info['looks_like']) for i in indecies]
f = [f[i] * 1. / sqrt(1. + M[i]) * 1. / sqrt(1. + L[i])
for i in range(len(f))]
return f
def get_two_candidates(self, with_history=True):
""" Returns two candidates for pairing employing the
fitness criteria from
L.B. Vilhelmsen et al., JACS, 2012, 134 (30), pp 12807-12816
and the roulete wheel selection scheme described in
R.L. Johnston Dalton Transactions,
Vol. 22, No. 22. (2003), pp. 4193-4207
"""
if len(self.pop) < 2:
self.update()
if len(self.pop) < 2:
return None
fit = self.__get_fitness__(range(len(self.pop)), with_history)
fmax = max(fit)
c1 = self.pop[0]
c2 = self.pop[0]
used_before = False
while c1.info['confid'] == c2.info['confid'] and not used_before:
nnf = True
while nnf:
t = randrange(0, len(self.pop), 1)
if fit[t] > random() * fmax:
c1 = self.pop[t]
nnf = False
nnf = True
while nnf:
t = randrange(0, len(self.pop), 1)
if fit[t] > random() * fmax:
c2 = self.pop[t]
nnf = False
c1id = c1.info['confid']
c2id = c2.info['confid']
used_before = (min([c1id, c2id]), max([c1id, c2id])) in self.pairs
return (c1.copy(), c2.copy())
def get_one_candidate(self, with_history=True):
"""Returns one candidate for mutation employing the
fitness criteria from
L.B. Vilhelmsen et al., JACS, 2012, 134 (30), pp 12807-12816
and the roulete wheel selection scheme described in
R.L. Johnston Dalton Transactions,
Vol. 22, No. 22. (2003), pp. 4193-4207
"""
if len(self.pop) < 1:
self.update()
if len(self.pop) < 1:
return None
fit = self.__get_fitness__(range(len(self.pop)), with_history)
fmax = max(fit)
nnf = True
while nnf:
t = randrange(0, len(self.pop), 1)
if fit[t] > random() * fmax:
c1 = self.pop[t]
nnf = False
return c1.copy()
def _write_log(self):
"""Writes the population to a logfile.
The format is::
timestamp: generation(if available): id1,id2,id3..."""
if self.logfile is not None:
ids = [str(a.info['relax_id']) for a in self.pop]
if ids != []:
try:
gen_nums = [c.info['key_value_pairs']['generation']
for c in self.all_cand]
max_gen = max(gen_nums)
except KeyError:
max_gen = ' '
f = open(self.logfile, 'a')
f.write('{time}: {gen}: {pop}\n'.format(time=now(),
pop=','.join(ids),
gen=max_gen))
f.close()
def is_uniform(self, func, min_std, pop=None):
"""Tests whether the current population is uniform or diverse.
Returns True if uniform, False otherwise.
Parameters:
func: function
that takes one argument an atoms object and returns a value that
will be used for testing against the rest of the population.
min_std: int or float
The minimum standard deviation, if the population has a lower
std dev it is uniform.
pop: list, optional
use this list of Atoms objects instead of the current population.
"""
if pop is None:
pop = self.pop
vals = [func(a) for a in pop]
stddev = np.std(vals)
if stddev < min_std:
return True
return False
def mass_extinction(self, ids):
"""Kills every candidate in the database with gaid in the
supplied list of ids. Typically used on the main part of the current
population if the diversity is to small.
Parameters:
ids: list
list of ids of candidates to be killed.
"""
for confid in ids:
self.dc.kill_candidate(confid)
self.pop = []
class RandomPopulation(Population):
def __init__(self, data_connection, population_size,
comparator=None, logfile=None, exclude_used_pairs=False,
bad_candidates=0, use_extinct=False):
self.exclude_used_pairs = exclude_used_pairs
self.bad_candidates = bad_candidates
Population.__init__(self, data_connection, population_size,
comparator, logfile, use_extinct)
def __initialize_pop__(self):
""" Private method that initalizes the population when
the population is created. """
# Get all relaxed candidates from the database
ue = self.use_extinct
all_cand = self.dc.get_all_relaxed_candidates(use_extinct=ue)
all_cand.sort(key=lambda x: x.get_raw_score(), reverse=True)
# all_cand.sort(key=lambda x: x.get_potential_energy())
if len(all_cand) > 0:
# Fill up the population with the self.pop_size most stable
# unique candidates.
ratings = []
best_raw = all_cand[0].get_raw_score()
i = 0
while i < len(all_cand):
c = all_cand[i]
i += 1
eq = False
for a in self.pop:
if self.comparator.looks_like(a, c):
eq = True
break
if not eq:
if len(self.pop) < self.pop_size - self.bad_candidates:
self.pop.append(c)
else:
exp_fact = exp(c.get_raw_score() / best_raw)
ratings.append([c, (exp_fact - 1) * random()])
ratings.sort(key=itemgetter(1), reverse=True)
for i in range(self.bad_candidates):
self.pop.append(ratings[i][0])
for a in self.pop:
a.info['looks_like'] = count_looks_like(a, all_cand,
self.comparator)
self.all_cand = all_cand
self.__calc_participation__()
def update(self):
""" The update method in Population will add to the end of
the population, that can't be used here since we might have
bad candidates that need to stay in the population, therefore
just recalc the population every time. """
self.pop = []
self.__initialize_pop__()
self._write_log()
def get_one_candidate(self):
"""Returns one candidates at random."""
if len(self.pop) < 1:
self.update()
if len(self.pop) < 1:
return None
t = randrange(0, len(self.pop), 1)
c = self.pop[t]
return c.copy()
def get_two_candidates(self):
"""Returns two candidates at random."""
if len(self.pop) < 2:
self.update()
if len(self.pop) < 2:
return None
c1 = self.pop[0]
c2 = self.pop[0]
used_before = False
while c1.info['confid'] == c2.info['confid'] and not used_before:
t = randrange(0, len(self.pop), 1)
c1 = self.pop[t]
t = randrange(0, len(self.pop), 1)
c2 = self.pop[t]
c1id = c1.info['confid']
c2id = c2.info['confid']
used_before = (tuple(sorted([c1id, c2id])) in self.pairs
and self.exclude_used_pairs)
return (c1.copy(), c2.copy())
|
suttond/MODOI
|
ase/ga/population.py
|
Python
|
lgpl-3.0
| 15,749
|
"""This module provides graph theory functionality."""
from heapq import heapify, heappop, heappush
def dijkstra(nodes, edges, startNode, directed):
"""Finds the length between each node in the graph and the startNode.
Arguments:
nodes - the set of nodes in the graph.
edges - the set of edges in the graph. Each edge should be a 3-tuple
containing the source and destination nodes, as well as the
(non-negative) weight of that edge (in that order).
startNode - the starting node for the search.
directed - boolean, should the graph be treated as directed,
or instead indirected?
Returns: a dict whose keys are nodes and whose values are the smallest cost
to get from startNode to that particular node.
E.g.
# example: from wikipedia
nodes = ["1", "2", "3", "4", "5", "6"]
edges = [("1", "2", 7), ("1", "3", 9), ("1", "6", 14), ("2", "3", 10),
("2", "4", 15), ("3", "4", 11), ("3", "6", 2), ("4", "5", 6),
("5", "6", 9)]
d = dijkstra(set(nodes), set(edges), "1", True)
print d
"""
#construct a dict holding each nodes' neighbours and the cost to them
neighbours = dict([(node, []) for node in nodes])
for edge in edges:
neighbours[edge[0]].append((edge[1], edge[2]))
if not directed:
neighbours[edge[1]].append((edge[0], edge[2]))
#to every node assign a distance (starting with +inf and zero for startNode)
distance = dict([(node, float("inf")) for node in nodes])
distance[startNode] = 0
#mark every node as unvisited
visited = dict([(node, False) for node in nodes])
#main part of algorithm
unvisitedQ = [(0, startNode)]
unvisitedQSet = set([startNode])
while len(unvisitedQ) != 0:
currentNode = heapq.heappop(unvisitedQ)[1]
unvisitedQSet.remove(currentNode)
for (node, edgeWeight) in neighbours[currentNode]:
if not visited[node]:
if distance[currentNode] + edgeWeight < distance[node]:
distance[node] = distance[currentNode] + edgeWeight
if node not in unvisitedQSet:
heapq.heappush(unvisitedQ, (distance[node], node))
unvisitedQSet.add(node)
visited[currentNode] = True
return distance
def prim(adj_list, start_v):
"""Finds a minimal spanning tree given a graph's adjacency list. The list
should be a dictionary whose keys are vertices. The values should be lists
of 2-tuples. Each 2-tuple should contain a 'to vertex' and a weight.
Returned is a list of edges in the minimal spanning tree, each a 3-tuple
containing the 'to vertex', 'from vertex', and weight.
E.g. from wikipedia:
a = {'A': [('B', 7), ('D', 5)],
'B': [('A', 7), ('C', 8), ('D', 9), ('E', 7)],
'C': [('B', 8), ('E', 5)],
'D': [('A', 5), ('B', 9), ('E', 15), ('F', 6)],
'E': [('B', 7), ('C', 5), ('D', 15), ('F', 8), ('G', 9)],
'F': [('D', 6), ('E', 8), ('G', 11)],
'G': [('E', 9), ('F', 11)]}
for from_v, to_v, weight in prim(a, 'A'):
print from_v, to_v, weight
"""
n = len(adj_list)
v_new = [start_v]
e_new = []
q = [(weight, start_v, to_v) for to_v, weight in adj_list[start_v]]
heapify(q)
while len(v_new) != n:
while True:
weight, from_v, to_v = heappop(q)
if from_v not in v_new or to_v not in v_new:
break
v_new.append(to_v)
e_new.append((from_v, to_v, weight))
for next_to_v, weight in adj_list[to_v]:
if next_to_v not in v_new:
heappush(q, (weight, to_v, next_to_v))
return e_new
|
peterstace/project-euler
|
OLD_PY_CODE/project_euler_old_old/Resources/graph_theory.py
|
Python
|
unlicense
| 3,807
|
# Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Utilities for testing `LinearOperator` and sub-classes."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import abc
import itertools
import numpy as np
import six
from tensorflow.python.eager import backprop
from tensorflow.python.eager import context
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import ops
from tensorflow.python.framework import random_seed
from tensorflow.python.framework import tensor_shape
from tensorflow.python.framework import tensor_util
from tensorflow.python.framework import test_util
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import linalg_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import random_ops
from tensorflow.python.ops.linalg import linalg_impl as linalg
from tensorflow.python.ops.linalg import linear_operator_util
from tensorflow.python.platform import test
class OperatorShapesInfo(object):
"""Object encoding expected shape for a test.
Encodes the expected shape of a matrix for a test. Also
allows additional metadata for the test harness.
"""
def __init__(self, shape, **kwargs):
self.shape = shape
self.__dict__.update(kwargs)
class CheckTapeSafeSkipOptions(object):
# Skip checking this particular method.
DETERMINANT = "determinant"
DIAG_PART = "diag_part"
LOG_ABS_DETERMINANT = "log_abs_determinant"
TRACE = "trace"
@six.add_metaclass(abc.ABCMeta) # pylint: disable=no-init
class LinearOperatorDerivedClassTest(test.TestCase):
"""Tests for derived classes.
Subclasses should implement every abstractmethod, and this will enable all
test methods to work.
"""
# Absolute/relative tolerance for tests.
_atol = {
dtypes.float16: 1e-3,
dtypes.float32: 1e-6,
dtypes.float64: 1e-12,
dtypes.complex64: 1e-6,
dtypes.complex128: 1e-12
}
_rtol = {
dtypes.float16: 1e-3,
dtypes.float32: 1e-6,
dtypes.float64: 1e-12,
dtypes.complex64: 1e-6,
dtypes.complex128: 1e-12
}
def assertAC(self, x, y):
"""Derived classes can set _atol, _rtol to get different tolerance."""
dtype = dtypes.as_dtype(x.dtype)
atol = self._atol[dtype]
rtol = self._rtol[dtype]
self.assertAllClose(x, y, atol=atol, rtol=rtol)
@staticmethod
def adjoint_options():
return [False, True]
@staticmethod
def adjoint_arg_options():
return [False, True]
@staticmethod
def dtypes_to_test():
# TODO(langmore) Test tf.float16 once tf.linalg.solve works in 16bit.
return [dtypes.float32, dtypes.float64, dtypes.complex64, dtypes.complex128]
@staticmethod
def use_placeholder_options():
return [False, True]
@staticmethod
def operator_shapes_infos():
"""Returns list of OperatorShapesInfo, encapsulating the shape to test."""
raise NotImplementedError("operator_shapes_infos has not been implemented.")
@abc.abstractmethod
def operator_and_matrix(
self, shapes_info, dtype, use_placeholder,
ensure_self_adjoint_and_pd=False):
"""Build a batch matrix and an Operator that should have similar behavior.
Every operator acts like a (batch) matrix. This method returns both
together, and is used by tests.
Args:
shapes_info: `OperatorShapesInfo`, encoding shape information about the
operator.
dtype: Numpy dtype. Data type of returned array/operator.
use_placeholder: Python bool. If True, initialize the operator with a
placeholder of undefined shape and correct dtype.
ensure_self_adjoint_and_pd: If `True`,
construct this operator to be Hermitian Positive Definite, as well
as ensuring the hints `is_positive_definite` and `is_self_adjoint`
are set.
This is useful for testing methods such as `cholesky`.
Returns:
operator: `LinearOperator` subclass instance.
mat: `Tensor` representing operator.
"""
# Create a matrix as a numpy array with desired shape/dtype.
# Create a LinearOperator that should have the same behavior as the matrix.
raise NotImplementedError("Not implemented yet.")
@abc.abstractmethod
def make_rhs(self, operator, adjoint, with_batch=True):
"""Make a rhs appropriate for calling operator.solve(rhs).
Args:
operator: A `LinearOperator`
adjoint: Python `bool`. If `True`, we are making a 'rhs' value for the
adjoint operator.
with_batch: Python `bool`. If `True`, create `rhs` with the same batch
shape as operator, and otherwise create a matrix without any batch
shape.
Returns:
A `Tensor`
"""
raise NotImplementedError("make_rhs is not defined.")
@abc.abstractmethod
def make_x(self, operator, adjoint, with_batch=True):
"""Make an 'x' appropriate for calling operator.matmul(x).
Args:
operator: A `LinearOperator`
adjoint: Python `bool`. If `True`, we are making an 'x' value for the
adjoint operator.
with_batch: Python `bool`. If `True`, create `x` with the same batch shape
as operator, and otherwise create a matrix without any batch shape.
Returns:
A `Tensor`
"""
raise NotImplementedError("make_x is not defined.")
@staticmethod
def skip_these_tests():
"""List of test names to skip."""
# Subclasses should over-ride if they want to skip some tests.
# To skip "test_foo", add "foo" to this list.
return []
def assertRaisesError(self, msg):
"""assertRaisesRegexp or OpError, depending on context.executing_eagerly."""
if context.executing_eagerly():
return self.assertRaisesRegexp(Exception, msg)
return self.assertRaisesOpError(msg)
def check_tape_safe(self, operator, skip_options=None):
"""Check gradients are not None w.r.t. operator.variables.
Meant to be called from the derived class.
This ensures grads are not w.r.t every variable in operator.variables. If
more fine-grained testing is needed, a custom test should be written.
Args:
operator: LinearOperator. Exact checks done will depend on hints.
skip_options: Optional list of CheckTapeSafeSkipOptions.
Makes this test skip particular checks.
"""
skip_options = skip_options or []
if not operator.variables:
raise AssertionError("`operator.variables` was empty")
def _assert_not_none(iterable):
for item in iterable:
self.assertIsNotNone(item)
# Tape tests that can be run on every operator below.
with backprop.GradientTape() as tape:
_assert_not_none(tape.gradient(operator.to_dense(), operator.variables))
with backprop.GradientTape() as tape:
_assert_not_none(
tape.gradient(operator.adjoint().to_dense(), operator.variables))
x = math_ops.cast(
array_ops.ones(shape=operator.H.shape_tensor()[:-1]), operator.dtype)
with backprop.GradientTape() as tape:
_assert_not_none(tape.gradient(operator.matvec(x), operator.variables))
# Tests for square, but possibly non-singular operators below.
if not operator.is_square:
return
for option in [
CheckTapeSafeSkipOptions.DETERMINANT,
CheckTapeSafeSkipOptions.LOG_ABS_DETERMINANT,
CheckTapeSafeSkipOptions.DIAG_PART,
CheckTapeSafeSkipOptions.TRACE,
]:
with backprop.GradientTape() as tape:
if option not in skip_options:
_assert_not_none(
tape.gradient(getattr(operator, option)(), operator.variables))
# Tests for non-singular operators below.
if operator.is_non_singular is False: # pylint: disable=g-bool-id-comparison
return
with backprop.GradientTape() as tape:
_assert_not_none(
tape.gradient(operator.inverse().to_dense(), operator.variables))
with backprop.GradientTape() as tape:
_assert_not_none(tape.gradient(operator.solvevec(x), operator.variables))
# Tests for SPD operators below.
if not (operator.is_self_adjoint and operator.is_positive_definite):
return
with backprop.GradientTape() as tape:
_assert_not_none(
tape.gradient(operator.cholesky().to_dense(), operator.variables))
# pylint:disable=missing-docstring
def _test_to_dense(use_placeholder, shapes_info, dtype):
def test_to_dense(self):
with self.session(graph=ops.Graph()) as sess:
sess.graph.seed = random_seed.DEFAULT_GRAPH_SEED
operator, mat = self.operator_and_matrix(
shapes_info, dtype, use_placeholder=use_placeholder)
op_dense = operator.to_dense()
if not use_placeholder:
self.assertAllEqual(shapes_info.shape, op_dense.shape)
op_dense_v, mat_v = sess.run([op_dense, mat])
self.assertAC(op_dense_v, mat_v)
return test_to_dense
def _test_det(use_placeholder, shapes_info, dtype):
def test_det(self):
with self.session(graph=ops.Graph()) as sess:
sess.graph.seed = random_seed.DEFAULT_GRAPH_SEED
operator, mat = self.operator_and_matrix(
shapes_info, dtype, use_placeholder=use_placeholder)
op_det = operator.determinant()
if not use_placeholder:
self.assertAllEqual(shapes_info.shape[:-2], op_det.shape)
op_det_v, mat_det_v = sess.run(
[op_det, linalg_ops.matrix_determinant(mat)])
self.assertAC(op_det_v, mat_det_v)
return test_det
def _test_log_abs_det(use_placeholder, shapes_info, dtype):
def test_log_abs_det(self):
with self.session(graph=ops.Graph()) as sess:
sess.graph.seed = random_seed.DEFAULT_GRAPH_SEED
operator, mat = self.operator_and_matrix(
shapes_info, dtype, use_placeholder=use_placeholder)
op_log_abs_det = operator.log_abs_determinant()
_, mat_log_abs_det = linalg.slogdet(mat)
if not use_placeholder:
self.assertAllEqual(
shapes_info.shape[:-2], op_log_abs_det.shape)
op_log_abs_det_v, mat_log_abs_det_v = sess.run(
[op_log_abs_det, mat_log_abs_det])
self.assertAC(op_log_abs_det_v, mat_log_abs_det_v)
return test_log_abs_det
def _test_matmul_base(
self,
use_placeholder,
shapes_info,
dtype,
adjoint,
adjoint_arg,
with_batch):
# If batch dimensions are omitted, but there are
# no batch dimensions for the linear operator, then
# skip the test case. This is already checked with
# with_batch=True.
if not with_batch and len(shapes_info.shape) <= 2:
return
with self.session(graph=ops.Graph()) as sess:
sess.graph.seed = random_seed.DEFAULT_GRAPH_SEED
operator, mat = self.operator_and_matrix(
shapes_info, dtype, use_placeholder=use_placeholder)
x = self.make_x(
operator, adjoint=adjoint, with_batch=with_batch)
# If adjoint_arg, compute A X^H^H = A X.
if adjoint_arg:
op_matmul = operator.matmul(
linalg.adjoint(x),
adjoint=adjoint,
adjoint_arg=adjoint_arg)
else:
op_matmul = operator.matmul(x, adjoint=adjoint)
mat_matmul = math_ops.matmul(mat, x, adjoint_a=adjoint)
if not use_placeholder:
self.assertAllEqual(op_matmul.shape,
mat_matmul.shape)
op_matmul_v, mat_matmul_v = sess.run(
[op_matmul, mat_matmul])
self.assertAC(op_matmul_v, mat_matmul_v)
def _test_matmul(
use_placeholder,
shapes_info,
dtype,
adjoint,
adjoint_arg):
def test_matmul(self):
_test_matmul_base(
self,
use_placeholder,
shapes_info,
dtype,
adjoint,
adjoint_arg,
with_batch=True)
return test_matmul
def _test_matmul_with_broadcast(
use_placeholder,
shapes_info,
dtype,
adjoint,
adjoint_arg):
def test_matmul_with_broadcast(self):
_test_matmul_base(
self,
use_placeholder,
shapes_info,
dtype,
adjoint,
adjoint_arg,
with_batch=True)
return test_matmul_with_broadcast
def _test_adjoint(use_placeholder, shapes_info, dtype):
def test_adjoint(self):
with self.test_session(graph=ops.Graph()) as sess:
sess.graph.seed = random_seed.DEFAULT_GRAPH_SEED
operator, mat = self.operator_and_matrix(
shapes_info, dtype, use_placeholder=use_placeholder)
op_adjoint = operator.adjoint().to_dense()
op_adjoint_h = operator.H.to_dense()
mat_adjoint = linalg.adjoint(mat)
op_adjoint_v, op_adjoint_h_v, mat_adjoint_v = sess.run(
[op_adjoint, op_adjoint_h, mat_adjoint])
self.assertAC(mat_adjoint_v, op_adjoint_v)
self.assertAC(mat_adjoint_v, op_adjoint_h_v)
return test_adjoint
def _test_cholesky(use_placeholder, shapes_info, dtype):
def test_cholesky(self):
with self.test_session(graph=ops.Graph()) as sess:
sess.graph.seed = random_seed.DEFAULT_GRAPH_SEED
operator, mat = self.operator_and_matrix(
shapes_info, dtype, use_placeholder=use_placeholder,
ensure_self_adjoint_and_pd=True)
op_chol = operator.cholesky().to_dense()
mat_chol = linalg_ops.cholesky(mat)
op_chol_v, mat_chol_v = sess.run([op_chol, mat_chol])
self.assertAC(mat_chol_v, op_chol_v)
return test_cholesky
def _test_solve_base(
self,
use_placeholder,
shapes_info,
dtype,
adjoint,
adjoint_arg,
with_batch):
# If batch dimensions are omitted, but there are
# no batch dimensions for the linear operator, then
# skip the test case. This is already checked with
# with_batch=True.
if not with_batch and len(shapes_info.shape) <= 2:
return
with self.session(graph=ops.Graph()) as sess:
sess.graph.seed = random_seed.DEFAULT_GRAPH_SEED
operator, mat = self.operator_and_matrix(
shapes_info, dtype, use_placeholder=use_placeholder)
rhs = self.make_rhs(
operator, adjoint=adjoint, with_batch=with_batch)
# If adjoint_arg, solve A X = (rhs^H)^H = rhs.
if adjoint_arg:
op_solve = operator.solve(
linalg.adjoint(rhs),
adjoint=adjoint,
adjoint_arg=adjoint_arg)
else:
op_solve = operator.solve(
rhs, adjoint=adjoint, adjoint_arg=adjoint_arg)
mat_solve = linear_operator_util.matrix_solve_with_broadcast(
mat, rhs, adjoint=adjoint)
if not use_placeholder:
self.assertAllEqual(op_solve.shape,
mat_solve.shape)
op_solve_v, mat_solve_v = sess.run([op_solve, mat_solve])
self.assertAC(op_solve_v, mat_solve_v)
def _test_solve(
use_placeholder, shapes_info, dtype, adjoint, adjoint_arg):
def test_solve(self):
_test_solve_base(
self,
use_placeholder,
shapes_info,
dtype,
adjoint,
adjoint_arg,
with_batch=True)
return test_solve
def _test_solve_with_broadcast(
use_placeholder, shapes_info, dtype, adjoint, adjoint_arg):
def test_solve_with_broadcast(self):
_test_solve_base(
self,
use_placeholder,
shapes_info,
dtype,
adjoint,
adjoint_arg,
with_batch=False)
return test_solve_with_broadcast
def _test_inverse(use_placeholder, shapes_info, dtype):
def test_inverse(self):
with self.session(graph=ops.Graph()) as sess:
sess.graph.seed = random_seed.DEFAULT_GRAPH_SEED
operator, mat = self.operator_and_matrix(
shapes_info, dtype, use_placeholder=use_placeholder)
op_inverse_v, mat_inverse_v = sess.run([
operator.inverse().to_dense(), linalg.inv(mat)])
self.assertAC(op_inverse_v, mat_inverse_v)
return test_inverse
def _test_trace(use_placeholder, shapes_info, dtype):
def test_trace(self):
with self.session(graph=ops.Graph()) as sess:
sess.graph.seed = random_seed.DEFAULT_GRAPH_SEED
operator, mat = self.operator_and_matrix(
shapes_info, dtype, use_placeholder=use_placeholder)
op_trace = operator.trace()
mat_trace = math_ops.trace(mat)
if not use_placeholder:
self.assertAllEqual(op_trace.shape, mat_trace.shape)
op_trace_v, mat_trace_v = sess.run([op_trace, mat_trace])
self.assertAC(op_trace_v, mat_trace_v)
return test_trace
def _test_add_to_tensor(use_placeholder, shapes_info, dtype):
def test_add_to_tensor(self):
with self.session(graph=ops.Graph()) as sess:
sess.graph.seed = random_seed.DEFAULT_GRAPH_SEED
operator, mat = self.operator_and_matrix(
shapes_info, dtype, use_placeholder=use_placeholder)
op_plus_2mat = operator.add_to_tensor(2 * mat)
if not use_placeholder:
self.assertAllEqual(shapes_info.shape, op_plus_2mat.shape)
op_plus_2mat_v, mat_v = sess.run([op_plus_2mat, mat])
self.assertAC(op_plus_2mat_v, 3 * mat_v)
return test_add_to_tensor
def _test_diag_part(use_placeholder, shapes_info, dtype):
def test_diag_part(self):
with self.session(graph=ops.Graph()) as sess:
sess.graph.seed = random_seed.DEFAULT_GRAPH_SEED
operator, mat = self.operator_and_matrix(
shapes_info, dtype, use_placeholder=use_placeholder)
op_diag_part = operator.diag_part()
mat_diag_part = array_ops.matrix_diag_part(mat)
if not use_placeholder:
self.assertAllEqual(mat_diag_part.shape,
op_diag_part.shape)
op_diag_part_, mat_diag_part_ = sess.run(
[op_diag_part, mat_diag_part])
self.assertAC(op_diag_part_, mat_diag_part_)
return test_diag_part
# pylint:enable=missing-docstring
def add_tests(test_cls):
"""Add tests for LinearOperator methods."""
test_name_dict = {
"add_to_tensor": _test_add_to_tensor,
"cholesky": _test_cholesky,
"det": _test_det,
"diag_part": _test_diag_part,
"inverse": _test_inverse,
"log_abs_det": _test_log_abs_det,
"matmul": _test_matmul,
"matmul_with_broadcast": _test_matmul_with_broadcast,
"solve": _test_solve,
"solve_with_broadcast": _test_solve_with_broadcast,
"to_dense": _test_to_dense,
"trace": _test_trace,
}
tests_with_adjoint_args = [
"matmul",
"matmul_with_broadcast",
"solve",
"solve_with_broadcast",
]
for name, test_template_fn in test_name_dict.items():
if name in test_cls.skip_these_tests():
continue
for dtype, use_placeholder, shape_info in itertools.product(
test_cls.dtypes_to_test(),
test_cls.use_placeholder_options(),
test_cls.operator_shapes_infos()):
base_test_name = "_".join([
"test", name, "_shape={},dtype={},use_placeholder={}".format(
shape_info.shape, dtype, use_placeholder)])
if name in tests_with_adjoint_args:
for adjoint in test_cls.adjoint_options():
for adjoint_arg in test_cls.adjoint_arg_options():
test_name = base_test_name + ",adjoint={},adjoint_arg={}".format(
adjoint, adjoint_arg)
if hasattr(test_cls, test_name):
raise RuntimeError("Test %s defined more than once" % test_name)
setattr(
test_cls,
test_name,
test_util.run_deprecated_v1(test_template_fn(
use_placeholder,
shape_info,
dtype,
adjoint,
adjoint_arg)))
else:
if hasattr(test_cls, base_test_name):
raise RuntimeError("Test %s defined more than once" % base_test_name)
setattr(
test_cls,
base_test_name,
test_util.run_deprecated_v1(test_template_fn(
use_placeholder, shape_info, dtype)))
@six.add_metaclass(abc.ABCMeta)
class SquareLinearOperatorDerivedClassTest(LinearOperatorDerivedClassTest):
"""Base test class appropriate for square operators.
Sub-classes must still define all abstractmethods from
LinearOperatorDerivedClassTest that are not defined here.
"""
@staticmethod
def operator_shapes_infos():
shapes_info = OperatorShapesInfo
# non-batch operators (n, n) and batch operators.
return [
shapes_info((0, 0)),
shapes_info((1, 1)),
shapes_info((1, 3, 3)),
shapes_info((3, 4, 4)),
shapes_info((2, 1, 4, 4))]
def make_rhs(self, operator, adjoint, with_batch=True):
# This operator is square, so rhs and x will have same shape.
# adjoint value makes no difference because the operator shape doesn't
# change since it is square, but be pedantic.
return self.make_x(operator, adjoint=not adjoint, with_batch=with_batch)
def make_x(self, operator, adjoint, with_batch=True):
# Value of adjoint makes no difference because the operator is square.
# Return the number of systems to solve, R, equal to 1 or 2.
r = self._get_num_systems(operator)
# If operator.shape = [B1,...,Bb, N, N] this returns a random matrix of
# shape [B1,...,Bb, N, R], R = 1 or 2.
if operator.shape.is_fully_defined():
batch_shape = operator.batch_shape.as_list()
n = operator.domain_dimension.value
if with_batch:
x_shape = batch_shape + [n, r]
else:
x_shape = [n, r]
else:
batch_shape = operator.batch_shape_tensor()
n = operator.domain_dimension_tensor()
if with_batch:
x_shape = array_ops.concat((batch_shape, [n, r]), 0)
else:
x_shape = [n, r]
return random_normal(x_shape, dtype=operator.dtype)
def _get_num_systems(self, operator):
"""Get some number, either 1 or 2, depending on operator."""
if operator.tensor_rank is None or operator.tensor_rank % 2:
return 1
else:
return 2
@six.add_metaclass(abc.ABCMeta)
class NonSquareLinearOperatorDerivedClassTest(LinearOperatorDerivedClassTest):
"""Base test class appropriate for generic rectangular operators.
Square shapes are never tested by this class, so if you want to test your
operator with a square shape, create two test classes, the other subclassing
SquareLinearOperatorFullMatrixTest.
Sub-classes must still define all abstractmethods from
LinearOperatorDerivedClassTest that are not defined here.
"""
@staticmethod
def skip_these_tests():
"""List of test names to skip."""
return [
"cholesky",
"inverse",
"solve",
"solve_with_broadcast",
"det",
"log_abs_det"
]
@staticmethod
def operator_shapes_infos():
shapes_info = OperatorShapesInfo
# non-batch operators (n, n) and batch operators.
return [
shapes_info((2, 1)),
shapes_info((1, 2)),
shapes_info((1, 3, 2)),
shapes_info((3, 3, 4)),
shapes_info((2, 1, 2, 4))]
def make_rhs(self, operator, adjoint, with_batch=True):
# TODO(langmore) Add once we're testing solve_ls.
raise NotImplementedError(
"make_rhs not implemented because we don't test solve")
def make_x(self, operator, adjoint, with_batch=True):
# Return the number of systems for the argument 'x' for .matmul(x)
r = self._get_num_systems(operator)
# If operator.shape = [B1,...,Bb, M, N] this returns a random matrix of
# shape [B1,...,Bb, N, R], R = 1 or 2.
if operator.shape.is_fully_defined():
batch_shape = operator.batch_shape.as_list()
if adjoint:
n = operator.range_dimension.value
else:
n = operator.domain_dimension.value
if with_batch:
x_shape = batch_shape + [n, r]
else:
x_shape = [n, r]
else:
batch_shape = operator.batch_shape_tensor()
if adjoint:
n = operator.range_dimension_tensor()
else:
n = operator.domain_dimension_tensor()
if with_batch:
x_shape = array_ops.concat((batch_shape, [n, r]), 0)
else:
x_shape = [n, r]
return random_normal(x_shape, dtype=operator.dtype)
def _get_num_systems(self, operator):
"""Get some number, either 1 or 2, depending on operator."""
if operator.tensor_rank is None or operator.tensor_rank % 2:
return 1
else:
return 2
def random_positive_definite_matrix(shape, dtype, force_well_conditioned=False):
"""[batch] positive definite matrix.
Args:
shape: `TensorShape` or Python list. Shape of the returned matrix.
dtype: `TensorFlow` `dtype` or Python dtype.
force_well_conditioned: Python bool. If `True`, returned matrix has
eigenvalues with modulus in `(1, 4)`. Otherwise, eigenvalues are
chi-squared random variables.
Returns:
`Tensor` with desired shape and dtype.
"""
dtype = dtypes.as_dtype(dtype)
if not tensor_util.is_tensor(shape):
shape = tensor_shape.TensorShape(shape)
# Matrix must be square.
shape.dims[-1].assert_is_compatible_with(shape.dims[-2])
with ops.name_scope("random_positive_definite_matrix"):
tril = random_tril_matrix(
shape, dtype, force_well_conditioned=force_well_conditioned)
return math_ops.matmul(tril, tril, adjoint_b=True)
def random_tril_matrix(shape,
dtype,
force_well_conditioned=False,
remove_upper=True):
"""[batch] lower triangular matrix.
Args:
shape: `TensorShape` or Python `list`. Shape of the returned matrix.
dtype: `TensorFlow` `dtype` or Python dtype
force_well_conditioned: Python `bool`. If `True`, returned matrix will have
eigenvalues with modulus in `(1, 2)`. Otherwise, eigenvalues are unit
normal random variables.
remove_upper: Python `bool`.
If `True`, zero out the strictly upper triangle.
If `False`, the lower triangle of returned matrix will have desired
properties, but will not have the strictly upper triangle zero'd out.
Returns:
`Tensor` with desired shape and dtype.
"""
with ops.name_scope("random_tril_matrix"):
# Totally random matrix. Has no nice properties.
tril = random_normal(shape, dtype=dtype)
if remove_upper:
tril = array_ops.matrix_band_part(tril, -1, 0)
# Create a diagonal with entries having modulus in [1, 2].
if force_well_conditioned:
maxval = ops.convert_to_tensor(np.sqrt(2.), dtype=dtype.real_dtype)
diag = random_sign_uniform(
shape[:-1], dtype=dtype, minval=1., maxval=maxval)
tril = array_ops.matrix_set_diag(tril, diag)
return tril
def random_normal(shape, mean=0.0, stddev=1.0, dtype=dtypes.float32, seed=None):
"""Tensor with (possibly complex) Gaussian entries.
Samples are distributed like
```
N(mean, stddev^2), if dtype is real,
X + iY, where X, Y ~ N(mean, stddev^2) if dtype is complex.
```
Args:
shape: `TensorShape` or Python list. Shape of the returned tensor.
mean: `Tensor` giving mean of normal to sample from.
stddev: `Tensor` giving stdev of normal to sample from.
dtype: `TensorFlow` `dtype` or numpy dtype
seed: Python integer seed for the RNG.
Returns:
`Tensor` with desired shape and dtype.
"""
dtype = dtypes.as_dtype(dtype)
with ops.name_scope("random_normal"):
samples = random_ops.random_normal(
shape, mean=mean, stddev=stddev, dtype=dtype.real_dtype, seed=seed)
if dtype.is_complex:
if seed is not None:
seed += 1234
more_samples = random_ops.random_normal(
shape, mean=mean, stddev=stddev, dtype=dtype.real_dtype, seed=seed)
samples = math_ops.complex(samples, more_samples)
return samples
def random_uniform(shape,
minval=None,
maxval=None,
dtype=dtypes.float32,
seed=None):
"""Tensor with (possibly complex) Uniform entries.
Samples are distributed like
```
Uniform[minval, maxval], if dtype is real,
X + iY, where X, Y ~ Uniform[minval, maxval], if dtype is complex.
```
Args:
shape: `TensorShape` or Python list. Shape of the returned tensor.
minval: `0-D` `Tensor` giving the minimum values.
maxval: `0-D` `Tensor` giving the maximum values.
dtype: `TensorFlow` `dtype` or Python dtype
seed: Python integer seed for the RNG.
Returns:
`Tensor` with desired shape and dtype.
"""
dtype = dtypes.as_dtype(dtype)
with ops.name_scope("random_uniform"):
samples = random_ops.random_uniform(
shape, dtype=dtype.real_dtype, minval=minval, maxval=maxval, seed=seed)
if dtype.is_complex:
if seed is not None:
seed += 12345
more_samples = random_ops.random_uniform(
shape,
dtype=dtype.real_dtype,
minval=minval,
maxval=maxval,
seed=seed)
samples = math_ops.complex(samples, more_samples)
return samples
def random_sign_uniform(shape,
minval=None,
maxval=None,
dtype=dtypes.float32,
seed=None):
"""Tensor with (possibly complex) random entries from a "sign Uniform".
Letting `Z` be a random variable equal to `-1` and `1` with equal probability,
Samples from this `Op` are distributed like
```
Z * X, where X ~ Uniform[minval, maxval], if dtype is real,
Z * (X + iY), where X, Y ~ Uniform[minval, maxval], if dtype is complex.
```
Args:
shape: `TensorShape` or Python list. Shape of the returned tensor.
minval: `0-D` `Tensor` giving the minimum values.
maxval: `0-D` `Tensor` giving the maximum values.
dtype: `TensorFlow` `dtype` or Python dtype
seed: Python integer seed for the RNG.
Returns:
`Tensor` with desired shape and dtype.
"""
dtype = dtypes.as_dtype(dtype)
with ops.name_scope("random_sign_uniform"):
unsigned_samples = random_uniform(
shape, minval=minval, maxval=maxval, dtype=dtype, seed=seed)
if seed is not None:
seed += 12
signs = math_ops.sign(
random_ops.random_uniform(shape, minval=-1., maxval=1., seed=seed))
return unsigned_samples * math_ops.cast(signs, unsigned_samples.dtype)
def random_normal_correlated_columns(shape,
mean=0.0,
stddev=1.0,
dtype=dtypes.float32,
eps=1e-4,
seed=None):
"""Batch matrix with (possibly complex) Gaussian entries and correlated cols.
Returns random batch matrix `A` with specified element-wise `mean`, `stddev`,
living close to an embedded hyperplane.
Suppose `shape[-2:] = (M, N)`.
If `M < N`, `A` is a random `M x N` [batch] matrix with iid Gaussian entries.
If `M >= N`, then the colums of `A` will be made almost dependent as follows:
```
L = random normal N x N-1 matrix, mean = 0, stddev = 1 / sqrt(N - 1)
B = random normal M x N-1 matrix, mean = 0, stddev = stddev.
G = (L B^H)^H, a random normal M x N matrix, living on N-1 dim hyperplane
E = a random normal M x N matrix, mean = 0, stddev = eps
mu = a constant M x N matrix, equal to the argument "mean"
A = G + E + mu
```
Args:
shape: Python list of integers.
Shape of the returned tensor. Must be at least length two.
mean: `Tensor` giving mean of normal to sample from.
stddev: `Tensor` giving stdev of normal to sample from.
dtype: `TensorFlow` `dtype` or numpy dtype
eps: Distance each column is perturbed from the low-dimensional subspace.
seed: Python integer seed for the RNG.
Returns:
`Tensor` with desired shape and dtype.
Raises:
ValueError: If `shape` is not at least length 2.
"""
dtype = dtypes.as_dtype(dtype)
if len(shape) < 2:
raise ValueError(
"Argument shape must be at least length 2. Found: %s" % shape)
# Shape is the final shape, e.g. [..., M, N]
shape = list(shape)
batch_shape = shape[:-2]
m, n = shape[-2:]
# If there is only one column, "they" are by definition correlated.
if n < 2 or n < m:
return random_normal(
shape, mean=mean, stddev=stddev, dtype=dtype, seed=seed)
# Shape of the matrix with only n - 1 columns that we will embed in higher
# dimensional space.
smaller_shape = batch_shape + [m, n - 1]
# Shape of the embedding matrix, mapping batch matrices
# from [..., N-1, M] to [..., N, M]
embedding_mat_shape = batch_shape + [n, n - 1]
# This stddev for the embedding_mat ensures final result has correct stddev.
stddev_mat = 1 / np.sqrt(n - 1)
with ops.name_scope("random_normal_correlated_columns"):
smaller_mat = random_normal(
smaller_shape, mean=0.0, stddev=stddev_mat, dtype=dtype, seed=seed)
if seed is not None:
seed += 1287
embedding_mat = random_normal(embedding_mat_shape, dtype=dtype, seed=seed)
embedded_t = math_ops.matmul(embedding_mat, smaller_mat, transpose_b=True)
embedded = array_ops.matrix_transpose(embedded_t)
mean_mat = array_ops.ones_like(embedded) * mean
return embedded + random_normal(shape, stddev=eps, dtype=dtype) + mean_mat
|
DavidNorman/tensorflow
|
tensorflow/python/ops/linalg/linear_operator_test_util.py
|
Python
|
apache-2.0
| 33,683
|
# -*- encoding: utf-8 -*-
# Copyright (c) 2017 ZTE
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from unittest import mock
from watcher.common import clients
from watcher.common import utils
from watcher.decision_engine.strategy import strategies
from watcher.tests.decision_engine.strategy.strategies.test_base \
import TestBaseStrategy
class TestSavingEnergy(TestBaseStrategy):
def setUp(self):
super(TestSavingEnergy, self).setUp()
mock_node1_dict = {
'uuid': '922d4762-0bc5-4b30-9cb9-48ab644dd861'}
mock_node2_dict = {
'uuid': '922d4762-0bc5-4b30-9cb9-48ab644dd862'}
mock_node1 = mock.Mock(**mock_node1_dict)
mock_node2 = mock.Mock(**mock_node2_dict)
self.fake_nodes = [mock_node1, mock_node2]
p_ironic = mock.patch.object(
clients.OpenStackClients, 'ironic')
self.m_ironic = p_ironic.start()
self.addCleanup(p_ironic.stop)
p_nova = mock.patch.object(
clients.OpenStackClients, 'nova')
self.m_nova = p_nova.start()
self.addCleanup(p_nova.stop)
self.m_ironic.node.list.return_value = self.fake_nodes
self.m_c_model.return_value = self.fake_c_cluster.generate_scenario_1()
self.strategy = strategies.SavingEnergy(
config=mock.Mock())
self.strategy.input_parameters = utils.Struct()
self.strategy.input_parameters.update(
{'free_used_percent': 10.0,
'min_free_hosts_num': 1})
self.strategy.free_used_percent = 10.0
self.strategy.min_free_hosts_num = 1
self.strategy._ironic_client = self.m_ironic
self.strategy._nova_client = self.m_nova
def test_get_hosts_pool_with_vms_node_pool(self):
mock_node1_dict = {
'extra': {'compute_node_id': 1},
'power_state': 'power on'}
mock_node2_dict = {
'extra': {'compute_node_id': 2},
'power_state': 'power off'}
mock_node1 = mock.Mock(**mock_node1_dict)
mock_node2 = mock.Mock(**mock_node2_dict)
self.m_ironic.node.get.side_effect = [mock_node1, mock_node2]
mock_hyper1 = mock.Mock()
mock_hyper2 = mock.Mock()
mock_hyper1.to_dict.return_value = {
'running_vms': 2, 'service': {'host': 'hostname_0'}, 'state': 'up'}
mock_hyper2.to_dict.return_value = {
'running_vms': 2, 'service': {'host': 'hostname_1'}, 'state': 'up'}
self.m_nova.hypervisors.get.side_effect = [mock_hyper1, mock_hyper2]
self.strategy.get_hosts_pool()
self.assertEqual(len(self.strategy.with_vms_node_pool), 2)
self.assertEqual(len(self.strategy.free_poweron_node_pool), 0)
self.assertEqual(len(self.strategy.free_poweroff_node_pool), 0)
def test_get_hosts_pool_free_poweron_node_pool(self):
mock_node1_dict = {
'extra': {'compute_node_id': 1},
'power_state': 'power on'}
mock_node2_dict = {
'extra': {'compute_node_id': 2},
'power_state': 'power on'}
mock_node1 = mock.Mock(**mock_node1_dict)
mock_node2 = mock.Mock(**mock_node2_dict)
self.m_ironic.node.get.side_effect = [mock_node1, mock_node2]
mock_hyper1 = mock.Mock()
mock_hyper2 = mock.Mock()
mock_hyper1.to_dict.return_value = {
'running_vms': 0, 'service': {'host': 'hostname_0'}, 'state': 'up'}
mock_hyper2.to_dict.return_value = {
'running_vms': 0, 'service': {'host': 'hostname_1'}, 'state': 'up'}
self.m_nova.hypervisors.get.side_effect = [mock_hyper1, mock_hyper2]
self.strategy.get_hosts_pool()
self.assertEqual(len(self.strategy.with_vms_node_pool), 0)
self.assertEqual(len(self.strategy.free_poweron_node_pool), 2)
self.assertEqual(len(self.strategy.free_poweroff_node_pool), 0)
def test_get_hosts_pool_free_poweroff_node_pool(self):
mock_node1_dict = {
'extra': {'compute_node_id': 1},
'power_state': 'power off'}
mock_node2_dict = {
'extra': {'compute_node_id': 2},
'power_state': 'power off'}
mock_node1 = mock.Mock(**mock_node1_dict)
mock_node2 = mock.Mock(**mock_node2_dict)
self.m_ironic.node.get.side_effect = [mock_node1, mock_node2]
mock_hyper1 = mock.Mock()
mock_hyper2 = mock.Mock()
mock_hyper1.to_dict.return_value = {
'running_vms': 0, 'service': {'host': 'hostname_0'}, 'state': 'up'}
mock_hyper2.to_dict.return_value = {
'running_vms': 0, 'service': {'host': 'hostname_1'}, 'state': 'up'}
self.m_nova.hypervisors.get.side_effect = [mock_hyper1, mock_hyper2]
self.strategy.get_hosts_pool()
self.assertEqual(len(self.strategy.with_vms_node_pool), 0)
self.assertEqual(len(self.strategy.free_poweron_node_pool), 0)
self.assertEqual(len(self.strategy.free_poweroff_node_pool), 2)
def test_get_hosts_pool_with_node_out_model(self):
mock_node1_dict = {
'extra': {'compute_node_id': 1},
'power_state': 'power off'}
mock_node2_dict = {
'extra': {'compute_node_id': 2},
'power_state': 'power off'}
mock_node1 = mock.Mock(**mock_node1_dict)
mock_node2 = mock.Mock(**mock_node2_dict)
self.m_ironic.node.get.side_effect = [mock_node1, mock_node2]
mock_hyper1 = mock.Mock()
mock_hyper2 = mock.Mock()
mock_hyper1.to_dict.return_value = {
'running_vms': 0, 'service': {'host': 'hostname_0'},
'state': 'up'}
mock_hyper2.to_dict.return_value = {
'running_vms': 0, 'service': {'host': 'hostname_10'},
'state': 'up'}
self.m_nova.hypervisors.get.side_effect = [mock_hyper1, mock_hyper2]
self.strategy.get_hosts_pool()
self.assertEqual(len(self.strategy.with_vms_node_pool), 0)
self.assertEqual(len(self.strategy.free_poweron_node_pool), 0)
self.assertEqual(len(self.strategy.free_poweroff_node_pool), 1)
def test_save_energy_poweron(self):
self.strategy.free_poweroff_node_pool = [
mock.Mock(uuid='922d4762-0bc5-4b30-9cb9-48ab644dd861'),
mock.Mock(uuid='922d4762-0bc5-4b30-9cb9-48ab644dd862')
]
self.strategy.save_energy()
self.assertEqual(len(self.strategy.solution.actions), 1)
action = self.strategy.solution.actions[0]
self.assertEqual(action.get('input_parameters').get('state'), 'on')
def test_save_energy_poweroff(self):
self.strategy.free_poweron_node_pool = [
mock.Mock(uuid='922d4762-0bc5-4b30-9cb9-48ab644dd861'),
mock.Mock(uuid='922d4762-0bc5-4b30-9cb9-48ab644dd862')
]
self.strategy.save_energy()
self.assertEqual(len(self.strategy.solution.actions), 1)
action = self.strategy.solution.actions[0]
self.assertEqual(action.get('input_parameters').get('state'), 'off')
def test_execute(self):
mock_node1_dict = {
'extra': {'compute_node_id': 1},
'power_state': 'power on'}
mock_node2_dict = {
'extra': {'compute_node_id': 2},
'power_state': 'power on'}
mock_node1 = mock.Mock(**mock_node1_dict)
mock_node2 = mock.Mock(**mock_node2_dict)
self.m_ironic.node.get.side_effect = [mock_node1, mock_node2]
mock_hyper1 = mock.Mock()
mock_hyper2 = mock.Mock()
mock_hyper1.to_dict.return_value = {
'running_vms': 0, 'service': {'host': 'hostname_0'}, 'state': 'up'}
mock_hyper2.to_dict.return_value = {
'running_vms': 0, 'service': {'host': 'hostname_1'}, 'state': 'up'}
self.m_nova.hypervisors.get.side_effect = [mock_hyper1, mock_hyper2]
model = self.fake_c_cluster.generate_scenario_1()
self.m_c_model.return_value = model
solution = self.strategy.execute()
self.assertEqual(len(solution.actions), 1)
|
openstack/watcher
|
watcher/tests/decision_engine/strategy/strategies/test_saving_energy.py
|
Python
|
apache-2.0
| 8,589
|
# ----------------------------------------------------------------------------
# Copyright 2015 Nervana Systems Inc.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ----------------------------------------------------------------------------
'''
General functions for running the unit tests via pytest.
'''
import itertools
import numpy as np
import pytest
from neon.backends import gen_backend
def pytest_addoption(parser):
'''
Add a --all option to run the full range of
parameters for tests generated using the
pytest test generators
'''
parser.addoption("--all", action="store_true",
help="run all tests")
return
@pytest.fixture(scope='session')
def data():
path_to_data = '~/nervana/data/'
return path_to_data
@pytest.fixture(scope='module', params=['gpu', 'cpu'])
def backend_default(request):
'''
Fixture to setup the backend before running a
test. Also registers the teardown function to clean
up the backend after a test is done. This had module
scope, so this will be run once for each test in a
given test file (module).
This fixture is parameterized to run both the cpu and
gpu backends for every test
'''
be = gen_backend(backend=request.param,
default_dtype=np.float32,
batch_size=128,
rng_seed=0)
# add a cleanup call - will run after all
# test in module are done
def cleanup():
be = request.getfuncargvalue('backend_default')
del be
request.addfinalizer(cleanup)
# tests using this fixture can
# access the backend object from
# backend or use the NervanaObject.be global
return be
@pytest.fixture(scope='module')
def backend_cpu64(request):
'''
Fixture that returns a cpu backend using 64 bit dtype.
For use in tests like gradient checking whihch need higher
precision
'''
be = gen_backend(backend='cpu',
default_dtype=np.float64,
batch_size=128,
rng_seed=0)
# add a cleanup call - will run after all
# test in module are done
def cleanup():
be = request.getfuncargvalue('backend_cpu64')
del be
request.addfinalizer(cleanup)
# tests using this fixture can
# access the backend object from
# backend or use the NervanaObject.be global
return be
def idfunc(vals):
'''
Prnit out a human readable format for the
parameterized tests
'''
dtype = str(vals[1])
dtype = dtype.split("numpy.")[1].strip("'>")
return vals[0] + '_' + dtype
gpu_cpu_32_16 = itertools.product(['gpu','cpu'], [np.float16, np.float32])
@pytest.fixture(scope='module', params=list(gpu_cpu_32_16),
ids=idfunc)
def backend_tests(request):
'''
Fixture that returns a cpu and gpu backes for 16, and 32 bit
For use in tests like gradient checking whihch need higher
precision
'''
be = gen_backend(backend=request.param[0],
default_dtype=request.param[1],
batch_size=128,
rng_seed=0)
# add a cleanup call - will run after all
# test in module are done
def cleanup():
be = request.getfuncargvalue('backend_tests')
del be
request.addfinalizer(cleanup)
# tests using this fixture can
# access the backend object from
# backend or use the NervanaObject.be global
return be
|
jfsantos/neon
|
conftest.py
|
Python
|
apache-2.0
| 3,983
|
#! /usr/bin/env python
# Copyright 2014 Uri Laserson
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import sys
import optparse
import vdj
parser = optparse.OptionParser()
(options, args) = parser.parse_args()
if len(args) == 2:
inhandle = open(args[0],'r')
outhandle = open(args[1],'w')
elif len(args) == 1:
inhandle = open(args[0],'r')
outhandle = sys.stdout
elif len(args) == 0:
inhandle = sys.stdin
outhandle = sys.stdout
else:
raise Exception, "Wrong number of arguments."
for chain in vdj.parse_imgt(inhandle):
# print >>outhandle, chain.format('fasta') # causes chain.description output instead of chain.id
print >>outhandle, ">%s\n%s" % (chain.id,chain.seq)
|
churchlab/vdj
|
bin/imgt2fasta.py
|
Python
|
apache-2.0
| 1,206
|
from __future__ import unicode_literals
from datetime import datetime
import json
import boto.cloudformation
from moto.core import BaseBackend
from .parsing import ResourceMap, OutputMap
from .utils import generate_stack_id
from .exceptions import ValidationError
class FakeStack(object):
def __init__(self, stack_id, name, template, parameters, region_name, notification_arns=None, tags=None):
self.stack_id = stack_id
self.name = name
self.template = template
self.template_dict = json.loads(self.template)
self.parameters = parameters
self.region_name = region_name
self.notification_arns = notification_arns if notification_arns else []
self.tags = tags if tags else {}
self.events = []
self._add_stack_event("CREATE_IN_PROGRESS", resource_status_reason="User Initiated")
self.description = self.template_dict.get('Description')
self.resource_map = self._create_resource_map()
self.output_map = self._create_output_map()
self._add_stack_event("CREATE_COMPLETE")
self.status = 'CREATE_COMPLETE'
def _create_resource_map(self):
resource_map = ResourceMap(self.stack_id, self.name, self.parameters, self.tags, self.region_name, self.template_dict)
resource_map.create()
return resource_map
def _create_output_map(self):
output_map = OutputMap(self.resource_map, self.template_dict)
output_map.create()
return output_map
def _add_stack_event(self, resource_status, resource_status_reason=None, resource_properties=None):
self.events.append(FakeEvent(
stack_id=self.stack_id,
stack_name=self.name,
logical_resource_id=self.name,
physical_resource_id=self.stack_id,
resource_type="AWS::CloudFormation::Stack",
resource_status=resource_status,
resource_status_reason=resource_status_reason,
resource_properties=resource_properties,
))
def _add_resource_event(self, logical_resource_id, resource_status, resource_status_reason=None, resource_properties=None):
# not used yet... feel free to help yourself
resource = self.resource_map[logical_resource_id]
self.events.append(FakeEvent(
stack_id=self.stack_id,
stack_name=self.name,
logical_resource_id=logical_resource_id,
physical_resource_id=resource.physical_resource_id,
resource_type=resource.type,
resource_status=resource_status,
resource_status_reason=resource_status_reason,
resource_properties=resource_properties,
))
@property
def stack_parameters(self):
return self.resource_map.resolved_parameters
@property
def stack_resources(self):
return self.resource_map.values()
@property
def stack_outputs(self):
return self.output_map.values()
def update(self, template):
self._add_stack_event("UPDATE_IN_PROGRESS", resource_status_reason="User Initiated")
self.template = template
self.resource_map.update(json.loads(template))
self.output_map = self._create_output_map()
self._add_stack_event("UPDATE_COMPLETE")
self.status = "UPDATE_COMPLETE"
def delete(self):
self._add_stack_event("DELETE_IN_PROGRESS", resource_status_reason="User Initiated")
self.resource_map.delete()
self._add_stack_event("DELETE_COMPLETE")
self.status = "DELETE_COMPLETE"
class FakeEvent(object):
def __init__(self, stack_id, stack_name, logical_resource_id, physical_resource_id, resource_type, resource_status, resource_status_reason=None, resource_properties=None):
self.stack_id = stack_id
self.stack_name = stack_name
self.logical_resource_id = logical_resource_id
self.physical_resource_id = physical_resource_id
self.resource_type = resource_type
self.resource_status = resource_status
self.resource_status_reason = resource_status_reason
self.resource_properties = resource_properties
self.timestamp = datetime.utcnow()
class CloudFormationBackend(BaseBackend):
def __init__(self):
self.stacks = {}
self.deleted_stacks = {}
def create_stack(self, name, template, parameters, region_name, notification_arns=None, tags=None):
stack_id = generate_stack_id(name)
new_stack = FakeStack(
stack_id=stack_id,
name=name,
template=template,
parameters=parameters,
region_name=region_name,
notification_arns=notification_arns,
tags=tags,
)
self.stacks[stack_id] = new_stack
return new_stack
def describe_stacks(self, name_or_stack_id):
stacks = self.stacks.values()
if name_or_stack_id:
for stack in stacks:
if stack.name == name_or_stack_id or stack.stack_id == name_or_stack_id:
return [stack]
if self.deleted_stacks:
deleted_stacks = self.deleted_stacks.values()
for stack in deleted_stacks:
if stack.stack_id == name_or_stack_id:
return [stack]
raise ValidationError(name_or_stack_id)
else:
return stacks
def list_stacks(self):
return self.stacks.values()
def get_stack(self, name_or_stack_id):
all_stacks = dict(self.deleted_stacks, **self.stacks)
if name_or_stack_id in all_stacks:
# Lookup by stack id - deleted stacks incldued
return all_stacks[name_or_stack_id]
else:
# Lookup by stack name - undeleted stacks only
for stack in self.stacks.values():
if stack.name == name_or_stack_id:
return stack
def update_stack(self, name, template):
stack = self.get_stack(name)
stack.update(template)
return stack
def list_stack_resources(self, stack_name_or_id):
stack = self.get_stack(stack_name_or_id)
return stack.stack_resources
def delete_stack(self, name_or_stack_id):
if name_or_stack_id in self.stacks:
# Delete by stack id
stack = self.stacks.pop(name_or_stack_id, None)
stack.delete()
self.deleted_stacks[stack.stack_id] = stack
return self.stacks.pop(name_or_stack_id, None)
else:
# Delete by stack name
for stack in list(self.stacks.values()):
if stack.name == name_or_stack_id:
self.delete_stack(stack.stack_id)
cloudformation_backends = {}
for region in boto.cloudformation.regions():
cloudformation_backends[region.name] = CloudFormationBackend()
|
tootedom/moto
|
moto/cloudformation/models.py
|
Python
|
apache-2.0
| 6,905
|
from flask import Flask
from flask import make_response
from flask import request
from flask import render_template
from flask import redirect
from flask import url_for
import logging
from logging.handlers import RotatingFileHandler
app = Flask(__name__)
@app.route('/')
def index():
app.logger.info('index')
username = request.cookies.get('username')
if (username == None):
return redirect(url_for('login'))
else:
return render_template('index.html', username=username)
@app.route('/login', methods=['GET','POST'])
def login():
app.logger.info('login')
if request.method == 'POST':
if validate_credentials(request.form['username'], request.form['password']):
resp = make_response(redirect(url_for('index')))
resp.set_cookie('username', request.form['username'])
return resp
else:
return render_template('login.html', error='Invalid username or password')
else:
return render_template('login.html')
@app.route('/logout')
def logout():
app.logger.info('logout')
resp = make_response(redirect(url_for('index')))
resp.set_cookie('username', '', expires=0)
return resp
def validate_credentials(username, password):
return username == password
if __name__ == '__main__':
handler = RotatingFileHandler('todo.log', maxBytes=10000, backupCount=1)
handler.setLevel(logging.INFO)
app.logger.addHandler(handler)
app.run()
|
CoderDojoSG/todo
|
todo1/application.py
|
Python
|
apache-2.0
| 1,472
|
__author__ = 'Arunkumar Eli'
__email__ = "elrarun@gmail.com"
from selenium.webdriver.common.by import By
class DigitalOceanLocators(object):
ACCESS_KEY_INPUT = (By.ID, 'accessKey')
SECRET_KEY_INPUT = (By.ID, 'secretKey')
NEXT_BTN = (By.CSS_SELECTOR, "button.btn.btn-primary")
AVAILABILITY_ZONE = (By.XPATH, "//section[3]/div/div/span")
ZONE_SELECT = (By.ID, "selectedZone")
VPC_RADIO_BTN = (By.XPATH, "//div[3]/div[2]/div/label")
SUBNET_RADIO_BTN = (By.XPATH, "//div[2]/label")
SECURITY_GROUP = (By.XPATH, "///section[5]/div/div/span")
INSTANCE = (By.XPATH, "//section[7]/div/div/span")
ACCOUNT_ACCESS = (By.XPATH, "//section/div/div/span")
STD_RADIO_BTN=(By.XPATH,"//section[5]/div[1]/div[2]/div[2]/div[1]/label/input")
CUSTOM_RADIO_BTN=(By.XPATH,"//section[5]/div[1]/div[2]/div[2]/div[2]/label/input")
SET_INSTANCE_OPTION_BTN = (By.XPATH, "//div[2]/button")
SLIDE_BAR_CLICK_3 = (By.XPATH, "//div[2]/div[3]/div")
HOST_NAME_INPUT = (By.ID, "prefix")
HOST_DESC_INPUT = (By.ID, "description")
HOST_INSTANCE_TYPE_SELECT = (By.ID, "instanceType")
HOST_MEM_SIZE_INPUT = (By.ID, "rootSize")
HOST_CREATE_BTN = (By.XPATH, "//div[2]/button")
|
aruneli/rancher-test
|
ui-selenium-tests/locators/RackspaceLocators.py
|
Python
|
apache-2.0
| 1,212
|
import argparse
import json
import os
import shutil
from .model import train_and_evaluate
if __name__ == "__main__":
parser = argparse.ArgumentParser()
# File arguments
parser.add_argument(
"--train_file_pattern",
help="GCS location to read training data.",
required=True
)
parser.add_argument(
"--eval_file_pattern",
help="GCS location to read evaluation data.",
required=True
)
parser.add_argument(
"--output_dir",
help="GCS location to write checkpoints and export models.",
required=True
)
parser.add_argument(
"--job-dir",
help="This model ignores this field, but it is required by gcloud.",
default="junk"
)
# Sequence shape hyperparameters
parser.add_argument(
"--seq_len",
help="Number of timesteps to include in each example.",
type=int,
default=30
)
# Training parameters
parser.add_argument(
"--train_batch_size",
help="Number of examples in training batch.",
type=int,
default=32
)
parser.add_argument(
"--eval_batch_size",
help="Number of examples in evaluation batch.",
type=int,
default=32
)
parser.add_argument(
"--train_steps",
help="Number of batches to train.",
type=int,
default=1024
)
parser.add_argument(
"--learning_rate",
help="How quickly or slowly we train our model by scaling the gradient.",
type=float,
default=0.1
)
parser.add_argument(
"--start_delay_secs",
help="Number of seconds to wait before first evaluation.",
type=int,
default=60
)
parser.add_argument(
"--throttle_secs",
help="Number of seconds to wait between evaluations.",
type=int,
default=120
)
## LSTM hyperparameters
parser.add_argument(
"--lstm_hidden_units",
help="Hidden layer sizes to use for LSTM.",
type=str,
default="64,32,16"
)
# Parse all arguments
args = parser.parse_args()
arguments = args.__dict__
# Unused args provided by service
arguments.pop("job_dir", None)
arguments.pop("job-dir", None)
# Fix list arguments
arguments["lstm_hidden_units"] = [
int(x) for x in arguments["lstm_hidden_units"].split(",")]
# Append trial_id to path if we are doing hptuning
# This code can be removed if you are not using hyperparameter tuning
arguments["output_dir"] = os.path.join(
arguments["output_dir"],
json.loads(
os.environ.get("TF_CONFIG", "{}")
).get("task", {}).get("trial", "")
)
# Run the model
shutil.rmtree(path=arguments["output_dir"], ignore_errors=True) # start fresh each time
train_and_evaluate(arguments)
|
GoogleCloudPlatform/training-data-analyst
|
blogs/gcp_forecasting/tf_module/trainer/task.py
|
Python
|
apache-2.0
| 2,729
|
import sys
import pickle
##########################################################
# usage
# pypy find_2g.py xid_train.p ../../data/train
# xid_train.p is a list like ['loIP1tiwELF9YNZQjSUO',''....] to specify
# the order of samples in traing data
# ../../data/train is the path of original train data
##########################################################
xid_name=sys.argv[1]
data_path=sys.argv[2]
xid=pickle.load(open(xid_name)) #xid_train.p or xid_test.p
newc=pickle.load(open('newc.p'))
cmd2g={}
for i in newc:
for j in newc:
cmd2g[(i,j)]=0
print newc
for c,f in enumerate(xid):#(files[len(files)/10*a1:len(files)/10*a2]):
count={}
for i in cmd2g:
count[i]=0
fo=open(data_path+'/'+f+'.asm')
tot=0
a=-1
b=-1
for line in fo:
xx=line.split()
for x in xx:
if x in newc:
a=b
b=x
if (a,b) in cmd2g:
count[(a,b)]+=1
tot+=1
# print (b,a)
fo.close()
if c%10==0:
print c*1.0/len(xid),tot
for i in cmd2g:
cmd2g[i]=count[i]+cmd2g[i]
del count
import pickle
cmd2gx={}
for i in cmd2g:
if cmd2g[i]>10:
cmd2gx[i]=cmd2g[i]
print len(cmd2gx)
pickle.dump(cmd2gx,open('cmd2g.p','w'))
|
bikash/kaggleCompetition
|
microsoft malware/Malware_Say_No_To_Overfitting/kaggle_Microsoft_malware_small/find_2g.py
|
Python
|
apache-2.0
| 1,327
|
from django import forms
from .widgets import MarkdownxWidget
class MarkdownxFormField(forms.CharField):
"""
Used in FormFields as a Markdown enabled replacement for ``CharField``.
"""
def __init__(self, *args, **kwargs):
"""
Arguments are similar to Django's default ``CharField``.
See Django's `documentations on CharField`_ for additional information.
.. _docs on Charfield: https://docs.djangoproject.com/en/dev/ref/models/fields/#django.db.models.CharField
"""
super(MarkdownxFormField, self).__init__(*args, **kwargs)
if issubclass(self.widget.__class__, forms.widgets.MultiWidget):
is_markdownx_widget = any(
issubclass(item.__class__, MarkdownxWidget)
for item in getattr(self.widget, 'widgets', list())
)
if not is_markdownx_widget:
self.widget = MarkdownxWidget()
elif not issubclass(self.widget.__class__, MarkdownxWidget):
self.widget = MarkdownxWidget()
|
wuga214/Django-Wuga
|
env/lib/python2.7/site-packages/markdownx/fields.py
|
Python
|
apache-2.0
| 1,070
|
# Copyright 2012 Grid Dynamics
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import abc
import base64
import contextlib
import functools
import os
import shutil
from oslo_config import cfg
from oslo_log import log as logging
from oslo_serialization import jsonutils
from oslo_utils import excutils
from oslo_utils import fileutils
from oslo_utils import strutils
from oslo_utils import units
import six
import nova.conf
from nova import exception
from nova.i18n import _
from nova.i18n import _LE, _LI, _LW
from nova import image
from nova import keymgr
from nova import utils
from nova.virt.disk import api as disk
from nova.virt.image import model as imgmodel
from nova.virt import images
from nova.virt.libvirt import config as vconfig
from nova.virt.libvirt.storage import dmcrypt
from nova.virt.libvirt.storage import lvm
from nova.virt.libvirt.storage import rbd_utils
from nova.virt.libvirt import utils as libvirt_utils
__imagebackend_opts = [
cfg.StrOpt('images_type',
default='default',
choices=('raw', 'qcow2', 'lvm', 'rbd', 'ploop', 'default'),
help='VM Images format. If default is specified, then'
' use_cow_images flag is used instead of this one.'),
cfg.StrOpt('images_volume_group',
help='LVM Volume Group that is used for VM images, when you'
' specify images_type=lvm.'),
cfg.BoolOpt('sparse_logical_volumes',
default=False,
help='Create sparse logical volumes (with virtualsize)'
' if this flag is set to True.'),
cfg.StrOpt('images_rbd_pool',
default='rbd',
help='The RADOS pool in which rbd volumes are stored'),
cfg.StrOpt('images_rbd_ceph_conf',
default='', # default determined by librados
help='Path to the ceph configuration file to use'),
cfg.StrOpt('hw_disk_discard',
choices=('ignore', 'unmap'),
help='Discard option for nova managed disks. Need'
' Libvirt(1.0.6) Qemu1.5 (raw format) Qemu1.6(qcow2'
' format)'),
]
CONF = nova.conf.CONF
CONF.register_opts(__imagebackend_opts, 'libvirt')
CONF.import_opt('rbd_user', 'nova.virt.libvirt.volume.net', group='libvirt')
CONF.import_opt('rbd_secret_uuid', 'nova.virt.libvirt.volume.net',
group='libvirt')
LOG = logging.getLogger(__name__)
IMAGE_API = image.API()
@six.add_metaclass(abc.ABCMeta)
class Image(object):
SUPPORTS_CLONE = False
def __init__(self, source_type, driver_format, is_block_dev=False):
"""Image initialization.
:source_type: block or file
:driver_format: raw or qcow2
:is_block_dev:
"""
if (CONF.ephemeral_storage_encryption.enabled and
not self._supports_encryption()):
raise exception.NovaException(_('Incompatible settings: '
'ephemeral storage encryption is supported '
'only for LVM images.'))
self.source_type = source_type
self.driver_format = driver_format
self.driver_io = None
self.discard_mode = CONF.libvirt.hw_disk_discard
self.is_block_dev = is_block_dev
self.preallocate = False
# NOTE(dripton): We store lines of json (path, disk_format) in this
# file, for some image types, to prevent attacks based on changing the
# disk_format.
self.disk_info_path = None
# NOTE(mikal): We need a lock directory which is shared along with
# instance files, to cover the scenario where multiple compute nodes
# are trying to create a base file at the same time
self.lock_path = os.path.join(CONF.instances_path, 'locks')
def _supports_encryption(self):
"""Used to test that the backend supports encryption.
Override in the subclass if backend supports encryption.
"""
return False
@abc.abstractmethod
def create_image(self, prepare_template, base, size, *args, **kwargs):
"""Create image from template.
Contains specific behavior for each image type.
:prepare_template: function, that creates template.
Should accept `target` argument.
:base: Template name
:size: Size of created image in bytes
"""
pass
@abc.abstractmethod
def resize_image(self, size):
"""Resize image to size (in bytes).
:size: Desired size of image in bytes
"""
pass
def libvirt_info(self, disk_bus, disk_dev, device_type, cache_mode,
extra_specs, hypervisor_version):
"""Get `LibvirtConfigGuestDisk` filled for this image.
:disk_dev: Disk bus device name
:disk_bus: Disk bus type
:device_type: Device type for this image.
:cache_mode: Caching mode for this image
:extra_specs: Instance type extra specs dict.
:hypervisor_version: the hypervisor version
"""
info = vconfig.LibvirtConfigGuestDisk()
info.source_type = self.source_type
info.source_device = device_type
info.target_bus = disk_bus
info.target_dev = disk_dev
info.driver_cache = cache_mode
info.driver_discard = self.discard_mode
info.driver_io = self.driver_io
info.driver_format = self.driver_format
driver_name = libvirt_utils.pick_disk_driver_name(hypervisor_version,
self.is_block_dev)
info.driver_name = driver_name
info.source_path = self.path
self.disk_qos(info, extra_specs)
return info
def disk_qos(self, info, extra_specs):
tune_items = ['disk_read_bytes_sec', 'disk_read_iops_sec',
'disk_write_bytes_sec', 'disk_write_iops_sec',
'disk_total_bytes_sec', 'disk_total_iops_sec']
for key, value in six.iteritems(extra_specs):
scope = key.split(':')
if len(scope) > 1 and scope[0] == 'quota':
if scope[1] in tune_items:
setattr(info, scope[1], value)
def libvirt_fs_info(self, target, driver_type=None):
"""Get `LibvirtConfigGuestFilesys` filled for this image.
:target: target directory inside a container.
:driver_type: filesystem driver type, can be loop
nbd or ploop.
"""
info = vconfig.LibvirtConfigGuestFilesys()
info.target_dir = target
if self.is_block_dev:
info.source_type = "block"
info.source_dev = self.path
else:
info.source_type = "file"
info.source_file = self.path
info.driver_format = self.driver_format
if driver_type:
info.driver_type = driver_type
else:
if self.driver_format == "raw":
info.driver_type = "loop"
else:
info.driver_type = "nbd"
return info
def check_image_exists(self):
return os.path.exists(self.path)
def cache(self, fetch_func, filename, size=None, *args, **kwargs):
"""Creates image from template.
Ensures that template and image not already exists.
Ensures that base directory exists.
Synchronizes on template fetching.
:fetch_func: Function that creates the base image
Should accept `target` argument.
:filename: Name of the file in the image directory
:size: Size of created image in bytes (optional)
"""
@utils.synchronized(filename, external=True, lock_path=self.lock_path)
def fetch_func_sync(target, *args, **kwargs):
# The image may have been fetched while a subsequent
# call was waiting to obtain the lock.
if not os.path.exists(target):
fetch_func(target=target, *args, **kwargs)
base_dir = os.path.join(CONF.instances_path,
CONF.image_cache_subdirectory_name)
if not os.path.exists(base_dir):
fileutils.ensure_tree(base_dir)
base = os.path.join(base_dir, filename)
if not self.check_image_exists() or not os.path.exists(base):
self.create_image(fetch_func_sync, base, size,
*args, **kwargs)
if size:
if size > self.get_disk_size(base):
self.resize_image(size)
if (self.preallocate and self._can_fallocate() and
os.access(self.path, os.W_OK)):
utils.execute('fallocate', '-n', '-l', size, self.path)
def _can_fallocate(self):
"""Check once per class, whether fallocate(1) is available,
and that the instances directory supports fallocate(2).
"""
can_fallocate = getattr(self.__class__, 'can_fallocate', None)
if can_fallocate is None:
test_path = self.path + '.fallocate_test'
_out, err = utils.trycmd('fallocate', '-l', '1', test_path)
fileutils.delete_if_exists(test_path)
can_fallocate = not err
self.__class__.can_fallocate = can_fallocate
if not can_fallocate:
LOG.warning(_LW('Unable to preallocate image at path: '
'%(path)s'), {'path': self.path})
return can_fallocate
def verify_base_size(self, base, size, base_size=0):
"""Check that the base image is not larger than size.
Since images can't be generally shrunk, enforce this
constraint taking account of virtual image size.
"""
# Note(pbrady): The size and min_disk parameters of a glance
# image are checked against the instance size before the image
# is even downloaded from glance, but currently min_disk is
# adjustable and doesn't currently account for virtual disk size,
# so we need this extra check here.
# NOTE(cfb): Having a flavor that sets the root size to 0 and having
# nova effectively ignore that size and use the size of the
# image is considered a feature at this time, not a bug.
if size is None:
return
if size and not base_size:
base_size = self.get_disk_size(base)
if size < base_size:
msg = _LE('%(base)s virtual size %(base_size)s '
'larger than flavor root disk size %(size)s')
LOG.error(msg % {'base': base,
'base_size': base_size,
'size': size})
raise exception.FlavorDiskSmallerThanImage(
flavor_size=size, image_size=base_size)
def get_disk_size(self, name):
return disk.get_disk_size(name)
def snapshot_extract(self, target, out_format):
raise NotImplementedError()
def _get_driver_format(self):
return self.driver_format
def resolve_driver_format(self):
"""Return the driver format for self.path.
First checks self.disk_info_path for an entry.
If it's not there, calls self._get_driver_format(), and then
stores the result in self.disk_info_path
See https://bugs.launchpad.net/nova/+bug/1221190
"""
def _dict_from_line(line):
if not line:
return {}
try:
return jsonutils.loads(line)
except (TypeError, ValueError) as e:
msg = (_("Could not load line %(line)s, got error "
"%(error)s") %
{'line': line, 'error': e})
raise exception.InvalidDiskInfo(reason=msg)
@utils.synchronized(self.disk_info_path, external=False,
lock_path=self.lock_path)
def write_to_disk_info_file():
# Use os.open to create it without group or world write permission.
fd = os.open(self.disk_info_path, os.O_RDONLY | os.O_CREAT, 0o644)
with os.fdopen(fd, "r") as disk_info_file:
line = disk_info_file.read().rstrip()
dct = _dict_from_line(line)
if self.path in dct:
msg = _("Attempted overwrite of an existing value.")
raise exception.InvalidDiskInfo(reason=msg)
dct.update({self.path: driver_format})
tmp_path = self.disk_info_path + ".tmp"
fd = os.open(tmp_path, os.O_WRONLY | os.O_CREAT, 0o644)
with os.fdopen(fd, "w") as tmp_file:
tmp_file.write('%s\n' % jsonutils.dumps(dct))
os.rename(tmp_path, self.disk_info_path)
try:
if (self.disk_info_path is not None and
os.path.exists(self.disk_info_path)):
with open(self.disk_info_path) as disk_info_file:
line = disk_info_file.read().rstrip()
dct = _dict_from_line(line)
for path, driver_format in six.iteritems(dct):
if path == self.path:
return driver_format
driver_format = self._get_driver_format()
if self.disk_info_path is not None:
fileutils.ensure_tree(os.path.dirname(self.disk_info_path))
write_to_disk_info_file()
except OSError as e:
raise exception.DiskInfoReadWriteFail(reason=six.text_type(e))
return driver_format
@staticmethod
def is_shared_block_storage():
"""True if the backend puts images on a shared block storage."""
return False
@staticmethod
def is_file_in_instance_path():
"""True if the backend stores images in files under instance path."""
return False
def clone(self, context, image_id_or_uri):
"""Clone an image.
Note that clone operation is backend-dependent. The backend may ask
the image API for a list of image "locations" and select one or more
of those locations to clone an image from.
:param image_id_or_uri: The ID or URI of an image to clone.
:raises: exception.ImageUnacceptable if it cannot be cloned
"""
reason = _('clone() is not implemented')
raise exception.ImageUnacceptable(image_id=image_id_or_uri,
reason=reason)
def direct_snapshot(self, context, snapshot_name, image_format, image_id,
base_image_id):
"""Prepare a snapshot for direct reference from glance
:raises: exception.ImageUnacceptable if it cannot be
referenced directly in the specified image format
:returns: URL to be given to glance
"""
raise NotImplementedError(_('direct_snapshot() is not implemented'))
def cleanup_direct_snapshot(self, location, also_destroy_volume=False,
ignore_errors=False):
"""Performs any cleanup actions required after calling
direct_snapshot(), for graceful exception handling and the like.
This should be a no-op on any backend where it is not implemented.
"""
pass
def _get_lock_name(self, base):
"""Get an image's name of a base file."""
return os.path.split(base)[-1]
def get_model(self, connection):
"""Get the image information model
:returns: an instance of nova.virt.image.model.Image
"""
raise NotImplementedError()
def import_file(self, instance, local_file, remote_name):
"""Import an image from local storage into this backend.
Import a local file into the store used by this image type. Note that
this is a noop for stores using local disk (the local file is
considered "in the store").
If the image already exists it will be overridden by the new file
:param local_file: path to the file to import
:param remote_name: the name for the file in the store
"""
# NOTE(mikal): this is a noop for now for all stores except RBD, but
# we should talk about if we want this functionality for everything.
pass
def create_snap(self, name):
"""Create a snapshot on the image. A noop on backends that don't
support snapshots.
:param name: name of the snapshot
"""
pass
def remove_snap(self, name, ignore_errors=False):
"""Remove a snapshot on the image. A noop on backends that don't
support snapshots.
:param name: name of the snapshot
:param ignore_errors: don't log errors if the snapshot does not exist
"""
pass
def rollback_to_snap(self, name):
"""Rollback the image to the named snapshot. A noop on backends that
don't support snapshots.
:param name: name of the snapshot
"""
pass
class Raw(Image):
def __init__(self, instance=None, disk_name=None, path=None):
self.disk_name = disk_name
super(Raw, self).__init__("file", "raw", is_block_dev=False)
self.path = (path or
os.path.join(libvirt_utils.get_instance_path(instance),
disk_name))
self.preallocate = (
strutils.to_slug(CONF.preallocate_images) == 'space')
if self.preallocate:
self.driver_io = "native"
self.disk_info_path = os.path.join(os.path.dirname(self.path),
'disk.info')
self.correct_format()
def _get_driver_format(self):
try:
data = images.qemu_img_info(self.path)
return data.file_format
except exception.InvalidDiskInfo as e:
LOG.info(_LI('Failed to get image info from path %(path)s; '
'error: %(error)s'),
{'path': self.path,
'error': e})
return 'raw'
def _supports_encryption(self):
# NOTE(dgenin): Kernel, ramdisk and disk.config are fetched using
# the Raw backend regardless of which backend is configured for
# ephemeral storage. Encryption for the Raw backend is not yet
# implemented so this loophole is necessary to allow other
# backends already supporting encryption to function. This can
# be removed once encryption for Raw is implemented.
if self.disk_name not in ['kernel', 'ramdisk', 'disk.config']:
return False
else:
return True
def correct_format(self):
if os.path.exists(self.path):
self.driver_format = self.resolve_driver_format()
def create_image(self, prepare_template, base, size, *args, **kwargs):
filename = self._get_lock_name(base)
@utils.synchronized(filename, external=True, lock_path=self.lock_path)
def copy_raw_image(base, target, size):
libvirt_utils.copy_image(base, target)
if size:
# class Raw is misnamed, format may not be 'raw' in all cases
image = imgmodel.LocalFileImage(target,
self.driver_format)
disk.extend(image, size)
generating = 'image_id' not in kwargs
if generating:
if not self.check_image_exists():
# Generating image in place
prepare_template(target=self.path, *args, **kwargs)
else:
if not os.path.exists(base):
prepare_template(target=base, max_size=size, *args, **kwargs)
# NOTE(mikal): Update the mtime of the base file so the image
# cache manager knows it is in use.
libvirt_utils.update_mtime(base)
self.verify_base_size(base, size)
if not os.path.exists(self.path):
with fileutils.remove_path_on_error(self.path):
copy_raw_image(base, self.path, size)
self.correct_format()
def resize_image(self, size):
image = imgmodel.LocalFileImage(self.path, self.driver_format)
disk.extend(image, size)
def snapshot_extract(self, target, out_format):
images.convert_image(self.path, target, self.driver_format, out_format)
@staticmethod
def is_file_in_instance_path():
return True
def get_model(self, connection):
return imgmodel.LocalFileImage(self.path,
imgmodel.FORMAT_RAW)
class Qcow2(Image):
def __init__(self, instance=None, disk_name=None, path=None):
super(Qcow2, self).__init__("file", "qcow2", is_block_dev=False)
self.path = (path or
os.path.join(libvirt_utils.get_instance_path(instance),
disk_name))
self.preallocate = (
strutils.to_slug(CONF.preallocate_images) == 'space')
if self.preallocate:
self.driver_io = "native"
self.disk_info_path = os.path.join(os.path.dirname(self.path),
'disk.info')
self.resolve_driver_format()
def create_image(self, prepare_template, base, size, *args, **kwargs):
filename = self._get_lock_name(base)
@utils.synchronized(filename, external=True, lock_path=self.lock_path)
def copy_qcow2_image(base, target, size):
# TODO(pbrady): Consider copying the cow image here
# with preallocation=metadata set for performance reasons.
# This would be keyed on a 'preallocate_images' setting.
libvirt_utils.create_cow_image(base, target)
if size:
image = imgmodel.LocalFileImage(target, imgmodel.FORMAT_QCOW2)
disk.extend(image, size)
# Download the unmodified base image unless we already have a copy.
if not os.path.exists(base):
prepare_template(target=base, max_size=size, *args, **kwargs)
# NOTE(ankit): Update the mtime of the base file so the image
# cache manager knows it is in use.
libvirt_utils.update_mtime(base)
self.verify_base_size(base, size)
legacy_backing_size = None
legacy_base = base
# Determine whether an existing qcow2 disk uses a legacy backing by
# actually looking at the image itself and parsing the output of the
# backing file it expects to be using.
if os.path.exists(self.path):
backing_path = libvirt_utils.get_disk_backing_file(self.path)
if backing_path is not None:
backing_file = os.path.basename(backing_path)
backing_parts = backing_file.rpartition('_')
if backing_file != backing_parts[-1] and \
backing_parts[-1].isdigit():
legacy_backing_size = int(backing_parts[-1])
legacy_base += '_%d' % legacy_backing_size
legacy_backing_size *= units.Gi
# Create the legacy backing file if necessary.
if legacy_backing_size:
if not os.path.exists(legacy_base):
with fileutils.remove_path_on_error(legacy_base):
libvirt_utils.copy_image(base, legacy_base)
image = imgmodel.LocalFileImage(legacy_base,
imgmodel.FORMAT_QCOW2)
disk.extend(image, legacy_backing_size)
if not os.path.exists(self.path):
with fileutils.remove_path_on_error(self.path):
copy_qcow2_image(base, self.path, size)
def resize_image(self, size):
image = imgmodel.LocalFileImage(self.path, imgmodel.FORMAT_QCOW2)
disk.extend(image, size)
def snapshot_extract(self, target, out_format):
libvirt_utils.extract_snapshot(self.path, 'qcow2',
target,
out_format)
@staticmethod
def is_file_in_instance_path():
return True
def get_model(self, connection):
return imgmodel.LocalFileImage(self.path,
imgmodel.FORMAT_QCOW2)
class Lvm(Image):
@staticmethod
def escape(filename):
return filename.replace('_', '__')
def __init__(self, instance=None, disk_name=None, path=None):
super(Lvm, self).__init__("block", "raw", is_block_dev=True)
self.ephemeral_key_uuid = instance.get('ephemeral_key_uuid')
if self.ephemeral_key_uuid is not None:
self.key_manager = keymgr.API()
else:
self.key_manager = None
if path:
self.path = path
if self.ephemeral_key_uuid is None:
info = lvm.volume_info(path)
self.vg = info['VG']
self.lv = info['LV']
else:
self.vg = CONF.libvirt.images_volume_group
else:
if not CONF.libvirt.images_volume_group:
raise RuntimeError(_('You should specify'
' images_volume_group'
' flag to use LVM images.'))
self.vg = CONF.libvirt.images_volume_group
self.lv = '%s_%s' % (instance.uuid,
self.escape(disk_name))
if self.ephemeral_key_uuid is None:
self.path = os.path.join('/dev', self.vg, self.lv)
else:
self.lv_path = os.path.join('/dev', self.vg, self.lv)
self.path = '/dev/mapper/' + dmcrypt.volume_name(self.lv)
# TODO(pbrady): possibly deprecate libvirt.sparse_logical_volumes
# for the more general preallocate_images
self.sparse = CONF.libvirt.sparse_logical_volumes
self.preallocate = not self.sparse
if not self.sparse:
self.driver_io = "native"
def _supports_encryption(self):
return True
def _can_fallocate(self):
return False
def create_image(self, prepare_template, base, size, *args, **kwargs):
def encrypt_lvm_image():
dmcrypt.create_volume(self.path.rpartition('/')[2],
self.lv_path,
CONF.ephemeral_storage_encryption.cipher,
CONF.ephemeral_storage_encryption.key_size,
key)
filename = self._get_lock_name(base)
@utils.synchronized(filename, external=True, lock_path=self.lock_path)
def create_lvm_image(base, size):
base_size = disk.get_disk_size(base)
self.verify_base_size(base, size, base_size=base_size)
resize = size > base_size
size = size if resize else base_size
lvm.create_volume(self.vg, self.lv,
size, sparse=self.sparse)
if self.ephemeral_key_uuid is not None:
encrypt_lvm_image()
# NOTE: by calling convert_image_unsafe here we're
# telling qemu-img convert to do format detection on the input,
# because we don't know what the format is. For example,
# we might have downloaded a qcow2 image, or created an
# ephemeral filesystem locally, we just don't know here. Having
# audited this, all current sources have been sanity checked,
# either because they're locally generated, or because they have
# come from images.fetch_to_raw. However, this is major code smell.
images.convert_image_unsafe(base, self.path, self.driver_format,
run_as_root=True)
if resize:
disk.resize2fs(self.path, run_as_root=True)
generated = 'ephemeral_size' in kwargs
if self.ephemeral_key_uuid is not None:
if 'context' in kwargs:
try:
# NOTE(dgenin): Key manager corresponding to the
# specific backend catches and reraises an
# an exception if key retrieval fails.
key = self.key_manager.get_key(kwargs['context'],
self.ephemeral_key_uuid).get_encoded()
except Exception:
with excutils.save_and_reraise_exception():
LOG.error(_LE("Failed to retrieve ephemeral encryption"
" key"))
else:
raise exception.NovaException(
_("Instance disk to be encrypted but no context provided"))
# Generate images with specified size right on volume
if generated and size:
lvm.create_volume(self.vg, self.lv,
size, sparse=self.sparse)
with self.remove_volume_on_error(self.path):
if self.ephemeral_key_uuid is not None:
encrypt_lvm_image()
prepare_template(target=self.path, *args, **kwargs)
else:
if not os.path.exists(base):
prepare_template(target=base, max_size=size, *args, **kwargs)
with self.remove_volume_on_error(self.path):
create_lvm_image(base, size)
# NOTE(nic): Resizing the image is already handled in create_image(),
# and migrate/resize is not supported with LVM yet, so this is a no-op
def resize_image(self, size):
pass
@contextlib.contextmanager
def remove_volume_on_error(self, path):
try:
yield
except Exception:
with excutils.save_and_reraise_exception():
if self.ephemeral_key_uuid is None:
lvm.remove_volumes([path])
else:
dmcrypt.delete_volume(path.rpartition('/')[2])
lvm.remove_volumes([self.lv_path])
def snapshot_extract(self, target, out_format):
images.convert_image(self.path, target, self.driver_format,
out_format, run_as_root=True)
def get_model(self, connection):
return imgmodel.LocalBlockImage(self.path)
class Rbd(Image):
SUPPORTS_CLONE = True
def __init__(self, instance=None, disk_name=None, path=None, **kwargs):
super(Rbd, self).__init__("block", "rbd", is_block_dev=False)
if path:
try:
self.rbd_name = path.split('/')[1]
except IndexError:
raise exception.InvalidDevicePath(path=path)
else:
self.rbd_name = '%s_%s' % (instance.uuid, disk_name)
if not CONF.libvirt.images_rbd_pool:
raise RuntimeError(_('You should specify'
' images_rbd_pool'
' flag to use rbd images.'))
self.pool = CONF.libvirt.images_rbd_pool
self.discard_mode = CONF.libvirt.hw_disk_discard
self.rbd_user = CONF.libvirt.rbd_user
self.ceph_conf = CONF.libvirt.images_rbd_ceph_conf
self.driver = rbd_utils.RBDDriver(
pool=self.pool,
ceph_conf=self.ceph_conf,
rbd_user=self.rbd_user)
self.path = 'rbd:%s/%s' % (self.pool, self.rbd_name)
if self.rbd_user:
self.path += ':id=' + self.rbd_user
if self.ceph_conf:
self.path += ':conf=' + self.ceph_conf
def libvirt_info(self, disk_bus, disk_dev, device_type, cache_mode,
extra_specs, hypervisor_version):
"""Get `LibvirtConfigGuestDisk` filled for this image.
:disk_dev: Disk bus device name
:disk_bus: Disk bus type
:device_type: Device type for this image.
:cache_mode: Caching mode for this image
:extra_specs: Instance type extra specs dict.
"""
info = vconfig.LibvirtConfigGuestDisk()
hosts, ports = self.driver.get_mon_addrs()
info.source_device = device_type
info.driver_format = 'raw'
info.driver_cache = cache_mode
info.driver_discard = self.discard_mode
info.target_bus = disk_bus
info.target_dev = disk_dev
info.source_type = 'network'
info.source_protocol = 'rbd'
info.source_name = '%s/%s' % (self.pool, self.rbd_name)
info.source_hosts = hosts
info.source_ports = ports
auth_enabled = (CONF.libvirt.rbd_user is not None)
if CONF.libvirt.rbd_secret_uuid:
info.auth_secret_uuid = CONF.libvirt.rbd_secret_uuid
auth_enabled = True # Force authentication locally
if CONF.libvirt.rbd_user:
info.auth_username = CONF.libvirt.rbd_user
if auth_enabled:
info.auth_secret_type = 'ceph'
info.auth_secret_uuid = CONF.libvirt.rbd_secret_uuid
self.disk_qos(info, extra_specs)
return info
def _can_fallocate(self):
return False
def check_image_exists(self):
return self.driver.exists(self.rbd_name)
def get_disk_size(self, name):
"""Returns the size of the virtual disk in bytes.
The name argument is ignored since this backend already knows
its name, and callers may pass a non-existent local file path.
"""
return self.driver.size(self.rbd_name)
def create_image(self, prepare_template, base, size, *args, **kwargs):
if not self.check_image_exists():
prepare_template(target=base, max_size=size, *args, **kwargs)
# prepare_template() may have cloned the image into a new rbd
# image already instead of downloading it locally
if not self.check_image_exists():
self.driver.import_image(base, self.rbd_name)
self.verify_base_size(base, size)
if size and size > self.get_disk_size(self.rbd_name):
self.driver.resize(self.rbd_name, size)
def resize_image(self, size):
self.driver.resize(self.rbd_name, size)
def snapshot_extract(self, target, out_format):
images.convert_image(self.path, target, 'raw', out_format)
@staticmethod
def is_shared_block_storage():
return True
def clone(self, context, image_id_or_uri):
image_meta = IMAGE_API.get(context, image_id_or_uri,
include_locations=True)
locations = image_meta['locations']
LOG.debug('Image locations are: %(locs)s' % {'locs': locations})
if image_meta.get('disk_format') not in ['raw', 'iso']:
reason = _('Image is not raw format')
raise exception.ImageUnacceptable(image_id=image_id_or_uri,
reason=reason)
for location in locations:
if self.driver.is_cloneable(location, image_meta):
return self.driver.clone(location, self.rbd_name)
reason = _('No image locations are accessible')
raise exception.ImageUnacceptable(image_id=image_id_or_uri,
reason=reason)
def get_model(self, connection):
secret = None
if CONF.libvirt.rbd_secret_uuid:
secretobj = connection.secretLookupByUUIDString(
CONF.libvirt.rbd_secret_uuid)
secret = base64.b64encode(secretobj.value())
hosts, ports = self.driver.get_mon_addrs()
servers = [str(':'.join(k)) for k in zip(hosts, ports)]
return imgmodel.RBDImage(self.rbd_name,
self.pool,
self.rbd_user,
secret,
servers)
def import_file(self, instance, local_file, remote_name):
name = '%s_%s' % (instance.uuid, remote_name)
if self.check_image_exists():
self.driver.remove_image(name)
self.driver.import_image(local_file, name)
def create_snap(self, name):
return self.driver.create_snap(self.rbd_name, name)
def remove_snap(self, name, ignore_errors=False):
return self.driver.remove_snap(self.rbd_name, name, ignore_errors)
def rollback_to_snap(self, name):
return self.driver.rollback_to_snap(self.rbd_name, name)
def _get_parent_pool(self, context, base_image_id, fsid):
parent_pool = None
try:
# The easy way -- the image is an RBD clone, so use the parent
# images' storage pool
parent_pool, _im, _snap = self.driver.parent_info(self.rbd_name)
except exception.ImageUnacceptable:
# The hard way -- the image is itself a parent, so ask Glance
# where it came from
LOG.debug('No parent info for %s; asking the Image API where its '
'store is', base_image_id)
try:
image_meta = IMAGE_API.get(context, base_image_id,
include_locations=True)
except Exception as e:
LOG.debug('Unable to get image %(image_id)s; error: %(error)s',
{'image_id': base_image_id, 'error': e})
image_meta = {}
# Find the first location that is in the same RBD cluster
for location in image_meta.get('locations', []):
try:
parent_fsid, parent_pool, _im, _snap = \
self.driver.parse_url(location['url'])
if parent_fsid == fsid:
break
else:
parent_pool = None
except exception.ImageUnacceptable:
continue
if not parent_pool:
raise exception.ImageUnacceptable(
_('Cannot determine the parent storage pool for %s; '
'cannot determine where to store images') %
base_image_id)
return parent_pool
def direct_snapshot(self, context, snapshot_name, image_format,
image_id, base_image_id):
"""Creates an RBD snapshot directly.
"""
fsid = self.driver.get_fsid()
# NOTE(nic): Nova has zero comprehension of how Glance's image store
# is configured, but we can infer what storage pool Glance is using
# by looking at the parent image. If using authx, write access should
# be enabled on that pool for the Nova user
parent_pool = self._get_parent_pool(context, base_image_id, fsid)
# Snapshot the disk and clone it into Glance's storage pool. librbd
# requires that snapshots be set to "protected" in order to clone them
self.driver.create_snap(self.rbd_name, snapshot_name, protect=True)
location = {'url': 'rbd://%(fsid)s/%(pool)s/%(image)s/%(snap)s' %
dict(fsid=fsid,
pool=self.pool,
image=self.rbd_name,
snap=snapshot_name)}
try:
self.driver.clone(location, image_id, dest_pool=parent_pool)
# Flatten the image, which detaches it from the source snapshot
self.driver.flatten(image_id, pool=parent_pool)
finally:
# all done with the source snapshot, clean it up
self.cleanup_direct_snapshot(location)
# Glance makes a protected snapshot called 'snap' on uploaded
# images and hands it out, so we'll do that too. The name of
# the snapshot doesn't really matter, this just uses what the
# glance-store rbd backend sets (which is not configurable).
self.driver.create_snap(image_id, 'snap', pool=parent_pool,
protect=True)
return ('rbd://%(fsid)s/%(pool)s/%(image)s/snap' %
dict(fsid=fsid, pool=parent_pool, image=image_id))
def cleanup_direct_snapshot(self, location, also_destroy_volume=False,
ignore_errors=False):
"""Unprotects and destroys the name snapshot.
With also_destroy_volume=True, it will also cleanup/destroy the parent
volume. This is useful for cleaning up when the target volume fails
to snapshot properly.
"""
if location:
_fsid, _pool, _im, _snap = self.driver.parse_url(location['url'])
self.driver.remove_snap(_im, _snap, pool=_pool, force=True,
ignore_errors=ignore_errors)
if also_destroy_volume:
self.driver.destroy_volume(_im, pool=_pool)
class Ploop(Image):
def __init__(self, instance=None, disk_name=None, path=None):
super(Ploop, self).__init__("file", "ploop", is_block_dev=False)
self.path = (path or
os.path.join(libvirt_utils.get_instance_path(instance),
disk_name))
self.resolve_driver_format()
def create_image(self, prepare_template, base, size, *args, **kwargs):
filename = os.path.split(base)[-1]
@utils.synchronized(filename, external=True, lock_path=self.lock_path)
def create_ploop_image(base, target, size):
image_path = os.path.join(target, "root.hds")
libvirt_utils.copy_image(base, image_path)
utils.execute('ploop', 'restore-descriptor', '-f', self.pcs_format,
target, image_path)
if size:
dd_path = os.path.join(self.path, "DiskDescriptor.xml")
utils.execute('ploop', 'grow', '-s', '%dK' % (size >> 10),
dd_path, run_as_root=True)
if not os.path.exists(self.path):
if CONF.force_raw_images:
self.pcs_format = "raw"
else:
image_meta = IMAGE_API.get(kwargs["context"],
kwargs["image_id"])
format = image_meta.get("disk_format")
if format == "ploop":
self.pcs_format = "expanded"
elif format == "raw":
self.pcs_format = "raw"
else:
reason = _("PCS doesn't support images in %s format."
" You should either set force_raw_images=True"
" in config or upload an image in ploop"
" or raw format.") % format
raise exception.ImageUnacceptable(
image_id=kwargs["image_id"],
reason=reason)
if not os.path.exists(base):
prepare_template(target=base, max_size=size, *args, **kwargs)
self.verify_base_size(base, size)
if os.path.exists(self.path):
return
fileutils.ensure_tree(self.path)
remove_func = functools.partial(fileutils.delete_if_exists,
remove=shutil.rmtree)
with fileutils.remove_path_on_error(self.path, remove=remove_func):
create_ploop_image(base, self.path, size)
def resize_image(self, size):
dd_path = os.path.join(self.path, "DiskDescriptor.xml")
utils.execute('ploop', 'grow', '-s', '%dK' % (size >> 10), dd_path,
run_as_root=True)
def snapshot_extract(self, target, out_format):
img_path = os.path.join(self.path, "root.hds")
libvirt_utils.extract_snapshot(img_path,
'parallels',
target,
out_format)
class Backend(object):
def __init__(self, use_cow):
self.BACKEND = {
'raw': Raw,
'qcow2': Qcow2,
'lvm': Lvm,
'rbd': Rbd,
'ploop': Ploop,
'default': Qcow2 if use_cow else Raw
}
def backend(self, image_type=None):
if not image_type:
image_type = CONF.libvirt.images_type
image = self.BACKEND.get(image_type)
if not image:
raise RuntimeError(_('Unknown image_type=%s') % image_type)
return image
def image(self, instance, disk_name, image_type=None):
"""Constructs image for selected backend
:instance: Instance name.
:name: Image name.
:image_type: Image type.
Optional, is CONF.libvirt.images_type by default.
"""
backend = self.backend(image_type)
return backend(instance=instance, disk_name=disk_name)
def snapshot(self, instance, disk_path, image_type=None):
"""Returns snapshot for given image
:path: path to image
:image_type: type of image
"""
backend = self.backend(image_type)
return backend(instance=instance, path=disk_path)
|
cernops/nova
|
nova/virt/libvirt/imagebackend.py
|
Python
|
apache-2.0
| 45,765
|
#!/usr/bin/env vpython
# Copyright 2020 The LUCI Authors. All rights reserved.
# Use of this source code is governed under the Apache License, Version 2.0
# that can be found in the LICENSE file.
import logging
import sys
import unittest
import test_env
test_env.setup_test_env()
from proto import realms_config_pb2
from realms import permissions
from test_support import test_case
class BuilderTest(test_case.TestCase):
def setUp(self):
super(BuilderTest, self).setUp()
self.builder = permissions.Builder('rev')
self.permission = self.builder.permission
self.include = self.builder.include
self.role = self.builder.role
def check(self, perms=None, roles=None):
db = self.builder.finish()
self.assertEquals(db.revision, 'rev')
if perms is not None:
self.assertEquals(sorted(db.permissions), perms)
if roles is not None:
self.assertEquals(
db.roles,
{n: permissions.Role(n, perms) for n, perms in roles.items()})
def test_empty(self):
self.check([], {})
def test_permissions_only(self):
self.permission('luci.dev.p1')
self.permission('luci.dev.p2')
self.permission('luci.dev.p1') # redeclaration is ok
self.check(perms=['luci.dev.p1', 'luci.dev.p2'])
def test_bad_permission_name(self):
with self.assertRaises(ValueError):
self.permission('luci.dev')
with self.assertRaises(ValueError):
self.permission('luci.dev.something.something')
def test_simple_role(self):
self.role('role/dev.a', [
self.permission('luci.dev.p1'),
self.permission('luci.dev.p2'),
])
self.check(
perms=['luci.dev.p1', 'luci.dev.p2'],
roles={'role/dev.a': ('luci.dev.p1', 'luci.dev.p2')})
def test_complex_role(self):
self.role('role/dev.a', [
self.permission('luci.dev.p1'),
self.permission('luci.dev.p2'),
])
self.role('role/dev.b', [
self.permission('luci.dev.p2'),
self.permission('luci.dev.p3'),
self.include('role/dev.a'),
])
self.check(
perms=['luci.dev.p1', 'luci.dev.p2', 'luci.dev.p3'],
roles={
'role/dev.a': ('luci.dev.p1', 'luci.dev.p2'),
'role/dev.b': ('luci.dev.p1', 'luci.dev.p2', 'luci.dev.p3'),
})
def test_role_redeclaration(self):
self.role('role/dev.a', [])
with self.assertRaises(ValueError):
self.role('role/dev.a', [])
def test_bad_role_name(self):
with self.assertRaises(ValueError):
self.role('zzz/role', [])
def test_referencing_undeclared_role(self):
with self.assertRaises(ValueError):
self.include('role/zzz')
def test_non_idempotent_perm(self):
self.permission('luci.dev.p1')
self.permission('luci.dev.p1')
with self.assertRaises(ValueError):
self.permission('luci.dev.p1', internal=True)
class HardcodedDBTest(test_case.TestCase):
def test_can_be_built(self):
db = permissions.db()
for b in db.implicit_root_bindings('proj'):
self.assertIsInstance(b, realms_config_pb2.Binding)
if __name__ == '__main__':
if '-v' in sys.argv:
unittest.TestCase.maxDiff = None
logging.basicConfig(level=logging.DEBUG)
else:
logging.basicConfig(level=logging.FATAL)
unittest.main()
|
luci/luci-py
|
appengine/auth_service/realms/permissions_test.py
|
Python
|
apache-2.0
| 3,254
|
import matplotlib.pyplot as plt
import numpy as np
import sklearn
from sklearn import svm, datasets
from sklearn.metrics import precision_recall_curve
from sklearn.metrics import average_precision_score
from sklearn.cross_validation import train_test_split
from sklearn.preprocessing import label_binarize
from sklearn.multiclass import OneVsRestClassifier
import pdb
from sklearn import ensemble
from sklearn import neighbors
from sklearn import tree
#import data
data_path = "joined_matrix_split.txt"
mat = np.loadtxt(data_path)
features = mat[50000:60000, 0:40]
features = sklearn.preprocessing.scale(features, axis=1)
output_raw = mat[50000:60000, -1]
output = sklearn.preprocessing.binarize(output_raw)
# Split into training and test
random_state = np.random.RandomState(0)
X_train, X_test, y_train, y_test = train_test_split(features, output, test_size=.5,
random_state=random_state)
n_classes = 1
#run classifier
classifier = OneVsRestClassifier(svm.SVC(kernel='linear', probability=True,
random_state=random_state))
y_score = classifier.fit(X_train, y_train).decision_function(X_test)
# Compute Precision-Recall and plot curve
precision = dict()
recall = dict()
average_precision = dict()
precision[0], recall[0], _ = precision_recall_curve(y_test, y_score)
average_precision[0] = average_precision_score(y_test, y_score)
# now do rbf kernel
classifier = OneVsRestClassifier(svm.SVC(kernel='rbf', probability=True, random_state=random_state))
y_score = classifier.fit(X_train, y_train).decision_function(X_test)
# Compute Precision-Recall and plot curve
precision[1], recall[1], _ = precision_recall_curve(y_test, y_score)
average_precision[1] = average_precision_score(y_test, y_score)
# now do adaboost
model = ensemble.AdaBoostClassifier()
classifier = OneVsRestClassifier(model)
y_score = classifier.fit(X_train, y_train).decision_function(X_test)
# Compute Precision-Recall and plot curve
precision[2], recall[2], _ = precision_recall_curve(y_test, y_score)
average_precision[2] = average_precision_score(y_test, y_score)
"""
pdb.set_trace()
# now do kNN classifier
model = neighbors.KNeighborsClassifier()
classifier = OneVsRestClassifier(model)
y_score = classifier.fit(X_train, y_train).decision_function(X_test)
# Compute Precision-Recall and plot curve
precision[3], recall[3], _ = precision_recall_curve(y_test, y_score)
average_precision[3] = average_precision_score(y_test, y_score)
# now do random forrest
model = ensemble.RandomForestClassifier()
classifier = OneVsRestClassifier(model)
y_score = classifier.fit(X_train, y_train).decision_function(X_test)
# Compute Precision-Recall and plot curve
precision[4], recall[4], _ = precision_recall_curve(y_test, y_score)
average_precision[4] = average_precision_score(y_test, y_score)
# now do decision trees
model = tree.DecisionTreeClassifier()
classifier = OneVsRestClassifier(model)
y_score = classifier.fit(X_train, y_train).decision_function(X_test)
# Compute Precision-Recall and plot curve
precision[5], recall[5], _ = precision_recall_curve(y_test, y_score)
average_precision[5] = average_precision_score(y_test, y_score)
# Plot Precision-Recall curve
#plt.clf()
#plt.plot(recall[0], precision[0], label='Precision-Recall curve')
#plt.xlabel('Recall')
#plt.ylabel('Precision')
#plt.ylim([0.0, 1.05])
#plt.xlim([0.0, 1.0])
#plt.title('Linear SVC Precision vs. Recall: AUC={0:0.2f}'.format(average_precision[0]))
#plt.legend(loc="lower left")
#plt.show()
"""
kernel = {}
kernel[0] = "linear SVC"
kernel[1] = "rbf SVC"
kernel[2] = "AdaBoost classifier"
#kernel[3] = "k-nearest-neighbors classifier"
#kernel[4] = "random forest classifier"
#kernel[5] = "decision tree classifier"
# Plot Precision-Recall curve for each class
plt.clf()
for i in range(3):
plt.plot(recall[i], precision[i],
label='Precision-recall curve of {0} (area = {1:0.2f})'
''.format(kernel[i], average_precision[i]))
plt.xlim([0.0, 1.0])
plt.ylim([0.0, 1.05])
plt.xlabel('Recall')
plt.ylabel('Precision')
plt.title('Classification on aggregate crime; precision vs. recall')
plt.legend(loc="lower right")
plt.show()
|
JamesWo/cs194-16-data_manatees
|
precision_recall.py
|
Python
|
apache-2.0
| 4,204
|
# Copyright The Cloud Custodian Authors.
# SPDX-License-Identifier: Apache-2.0
from ..azure_common import BaseTest, arm_template
class IoTHubTest(BaseTest):
def setUp(self):
super(IoTHubTest, self).setUp()
def test_iot_hub_schema_validate(self):
with self.sign_out_patch():
p = self.load_policy({
'name': 'test-iot-hub-compliance',
'resource': 'azure.iothub'
}, validate=True)
self.assertTrue(p)
@arm_template('iothub.json')
def test_find_by_name(self):
p = self.load_policy({
'name': 'test-azure-iothub',
'resource': 'azure.iothub',
'filters': [
{'type': 'value',
'key': 'name',
'op': 'glob',
'value': 'cctest-iothub*'}],
})
resources = p.run()
self.assertEqual(len(resources), 1)
|
thisisshi/cloud-custodian
|
tools/c7n_azure/tests_azure/tests_resources/test_iot_hub.py
|
Python
|
apache-2.0
| 921
|
"""Support for RainMachine devices."""
import logging
from datetime import timedelta
import voluptuous as vol
from homeassistant.config_entries import SOURCE_IMPORT
from homeassistant.const import (
ATTR_ATTRIBUTION, CONF_BINARY_SENSORS, CONF_IP_ADDRESS, CONF_PASSWORD,
CONF_PORT, CONF_SCAN_INTERVAL, CONF_SENSORS, CONF_SSL,
CONF_MONITORED_CONDITIONS, CONF_SWITCHES)
from homeassistant.exceptions import ConfigEntryNotReady
from homeassistant.helpers import aiohttp_client, config_validation as cv
from homeassistant.helpers.dispatcher import async_dispatcher_send
from homeassistant.helpers.entity import Entity
from homeassistant.helpers.event import async_track_time_interval
from .config_flow import configured_instances
from .const import (
DATA_CLIENT, DEFAULT_PORT, DEFAULT_SCAN_INTERVAL, DEFAULT_SSL, DOMAIN)
REQUIREMENTS = ['regenmaschine==1.2.0']
_LOGGER = logging.getLogger(__name__)
DATA_LISTENER = 'listener'
PROGRAM_UPDATE_TOPIC = '{0}_program_update'.format(DOMAIN)
SENSOR_UPDATE_TOPIC = '{0}_data_update'.format(DOMAIN)
ZONE_UPDATE_TOPIC = '{0}_zone_update'.format(DOMAIN)
CONF_CONTROLLERS = 'controllers'
CONF_PROGRAM_ID = 'program_id'
CONF_SECONDS = 'seconds'
CONF_ZONE_ID = 'zone_id'
CONF_ZONE_RUN_TIME = 'zone_run_time'
DEFAULT_ATTRIBUTION = 'Data provided by Green Electronics LLC'
DEFAULT_ICON = 'mdi:water'
DEFAULT_ZONE_RUN = 60 * 10
TYPE_FREEZE = 'freeze'
TYPE_FREEZE_PROTECTION = 'freeze_protection'
TYPE_FREEZE_TEMP = 'freeze_protect_temp'
TYPE_HOT_DAYS = 'extra_water_on_hot_days'
TYPE_HOURLY = 'hourly'
TYPE_MONTH = 'month'
TYPE_RAINDELAY = 'raindelay'
TYPE_RAINSENSOR = 'rainsensor'
TYPE_WEEKDAY = 'weekday'
BINARY_SENSORS = {
TYPE_FREEZE: ('Freeze Restrictions', 'mdi:cancel'),
TYPE_FREEZE_PROTECTION: ('Freeze Protection', 'mdi:weather-snowy'),
TYPE_HOT_DAYS: ('Extra Water on Hot Days', 'mdi:thermometer-lines'),
TYPE_HOURLY: ('Hourly Restrictions', 'mdi:cancel'),
TYPE_MONTH: ('Month Restrictions', 'mdi:cancel'),
TYPE_RAINDELAY: ('Rain Delay Restrictions', 'mdi:cancel'),
TYPE_RAINSENSOR: ('Rain Sensor Restrictions', 'mdi:cancel'),
TYPE_WEEKDAY: ('Weekday Restrictions', 'mdi:cancel'),
}
SENSORS = {
TYPE_FREEZE_TEMP: ('Freeze Protect Temperature', 'mdi:thermometer', '°C'),
}
BINARY_SENSOR_SCHEMA = vol.Schema({
vol.Optional(CONF_MONITORED_CONDITIONS, default=list(BINARY_SENSORS)):
vol.All(cv.ensure_list, [vol.In(BINARY_SENSORS)])
})
SENSOR_SCHEMA = vol.Schema({
vol.Optional(CONF_MONITORED_CONDITIONS, default=list(SENSORS)):
vol.All(cv.ensure_list, [vol.In(SENSORS)])
})
SERVICE_PAUSE_WATERING = vol.Schema({
vol.Required(CONF_SECONDS): cv.positive_int,
})
SERVICE_START_PROGRAM_SCHEMA = vol.Schema({
vol.Required(CONF_PROGRAM_ID): cv.positive_int,
})
SERVICE_START_ZONE_SCHEMA = vol.Schema({
vol.Required(CONF_ZONE_ID): cv.positive_int,
vol.Optional(CONF_ZONE_RUN_TIME, default=DEFAULT_ZONE_RUN):
cv.positive_int,
})
SERVICE_STOP_PROGRAM_SCHEMA = vol.Schema({
vol.Required(CONF_PROGRAM_ID): cv.positive_int,
})
SERVICE_STOP_ZONE_SCHEMA = vol.Schema({
vol.Required(CONF_ZONE_ID): cv.positive_int,
})
SWITCH_SCHEMA = vol.Schema({vol.Optional(CONF_ZONE_RUN_TIME): cv.positive_int})
CONTROLLER_SCHEMA = vol.Schema({
vol.Required(CONF_IP_ADDRESS): cv.string,
vol.Required(CONF_PASSWORD): cv.string,
vol.Optional(CONF_PORT, default=DEFAULT_PORT): cv.port,
vol.Optional(CONF_SSL, default=DEFAULT_SSL): cv.boolean,
vol.Optional(CONF_SCAN_INTERVAL, default=DEFAULT_SCAN_INTERVAL):
cv.time_period,
vol.Optional(CONF_BINARY_SENSORS, default={}): BINARY_SENSOR_SCHEMA,
vol.Optional(CONF_SENSORS, default={}): SENSOR_SCHEMA,
vol.Optional(CONF_SWITCHES, default={}): SWITCH_SCHEMA,
})
CONFIG_SCHEMA = vol.Schema({
DOMAIN: vol.Schema({
vol.Required(CONF_CONTROLLERS):
vol.All(cv.ensure_list, [CONTROLLER_SCHEMA]),
}),
}, extra=vol.ALLOW_EXTRA)
async def async_setup(hass, config):
"""Set up the RainMachine component."""
hass.data[DOMAIN] = {}
hass.data[DOMAIN][DATA_CLIENT] = {}
hass.data[DOMAIN][DATA_LISTENER] = {}
if DOMAIN not in config:
return True
conf = config[DOMAIN]
for controller in conf[CONF_CONTROLLERS]:
if controller[CONF_IP_ADDRESS] in configured_instances(hass):
continue
hass.async_create_task(
hass.config_entries.flow.async_init(
DOMAIN,
context={'source': SOURCE_IMPORT},
data=controller))
return True
async def async_setup_entry(hass, config_entry):
"""Set up RainMachine as config entry."""
from regenmaschine import login
from regenmaschine.errors import RainMachineError
websession = aiohttp_client.async_get_clientsession(hass)
try:
client = await login(
config_entry.data[CONF_IP_ADDRESS],
config_entry.data[CONF_PASSWORD],
websession,
port=config_entry.data[CONF_PORT],
ssl=config_entry.data[CONF_SSL])
rainmachine = RainMachine(
client,
config_entry.data.get(CONF_BINARY_SENSORS, {}).get(
CONF_MONITORED_CONDITIONS, list(BINARY_SENSORS)),
config_entry.data.get(CONF_SENSORS, {}).get(
CONF_MONITORED_CONDITIONS, list(SENSORS)),
config_entry.data.get(CONF_ZONE_RUN_TIME, DEFAULT_ZONE_RUN))
await rainmachine.async_update()
except RainMachineError as err:
_LOGGER.error('An error occurred: %s', err)
raise ConfigEntryNotReady
hass.data[DOMAIN][DATA_CLIENT][config_entry.entry_id] = rainmachine
for component in ('binary_sensor', 'sensor', 'switch'):
hass.async_create_task(
hass.config_entries.async_forward_entry_setup(
config_entry, component))
async def refresh(event_time):
"""Refresh RainMachine sensor data."""
_LOGGER.debug('Updating RainMachine sensor data')
await rainmachine.async_update()
async_dispatcher_send(hass, SENSOR_UPDATE_TOPIC)
hass.data[DOMAIN][DATA_LISTENER][
config_entry.entry_id] = async_track_time_interval(
hass,
refresh,
timedelta(seconds=config_entry.data[CONF_SCAN_INTERVAL]))
async def pause_watering(service):
"""Pause watering for a set number of seconds."""
await rainmachine.client.watering.pause_all(service.data[CONF_SECONDS])
async_dispatcher_send(hass, PROGRAM_UPDATE_TOPIC)
async def start_program(service):
"""Start a particular program."""
await rainmachine.client.programs.start(service.data[CONF_PROGRAM_ID])
async_dispatcher_send(hass, PROGRAM_UPDATE_TOPIC)
async def start_zone(service):
"""Start a particular zone for a certain amount of time."""
await rainmachine.client.zones.start(
service.data[CONF_ZONE_ID], service.data[CONF_ZONE_RUN_TIME])
async_dispatcher_send(hass, ZONE_UPDATE_TOPIC)
async def stop_all(service):
"""Stop all watering."""
await rainmachine.client.watering.stop_all()
async_dispatcher_send(hass, PROGRAM_UPDATE_TOPIC)
async def stop_program(service):
"""Stop a program."""
await rainmachine.client.programs.stop(service.data[CONF_PROGRAM_ID])
async_dispatcher_send(hass, PROGRAM_UPDATE_TOPIC)
async def stop_zone(service):
"""Stop a zone."""
await rainmachine.client.zones.stop(service.data[CONF_ZONE_ID])
async_dispatcher_send(hass, ZONE_UPDATE_TOPIC)
async def unpause_watering(service):
"""Unpause watering."""
await rainmachine.client.watering.unpause_all()
async_dispatcher_send(hass, PROGRAM_UPDATE_TOPIC)
for service, method, schema in [
('pause_watering', pause_watering, SERVICE_PAUSE_WATERING),
('start_program', start_program, SERVICE_START_PROGRAM_SCHEMA),
('start_zone', start_zone, SERVICE_START_ZONE_SCHEMA),
('stop_all', stop_all, {}),
('stop_program', stop_program, SERVICE_STOP_PROGRAM_SCHEMA),
('stop_zone', stop_zone, SERVICE_STOP_ZONE_SCHEMA),
('unpause_watering', unpause_watering, {}),
]:
hass.services.async_register(DOMAIN, service, method, schema=schema)
return True
async def async_unload_entry(hass, config_entry):
"""Unload an OpenUV config entry."""
hass.data[DOMAIN][DATA_CLIENT].pop(config_entry.entry_id)
remove_listener = hass.data[DOMAIN][DATA_LISTENER].pop(
config_entry.entry_id)
remove_listener()
for component in ('binary_sensor', 'sensor', 'switch'):
await hass.config_entries.async_forward_entry_unload(
config_entry, component)
return True
class RainMachine:
"""Define a generic RainMachine object."""
def __init__(
self, client, binary_sensor_conditions, sensor_conditions,
default_zone_runtime):
"""Initialize."""
self.binary_sensor_conditions = binary_sensor_conditions
self.client = client
self.default_zone_runtime = default_zone_runtime
self.device_mac = self.client.mac
self.restrictions = {}
self.sensor_conditions = sensor_conditions
async def async_update(self):
"""Update sensor/binary sensor data."""
self.restrictions.update({
'current': await self.client.restrictions.current(),
'global': await self.client.restrictions.universal()
})
class RainMachineEntity(Entity):
"""Define a generic RainMachine entity."""
def __init__(self, rainmachine):
"""Initialize."""
self._attrs = {ATTR_ATTRIBUTION: DEFAULT_ATTRIBUTION}
self._dispatcher_handlers = []
self._name = None
self.rainmachine = rainmachine
@property
def device_info(self):
"""Return device registry information for this entity."""
return {
'identifiers': {
(DOMAIN, self.rainmachine.client.mac)
},
'name': self.rainmachine.client.name,
'manufacturer': 'RainMachine',
'model': 'Version {0} (API: {1})'.format(
self.rainmachine.client.hardware_version,
self.rainmachine.client.api_version),
'sw_version': self.rainmachine.client.software_version,
}
@property
def device_state_attributes(self) -> dict:
"""Return the state attributes."""
return self._attrs
@property
def name(self) -> str:
"""Return the name of the entity."""
return self._name
async def async_will_remove_from_hass(self):
"""Disconnect dispatcher listener when removed."""
for handler in self._dispatcher_handlers:
handler()
|
HydrelioxGitHub/home-assistant
|
homeassistant/components/rainmachine/__init__.py
|
Python
|
apache-2.0
| 10,913
|
# Copyright (c) 2011 OpenStack Foundation
# Copyright (c) 2012 Cloudscaling
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_config import cfg
from nova.i18n import _LW
from nova.openstack.common import log as logging
from nova.scheduler import filters
from nova.scheduler.filters import utils
LOG = logging.getLogger(__name__)
ram_allocation_ratio_opt = cfg.FloatOpt('ram_allocation_ratio',
default=1.5,
help='Virtual ram to physical ram allocation ratio which affects '
'all ram filters. This configuration specifies a global ratio '
'for RamFilter. For AggregateRamFilter, it will fall back to '
'this configuration value if no per-aggregate setting found.')
CONF = cfg.CONF
CONF.register_opt(ram_allocation_ratio_opt)
class BaseRamFilter(filters.BaseHostFilter):
def _get_ram_allocation_ratio(self, host_state, filter_properties):
raise NotImplementedError
def host_passes(self, host_state, filter_properties):
"""Only return hosts with sufficient available RAM."""
instance_type = filter_properties.get('instance_type')
requested_ram = instance_type['memory_mb']
free_ram_mb = host_state.free_ram_mb
total_usable_ram_mb = host_state.total_usable_ram_mb
ram_allocation_ratio = self._get_ram_allocation_ratio(host_state,
filter_properties)
memory_mb_limit = total_usable_ram_mb * ram_allocation_ratio
used_ram_mb = total_usable_ram_mb - free_ram_mb
usable_ram = memory_mb_limit - used_ram_mb
if not usable_ram >= requested_ram:
LOG.debug("%(host_state)s does not have %(requested_ram)s MB "
"usable ram, it only has %(usable_ram)s MB usable ram.",
{'host_state': host_state,
'requested_ram': requested_ram,
'usable_ram': usable_ram})
return False
# save oversubscription limit for compute node to test against:
host_state.limits['memory_mb'] = memory_mb_limit
return True
class RamFilter(BaseRamFilter):
"""Ram Filter with over subscription flag."""
ram_allocation_ratio = CONF.ram_allocation_ratio
def _get_ram_allocation_ratio(self, host_state, filter_properties):
return self.ram_allocation_ratio
class AggregateRamFilter(BaseRamFilter):
"""AggregateRamFilter with per-aggregate ram subscription flag.
Fall back to global ram_allocation_ratio if no per-aggregate setting found.
"""
def _get_ram_allocation_ratio(self, host_state, filter_properties):
# TODO(uni): DB query in filter is a performance hit, especially for
# system with lots of hosts. Will need a general solution here to fix
# all filters with aggregate DB call things.
aggregate_vals = utils.aggregate_values_from_db(
filter_properties['context'],
host_state.host,
'ram_allocation_ratio')
try:
ratio = utils.validate_num_values(
aggregate_vals, CONF.ram_allocation_ratio, cast_to=float)
except ValueError as e:
LOG.warning(_LW("Could not decode ram_allocation_ratio: '%s'"), e)
ratio = CONF.ram_allocation_ratio
return ratio
|
orbitfp7/nova
|
nova/scheduler/filters/ram_filter.py
|
Python
|
apache-2.0
| 3,894
|
# Copyright (c) 2012 OpenStack Foundation.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import contextlib
import sys
import mock
import netaddr
from oslo.config import cfg
import testtools
from neutron.agent.linux import async_process
from neutron.agent.linux import ip_lib
from neutron.agent.linux import ovs_lib
from neutron.agent.linux import utils
from neutron.common import constants as n_const
from neutron.openstack.common import log
from neutron.plugins.common import constants as p_const
from neutron.plugins.openvswitch.agent import ovs_neutron_agent
from neutron.plugins.openvswitch.common import constants
from neutron.tests import base
NOTIFIER = ('neutron.plugins.openvswitch.'
'ovs_neutron_plugin.AgentNotifierApi')
OVS_LINUX_KERN_VERS_WITHOUT_VXLAN = "3.12.0"
FAKE_MAC = '00:11:22:33:44:55'
FAKE_IP1 = '10.0.0.1'
FAKE_IP2 = '10.0.0.2'
class CreateAgentConfigMap(base.BaseTestCase):
def test_create_agent_config_map_succeeds(self):
self.assertTrue(ovs_neutron_agent.create_agent_config_map(cfg.CONF))
def test_create_agent_config_map_fails_for_invalid_tunnel_config(self):
# An ip address is required for tunneling but there is no default,
# verify this for both gre and vxlan tunnels.
cfg.CONF.set_override('tunnel_types', [p_const.TYPE_GRE],
group='AGENT')
with testtools.ExpectedException(ValueError):
ovs_neutron_agent.create_agent_config_map(cfg.CONF)
cfg.CONF.set_override('tunnel_types', [p_const.TYPE_VXLAN],
group='AGENT')
with testtools.ExpectedException(ValueError):
ovs_neutron_agent.create_agent_config_map(cfg.CONF)
def test_create_agent_config_map_enable_tunneling(self):
# Verify setting only enable_tunneling will default tunnel_type to GRE
cfg.CONF.set_override('tunnel_types', None, group='AGENT')
cfg.CONF.set_override('enable_tunneling', True, group='OVS')
cfg.CONF.set_override('local_ip', '10.10.10.10', group='OVS')
cfgmap = ovs_neutron_agent.create_agent_config_map(cfg.CONF)
self.assertEqual(cfgmap['tunnel_types'], [p_const.TYPE_GRE])
def test_create_agent_config_map_fails_no_local_ip(self):
# An ip address is required for tunneling but there is no default
cfg.CONF.set_override('enable_tunneling', True, group='OVS')
with testtools.ExpectedException(ValueError):
ovs_neutron_agent.create_agent_config_map(cfg.CONF)
def test_create_agent_config_map_fails_for_invalid_tunnel_type(self):
cfg.CONF.set_override('tunnel_types', ['foobar'], group='AGENT')
with testtools.ExpectedException(ValueError):
ovs_neutron_agent.create_agent_config_map(cfg.CONF)
def test_create_agent_config_map_multiple_tunnel_types(self):
cfg.CONF.set_override('local_ip', '10.10.10.10', group='OVS')
cfg.CONF.set_override('tunnel_types', [p_const.TYPE_GRE,
p_const.TYPE_VXLAN], group='AGENT')
cfgmap = ovs_neutron_agent.create_agent_config_map(cfg.CONF)
self.assertEqual(cfgmap['tunnel_types'],
[p_const.TYPE_GRE, p_const.TYPE_VXLAN])
def test_create_agent_config_map_enable_distributed_routing(self):
self.addCleanup(cfg.CONF.reset)
# Verify setting only enable_tunneling will default tunnel_type to GRE
cfg.CONF.set_override('enable_distributed_routing', True,
group='AGENT')
cfgmap = ovs_neutron_agent.create_agent_config_map(cfg.CONF)
self.assertEqual(cfgmap['enable_distributed_routing'], True)
class TestOvsNeutronAgent(base.BaseTestCase):
def setUp(self):
super(TestOvsNeutronAgent, self).setUp()
notifier_p = mock.patch(NOTIFIER)
notifier_cls = notifier_p.start()
self.notifier = mock.Mock()
notifier_cls.return_value = self.notifier
cfg.CONF.set_default('firewall_driver',
'neutron.agent.firewall.NoopFirewallDriver',
group='SECURITYGROUP')
kwargs = ovs_neutron_agent.create_agent_config_map(cfg.CONF)
class MockFixedIntervalLoopingCall(object):
def __init__(self, f):
self.f = f
def start(self, interval=0):
self.f()
with contextlib.nested(
mock.patch('neutron.plugins.openvswitch.agent.ovs_neutron_agent.'
'OVSNeutronAgent.setup_integration_br',
return_value=mock.Mock()),
mock.patch('neutron.plugins.openvswitch.agent.ovs_neutron_agent.'
'OVSNeutronAgent.setup_ancillary_bridges',
return_value=[]),
mock.patch('neutron.agent.linux.ovs_lib.OVSBridge.'
'create'),
mock.patch('neutron.agent.linux.ovs_lib.OVSBridge.'
'set_secure_mode'),
mock.patch('neutron.agent.linux.ovs_lib.OVSBridge.'
'get_local_port_mac',
return_value='00:00:00:00:00:01'),
mock.patch('neutron.agent.linux.utils.get_interface_mac',
return_value='00:00:00:00:00:01'),
mock.patch('neutron.agent.linux.ovs_lib.'
'get_bridges'),
mock.patch('neutron.openstack.common.loopingcall.'
'FixedIntervalLoopingCall',
new=MockFixedIntervalLoopingCall)):
self.agent = ovs_neutron_agent.OVSNeutronAgent(**kwargs)
self.agent.tun_br = mock.Mock()
self.agent.sg_agent = mock.Mock()
def _mock_port_bound(self, ofport=None, new_local_vlan=None,
old_local_vlan=None):
port = mock.Mock()
port.ofport = ofport
net_uuid = 'my-net-uuid'
fixed_ips = [{'subnet_id': 'my-subnet-uuid',
'ip_address': '1.1.1.1'}]
if old_local_vlan is not None:
self.agent.local_vlan_map[net_uuid] = (
ovs_neutron_agent.LocalVLANMapping(
old_local_vlan, None, None, None))
with contextlib.nested(
mock.patch('neutron.agent.linux.ovs_lib.OVSBridge.'
'set_db_attribute', return_value=True),
mock.patch('neutron.agent.linux.ovs_lib.OVSBridge.'
'db_get_val', return_value=str(old_local_vlan)),
mock.patch.object(self.agent.int_br, 'delete_flows')
) as (set_ovs_db_func, get_ovs_db_func, delete_flows_func):
self.agent.port_bound(port, net_uuid, 'local', None, None,
fixed_ips, "compute:None", False)
get_ovs_db_func.assert_called_once_with("Port", mock.ANY, "tag")
if new_local_vlan != old_local_vlan:
set_ovs_db_func.assert_called_once_with(
"Port", mock.ANY, "tag", str(new_local_vlan))
if ofport != -1:
delete_flows_func.assert_called_once_with(in_port=port.ofport)
else:
self.assertFalse(delete_flows_func.called)
else:
self.assertFalse(set_ovs_db_func.called)
self.assertFalse(delete_flows_func.called)
def _setup_for_dvr_test(self, ofport=10):
self._port = mock.Mock()
self._port.ofport = ofport
self._port.vif_id = "1234-5678-90"
self.agent.enable_distributed_routing = True
self.agent.enable_tunneling = True
self.agent.patch_tun_ofport = 1
self.agent.patch_int_ofport = 2
self.agent.dvr_agent.local_ports = {}
self.agent.local_vlan_map = {}
self.agent.dvr_agent.enable_distributed_routing = True
self.agent.dvr_agent.enable_tunneling = True
self.agent.dvr_agent.patch_tun_ofport = 1
self.agent.dvr_agent.patch_int_ofport = 2
self.agent.dvr_agent.tun_br = mock.Mock()
self.agent.dvr_agent.local_dvr_map = {}
self.agent.dvr_agent.registered_dvr_macs = set()
self.agent.dvr_agent.dvr_mac_address = 'aa:22:33:44:55:66'
self._net_uuid = 'my-net-uuid'
self._fixed_ips = [{'subnet_id': 'my-subnet-uuid',
'ip_address': '1.1.1.1'}]
self._compute_port = mock.Mock()
self._compute_port.ofport = 20
self._compute_port.vif_id = "1234-5678-91"
self._old_local_vlan = None
self._compute_fixed_ips = [{'subnet_id': 'my-subnet-uuid',
'ip_address': '1.1.1.3'}]
def test_port_bound_deletes_flows_for_valid_ofport(self):
self._mock_port_bound(ofport=1, new_local_vlan=1)
def test_port_bound_ignores_flows_for_invalid_ofport(self):
self._mock_port_bound(ofport=-1, new_local_vlan=1)
def test_port_bound_does_not_rewire_if_already_bound(self):
self._mock_port_bound(ofport=-1, new_local_vlan=1, old_local_vlan=1)
def test_port_bound_for_dvr_interface(self, ofport=10):
self._setup_for_dvr_test()
with mock.patch('neutron.agent.linux.ovs_lib.OVSBridge.'
'set_db_attribute',
return_value=True):
with contextlib.nested(
mock.patch('neutron.agent.linux.ovs_lib.OVSBridge.'
'db_get_val',
return_value=str(self._old_local_vlan)),
mock.patch.object(
self.agent.dvr_agent.plugin_rpc, 'get_subnet_for_dvr',
return_value={'gateway_ip': '1.1.1.1',
'cidr': '1.1.1.0/24',
'gateway_mac': 'aa:bb:cc:11:22:33'}),
mock.patch.object(self.agent.dvr_agent.plugin_rpc,
'get_ports_on_host_by_subnet',
return_value=[]),
mock.patch.object(self.agent.dvr_agent.int_br,
'get_vif_port_by_id',
return_value=self._port),
mock.patch.object(self.agent.dvr_agent.int_br, 'add_flow'),
mock.patch.object(self.agent.dvr_agent.int_br, 'delete_flows'),
mock.patch.object(self.agent.dvr_agent.tun_br, 'add_flow'),
mock.patch.object(self.agent.dvr_agent.tun_br, 'delete_flows')
) as (get_ovs_db_func, get_subnet_fn, get_cphost_fn,
get_vif_fn, add_flow_int_fn, delete_flows_int_fn,
add_flow_tun_fn, delete_flows_tun_fn):
self.agent.port_bound(
self._port, self._net_uuid, 'vxlan',
None, None, self._fixed_ips,
n_const.DEVICE_OWNER_DVR_INTERFACE,
False)
self.assertTrue(add_flow_tun_fn.called)
self.assertTrue(delete_flows_int_fn.called)
def _test_port_bound_for_dvr(self, device_owner):
self._setup_for_dvr_test()
with mock.patch('neutron.agent.linux.ovs_lib.OVSBridge.'
'set_db_attribute',
return_value=True):
with contextlib.nested(
mock.patch('neutron.agent.linux.ovs_lib.OVSBridge.'
'db_get_val',
return_value=str(self._old_local_vlan)),
mock.patch.object(self.agent.dvr_agent.plugin_rpc,
'get_subnet_for_dvr',
return_value={
'gateway_ip': '1.1.1.1',
'cidr': '1.1.1.0/24',
'gateway_mac': 'aa:bb:cc:11:22:33'}),
mock.patch.object(self.agent.dvr_agent.plugin_rpc,
'get_ports_on_host_by_subnet',
return_value=[]),
mock.patch.object(self.agent.dvr_agent.int_br,
'get_vif_port_by_id',
return_value=self._port),
mock.patch.object(self.agent.dvr_agent.int_br, 'add_flow'),
mock.patch.object(self.agent.dvr_agent.int_br, 'delete_flows'),
mock.patch.object(self.agent.dvr_agent.tun_br, 'add_flow'),
mock.patch.object(self.agent.dvr_agent.tun_br, 'delete_flows')
) as (get_ovs_db_func, get_subnet_fn, get_cphost_fn,
get_vif_fn, add_flow_int_fn, delete_flows_int_fn,
add_flow_tun_fn, delete_flows_tun_fn):
self.agent.port_bound(
self._port, self._net_uuid, 'vxlan',
None, None, self._fixed_ips,
n_const.DEVICE_OWNER_DVR_INTERFACE,
False)
self.agent.port_bound(self._compute_port, self._net_uuid,
'vxlan', None, None,
self._compute_fixed_ips,
device_owner, False)
self.assertTrue(add_flow_tun_fn.called)
self.assertTrue(add_flow_int_fn.called)
self.assertTrue(delete_flows_int_fn.called)
def test_port_bound_for_dvr_with_compute_ports(self):
self._test_port_bound_for_dvr(device_owner="compute:None")
def test_port_bound_for_dvr_with_lbaas_vip_ports(self):
self._test_port_bound_for_dvr(
device_owner=n_const.DEVICE_OWNER_LOADBALANCER)
def test_port_bound_for_dvr_with_csnat_ports(self, ofport=10):
self._setup_for_dvr_test()
with mock.patch('neutron.agent.linux.ovs_lib.OVSBridge.'
'set_db_attribute',
return_value=True):
with contextlib.nested(
mock.patch('neutron.agent.linux.ovs_lib.OVSBridge.'
'db_get_val',
return_value=str(self._old_local_vlan)),
mock.patch.object(
self.agent.dvr_agent.plugin_rpc, 'get_subnet_for_dvr',
return_value={'gateway_ip': '1.1.1.1',
'cidr': '1.1.1.0/24',
'gateway_mac': 'aa:bb:cc:11:22:33'}),
mock.patch.object(self.agent.dvr_agent.plugin_rpc,
'get_ports_on_host_by_subnet',
return_value=[]),
mock.patch.object(self.agent.dvr_agent.int_br,
'get_vif_port_by_id',
return_value=self._port),
mock.patch.object(self.agent.dvr_agent.int_br, 'add_flow'),
mock.patch.object(self.agent.dvr_agent.int_br, 'delete_flows'),
mock.patch.object(self.agent.dvr_agent.tun_br, 'add_flow'),
mock.patch.object(self.agent.dvr_agent.tun_br, 'delete_flows')
) as (get_ovs_db_func, get_subnet_fn, get_cphost_fn,
get_vif_fn, add_flow_int_fn, delete_flows_int_fn,
add_flow_tun_fn, delete_flows_tun_fn):
self.agent.port_bound(
self._port, self._net_uuid, 'vxlan',
None, None, self._fixed_ips,
n_const.DEVICE_OWNER_ROUTER_SNAT,
False)
self.assertTrue(add_flow_int_fn.called)
self.assertTrue(delete_flows_int_fn.called)
def test_treat_devices_removed_for_dvr_interface(self, ofport=10):
self._setup_for_dvr_test()
with mock.patch('neutron.agent.linux.ovs_lib.OVSBridge.'
'set_db_attribute',
return_value=True):
with contextlib.nested(
mock.patch('neutron.agent.linux.ovs_lib.OVSBridge.'
'db_get_val',
return_value=str(self._old_local_vlan)),
mock.patch.object(
self.agent.dvr_agent.plugin_rpc, 'get_subnet_for_dvr',
return_value={'gateway_ip': '1.1.1.1',
'cidr': '1.1.1.0/24',
'gateway_mac': 'aa:bb:cc:11:22:33'}),
mock.patch.object(self.agent.dvr_agent.plugin_rpc,
'get_ports_on_host_by_subnet',
return_value=[]),
mock.patch.object(self.agent.dvr_agent.int_br,
'get_vif_port_by_id',
return_value=self._port),
mock.patch.object(self.agent.dvr_agent.int_br, 'add_flow'),
mock.patch.object(self.agent.dvr_agent.int_br, 'delete_flows'),
mock.patch.object(self.agent.dvr_agent.tun_br, 'add_flow'),
mock.patch.object(self.agent.dvr_agent.tun_br, 'delete_flows')
) as (get_ovs_db_func, get_subnet_fn, get_cphost_fn,
get_vif_fn, add_flow_int_fn, delete_flows_int_fn,
add_flow_tun_fn, delete_flows_tun_fn):
self.agent.port_bound(
self._port, self._net_uuid, 'vxlan',
None, None, self._fixed_ips,
n_const.DEVICE_OWNER_DVR_INTERFACE,
False)
self.assertTrue(add_flow_tun_fn.called)
self.assertTrue(delete_flows_int_fn.called)
with contextlib.nested(
mock.patch.object(self.agent, 'reclaim_local_vlan'),
mock.patch.object(self.agent.plugin_rpc, 'update_device_down',
return_value=None),
mock.patch.object(self.agent.dvr_agent.int_br, 'delete_flows'),
mock.patch.object(self.agent.dvr_agent.tun_br,
'delete_flows')) as (reclaim_vlan_fn,
update_dev_down_fn,
delete_flows_int_fn,
delete_flows_tun_fn):
self.agent.treat_devices_removed([self._port.vif_id])
self.assertTrue(delete_flows_int_fn.called)
self.assertTrue(delete_flows_tun_fn.called)
def _test_treat_devices_removed_for_dvr(self, device_owner):
self._setup_for_dvr_test()
with mock.patch('neutron.agent.linux.ovs_lib.OVSBridge.'
'set_db_attribute',
return_value=True):
with contextlib.nested(
mock.patch('neutron.agent.linux.ovs_lib.OVSBridge.'
'db_get_val',
return_value=str(self._old_local_vlan)),
mock.patch.object(
self.agent.dvr_agent.plugin_rpc, 'get_subnet_for_dvr',
return_value={'gateway_ip': '1.1.1.1',
'cidr': '1.1.1.0/24',
'gateway_mac': 'aa:bb:cc:11:22:33'}),
mock.patch.object(self.agent.dvr_agent.plugin_rpc,
'get_ports_on_host_by_subnet',
return_value=[]),
mock.patch.object(self.agent.dvr_agent.int_br,
'get_vif_port_by_id',
return_value=self._port),
mock.patch.object(self.agent.dvr_agent.int_br, 'add_flow'),
mock.patch.object(self.agent.dvr_agent.int_br, 'delete_flows'),
mock.patch.object(self.agent.dvr_agent.tun_br, 'add_flow'),
mock.patch.object(self.agent.dvr_agent.tun_br, 'delete_flows')
) as (get_ovs_db_func, get_subnet_fn, get_cphost_fn,
get_vif_fn, add_flow_int_fn, delete_flows_int_fn,
add_flow_tun_fn, delete_flows_tun_fn):
self.agent.port_bound(
self._port, self._net_uuid, 'vxlan',
None, None, self._fixed_ips,
n_const.DEVICE_OWNER_DVR_INTERFACE,
False)
self.agent.port_bound(self._compute_port,
self._net_uuid, 'vxlan',
None, None,
self._compute_fixed_ips,
device_owner, False)
self.assertTrue(add_flow_tun_fn.called)
self.assertTrue(add_flow_int_fn.called)
self.assertTrue(delete_flows_int_fn.called)
with contextlib.nested(
mock.patch.object(self.agent, 'reclaim_local_vlan'),
mock.patch.object(self.agent.plugin_rpc, 'update_device_down',
return_value=None),
mock.patch.object(self.agent.dvr_agent.int_br,
'delete_flows')) as (reclaim_vlan_fn,
update_dev_down_fn,
delete_flows_int_fn):
self.agent.treat_devices_removed([self._compute_port.vif_id])
self.assertTrue(delete_flows_int_fn.called)
def test_treat_devices_removed_for_dvr_with_compute_ports(self):
self._test_treat_devices_removed_for_dvr(device_owner="compute:None")
def test_treat_devices_removed_for_dvr_with_lbaas_vip_ports(self):
self._test_treat_devices_removed_for_dvr(
device_owner=n_const.DEVICE_OWNER_LOADBALANCER)
def test_treat_devices_removed_for_dvr_csnat_port(self, ofport=10):
self._setup_for_dvr_test()
with mock.patch('neutron.agent.linux.ovs_lib.OVSBridge.'
'set_db_attribute',
return_value=True):
with contextlib.nested(
mock.patch('neutron.agent.linux.ovs_lib.OVSBridge.'
'db_get_val',
return_value=str(self._old_local_vlan)),
mock.patch.object(
self.agent.dvr_agent.plugin_rpc, 'get_subnet_for_dvr',
return_value={'gateway_ip': '1.1.1.1',
'cidr': '1.1.1.0/24',
'gateway_mac': 'aa:bb:cc:11:22:33'}),
mock.patch.object(self.agent.dvr_agent.plugin_rpc,
'get_ports_on_host_by_subnet',
return_value=[]),
mock.patch.object(self.agent.dvr_agent.int_br,
'get_vif_port_by_id',
return_value=self._port),
mock.patch.object(self.agent.dvr_agent.int_br, 'add_flow'),
mock.patch.object(self.agent.dvr_agent.int_br, 'delete_flows'),
mock.patch.object(self.agent.dvr_agent.tun_br, 'add_flow'),
mock.patch.object(self.agent.dvr_agent.tun_br, 'delete_flows')
) as (get_ovs_db_func, get_subnet_fn, get_cphost_fn,
get_vif_fn, add_flow_int_fn, delete_flows_int_fn,
add_flow_tun_fn, delete_flows_tun_fn):
self.agent.port_bound(
self._port, self._net_uuid, 'vxlan',
None, None, self._fixed_ips,
n_const.DEVICE_OWNER_ROUTER_SNAT,
False)
self.assertTrue(add_flow_int_fn.called)
self.assertTrue(delete_flows_int_fn.called)
with contextlib.nested(
mock.patch.object(self.agent, 'reclaim_local_vlan'),
mock.patch.object(self.agent.plugin_rpc, 'update_device_down',
return_value=None),
mock.patch.object(self.agent.dvr_agent.int_br,
'delete_flows')) as (reclaim_vlan_fn,
update_dev_down_fn,
delete_flows_int_fn):
self.agent.treat_devices_removed([self._port.vif_id])
self.assertTrue(delete_flows_int_fn.called)
def test_setup_dvr_flows_on_int_br(self):
self._setup_for_dvr_test()
with contextlib.nested(
mock.patch.object(
self.agent.dvr_agent.plugin_rpc,
'get_dvr_mac_address_by_host',
return_value={'host': 'cn1',
'mac_address': 'aa:bb:cc:dd:ee:ff'}),
mock.patch.object(self.agent.dvr_agent.int_br, 'add_flow'),
mock.patch.object(self.agent.dvr_agent.tun_br, 'add_flow'),
mock.patch.object(self.agent.dvr_agent.int_br,
'remove_all_flows'),
mock.patch.object(
self.agent.dvr_agent.plugin_rpc,
'get_dvr_mac_address_list',
return_value=[{'host': 'cn1',
'mac_address': 'aa:bb:cc:dd:ee:ff'},
{'host': 'cn2',
'mac_address': '11:22:33:44:55:66'}])) as \
(get_subnet_fn, get_cphost_fn, get_vif_fn,
add_flow_fn, delete_flows_fn):
self.agent.dvr_agent.setup_dvr_flows_on_integ_tun_br()
def _test_port_dead(self, cur_tag=None):
port = mock.Mock()
port.ofport = 1
with contextlib.nested(
mock.patch('neutron.agent.linux.ovs_lib.OVSBridge.'
'set_db_attribute', return_value=True),
mock.patch('neutron.agent.linux.ovs_lib.OVSBridge.'
'db_get_val', return_value=cur_tag),
mock.patch.object(self.agent.int_br, 'add_flow')
) as (set_ovs_db_func, get_ovs_db_func, add_flow_func):
self.agent.port_dead(port)
get_ovs_db_func.assert_called_once_with("Port", mock.ANY, "tag")
if cur_tag == ovs_neutron_agent.DEAD_VLAN_TAG:
self.assertFalse(set_ovs_db_func.called)
self.assertFalse(add_flow_func.called)
else:
set_ovs_db_func.assert_called_once_with(
"Port", mock.ANY, "tag", str(ovs_neutron_agent.DEAD_VLAN_TAG))
add_flow_func.assert_called_once_with(
priority=2, in_port=port.ofport, actions="drop")
def test_port_dead(self):
self._test_port_dead()
def test_port_dead_with_port_already_dead(self):
self._test_port_dead(ovs_neutron_agent.DEAD_VLAN_TAG)
def mock_scan_ports(self, vif_port_set=None, registered_ports=None,
updated_ports=None, port_tags_dict=None):
if port_tags_dict is None: # Because empty dicts evaluate as False.
port_tags_dict = {}
with contextlib.nested(
mock.patch.object(self.agent.int_br, 'get_vif_port_set',
return_value=vif_port_set),
mock.patch.object(self.agent.int_br, 'get_port_tag_dict',
return_value=port_tags_dict)
):
return self.agent.scan_ports(registered_ports, updated_ports)
def test_scan_ports_returns_current_only_for_unchanged_ports(self):
vif_port_set = set([1, 3])
registered_ports = set([1, 3])
expected = {'current': vif_port_set}
actual = self.mock_scan_ports(vif_port_set, registered_ports)
self.assertEqual(expected, actual)
def test_scan_ports_returns_port_changes(self):
vif_port_set = set([1, 3])
registered_ports = set([1, 2])
expected = dict(current=vif_port_set, added=set([3]), removed=set([2]))
actual = self.mock_scan_ports(vif_port_set, registered_ports)
self.assertEqual(expected, actual)
def _test_scan_ports_with_updated_ports(self, updated_ports):
vif_port_set = set([1, 3, 4])
registered_ports = set([1, 2, 4])
expected = dict(current=vif_port_set, added=set([3]),
removed=set([2]), updated=set([4]))
actual = self.mock_scan_ports(vif_port_set, registered_ports,
updated_ports)
self.assertEqual(expected, actual)
def test_scan_ports_finds_known_updated_ports(self):
self._test_scan_ports_with_updated_ports(set([4]))
def test_scan_ports_ignores_unknown_updated_ports(self):
# the port '5' was not seen on current ports. Hence it has either
# never been wired or already removed and should be ignored
self._test_scan_ports_with_updated_ports(set([4, 5]))
def test_scan_ports_ignores_updated_port_if_removed(self):
vif_port_set = set([1, 3])
registered_ports = set([1, 2])
updated_ports = set([1, 2])
expected = dict(current=vif_port_set, added=set([3]),
removed=set([2]), updated=set([1]))
actual = self.mock_scan_ports(vif_port_set, registered_ports,
updated_ports)
self.assertEqual(expected, actual)
def test_scan_ports_no_vif_changes_returns_updated_port_only(self):
vif_port_set = set([1, 2, 3])
registered_ports = set([1, 2, 3])
updated_ports = set([2])
expected = dict(current=vif_port_set, updated=set([2]))
actual = self.mock_scan_ports(vif_port_set, registered_ports,
updated_ports)
self.assertEqual(expected, actual)
def test_update_ports_returns_changed_vlan(self):
br = ovs_lib.OVSBridge('br-int', 'sudo')
mac = "ca:fe:de:ad:be:ef"
port = ovs_lib.VifPort(1, 1, 1, mac, br)
lvm = ovs_neutron_agent.LocalVLANMapping(
1, '1', None, 1, {port.vif_id: port})
local_vlan_map = {'1': lvm}
vif_port_set = set([1, 3])
registered_ports = set([1, 2])
port_tags_dict = {1: []}
expected = dict(
added=set([3]), current=vif_port_set,
removed=set([2]), updated=set([1])
)
with mock.patch.dict(self.agent.local_vlan_map, local_vlan_map):
actual = self.mock_scan_ports(
vif_port_set, registered_ports, port_tags_dict=port_tags_dict)
self.assertEqual(expected, actual)
def test_treat_devices_added_returns_raises_for_missing_device(self):
with contextlib.nested(
mock.patch.object(self.agent.plugin_rpc,
'get_devices_details_list',
side_effect=Exception()),
mock.patch.object(self.agent.int_br, 'get_vif_port_by_id',
return_value=mock.Mock())):
self.assertRaises(
ovs_neutron_agent.DeviceListRetrievalError,
self.agent.treat_devices_added_or_updated, [{}], False)
def _mock_treat_devices_added_updated(self, details, port, func_name):
"""Mock treat devices added or updated.
:param details: the details to return for the device
:param port: the port that get_vif_port_by_id should return
:param func_name: the function that should be called
:returns: whether the named function was called
"""
with contextlib.nested(
mock.patch.object(self.agent.plugin_rpc,
'get_devices_details_list',
return_value=[details]),
mock.patch.object(self.agent.int_br, 'get_vif_port_by_id',
return_value=port),
mock.patch.object(self.agent.plugin_rpc, 'update_device_up'),
mock.patch.object(self.agent.plugin_rpc, 'update_device_down'),
mock.patch.object(self.agent, func_name)
) as (get_dev_fn, get_vif_func, upd_dev_up, upd_dev_down, func):
skip_devs = self.agent.treat_devices_added_or_updated([{}], False)
# The function should not raise
self.assertFalse(skip_devs)
return func.called
def test_treat_devices_added_updated_ignores_invalid_ofport(self):
port = mock.Mock()
port.ofport = -1
self.assertFalse(self._mock_treat_devices_added_updated(
mock.MagicMock(), port, 'port_dead'))
def test_treat_devices_added_updated_marks_unknown_port_as_dead(self):
port = mock.Mock()
port.ofport = 1
self.assertTrue(self._mock_treat_devices_added_updated(
mock.MagicMock(), port, 'port_dead'))
def test_treat_devices_added_does_not_process_missing_port(self):
with contextlib.nested(
mock.patch.object(self.agent.plugin_rpc, 'get_device_details'),
mock.patch.object(self.agent.int_br, 'get_vif_port_by_id',
return_value=None)
) as (get_dev_fn, get_vif_func):
self.assertFalse(get_dev_fn.called)
def test_treat_devices_added_updated_updates_known_port(self):
details = mock.MagicMock()
details.__contains__.side_effect = lambda x: True
self.assertTrue(self._mock_treat_devices_added_updated(
details, mock.Mock(), 'treat_vif_port'))
def test_treat_devices_added_updated_skips_if_port_not_found(self):
dev_mock = mock.MagicMock()
dev_mock.__getitem__.return_value = 'the_skipped_one'
with contextlib.nested(
mock.patch.object(self.agent.plugin_rpc,
'get_devices_details_list',
return_value=[dev_mock]),
mock.patch.object(self.agent.int_br, 'get_vif_port_by_id',
return_value=None),
mock.patch.object(self.agent.plugin_rpc, 'update_device_up'),
mock.patch.object(self.agent.plugin_rpc, 'update_device_down'),
mock.patch.object(self.agent, 'treat_vif_port')
) as (get_dev_fn, get_vif_func, upd_dev_up,
upd_dev_down, treat_vif_port):
skip_devs = self.agent.treat_devices_added_or_updated([{}], False)
# The function should return False for resync and no device
# processed
self.assertEqual(['the_skipped_one'], skip_devs)
self.assertFalse(treat_vif_port.called)
self.assertFalse(upd_dev_down.called)
self.assertFalse(upd_dev_up.called)
def test_treat_devices_added_updated_put_port_down(self):
fake_details_dict = {'admin_state_up': False,
'port_id': 'xxx',
'device': 'xxx',
'network_id': 'yyy',
'physical_network': 'foo',
'segmentation_id': 'bar',
'network_type': 'baz',
'fixed_ips': [{'subnet_id': 'my-subnet-uuid',
'ip_address': '1.1.1.1'}],
'device_owner': 'compute:None'
}
with contextlib.nested(
mock.patch.object(self.agent.plugin_rpc,
'get_devices_details_list',
return_value=[fake_details_dict]),
mock.patch.object(self.agent.int_br, 'get_vif_port_by_id',
return_value=mock.MagicMock()),
mock.patch.object(self.agent.plugin_rpc, 'update_device_up'),
mock.patch.object(self.agent.plugin_rpc, 'update_device_down'),
mock.patch.object(self.agent, 'treat_vif_port')
) as (get_dev_fn, get_vif_func, upd_dev_up,
upd_dev_down, treat_vif_port):
skip_devs = self.agent.treat_devices_added_or_updated([{}], False)
# The function should return False for resync
self.assertFalse(skip_devs)
self.assertTrue(treat_vif_port.called)
self.assertTrue(upd_dev_down.called)
def test_treat_devices_removed_returns_true_for_missing_device(self):
with mock.patch.object(self.agent.plugin_rpc, 'update_device_down',
side_effect=Exception()):
self.assertTrue(self.agent.treat_devices_removed([{}]))
def _mock_treat_devices_removed(self, port_exists):
details = dict(exists=port_exists)
with mock.patch.object(self.agent.plugin_rpc, 'update_device_down',
return_value=details):
with mock.patch.object(self.agent, 'port_unbound') as port_unbound:
self.assertFalse(self.agent.treat_devices_removed([{}]))
self.assertTrue(port_unbound.called)
def test_treat_devices_removed_unbinds_port(self):
self._mock_treat_devices_removed(True)
def test_treat_devices_removed_ignores_missing_port(self):
self._mock_treat_devices_removed(False)
def _test_process_network_ports(self, port_info):
with contextlib.nested(
mock.patch.object(self.agent.sg_agent, "setup_port_filters"),
mock.patch.object(self.agent, "treat_devices_added_or_updated",
return_value=[]),
mock.patch.object(self.agent, "treat_devices_removed",
return_value=False)
) as (setup_port_filters, device_added_updated, device_removed):
self.assertFalse(self.agent.process_network_ports(port_info,
False))
setup_port_filters.assert_called_once_with(
port_info['added'], port_info.get('updated', set()))
device_added_updated.assert_called_once_with(
port_info['added'] | port_info.get('updated', set()), False)
device_removed.assert_called_once_with(port_info['removed'])
def test_process_network_ports(self):
self._test_process_network_ports(
{'current': set(['tap0']),
'removed': set(['eth0']),
'added': set(['eth1'])})
def test_process_network_port_with_updated_ports(self):
self._test_process_network_ports(
{'current': set(['tap0', 'tap1']),
'updated': set(['tap1', 'eth1']),
'removed': set(['eth0']),
'added': set(['eth1'])})
def test_report_state(self):
with mock.patch.object(self.agent.state_rpc,
"report_state") as report_st:
self.agent.int_br_device_count = 5
self.agent._report_state()
report_st.assert_called_with(self.agent.context,
self.agent.agent_state)
self.assertNotIn("start_flag", self.agent.agent_state)
self.assertEqual(
self.agent.agent_state["configurations"]["devices"],
self.agent.int_br_device_count
)
def test_network_delete(self):
with contextlib.nested(
mock.patch.object(self.agent, "reclaim_local_vlan"),
mock.patch.object(self.agent.tun_br, "cleanup_tunnel_port")
) as (recl_fn, clean_tun_fn):
self.agent.network_delete("unused_context",
network_id="123")
self.assertFalse(recl_fn.called)
self.agent.local_vlan_map["123"] = "LVM object"
self.agent.network_delete("unused_context",
network_id="123")
self.assertFalse(clean_tun_fn.called)
recl_fn.assert_called_with("123")
def test_port_update(self):
port = {"id": "123",
"network_id": "124",
"admin_state_up": False}
self.agent.port_update("unused_context",
port=port,
network_type="vlan",
segmentation_id="1",
physical_network="physnet")
self.assertEqual(set(['123']), self.agent.updated_ports)
def test_setup_physical_bridges(self):
with contextlib.nested(
mock.patch.object(ip_lib, "device_exists"),
mock.patch.object(sys, "exit"),
mock.patch.object(utils, "execute"),
mock.patch.object(ovs_lib.OVSBridge, "remove_all_flows"),
mock.patch.object(ovs_lib.OVSBridge, "add_flow"),
mock.patch.object(ovs_lib.OVSBridge, "add_patch_port"),
mock.patch.object(ovs_lib.OVSBridge, "delete_port"),
mock.patch.object(ovs_lib.OVSBridge, "set_db_attribute"),
mock.patch.object(self.agent.int_br, "add_flow"),
mock.patch.object(self.agent.int_br, "add_patch_port"),
mock.patch.object(self.agent.int_br, "delete_port"),
mock.patch.object(self.agent.int_br, "set_db_attribute"),
) as (devex_fn, sysexit_fn, utilsexec_fn, remflows_fn, ovs_add_flow_fn,
ovs_addpatch_port_fn, ovs_delport_fn, ovs_set_attr_fn,
br_add_flow_fn, br_addpatch_port_fn, br_delport_fn,
br_set_attr_fn):
devex_fn.return_value = True
parent = mock.MagicMock()
parent.attach_mock(ovs_addpatch_port_fn, 'phy_add_patch_port')
parent.attach_mock(ovs_add_flow_fn, 'phy_add_flow')
parent.attach_mock(ovs_set_attr_fn, 'phy_set_attr')
parent.attach_mock(br_addpatch_port_fn, 'int_add_patch_port')
parent.attach_mock(br_add_flow_fn, 'int_add_flow')
parent.attach_mock(br_set_attr_fn, 'int_set_attr')
ovs_addpatch_port_fn.return_value = "phy_ofport"
br_addpatch_port_fn.return_value = "int_ofport"
self.agent.setup_physical_bridges({"physnet1": "br-eth"})
expected_calls = [
mock.call.phy_add_flow(priority=1, actions='normal'),
mock.call.int_add_patch_port('int-br-eth',
constants.NONEXISTENT_PEER),
mock.call.phy_add_patch_port('phy-br-eth',
constants.NONEXISTENT_PEER),
mock.call.int_add_flow(priority=2, in_port='int_ofport',
actions='drop'),
mock.call.phy_add_flow(priority=2, in_port='phy_ofport',
actions='drop'),
mock.call.int_set_attr('Interface', 'int-br-eth',
'options:peer', 'phy-br-eth'),
mock.call.phy_set_attr('Interface', 'phy-br-eth',
'options:peer', 'int-br-eth'),
]
parent.assert_has_calls(expected_calls)
self.assertEqual(self.agent.int_ofports["physnet1"],
"int_ofport")
self.assertEqual(self.agent.phys_ofports["physnet1"],
"phy_ofport")
def test_setup_physical_bridges_using_veth_interconnection(self):
self.agent.use_veth_interconnection = True
with contextlib.nested(
mock.patch.object(ip_lib, "device_exists"),
mock.patch.object(sys, "exit"),
mock.patch.object(utils, "execute"),
mock.patch.object(ovs_lib.OVSBridge, "remove_all_flows"),
mock.patch.object(ovs_lib.OVSBridge, "add_flow"),
mock.patch.object(ovs_lib.OVSBridge, "add_port"),
mock.patch.object(ovs_lib.OVSBridge, "delete_port"),
mock.patch.object(self.agent.int_br, "add_port"),
mock.patch.object(self.agent.int_br, "delete_port"),
mock.patch.object(ip_lib.IPWrapper, "add_veth"),
mock.patch.object(ip_lib.IpLinkCommand, "delete"),
mock.patch.object(ip_lib.IpLinkCommand, "set_up"),
mock.patch.object(ip_lib.IpLinkCommand, "set_mtu"),
mock.patch.object(ovs_lib, "get_bridges")
) as (devex_fn, sysexit_fn, utilsexec_fn, remflows_fn, ovs_addfl_fn,
ovs_addport_fn, ovs_delport_fn, br_addport_fn, br_delport_fn,
addveth_fn, linkdel_fn, linkset_fn, linkmtu_fn, get_br_fn):
devex_fn.return_value = True
parent = mock.MagicMock()
parent.attach_mock(utilsexec_fn, 'utils_execute')
parent.attach_mock(linkdel_fn, 'link_delete')
parent.attach_mock(addveth_fn, 'add_veth')
addveth_fn.return_value = (ip_lib.IPDevice("int-br-eth1"),
ip_lib.IPDevice("phy-br-eth1"))
ovs_addport_fn.return_value = "int_ofport"
br_addport_fn.return_value = "phys_veth"
get_br_fn.return_value = ["br-eth"]
self.agent.setup_physical_bridges({"physnet1": "br-eth"})
expected_calls = [mock.call.link_delete(),
mock.call.utils_execute(['/sbin/udevadm',
'settle',
'--timeout=10']),
mock.call.add_veth('int-br-eth',
'phy-br-eth')]
parent.assert_has_calls(expected_calls, any_order=False)
self.assertEqual(self.agent.int_ofports["physnet1"],
"phys_veth")
self.assertEqual(self.agent.phys_ofports["physnet1"],
"int_ofport")
def test_get_peer_name(self):
bridge1 = "A_REALLY_LONG_BRIDGE_NAME1"
bridge2 = "A_REALLY_LONG_BRIDGE_NAME2"
self.agent.use_veth_interconnection = True
self.assertEqual(len(self.agent.get_peer_name('int-', bridge1)),
n_const.DEVICE_NAME_MAX_LEN)
self.assertEqual(len(self.agent.get_peer_name('int-', bridge2)),
n_const.DEVICE_NAME_MAX_LEN)
self.assertNotEqual(self.agent.get_peer_name('int-', bridge1),
self.agent.get_peer_name('int-', bridge2))
def test_setup_tunnel_br(self):
self.tun_br = mock.Mock()
with contextlib.nested(
mock.patch.object(self.agent.int_br, "add_patch_port",
return_value=1),
mock.patch.object(self.agent.tun_br, "add_patch_port",
return_value=2),
mock.patch.object(self.agent.tun_br, "remove_all_flows"),
mock.patch.object(self.agent.tun_br, "add_flow"),
mock.patch.object(ovs_lib, "OVSBridge"),
mock.patch.object(self.agent.tun_br, "reset_bridge"),
mock.patch.object(sys, "exit")
) as (intbr_patch_fn, tunbr_patch_fn, remove_all_fn,
add_flow_fn, ovs_br_fn, reset_br_fn, exit_fn):
self.agent.setup_tunnel_br(None)
self.assertTrue(intbr_patch_fn.called)
def test_setup_tunnel_port(self):
self.agent.tun_br = mock.Mock()
self.agent.l2_pop = False
self.agent.udp_vxlan_port = 8472
self.agent.tun_br_ofports['vxlan'] = {}
with contextlib.nested(
mock.patch.object(self.agent.tun_br, "add_tunnel_port",
return_value='6'),
mock.patch.object(self.agent.tun_br, "add_flow")
) as (add_tun_port_fn, add_flow_fn):
self.agent._setup_tunnel_port(self.agent.tun_br, 'portname',
'1.2.3.4', 'vxlan')
self.assertTrue(add_tun_port_fn.called)
def test_port_unbound(self):
with mock.patch.object(self.agent, "reclaim_local_vlan") as reclvl_fn:
self.agent.enable_tunneling = True
lvm = mock.Mock()
lvm.network_type = "gre"
lvm.vif_ports = {"vif1": mock.Mock()}
self.agent.local_vlan_map["netuid12345"] = lvm
self.agent.port_unbound("vif1", "netuid12345")
self.assertTrue(reclvl_fn.called)
reclvl_fn.called = False
lvm.vif_ports = {}
self.agent.port_unbound("vif1", "netuid12345")
self.assertEqual(reclvl_fn.call_count, 2)
lvm.vif_ports = {"vif1": mock.Mock()}
self.agent.port_unbound("vif3", "netuid12345")
self.assertEqual(reclvl_fn.call_count, 2)
def _prepare_l2_pop_ofports(self):
lvm1 = mock.Mock()
lvm1.network_type = 'gre'
lvm1.vlan = 'vlan1'
lvm1.segmentation_id = 'seg1'
lvm1.tun_ofports = set(['1'])
lvm2 = mock.Mock()
lvm2.network_type = 'gre'
lvm2.vlan = 'vlan2'
lvm2.segmentation_id = 'seg2'
lvm2.tun_ofports = set(['1', '2'])
self.agent.local_vlan_map = {'net1': lvm1, 'net2': lvm2}
self.agent.tun_br_ofports = {'gre':
{'1.1.1.1': '1', '2.2.2.2': '2'}}
self.agent.arp_responder_enabled = True
def test_fdb_ignore_network(self):
self._prepare_l2_pop_ofports()
fdb_entry = {'net3': {}}
with contextlib.nested(
mock.patch.object(self.agent.tun_br, 'add_flow'),
mock.patch.object(self.agent.tun_br, 'delete_flows'),
mock.patch.object(self.agent, '_setup_tunnel_port'),
mock.patch.object(self.agent, 'cleanup_tunnel_port')
) as (add_flow_fn, del_flow_fn, add_tun_fn, clean_tun_fn):
self.agent.fdb_add(None, fdb_entry)
self.assertFalse(add_flow_fn.called)
self.assertFalse(add_tun_fn.called)
self.agent.fdb_remove(None, fdb_entry)
self.assertFalse(del_flow_fn.called)
self.assertFalse(clean_tun_fn.called)
def test_fdb_ignore_self(self):
self._prepare_l2_pop_ofports()
self.agent.local_ip = 'agent_ip'
fdb_entry = {'net2':
{'network_type': 'gre',
'segment_id': 'tun2',
'ports':
{'agent_ip':
[[FAKE_MAC, FAKE_IP1],
n_const.FLOODING_ENTRY]}}}
with mock.patch.object(self.agent.tun_br,
"deferred") as defer_fn:
self.agent.fdb_add(None, fdb_entry)
self.assertFalse(defer_fn.called)
self.agent.fdb_remove(None, fdb_entry)
self.assertFalse(defer_fn.called)
def test_fdb_add_flows(self):
self._prepare_l2_pop_ofports()
fdb_entry = {'net1':
{'network_type': 'gre',
'segment_id': 'tun1',
'ports':
{'2.2.2.2':
[[FAKE_MAC, FAKE_IP1],
n_const.FLOODING_ENTRY]}}}
with contextlib.nested(
mock.patch.object(self.agent.tun_br, 'deferred'),
mock.patch.object(self.agent.tun_br, 'do_action_flows'),
mock.patch.object(self.agent, '_setup_tunnel_port'),
) as (deferred_fn, do_action_flows_fn, add_tun_fn):
deferred_fn.return_value = ovs_lib.DeferredOVSBridge(
self.agent.tun_br)
self.agent.fdb_add(None, fdb_entry)
self.assertFalse(add_tun_fn.called)
actions = (constants.ARP_RESPONDER_ACTIONS %
{'mac': netaddr.EUI(FAKE_MAC, dialect=netaddr.mac_unix),
'ip': netaddr.IPAddress(FAKE_IP1)})
expected_calls = [
mock.call('add', [dict(table=constants.ARP_RESPONDER,
priority=1,
proto='arp',
dl_vlan='vlan1',
nw_dst=FAKE_IP1,
actions=actions),
dict(table=constants.UCAST_TO_TUN,
priority=2,
dl_vlan='vlan1',
dl_dst=FAKE_MAC,
actions='strip_vlan,'
'set_tunnel:seg1,output:2')]),
mock.call('mod', [dict(table=constants.FLOOD_TO_TUN,
dl_vlan='vlan1',
actions='strip_vlan,'
'set_tunnel:seg1,output:1,2')]),
]
do_action_flows_fn.assert_has_calls(expected_calls)
def test_fdb_del_flows(self):
self._prepare_l2_pop_ofports()
fdb_entry = {'net2':
{'network_type': 'gre',
'segment_id': 'tun2',
'ports':
{'2.2.2.2':
[[FAKE_MAC, FAKE_IP1],
n_const.FLOODING_ENTRY]}}}
with contextlib.nested(
mock.patch.object(self.agent.tun_br, 'deferred'),
mock.patch.object(self.agent.tun_br, 'do_action_flows'),
) as (deferred_fn, do_action_flows_fn):
deferred_fn.return_value = ovs_lib.DeferredOVSBridge(
self.agent.tun_br)
self.agent.fdb_remove(None, fdb_entry)
expected_calls = [
mock.call('mod', [dict(table=constants.FLOOD_TO_TUN,
dl_vlan='vlan2',
actions='strip_vlan,'
'set_tunnel:seg2,output:1')]),
mock.call('del', [dict(table=constants.ARP_RESPONDER,
proto='arp',
dl_vlan='vlan2',
nw_dst=FAKE_IP1),
dict(table=constants.UCAST_TO_TUN,
dl_vlan='vlan2',
dl_dst=FAKE_MAC),
dict(in_port='2')]),
]
do_action_flows_fn.assert_has_calls(expected_calls)
def test_fdb_add_port(self):
self._prepare_l2_pop_ofports()
fdb_entry = {'net1':
{'network_type': 'gre',
'segment_id': 'tun1',
'ports': {'1.1.1.1': [[FAKE_MAC, FAKE_IP1]]}}}
with contextlib.nested(
mock.patch.object(self.agent.tun_br, 'deferred'),
mock.patch.object(self.agent.tun_br, 'do_action_flows'),
mock.patch.object(self.agent, '_setup_tunnel_port')
) as (deferred_fn, do_action_flows_fn, add_tun_fn):
deferred_br = ovs_lib.DeferredOVSBridge(self.agent.tun_br)
deferred_fn.return_value = deferred_br
self.agent.fdb_add(None, fdb_entry)
self.assertFalse(add_tun_fn.called)
fdb_entry['net1']['ports']['10.10.10.10'] = [[FAKE_MAC, FAKE_IP1]]
self.agent.fdb_add(None, fdb_entry)
add_tun_fn.assert_called_with(
deferred_br, 'gre-0a0a0a0a', '10.10.10.10', 'gre')
def test_fdb_del_port(self):
self._prepare_l2_pop_ofports()
fdb_entry = {'net2':
{'network_type': 'gre',
'segment_id': 'tun2',
'ports': {'2.2.2.2': [n_const.FLOODING_ENTRY]}}}
with contextlib.nested(
mock.patch.object(self.agent.tun_br, 'deferred'),
mock.patch.object(self.agent.tun_br, 'do_action_flows'),
mock.patch.object(self.agent.tun_br, 'delete_port')
) as (deferred_fn, do_action_flows_fn, delete_port_fn):
deferred_br = ovs_lib.DeferredOVSBridge(self.agent.tun_br)
deferred_fn.return_value = deferred_br
self.agent.fdb_remove(None, fdb_entry)
delete_port_fn.assert_called_once_with('gre-02020202')
def test_fdb_update_chg_ip(self):
self._prepare_l2_pop_ofports()
fdb_entries = {'chg_ip':
{'net1':
{'agent_ip':
{'before': [[FAKE_MAC, FAKE_IP1]],
'after': [[FAKE_MAC, FAKE_IP2]]}}}}
with contextlib.nested(
mock.patch.object(self.agent.tun_br, 'deferred'),
mock.patch.object(self.agent.tun_br, 'do_action_flows'),
) as (deferred_fn, do_action_flows_fn):
deferred_br = ovs_lib.DeferredOVSBridge(self.agent.tun_br)
deferred_fn.return_value = deferred_br
self.agent.fdb_update(None, fdb_entries)
actions = (constants.ARP_RESPONDER_ACTIONS %
{'mac': netaddr.EUI(FAKE_MAC, dialect=netaddr.mac_unix),
'ip': netaddr.IPAddress(FAKE_IP2)})
expected_calls = [
mock.call('add', [dict(table=constants.ARP_RESPONDER,
priority=1,
proto='arp',
dl_vlan='vlan1',
nw_dst=FAKE_IP2,
actions=actions)]),
mock.call('del', [dict(table=constants.ARP_RESPONDER,
proto='arp',
dl_vlan='vlan1',
nw_dst=FAKE_IP1)])
]
do_action_flows_fn.assert_has_calls(expected_calls)
self.assertEqual(len(expected_calls),
len(do_action_flows_fn.mock_calls))
def test_recl_lv_port_to_preserve(self):
self._prepare_l2_pop_ofports()
self.agent.l2_pop = True
self.agent.enable_tunneling = True
with mock.patch.object(
self.agent.tun_br, 'cleanup_tunnel_port'
) as clean_tun_fn:
self.agent.reclaim_local_vlan('net1')
self.assertFalse(clean_tun_fn.called)
def test_recl_lv_port_to_remove(self):
self._prepare_l2_pop_ofports()
self.agent.l2_pop = True
self.agent.enable_tunneling = True
with contextlib.nested(
mock.patch.object(self.agent.tun_br, 'delete_port'),
mock.patch.object(self.agent.tun_br, 'delete_flows')
) as (del_port_fn, del_flow_fn):
self.agent.reclaim_local_vlan('net2')
del_port_fn.assert_called_once_with('gre-02020202')
def test_dvr_mac_address_update(self):
self._setup_for_dvr_test()
with contextlib.nested(
mock.patch.object(self.agent.dvr_agent.int_br, 'add_flow'),
mock.patch.object(self.agent.dvr_agent.tun_br, 'add_flow'),
mock.patch.object(self.agent.dvr_agent.int_br, 'delete_flows'),
#mock.patch.object(self.agent.dvr_agent.tun_br, 'delete_flows')
) as (add_flow_fn, add_flow_tn_fn, del_flows_fn):
self.agent.dvr_agent.\
dvr_mac_address_update(
dvr_macs=[{'host': 'cn2',
'mac_address': 'aa:bb:cc:dd:ee:ff'}])
add_flow_tn_fn.assert_called_with(table=constants.DVR_NOT_LEARN,
priority=1,
dl_src='aa:bb:cc:dd:ee:ff',
actions="output:%s"
% self.agent.patch_int_ofport
)
self.assertFalse(del_flows_fn.called)
with contextlib.nested(
mock.patch.object(self.agent.dvr_agent.int_br, 'add_flow'),
mock.patch.object(self.agent.dvr_agent.tun_br, 'delete_flows'),
mock.patch.object(self.agent.dvr_agent.int_br, 'delete_flows')
) as (add_flow_fn, del_flows_tn_fn, del_flows_fn):
self.agent.dvr_agent.dvr_mac_address_update(dvr_macs=[])
del_flows_tn_fn.assert_called_with(table=constants.DVR_NOT_LEARN,
dl_src='aa:bb:cc:dd:ee:ff')
self.assertFalse(add_flow_fn.called)
def test_daemon_loop_uses_polling_manager(self):
with mock.patch(
'neutron.agent.linux.polling.get_polling_manager') as mock_get_pm:
with mock.patch.object(self.agent, 'rpc_loop') as mock_loop:
self.agent.daemon_loop()
mock_get_pm.assert_called_with(True, 'sudo',
constants.DEFAULT_OVSDBMON_RESPAWN)
mock_loop.assert_called_once_with(polling_manager=mock.ANY)
def test__setup_tunnel_port_error_negative(self):
with contextlib.nested(
mock.patch.object(self.agent.tun_br, 'add_tunnel_port',
return_value='-1'),
mock.patch.object(ovs_neutron_agent.LOG, 'error')
) as (add_tunnel_port_fn, log_error_fn):
ofport = self.agent._setup_tunnel_port(
self.agent.tun_br, 'gre-1', 'remote_ip', p_const.TYPE_GRE)
add_tunnel_port_fn.assert_called_once_with(
'gre-1', 'remote_ip', self.agent.local_ip, p_const.TYPE_GRE,
self.agent.vxlan_udp_port, self.agent.dont_fragment)
log_error_fn.assert_called_once_with(
_("Failed to set-up %(type)s tunnel port to %(ip)s"),
{'type': p_const.TYPE_GRE, 'ip': 'remote_ip'})
self.assertEqual(ofport, 0)
def test__setup_tunnel_port_error_not_int(self):
with contextlib.nested(
mock.patch.object(self.agent.tun_br, 'add_tunnel_port',
return_value=None),
mock.patch.object(ovs_neutron_agent.LOG, 'exception'),
mock.patch.object(ovs_neutron_agent.LOG, 'error')
) as (add_tunnel_port_fn, log_exc_fn, log_error_fn):
ofport = self.agent._setup_tunnel_port(
self.agent.tun_br, 'gre-1', 'remote_ip', p_const.TYPE_GRE)
add_tunnel_port_fn.assert_called_once_with(
'gre-1', 'remote_ip', self.agent.local_ip, p_const.TYPE_GRE,
self.agent.vxlan_udp_port, self.agent.dont_fragment)
log_exc_fn.assert_called_once_with(
_("ofport should have a value that can be "
"interpreted as an integer"))
log_error_fn.assert_called_once_with(
_("Failed to set-up %(type)s tunnel port to %(ip)s"),
{'type': p_const.TYPE_GRE, 'ip': 'remote_ip'})
self.assertEqual(ofport, 0)
def test__setup_tunnel_port_error_negative_df_disabled(self):
with contextlib.nested(
mock.patch.object(self.agent.tun_br, 'add_tunnel_port',
return_value='-1'),
mock.patch.object(ovs_neutron_agent.LOG, 'error')
) as (add_tunnel_port_fn, log_error_fn):
self.agent.dont_fragment = False
ofport = self.agent._setup_tunnel_port(
self.agent.tun_br, 'gre-1', 'remote_ip', p_const.TYPE_GRE)
add_tunnel_port_fn.assert_called_once_with(
'gre-1', 'remote_ip', self.agent.local_ip, p_const.TYPE_GRE,
self.agent.vxlan_udp_port, self.agent.dont_fragment)
log_error_fn.assert_called_once_with(
_("Failed to set-up %(type)s tunnel port to %(ip)s"),
{'type': p_const.TYPE_GRE, 'ip': 'remote_ip'})
self.assertEqual(ofport, 0)
def test_tunnel_sync_with_ovs_plugin(self):
fake_tunnel_details = {'tunnels': [{'id': '42',
'ip_address': '100.101.102.103'}]}
with contextlib.nested(
mock.patch.object(self.agent.plugin_rpc, 'tunnel_sync',
return_value=fake_tunnel_details),
mock.patch.object(self.agent, '_setup_tunnel_port')
) as (tunnel_sync_rpc_fn, _setup_tunnel_port_fn):
self.agent.tunnel_types = ['gre']
self.agent.tunnel_sync()
expected_calls = [mock.call(self.agent.tun_br, 'gre-42',
'100.101.102.103', 'gre')]
_setup_tunnel_port_fn.assert_has_calls(expected_calls)
def test_tunnel_sync_with_ml2_plugin(self):
fake_tunnel_details = {'tunnels': [{'ip_address': '100.101.31.15'}]}
with contextlib.nested(
mock.patch.object(self.agent.plugin_rpc, 'tunnel_sync',
return_value=fake_tunnel_details),
mock.patch.object(self.agent, '_setup_tunnel_port')
) as (tunnel_sync_rpc_fn, _setup_tunnel_port_fn):
self.agent.tunnel_types = ['vxlan']
self.agent.tunnel_sync()
expected_calls = [mock.call(self.agent.tun_br, 'vxlan-64651f0f',
'100.101.31.15', 'vxlan')]
_setup_tunnel_port_fn.assert_has_calls(expected_calls)
def test_tunnel_sync_invalid_ip_address(self):
fake_tunnel_details = {'tunnels': [{'ip_address': '300.300.300.300'},
{'ip_address': '100.100.100.100'}]}
with contextlib.nested(
mock.patch.object(self.agent.plugin_rpc, 'tunnel_sync',
return_value=fake_tunnel_details),
mock.patch.object(self.agent, '_setup_tunnel_port')
) as (tunnel_sync_rpc_fn, _setup_tunnel_port_fn):
self.agent.tunnel_types = ['vxlan']
self.agent.tunnel_sync()
_setup_tunnel_port_fn.assert_called_once_with(self.agent.tun_br,
'vxlan-64646464',
'100.100.100.100',
'vxlan')
def test_tunnel_update(self):
kwargs = {'tunnel_ip': '10.10.10.10',
'tunnel_type': 'gre'}
self.agent._setup_tunnel_port = mock.Mock()
self.agent.enable_tunneling = True
self.agent.tunnel_types = ['gre']
self.agent.l2_pop = False
self.agent.tunnel_update(context=None, **kwargs)
expected_calls = [
mock.call(self.agent.tun_br, 'gre-0a0a0a0a', '10.10.10.10', 'gre')]
self.agent._setup_tunnel_port.assert_has_calls(expected_calls)
def test_ovs_restart(self):
reply2 = {'current': set(['tap0']),
'added': set(['tap2']),
'removed': set([])}
reply3 = {'current': set(['tap2']),
'added': set([]),
'removed': set(['tap0'])}
with contextlib.nested(
mock.patch.object(async_process.AsyncProcess, "_spawn"),
mock.patch.object(log.ContextAdapter, 'exception'),
mock.patch.object(ovs_neutron_agent.OVSNeutronAgent,
'scan_ports'),
mock.patch.object(ovs_neutron_agent.OVSNeutronAgent,
'process_network_ports'),
mock.patch.object(ovs_neutron_agent.OVSNeutronAgent,
'check_ovs_restart'),
mock.patch.object(ovs_neutron_agent.OVSNeutronAgent,
'setup_integration_br'),
mock.patch.object(ovs_neutron_agent.OVSNeutronAgent,
'setup_physical_bridges')
) as (spawn_fn, log_exception, scan_ports, process_network_ports,
check_ovs_restart, setup_int_br, setup_phys_br):
log_exception.side_effect = Exception(
'Fake exception to get out of the loop')
scan_ports.side_effect = [reply2, reply3]
process_network_ports.side_effect = [
False, Exception('Fake exception to get out of the loop')]
check_ovs_restart.side_effect = [False, True]
# This will exit after the second loop
try:
self.agent.daemon_loop()
except Exception:
pass
scan_ports.assert_has_calls([
mock.call(set(), set()),
mock.call(set(), set())
])
process_network_ports.assert_has_calls([
mock.call({'current': set(['tap0']),
'removed': set([]),
'added': set(['tap2'])}, False),
mock.call({'current': set(['tap2']),
'removed': set(['tap0']),
'added': set([])}, True)
])
# Verify the second time through the loop we triggered an
# OVS restart and re-setup the bridges
setup_int_br.assert_has_calls([mock.call()])
setup_phys_br.assert_has_calls([mock.call({})])
class AncillaryBridgesTest(base.BaseTestCase):
def setUp(self):
super(AncillaryBridgesTest, self).setUp()
notifier_p = mock.patch(NOTIFIER)
notifier_cls = notifier_p.start()
self.notifier = mock.Mock()
notifier_cls.return_value = self.notifier
cfg.CONF.set_default('firewall_driver',
'neutron.agent.firewall.NoopFirewallDriver',
group='SECURITYGROUP')
cfg.CONF.set_override('report_interval', 0, 'AGENT')
self.kwargs = ovs_neutron_agent.create_agent_config_map(cfg.CONF)
def _test_ancillary_bridges(self, bridges, ancillary):
device_ids = ancillary[:]
def pullup_side_effect(self, *args):
result = device_ids.pop(0)
return result
with contextlib.nested(
mock.patch('neutron.plugins.openvswitch.agent.ovs_neutron_agent.'
'OVSNeutronAgent.setup_integration_br',
return_value=mock.Mock()),
mock.patch('neutron.agent.linux.utils.get_interface_mac',
return_value='00:00:00:00:00:01'),
mock.patch('neutron.agent.linux.ovs_lib.OVSBridge.'
'get_local_port_mac',
return_value='00:00:00:00:00:01'),
mock.patch('neutron.agent.linux.ovs_lib.OVSBridge.'
'set_secure_mode'),
mock.patch('neutron.agent.linux.ovs_lib.get_bridges',
return_value=bridges),
mock.patch(
'neutron.agent.linux.ovs_lib.get_bridge_external_bridge_id',
side_effect=pullup_side_effect)):
self.agent = ovs_neutron_agent.OVSNeutronAgent(**self.kwargs)
self.assertEqual(len(ancillary), len(self.agent.ancillary_brs))
if ancillary:
bridges = [br.br_name for br in self.agent.ancillary_brs]
for br in ancillary:
self.assertIn(br, bridges)
def test_ancillary_bridges_single(self):
bridges = ['br-int', 'br-ex']
self._test_ancillary_bridges(bridges, ['br-ex'])
def test_ancillary_bridges_none(self):
bridges = ['br-int']
self._test_ancillary_bridges(bridges, [])
def test_ancillary_bridges_multiple(self):
bridges = ['br-int', 'br-ex1', 'br-ex2']
self._test_ancillary_bridges(bridges, ['br-ex1', 'br-ex2'])
|
shakamunyi/neutron-vrrp
|
neutron/tests/unit/openvswitch/test_ovs_neutron_agent.py
|
Python
|
apache-2.0
| 71,665
|
# Copyright (c) 2019 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
__All__ = ['lock', 'unlock']
if os.name == 'nt':
def lock(file):
raise NotImplementedError('Windows is not supported.')
def unlock(file):
raise NotImplementedError('Windows is not supported.')
elif os.name == 'posix':
from fcntl import flock, LOCK_EX, LOCK_UN
def lock(file):
"""Lock the file in local file system."""
flock(file.fileno(), LOCK_EX)
def unlock(file):
"""Unlock the file in local file system."""
flock(file.fileno(), LOCK_UN)
else:
raise RuntimeError("File Locker only support NT and Posix platforms!")
|
chengduoZH/Paddle
|
python/paddle/fluid/contrib/slim/nas/lock.py
|
Python
|
apache-2.0
| 1,215
|
# Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Builder function to construct tf-slim arg_scope for convolution, fc ops."""
import tensorflow as tf
from object_detection.protos import hyperparams_pb2
slim = tf.contrib.slim
def build(hyperparams_config, is_training):
"""Builds tf-slim arg_scope for convolution ops based on the config.
Returns an arg_scope to use for convolution ops containing weights
initializer, weights regularizer, activation function, batch norm function
and batch norm parameters based on the configuration.
Note that if the batch_norm parameteres are not specified in the config
(i.e. left to default) then batch norm is excluded from the arg_scope.
The batch norm parameters are set for updates based on `is_training` argument
and conv_hyperparams_config.batch_norm.train parameter. During training, they
are updated only if batch_norm.train parameter is true. However, during eval,
no updates are made to the batch norm variables. In both cases, their current
values are used during forward pass.
Args:
hyperparams_config: hyperparams.proto object containing
hyperparameters.
is_training: Whether the network is in training mode.
Returns:
arg_scope: tf-slim arg_scope containing hyperparameters for ops.
Raises:
ValueError: if hyperparams_config is not of type hyperparams.Hyperparams.
"""
if not isinstance(hyperparams_config,
hyperparams_pb2.Hyperparams):
raise ValueError('hyperparams_config not of type '
'hyperparams_pb.Hyperparams.')
batch_norm = None
batch_norm_params = None
if hyperparams_config.HasField('batch_norm'):
batch_norm = slim.batch_norm
batch_norm_params = _build_batch_norm_params(
hyperparams_config.batch_norm, is_training)
affected_ops = [slim.conv2d, slim.separable_conv2d, slim.conv2d_transpose]
if hyperparams_config.HasField('op') and (
hyperparams_config.op == hyperparams_pb2.Hyperparams.FC):
affected_ops = [slim.fully_connected]
with slim.arg_scope(
affected_ops,
weights_regularizer=_build_regularizer(
hyperparams_config.regularizer),
weights_initializer=_build_initializer(
hyperparams_config.initializer),
activation_fn=_build_activation_fn(hyperparams_config.activation),
normalizer_fn=batch_norm,
normalizer_params=batch_norm_params) as sc:
return sc
def _build_activation_fn(activation_fn):
"""Builds a callable activation from config.
Args:
activation_fn: hyperparams_pb2.Hyperparams.activation
Returns:
Callable activation function.
Raises:
ValueError: On unknown activation function.
"""
if activation_fn == hyperparams_pb2.Hyperparams.NONE:
return None
if activation_fn == hyperparams_pb2.Hyperparams.RELU:
return tf.nn.relu
if activation_fn == hyperparams_pb2.Hyperparams.RELU_6:
return tf.nn.relu6
raise ValueError('Unknown activation function: {}'.format(activation_fn))
def _build_regularizer(regularizer):
"""Builds a tf-slim regularizer from config.
Args:
regularizer: hyperparams_pb2.Hyperparams.regularizer proto.
Returns:
tf-slim regularizer.
Raises:
ValueError: On unknown regularizer.
"""
regularizer_oneof = regularizer.WhichOneof('regularizer_oneof')
if regularizer_oneof == 'l1_regularizer':
return slim.l1_regularizer(scale=float(regularizer.l1_regularizer.weight))
if regularizer_oneof == 'l2_regularizer':
return slim.l2_regularizer(scale=float(regularizer.l2_regularizer.weight))
raise ValueError('Unknown regularizer function: {}'.format(regularizer_oneof))
def _build_initializer(initializer):
"""Build a tf initializer from config.
Args:
initializer: hyperparams_pb2.Hyperparams.regularizer proto.
Returns:
tf initializer.
Raises:
ValueError: On unknown initializer.
"""
initializer_oneof = initializer.WhichOneof('initializer_oneof')
if initializer_oneof == 'truncated_normal_initializer':
return tf.truncated_normal_initializer(
mean=initializer.truncated_normal_initializer.mean,
stddev=initializer.truncated_normal_initializer.stddev)
if initializer_oneof == 'random_normal_initializer':
return tf.random_normal_initializer(
mean=initializer.random_normal_initializer.mean,
stddev=initializer.random_normal_initializer.stddev)
if initializer_oneof == 'variance_scaling_initializer':
enum_descriptor = (hyperparams_pb2.VarianceScalingInitializer.
DESCRIPTOR.enum_types_by_name['Mode'])
mode = enum_descriptor.values_by_number[initializer.
variance_scaling_initializer.
mode].name
return slim.variance_scaling_initializer(
factor=initializer.variance_scaling_initializer.factor,
mode=mode,
uniform=initializer.variance_scaling_initializer.uniform)
raise ValueError('Unknown initializer function: {}'.format(
initializer_oneof))
def _build_batch_norm_params(batch_norm, is_training):
"""Build a dictionary of batch_norm params from config.
Args:
batch_norm: hyperparams_pb2.ConvHyperparams.batch_norm proto.
is_training: Whether the models is in training mode.
Returns:
A dictionary containing batch_norm parameters.
"""
batch_norm_params = {
'decay': batch_norm.decay,
'center': batch_norm.center,
'scale': batch_norm.scale,
'epsilon': batch_norm.epsilon,
'is_training': is_training and batch_norm.train,
}
return batch_norm_params
|
jiaphuan/models
|
research/object_detection/builders/hyperparams_builder.py
|
Python
|
apache-2.0
| 6,261
|
from plenum.test.spy_helpers import get_count
def sum_of_request_propagates(node):
return get_count(node.replicas[0]._ordering_service,
node.replicas[0]._ordering_service._request_propagates_if_needed) + \
get_count(node.replicas[1]._ordering_service,
node.replicas[1]._ordering_service._request_propagates_if_needed)
|
evernym/zeno
|
plenum/test/node_request/test_propagate/helper.py
|
Python
|
apache-2.0
| 377
|
#-
# Copyright (c) 2011 Steven J. Murdoch
# All rights reserved.
#
# This software was developed by SRI International and the University of
# Cambridge Computer Laboratory under DARPA/AFRL contract FA8750-10-C-0237
# ("CTSRD"), as part of the DARPA CRASH research programme.
#
# @BERI_LICENSE_HEADER_START@
#
# Licensed to BERI Open Systems C.I.C. (BERI) under one or more contributor
# license agreements. See the NOTICE file distributed with this work for
# additional information regarding copyright ownership. BERI licenses this
# file to you under the BERI Hardware-Software License, Version 1.0 (the
# "License"); you may not use this file except in compliance with the
# License. You may obtain a copy of the License at:
#
# http://www.beri-open-systems.org/legal/license-1-0.txt
#
# Unless required by applicable law or agreed to in writing, Work distributed
# under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
# CONDITIONS OF ANY KIND, either express or implied. See the License for the
# specific language governing permissions and limitations under the License.
#
# @BERI_LICENSE_HEADER_END@
#
from beritest_tools import BaseBERITestCase
class test_raw_bltzall_lt_back(BaseBERITestCase):
def test_before_bltzall(self):
self.assertRegisterNotEqual(self.MIPS.a0, 0, "instruction before bltzall missed")
def test_bltzall_branch_delay(self):
self.assertRegisterEqual(self.MIPS.a1, 2, "instruction in brach-delay slot missed")
def test_bltzall_skipped(self):
self.assertRegisterNotEqual(self.MIPS.a2, 3, "bltzall didn't branch")
def test_bltzall_target(self):
self.assertRegisterEqual(self.MIPS.a3, 4, "instruction at branch target didn't run")
def test_bltzall_ra(self):
self.assertRegisterEqual(self.MIPS.a4, self.MIPS.ra, "bltzall ra incorrect")
|
8l/beri
|
cheritest/trunk/tests/branch/test_raw_bltzall_lt_back.py
|
Python
|
apache-2.0
| 1,853
|
# Copyright 2014 Hewlett-Packard Development Company, L.P.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import abc
import copy
import datetime
import exceptions
import re
import urlparse
import six
from tempest import config
from tempest.openstack.common import log as logging
from tempest.services.identity.json import token_client as json_id
from tempest.services.identity.v3.json import token_client as json_v3id
CONF = config.CONF
LOG = logging.getLogger(__name__)
@six.add_metaclass(abc.ABCMeta)
class AuthProvider(object):
"""
Provide authentication
"""
def __init__(self, credentials, interface=None):
"""
:param credentials: credentials for authentication
:param interface: 'json' or 'xml'. Applicable for tempest client only
(deprecated: only json now supported)
"""
credentials = self._convert_credentials(credentials)
if self.check_credentials(credentials):
self.credentials = credentials
else:
raise TypeError("Invalid credentials")
self.interface = 'json'
self.cache = None
self.alt_auth_data = None
self.alt_part = None
def _convert_credentials(self, credentials):
# Support dict credentials for backwards compatibility
if isinstance(credentials, dict):
return get_credentials(**credentials)
else:
return credentials
def __str__(self):
return "Creds :{creds}, interface: {interface}, " \
"cached auth data: {cache}".format(
creds=self.credentials, interface=self.interface,
cache=self.cache)
@abc.abstractmethod
def _decorate_request(self, filters, method, url, headers=None, body=None,
auth_data=None):
"""
Decorate request with authentication data
"""
return
@abc.abstractmethod
def _get_auth(self):
return
@abc.abstractmethod
def _fill_credentials(self, auth_data_body):
return
def fill_credentials(self):
"""
Fill credentials object with data from auth
"""
auth_data = self.get_auth()
self._fill_credentials(auth_data[1])
return self.credentials
@classmethod
def check_credentials(cls, credentials):
"""
Verify credentials are valid.
"""
return isinstance(credentials, Credentials) and credentials.is_valid()
@property
def auth_data(self):
return self.get_auth()
@auth_data.deleter
def auth_data(self):
self.clear_auth()
def get_auth(self):
"""
Returns auth from cache if available, else auth first
"""
if self.cache is None or self.is_expired(self.cache):
self.set_auth()
return self.cache
def set_auth(self):
"""
Forces setting auth, ignores cache if it exists.
Refills credentials
"""
self.cache = self._get_auth()
self._fill_credentials(self.cache[1])
def clear_auth(self):
"""
Can be called to clear the access cache so that next request
will fetch a new token and base_url.
"""
self.cache = None
self.credentials.reset()
@abc.abstractmethod
def is_expired(self, auth_data):
return
def auth_request(self, method, url, headers=None, body=None, filters=None):
"""
Obtains auth data and decorates a request with that.
:param method: HTTP method of the request
:param url: relative URL of the request (path)
:param headers: HTTP headers of the request
:param body: HTTP body in case of POST / PUT
:param filters: select a base URL out of the catalog
:returns a Tuple (url, headers, body)
"""
orig_req = dict(url=url, headers=headers, body=body)
auth_url, auth_headers, auth_body = self._decorate_request(
filters, method, url, headers, body)
auth_req = dict(url=auth_url, headers=auth_headers, body=auth_body)
# Overwrite part if the request if it has been requested
if self.alt_part is not None:
if self.alt_auth_data is not None:
alt_url, alt_headers, alt_body = self._decorate_request(
filters, method, url, headers, body,
auth_data=self.alt_auth_data)
alt_auth_req = dict(url=alt_url, headers=alt_headers,
body=alt_body)
auth_req[self.alt_part] = alt_auth_req[self.alt_part]
else:
# If alt auth data is None, skip auth in the requested part
auth_req[self.alt_part] = orig_req[self.alt_part]
# Next auth request will be normal, unless otherwise requested
self.reset_alt_auth_data()
return auth_req['url'], auth_req['headers'], auth_req['body']
def reset_alt_auth_data(self):
"""
Configure auth provider to provide valid authentication data
"""
self.alt_part = None
self.alt_auth_data = None
def set_alt_auth_data(self, request_part, auth_data):
"""
Configure auth provider to provide alt authentication data
on a part of the *next* auth_request. If credentials are None,
set invalid data.
:param request_part: request part to contain invalid auth: url,
headers, body
:param auth_data: alternative auth_data from which to get the
invalid data to be injected
"""
self.alt_part = request_part
self.alt_auth_data = auth_data
@abc.abstractmethod
def base_url(self, filters, auth_data=None):
"""
Extracts the base_url based on provided filters
"""
return
class KeystoneAuthProvider(AuthProvider):
token_expiry_threshold = datetime.timedelta(seconds=60)
def __init__(self, credentials, interface=None):
super(KeystoneAuthProvider, self).__init__(credentials, interface)
self.auth_client = self._auth_client()
def _decorate_request(self, filters, method, url, headers=None, body=None,
auth_data=None):
if auth_data is None:
auth_data = self.auth_data
token, _ = auth_data
base_url = self.base_url(filters=filters, auth_data=auth_data)
# build authenticated request
# returns new request, it does not touch the original values
_headers = copy.deepcopy(headers) if headers is not None else {}
_headers['X-Auth-Token'] = str(token)
if url is None or url == "":
_url = base_url
else:
# Join base URL and url, and remove multiple contiguous slashes
_url = "/".join([base_url, url])
parts = [x for x in urlparse.urlparse(_url)]
parts[2] = re.sub("/{2,}", "/", parts[2])
_url = urlparse.urlunparse(parts)
# no change to method or body
return str(_url), _headers, body
@abc.abstractmethod
def _auth_client(self):
return
@abc.abstractmethod
def _auth_params(self):
return
def _get_auth(self):
# Bypasses the cache
auth_func = getattr(self.auth_client, 'get_token')
auth_params = self._auth_params()
# returns token, auth_data
token, auth_data = auth_func(**auth_params)
return token, auth_data
def get_token(self):
return self.auth_data[0]
class KeystoneV2AuthProvider(KeystoneAuthProvider):
EXPIRY_DATE_FORMAT = '%Y-%m-%dT%H:%M:%SZ'
def _auth_client(self):
return json_id.TokenClientJSON()
def _auth_params(self):
return dict(
user=self.credentials.username,
password=self.credentials.password,
tenant=self.credentials.tenant_name,
auth_data=True)
def _fill_credentials(self, auth_data_body):
tenant = auth_data_body['token']['tenant']
user = auth_data_body['user']
if self.credentials.tenant_name is None:
self.credentials.tenant_name = tenant['name']
if self.credentials.tenant_id is None:
self.credentials.tenant_id = tenant['id']
if self.credentials.username is None:
self.credentials.username = user['name']
if self.credentials.user_id is None:
self.credentials.user_id = user['id']
def base_url(self, filters, auth_data=None):
"""
Filters can be:
- service: compute, image, etc
- region: the service region
- endpoint_type: adminURL, publicURL, internalURL
- api_version: replace catalog version with this
- skip_path: take just the base URL
"""
if auth_data is None:
auth_data = self.auth_data
token, _auth_data = auth_data
service = filters.get('service')
region = filters.get('region')
endpoint_type = filters.get('endpoint_type', 'publicURL')
if service is None:
raise exceptions.EndpointNotFound("No service provided")
_base_url = None
for ep in _auth_data['serviceCatalog']:
if ep["type"] == service:
for _ep in ep['endpoints']:
if region is not None and _ep['region'] == region:
_base_url = _ep.get(endpoint_type)
if not _base_url:
# No region matching, use the first
_base_url = ep['endpoints'][0].get(endpoint_type)
break
if _base_url is None:
raise exceptions.EndpointNotFound(service)
parts = urlparse.urlparse(_base_url)
if filters.get('api_version', None) is not None:
path = "/" + filters['api_version']
noversion_path = "/".join(parts.path.split("/")[2:])
if noversion_path != "":
path += "/" + noversion_path
_base_url = _base_url.replace(parts.path, path)
if filters.get('skip_path', None) is not None and parts.path != '':
_base_url = _base_url.replace(parts.path, "/")
return _base_url
def is_expired(self, auth_data):
_, access = auth_data
expiry = datetime.datetime.strptime(access['token']['expires'],
self.EXPIRY_DATE_FORMAT)
return expiry - self.token_expiry_threshold <= \
datetime.datetime.utcnow()
class KeystoneV3AuthProvider(KeystoneAuthProvider):
EXPIRY_DATE_FORMAT = '%Y-%m-%dT%H:%M:%S.%fZ'
def _auth_client(self):
return json_v3id.V3TokenClientJSON()
def _auth_params(self):
return dict(
user=self.credentials.username,
password=self.credentials.password,
tenant=self.credentials.tenant_name,
domain=self.credentials.user_domain_name,
auth_data=True)
def _fill_credentials(self, auth_data_body):
# project or domain, depending on the scope
project = auth_data_body.get('project', None)
domain = auth_data_body.get('domain', None)
# user is always there
user = auth_data_body['user']
# Set project fields
if project is not None:
if self.credentials.project_name is None:
self.credentials.project_name = project['name']
if self.credentials.project_id is None:
self.credentials.project_id = project['id']
if self.credentials.project_domain_id is None:
self.credentials.project_domain_id = project['domain']['id']
if self.credentials.project_domain_name is None:
self.credentials.project_domain_name = \
project['domain']['name']
# Set domain fields
if domain is not None:
if self.credentials.domain_id is None:
self.credentials.domain_id = domain['id']
if self.credentials.domain_name is None:
self.credentials.domain_name = domain['name']
# Set user fields
if self.credentials.username is None:
self.credentials.username = user['name']
if self.credentials.user_id is None:
self.credentials.user_id = user['id']
if self.credentials.user_domain_id is None:
self.credentials.user_domain_id = user['domain']['id']
if self.credentials.user_domain_name is None:
self.credentials.user_domain_name = user['domain']['name']
def base_url(self, filters, auth_data=None):
"""
Filters can be:
- service: compute, image, etc
- region: the service region
- endpoint_type: adminURL, publicURL, internalURL
- api_version: replace catalog version with this
- skip_path: take just the base URL
"""
if auth_data is None:
auth_data = self.auth_data
token, _auth_data = auth_data
service = filters.get('service')
region = filters.get('region')
endpoint_type = filters.get('endpoint_type', 'public')
if service is None:
raise exceptions.EndpointNotFound("No service provided")
if 'URL' in endpoint_type:
endpoint_type = endpoint_type.replace('URL', '')
_base_url = None
catalog = _auth_data['catalog']
# Select entries with matching service type
service_catalog = [ep for ep in catalog if ep['type'] == service]
if len(service_catalog) > 0:
service_catalog = service_catalog[0]['endpoints']
else:
# No matching service
raise exceptions.EndpointNotFound(service)
# Filter by endpoint type (interface)
filtered_catalog = [ep for ep in service_catalog if
ep['interface'] == endpoint_type]
if len(filtered_catalog) == 0:
# No matching type, keep all and try matching by region at least
filtered_catalog = service_catalog
# Filter by region
filtered_catalog = [ep for ep in filtered_catalog if
ep['region'] == region]
if len(filtered_catalog) == 0:
# No matching region, take the first endpoint
filtered_catalog = [service_catalog[0]]
# There should be only one match. If not take the first.
_base_url = filtered_catalog[0].get('url', None)
if _base_url is None:
raise exceptions.EndpointNotFound(service)
parts = urlparse.urlparse(_base_url)
if filters.get('api_version', None) is not None:
path = "/" + filters['api_version']
noversion_path = "/".join(parts.path.split("/")[2:])
if noversion_path != "":
path += "/" + noversion_path
_base_url = _base_url.replace(parts.path, path)
if filters.get('skip_path', None) is not None:
_base_url = _base_url.replace(parts.path, "/")
return _base_url
def is_expired(self, auth_data):
_, access = auth_data
expiry = datetime.datetime.strptime(access['expires_at'],
self.EXPIRY_DATE_FORMAT)
return expiry - self.token_expiry_threshold <= \
datetime.datetime.utcnow()
def get_default_credentials(credential_type, fill_in=True):
"""
Returns configured credentials of the specified type
based on the configured auth_version
"""
return get_credentials(fill_in=fill_in, credential_type=credential_type)
def get_credentials(credential_type=None, fill_in=True, **kwargs):
"""
Builds a credentials object based on the configured auth_version
:param credential_type (string): requests credentials from tempest
configuration file. Valid values are defined in
Credentials.TYPE.
:param kwargs (dict): take into account only if credential_type is
not specified or None. Dict of credential key/value pairs
Examples:
Returns credentials from the provided parameters:
>>> get_credentials(username='foo', password='bar')
Returns credentials from tempest configuration:
>>> get_credentials(credential_type='user')
"""
if CONF.identity.auth_version == 'v2':
credential_class = KeystoneV2Credentials
auth_provider_class = KeystoneV2AuthProvider
elif CONF.identity.auth_version == 'v3':
credential_class = KeystoneV3Credentials
auth_provider_class = KeystoneV3AuthProvider
else:
raise exceptions.InvalidConfiguration('Unsupported auth version')
if credential_type is not None:
creds = credential_class.get_default(credential_type)
else:
creds = credential_class(**kwargs)
# Fill in the credentials fields that were not specified
if fill_in:
auth_provider = auth_provider_class(creds)
creds = auth_provider.fill_credentials()
return creds
class Credentials(object):
"""
Set of credentials for accessing OpenStack services
ATTRIBUTES: list of valid class attributes representing credentials.
TYPES: types of credentials available in the configuration file.
For each key there's a tuple (section, prefix) to match the
configuration options.
"""
ATTRIBUTES = []
TYPES = {
'identity_admin': ('identity', 'admin'),
'user': ('identity', None),
'alt_user': ('identity', 'alt')
}
def __init__(self, **kwargs):
"""
Enforce the available attributes at init time (only).
Additional attributes can still be set afterwards if tests need
to do so.
"""
self._initial = kwargs
self._apply_credentials(kwargs)
def _apply_credentials(self, attr):
for key in attr.keys():
if key in self.ATTRIBUTES:
setattr(self, key, attr[key])
else:
raise exceptions.InvalidCredentials
def __str__(self):
"""
Represent only attributes included in self.ATTRIBUTES
"""
_repr = dict((k, getattr(self, k)) for k in self.ATTRIBUTES)
return str(_repr)
def __eq__(self, other):
"""
Credentials are equal if attributes in self.ATTRIBUTES are equal
"""
return str(self) == str(other)
def __getattr__(self, key):
# If an attribute is set, __getattr__ is not invoked
# If an attribute is not set, and it is a known one, return None
if key in self.ATTRIBUTES:
return None
else:
raise AttributeError
def __delitem__(self, key):
# For backwards compatibility, support dict behaviour
if key in self.ATTRIBUTES:
delattr(self, key)
else:
raise AttributeError
def get(self, item, default):
# In this patch act as dict for backward compatibility
try:
return getattr(self, item)
except AttributeError:
return default
@classmethod
def get_default(cls, credentials_type):
if credentials_type not in cls.TYPES:
raise exceptions.InvalidCredentials()
creds = cls._get_default(credentials_type)
if not creds.is_valid():
msg = ("The %s credentials are incorrectly set in the config file."
" Double check that all required values are assigned" %
credentials_type)
raise exceptions.InvalidConfiguration(msg)
return creds
@classmethod
def _get_default(cls, credentials_type):
raise NotImplementedError
def is_valid(self):
raise NotImplementedError
def reset(self):
# First delete all known attributes
for key in self.ATTRIBUTES:
if getattr(self, key) is not None:
delattr(self, key)
# Then re-apply initial setup
self._apply_credentials(self._initial)
class KeystoneV2Credentials(Credentials):
CONF_ATTRIBUTES = ['username', 'password', 'tenant_name']
ATTRIBUTES = ['user_id', 'tenant_id']
ATTRIBUTES.extend(CONF_ATTRIBUTES)
@classmethod
def _get_default(cls, credentials_type='user'):
params = {}
section, prefix = cls.TYPES[credentials_type]
for attr in cls.CONF_ATTRIBUTES:
_section = getattr(CONF, section)
if prefix is None:
params[attr] = getattr(_section, attr)
else:
params[attr] = getattr(_section, prefix + "_" + attr)
return cls(**params)
def is_valid(self):
"""
Minimum set of valid credentials, are username and password.
Tenant is optional.
"""
return None not in (self.username, self.password)
class KeystoneV3Credentials(KeystoneV2Credentials):
"""
Credentials suitable for the Keystone Identity V3 API
"""
CONF_ATTRIBUTES = ['domain_name', 'password', 'tenant_name', 'username']
ATTRIBUTES = ['project_domain_id', 'project_domain_name', 'project_id',
'project_name', 'tenant_id', 'tenant_name', 'user_domain_id',
'user_domain_name', 'user_id']
ATTRIBUTES.extend(CONF_ATTRIBUTES)
def __init__(self, **kwargs):
"""
If domain is not specified, load the one configured for the
identity manager.
"""
domain_fields = set(x for x in self.ATTRIBUTES if 'domain' in x)
if not domain_fields.intersection(kwargs.keys()):
kwargs['user_domain_name'] = CONF.identity.admin_domain_name
super(KeystoneV3Credentials, self).__init__(**kwargs)
def __setattr__(self, key, value):
parent = super(KeystoneV3Credentials, self)
# for tenant_* set both project and tenant
if key == 'tenant_id':
parent.__setattr__('project_id', value)
elif key == 'tenant_name':
parent.__setattr__('project_name', value)
# for project_* set both project and tenant
if key == 'project_id':
parent.__setattr__('tenant_id', value)
elif key == 'project_name':
parent.__setattr__('tenant_name', value)
# for *_domain_* set both user and project if not set yet
if key == 'user_domain_id':
if self.project_domain_id is None:
parent.__setattr__('project_domain_id', value)
if key == 'project_domain_id':
if self.user_domain_id is None:
parent.__setattr__('user_domain_id', value)
if key == 'user_domain_name':
if self.project_domain_name is None:
parent.__setattr__('project_domain_name', value)
if key == 'project_domain_name':
if self.user_domain_name is None:
parent.__setattr__('user_domain_name', value)
# support domain_name coming from config
if key == 'domain_name':
parent.__setattr__('user_domain_name', value)
parent.__setattr__('project_domain_name', value)
# finally trigger default behaviour for all attributes
parent.__setattr__(key, value)
def is_valid(self):
"""
Valid combinations of v3 credentials (excluding token, scope)
- User id, password (optional domain)
- User name, password and its domain id/name
For the scope, valid combinations are:
- None
- Project id (optional domain)
- Project name and its domain id/name
"""
valid_user_domain = any(
[self.user_domain_id is not None,
self.user_domain_name is not None])
valid_project_domain = any(
[self.project_domain_id is not None,
self.project_domain_name is not None])
valid_user = any(
[self.user_id is not None,
self.username is not None and valid_user_domain])
valid_project = any(
[self.project_name is None and self.project_id is None,
self.project_id is not None,
self.project_name is not None and valid_project_domain])
return all([self.password is not None, valid_user, valid_project])
|
jamielennox/tempest
|
tempest/auth.py
|
Python
|
apache-2.0
| 24,953
|
# -*- coding: utf-8 -*-
import pytest
import time
import sys
import cPickle as pickle
from test_base_class import TestBaseClass
aerospike = pytest.importorskip("aerospike")
try:
from aerospike.exception import *
except:
print "Please install aerospike python client."
sys.exit(1)
class TestAppend(object):
def setup_class(cls):
"""
Setup method.
"""
hostlist, user, password = TestBaseClass.get_hosts()
config = {'hosts': hostlist}
if user == None and password == None:
TestAppend.client = aerospike.client(config).connect()
else:
TestAppend.client = aerospike.client(config).connect(user, password)
def teardown_class(cls):
TestAppend.client.close()
def setup_method(self, method):
for i in xrange(5):
key = ('test', 'demo', i)
rec = {'name': 'name%s' % (str(i)), 'age': i}
TestAppend.client.put(key, rec)
def teardown_method(self, method):
"""
Teardoen method.
"""
#time.sleep(1)
for i in xrange(5):
key = ('test', 'demo', i)
TestAppend.client.remove(key)
def test_append_with_no_parameters(self):
"""
Invoke append() without any mandatory parameters.
"""
with pytest.raises(TypeError) as typeError:
TestAppend.client.append()
assert "Required argument 'key' (pos 1) not found" in typeError.value
def test_append_with_correct_paramters(self):
"""
Invoke append() with correct parameters
"""
key = ('test', 'demo', 1)
TestAppend.client.append(key, "name", "str")
(key, meta, bins) = TestAppend.client.get(key)
assert bins == {'age': 1, 'name': 'name1str'}
def test_append_with_correct_policy(self):
"""
Invoke append() with correct policy
"""
key = ('test', 'demo', 1)
policy = {
'timeout': 1000,
'retry': aerospike.POLICY_RETRY_ONCE,
'commit_level': aerospike.POLICY_COMMIT_LEVEL_MASTER
}
TestAppend.client.append(key, "name", "str", {}, policy)
(key, meta, bins) = TestAppend.client.get(key)
assert bins == {'age': 1, 'name': 'name1str'}
def test_append_with_policy_key_send(self):
"""
Invoke append() with policy key send
"""
key = ('test', 'demo', 1)
policy = {
'timeout': 1000,
'key': aerospike.POLICY_KEY_SEND,
'retry': aerospike.POLICY_RETRY_ONCE,
'commit_level': aerospike.POLICY_COMMIT_LEVEL_ALL
}
TestAppend.client.append(key, "name", "str", {}, policy)
(key, meta, bins) = TestAppend.client.get(key)
assert bins == {'age': 1, 'name': 'name1str'}
assert key == ('test', 'demo', None, bytearray(
b'\xb7\xf4\xb88\x89\xe2\xdag\xdeh>\x1d\xf6\x91\x9a\x1e\xac\xc4F\xc8')
)
def test_append_with_policy_key_digest(self):
"""
Invoke append() with policy key digest
"""
key = ('test', 'demo', None, bytearray("asd;as[d'as;djk;uyfl",
"utf-8"))
rec = {'name': 'name%s' % (str(1)), 'age': 1, 'nolist': [1, 2, 3]}
TestAppend.client.put(key, rec)
policy = {
'timeout': 1000,
'key': aerospike.POLICY_KEY_DIGEST,
'retry': aerospike.POLICY_RETRY_NONE
}
TestAppend.client.append(key, "name", "str", {}, policy)
(key, meta, bins) = TestAppend.client.get(key)
assert bins == {'age': 1, 'name': 'name1str', 'nolist': [1, 2, 3]}
assert key == ('test', 'demo', None,
bytearray(b"asd;as[d\'as;djk;uyfl"))
TestAppend.client.remove(key)
def test_append_with_policy_key_gen_EQ_ignore(self):
"""
Invoke append() with gen eq positive ignore
"""
key = ('test', 'demo', 1)
policy = {
'timeout': 1000,
'key': aerospike.POLICY_KEY_SEND,
'retry': aerospike.POLICY_RETRY_ONCE,
'gen': aerospike.POLICY_GEN_IGNORE
}
meta = {'gen': 10, 'ttl': 1200}
TestAppend.client.append(key, "name", "str", meta, policy)
(key, meta, bins) = TestAppend.client.get(key)
assert bins == {'age': 1, 'name': 'name1str'}
assert key == ('test', 'demo', None, bytearray(
b'\xb7\xf4\xb88\x89\xe2\xdag\xdeh>\x1d\xf6\x91\x9a\x1e\xac\xc4F\xc8')
)
def test_append_with_policy_key_gen_EQ_positive(self):
"""
Invoke append() with gen eq positive
"""
key = ('test', 'demo', 1)
policy = {
'timeout': 1000,
'key': aerospike.POLICY_KEY_SEND,
'retry': aerospike.POLICY_RETRY_ONCE,
'gen': aerospike.POLICY_GEN_EQ
}
(key, meta) = TestAppend.client.exists(key)
gen = meta['gen']
meta = {'gen': gen, 'ttl': 1200}
TestAppend.client.append(key, "name", "str", meta, policy)
(key, meta, bins) = TestAppend.client.get(key)
assert bins == {'age': 1, 'name': 'name1str'}
assert key == ('test', 'demo', None, bytearray(
b'\xb7\xf4\xb88\x89\xe2\xdag\xdeh>\x1d\xf6\x91\x9a\x1e\xac\xc4F\xc8')
)
def test_append_with_policy_key_gen_GT_lesser(self):
"""
Invoke append() with gen GT lesser
"""
key = ('test', 'demo', 1)
policy = {
'timeout': 1000,
'key': aerospike.POLICY_KEY_SEND,
'retry': aerospike.POLICY_RETRY_ONCE,
'gen': aerospike.POLICY_GEN_GT
}
(key, meta) = TestAppend.client.exists(key)
gen = meta['gen']
meta = {
'gen': gen,
'ttl': 1200
}
try:
TestAppend.client.append(key, "name", "str", meta, policy)
except RecordGenerationError as exception:
assert exception.code == 3
assert exception.msg == "AEROSPIKE_ERR_RECORD_GENERATION"
assert exception.bin == "name"
(key , meta, bins) = TestAppend.client.get(key)
assert bins == {'age': 1, 'name': 'name1'}
assert key == ('test', 'demo', None, bytearray(
b'\xb7\xf4\xb88\x89\xe2\xdag\xdeh>\x1d\xf6\x91\x9a\x1e\xac\xc4F\xc8')
)
def test_append_with_policy_key_gen_GT_positive(self):
"""
Invoke append() with gen GT positive
"""
key = ('test', 'demo', 1)
policy = {
'timeout': 1000,
'key': aerospike.POLICY_KEY_SEND,
'retry': aerospike.POLICY_RETRY_ONCE,
'gen': aerospike.POLICY_GEN_GT
}
(key, meta) = TestAppend.client.exists(key)
gen = meta['gen']
meta = {'gen': gen + 2, 'ttl': 1200}
TestAppend.client.append(key, "name", "str", meta, policy)
(key, meta, bins) = TestAppend.client.get(key)
assert bins == {'age': 1, 'name': 'name1str'}
assert key == ('test', 'demo', None, bytearray(
b'\xb7\xf4\xb88\x89\xe2\xdag\xdeh>\x1d\xf6\x91\x9a\x1e\xac\xc4F\xc8')
)
def test_append_with_policy_key_gen_EQ_not_equal(self):
"""
Invoke append() with policy key EQ not equal
"""
key = ('test', 'demo', 1)
policy = {
'timeout': 1000,
'key': aerospike.POLICY_KEY_SEND,
'retry': aerospike.POLICY_RETRY_ONCE,
'gen': aerospike.POLICY_GEN_EQ
}
(key, meta) = TestAppend.client.exists(key)
gen = meta['gen']
meta = {
'gen': gen + 5,
'ttl': 1200
}
try:
TestAppend.client.append(key, "name", "str", meta, policy)
except RecordGenerationError as exception:
assert exception.code == 3
assert exception.msg == "AEROSPIKE_ERR_RECORD_GENERATION"
assert exception.bin == "name"
(key , meta, bins) = TestAppend.client.get(key)
assert bins == {'age': 1, 'name': 'name1'}
assert key == ('test', 'demo', None, bytearray(
b'\xb7\xf4\xb88\x89\xe2\xdag\xdeh>\x1d\xf6\x91\x9a\x1e\xac\xc4F\xc8')
)
def test_append_with_incorrect_policy(self):
"""
Invoke append() with incorrect policy
"""
key = ('test', 'demo', 1)
policy = {
'timeout': 0.5
}
try:
TestAppend.client.append(key, "name", "str", {}, policy)
except ParamError as exception:
assert exception.code == -2
assert exception.msg == "timeout is invalid"
def test_append_with_nonexistent_key(self):
"""
Invoke append() with non-existent key
"""
key = ('test', 'demo', 1000)
status = TestAppend.client.append(key, "name", "str")
assert status == 0L
TestAppend.client.remove(key)
def test_append_with_nonexistent_bin(self):
"""
Invoke append() with non-existent bin
"""
key = ('test', 'demo', 1)
status = TestAppend.client.append(key, "name1", "str")
assert status == 0L
def test_append_value_not_string(self):
"""
Invoke append() not a string
"""
key = ('test', 'demo', 1)
try:
TestAppend.client.append(key, "name", 2)
except ParamError as exception:
assert exception.code == -2
assert exception.msg == "Cannot concatenate 'str' and 'non-str' objects"
def test_append_with_extra_parameter(self):
"""
Invoke append() with extra parameter.
"""
key = ('test', 'demo', 1)
policy = {'timeout': 1000}
with pytest.raises(TypeError) as typeError:
TestAppend.client.append(key, "name", "str", {}, policy, "")
assert "append() takes at most 5 arguments (6 given)" in typeError.value
def test_append_policy_is_string(self):
"""
Invoke append() with policy is string
"""
key = ('test', 'demo', 1)
try:
TestAppend.client.append(key, "name", "pqr", {}, "")
except ParamError as exception:
assert exception.code == -2
assert exception.msg == "policy must be a dict"
def test_append_key_is_none(self):
"""
Invoke append() with key is none
"""
try:
TestAppend.client.append(None, "name", "str")
except ParamError as exception:
assert exception.code == -2
assert exception.msg == "key is invalid"
def test_append_bin_is_none(self):
"""
Invoke append() with bin is none
"""
key = ('test', 'demo', 1)
try:
TestAppend.client.append(key, None, "str")
except ParamError as exception:
assert exception.code == -2
assert exception.msg == "Bin name should be of type string"
def test_append_unicode_value(self):
"""
Invoke append() with unicode string
"""
key = ('test', 'demo', 1)
res = TestAppend.client.append(key, "name", u"address")
key, meta, bins = TestAppend.client.get(key)
assert bins['name'] == 'name1address'
def test_append_unicode_bin_name(self):
"""
Invoke append() with unicode string
"""
key = ('test', 'demo', 1)
res = TestAppend.client.append(key, u"add", u"address")
key, meta, bins = TestAppend.client.get(key)
assert bins['add'] == 'address'
def test_append_with_correct_paramters_without_connection(self):
"""
Invoke append() with correct parameters without connection
"""
config = {'hosts': [('127.0.0.1', 3000)]}
client1 = aerospike.client(config)
key = ('test', 'demo', 1)
try:
client1.append(key, "name", "str")
except ClusterError as exception:
assert exception.code == 11L
assert exception.msg == 'No connection to aerospike cluster'
|
arthurprs/aerospike-client-python
|
test/test_append.py
|
Python
|
apache-2.0
| 12,313
|
"""
Definition of views.
"""
from app.models import Choice, Poll
from datetime import datetime
from django.contrib.auth.decorators import login_required
from django.core.urlresolvers import reverse
from django.http import HttpRequest, HttpResponseRedirect
from django.shortcuts import get_object_or_404, render
from django.template import RequestContext
from django.utils import timezone
from django.views.generic import ListView, DetailView
from os import path
import json
class PollListView(ListView):
"""Renders the home page, with a list of all polls."""
model = Poll
def get_context_data(self, **kwargs):
context = super(PollListView, self).get_context_data(**kwargs)
context['title'] = 'Polls'
context['year'] = datetime.now().year
return context
class PollDetailView(DetailView):
"""Renders the poll details page."""
model = Poll
def get_context_data(self, **kwargs):
context = super(PollDetailView, self).get_context_data(**kwargs)
context['title'] = 'Poll'
context['year'] = datetime.now().year
return context
class PollResultsView(DetailView):
"""Renders the results page."""
model = Poll
def get_context_data(self, **kwargs):
context = super(PollResultsView, self).get_context_data(**kwargs)
context['title'] = 'Results'
context['year'] = datetime.now().year
return context
def contact(request):
"""Renders the contact page."""
assert isinstance(request, HttpRequest)
return render(
request,
'app/contact.html',
context_instance = RequestContext(request,
{
'title': 'Contact',
'message': 'Your contact page.',
'year': datetime.now().year,
})
)
def about(request):
"""Renders the about page."""
assert isinstance(request, HttpRequest)
return render(
request,
'app/about.html',
context_instance = RequestContext(request,
{
'title': 'About',
'message': 'Your application description page.',
'year': datetime.now().year,
})
)
def vote(request, poll_id):
"""Handles voting. Validates input and updates the repository."""
poll = get_object_or_404(Poll, pk=poll_id)
try:
selected_choice = poll.choice_set.get(pk=request.POST['choice'])
except (KeyError, Choice.DoesNotExist):
return render(request, 'app/details.html', {
'title': 'Poll',
'year': datetime.now().year,
'poll': poll,
'error_message': "Please make a selection.",
})
else:
selected_choice.votes += 1
selected_choice.save()
return HttpResponseRedirect(reverse('app:results', args=(poll.id,)))
@login_required
def seed(request):
"""Seeds the database with sample polls."""
samples_path = path.join(path.dirname(__file__), 'samples.json')
with open(samples_path, 'r') as samples_file:
samples_polls = json.load(samples_file)
for sample_poll in samples_polls:
poll = Poll()
poll.text = sample_poll['text']
poll.pub_date = timezone.now()
poll.save()
for sample_choice in sample_poll['choices']:
choice = Choice()
choice.poll = poll
choice.text = sample_choice
choice.votes = 0
choice.save()
return HttpResponseRedirect(reverse('app:home'))
|
DinoV/PTVS
|
Python/Templates/Samples/ProjectTemplates/Python/Samples/PollsDjango/app/views.py
|
Python
|
apache-2.0
| 3,472
|
import sys
import json
import os
import re
import argparse
def get_file_locations():
parser = argparse.ArgumentParser()
parser.add_argument('input', help='Input AVPR filename(s)', nargs='+')
parser.add_argument('output', help='Output directory')
args = parser.parse_args()
return (args.input, args.output)
def typename(typeobject):
if isinstance(typeobject, list):
union_names = [typename(item) for item in typeobject]
return '|'.join(union_names)
elif isinstance(typeobject, dict):
if typeobject['type'] == 'array':
return 'array<%s>' % typename(typeobject['items'])
elif typeobject['type'] == 'map':
return 'map<%s>' % typename(typeobject['values'])
elif isinstance(typeobject, basestring):
return typeobject
raise ValueError
def cleanup_doc(doc,indent=0):
return '\n'.join([' '*indent + line for line in doc.split('\n')])
if __name__ == '__main__':
avpr_filenames, rest_directory = get_file_locations()
for avpr_filename in avpr_filenames:
base_filename = os.path.basename(avpr_filename)
name = os.path.splitext(base_filename)[0]
rest_filename = os.path.join(rest_directory, name+'.rst')
with open(avpr_filename,'r') as f:
data = json.load(f)
output = data['protocol'] + '\n'
output += '*' * len(data['protocol']) + '\n\n'
if 'doc' in data:
output += cleanup_doc(data['doc']) + '\n\n'
for message_name in data['messages']:
message_def = data['messages'][message_name]
doc = message_def['doc']
# process formal parameters ('request')
request = message_def['request']
# collect the names
param_names = []
for param in request:
param_names.append(param['name'])
response = message_def['response']
errors = message_def['errors']
output += " .. function:: %s(%s)\n\n" % (message_name,
', '.join(param_names))
for param in request:
output += " :param %s: %s: %s\n" % (param['name'], param['type'],
param['doc'])
output += " :return type: %s\n" % response
output += " :throws: %s\n\n" % ', '.join(errors)
output += cleanup_doc(doc)
output += "\n\n"
for item in data['types']:
output += '.. avro:%s:: %s\n\n' % (item['type'], item['name'])
if item['type'] == 'record':
for field in item['fields']:
output += ' :field %s:\n' % field['name']
if 'doc' in field:
output += cleanup_doc(field['doc'],indent=4) + '\n'
output += ' :type %s: %s\n' % (field['name'], typename(field['type']))
output += '\n'
if item['type'] == 'enum':
output += ' :symbols: %s\n' % '|'.join(item['symbols'])
if item['type'] == 'fixed':
output += ' :size: %s\n' % item['size']
if 'doc' in item:
output += cleanup_doc(item['doc'],indent=2) + '\n\n'
with open(rest_filename,'w') as f:
f.write(output)
def get_file_locations():
parser = argparse.ArgumentParser()
parser.add_argument('input', help='Input AVPR filename(s)', nargs='+')
parser.add_argument('output', help='Output directory')
args = parser.parse_args()
return (args.input, args.output)
def typename(typeobject):
if isinstance(typeobject, list):
union_names = [typename(item) for item in typeobject]
return '|'.join(union_names)
elif isinstance(typeobject, dict):
if typeobject['type'] == 'array':
return 'array<%s>' % typename(typeobject['items'])
elif typeobject['type'] == 'map':
return 'map<%s>' % typename(typeobject['values'])
elif isinstance(typeobject, basestring):
return typeobject
raise ValueError
if __name__ == '__main__':
avpr_filenames, rest_directory = get_file_locations()
for avpr_filename in avpr_filenames:
base_filename = os.path.basename(avpr_filename)
name = os.path.splitext(base_filename)[0]
rest_filename = os.path.join(rest_directory, name+'.rst')
with open(avpr_filename,'r') as f:
data = json.load(f)
output = data['protocol'] + '\n'
output += '*' * len(data['protocol']) + '\n\n'
if 'doc' in data:
output += cleanup_doc(data['doc']) + '\n\n'
for message_name in data['messages']:
message_def = data['messages'][message_name]
doc = message_def['doc']
# process formal parameters ('request')
request = message_def['request']
# collect the names
param_names = []
for param in request:
param_names.append(param['name'])
response = message_def['response']
errors = message_def['errors']
output += " .. function:: %s(%s)\n\n" % (message_name,
', '.join(param_names))
for param in request:
output += " :param %s: %s: %s\n" % (param['name'], param['type'],
param['doc'])
output += " :return type: %s\n" % response
output += " :throws: %s\n\n" % ', '.join(errors)
output += cleanup_doc(doc)
output += "\n\n"
for item in data['types']:
output += '.. avro:%s:: %s\n\n' % (item['type'], item['name'])
if item['type'] == 'record':
for field in item['fields']:
output += ' :field %s:\n' % field['name']
if 'doc' in field:
output += cleanup_doc(field['doc'],indent=4) + '\n'
output += ' :type %s: %s\n' % (field['name'], typename(field['type']))
output += '\n'
if item['type'] == 'enum':
output += ' :symbols: %s\n' % '|'.join(item['symbols'])
if item['type'] == 'fixed':
output += ' :size: %s\n' % item['size']
if 'doc' in item:
output += cleanup_doc(item['doc'],indent=2) + '\n\n'
with open(rest_filename,'w') as f:
f.write(output)
|
pcingola/schemas
|
tools/sphinx/avpr2rest.py
|
Python
|
apache-2.0
| 5,874
|
#!/usr/bin/env python
# pylint: disable=missing-docstring
# flake8: noqa: T001
# ___ ___ _ _ ___ ___ _ _____ ___ ___
# / __| __| \| | __| _ \ /_\_ _| __| \
# | (_ | _|| .` | _|| / / _ \| | | _|| |) |
# \___|___|_|\_|___|_|_\/_/_\_\_|_|___|___/_ _____
# | \ / _ \ | \| |/ _ \_ _| | __| \_ _|_ _|
# | |) | (_) | | .` | (_) || | | _|| |) | | | |
# |___/ \___/ |_|\_|\___/ |_| |___|___/___| |_|
#
# Copyright 2016 Red Hat, Inc. and/or its affiliates
# and other contributors as indicated by the @author tags.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# -*- -*- -*- Begin included fragment: lib/import.py -*- -*- -*-
'''
OpenShiftCLI class that wraps the oc commands in a subprocess
'''
# pylint: disable=too-many-lines
from __future__ import print_function
import atexit
import copy
import json
import os
import re
import shutil
import subprocess
import tempfile
# pylint: disable=import-error
try:
import ruamel.yaml as yaml
except ImportError:
import yaml
from ansible.module_utils.basic import AnsibleModule
# -*- -*- -*- End included fragment: lib/import.py -*- -*- -*-
# -*- -*- -*- Begin included fragment: doc/project -*- -*- -*-
DOCUMENTATION = '''
---
module: oc_project
short_description: Module to manage openshift projects
description:
- Manage openshift projects programmatically.
options:
state:
description:
- If present, the project will be created if it doesn't exist or update if different. If absent, the project will be removed if present. If list, information about the project will be gathered and returned as part of the Ansible call results.
required: false
default: present
choices: ["present", "absent", "list"]
aliases: []
kubeconfig:
description:
- The path for the kubeconfig file to use for authentication
required: false
default: /etc/origin/master/admin.kubeconfig
aliases: []
debug:
description:
- Turn on debug output.
required: false
default: False
aliases: []
name:
description:
- Name of the object that is being queried.
required: false
default: None
aliases: []
display_name:
description:
- The display name attribute for a project
required: false
default: None
aliases: []
description:
description:
- The description attribute for a project
required: false
default: None
aliases: []
admin:
description:
- The project admin username
required: false
default: false
aliases: []
admin_role:
description:
- The project admin username
required: false
default: 'admin'
aliases: []
node_selector:
description:
- The node selector for this project.
- This allows certain pods in this project to run on certain nodes.
required: false
default: None
aliases: []
author:
- "Kenny Woodson <kwoodson@redhat.com>"
extends_documentation_fragment: []
'''
EXAMPLES = '''
- name: create secret
oc_project:
state: present
name: openshift-ops
display_name: operations team project
node_selector:
- top=secret
- noncustomer=True
'''
# -*- -*- -*- End included fragment: doc/project -*- -*- -*-
# -*- -*- -*- Begin included fragment: ../../lib_utils/src/class/yedit.py -*- -*- -*-
class YeditException(Exception): # pragma: no cover
''' Exception class for Yedit '''
pass
# pylint: disable=too-many-public-methods
class Yedit(object): # pragma: no cover
''' Class to modify yaml files '''
re_valid_key = r"(((\[-?\d+\])|([0-9a-zA-Z%s/_-]+)).?)+$"
re_key = r"(?:\[(-?\d+)\])|([0-9a-zA-Z{}/_-]+)"
com_sep = set(['.', '#', '|', ':'])
# pylint: disable=too-many-arguments
def __init__(self,
filename=None,
content=None,
content_type='yaml',
separator='.',
backup=False):
self.content = content
self._separator = separator
self.filename = filename
self.__yaml_dict = content
self.content_type = content_type
self.backup = backup
self.load(content_type=self.content_type)
if self.__yaml_dict is None:
self.__yaml_dict = {}
@property
def separator(self):
''' getter method for separator '''
return self._separator
@separator.setter
def separator(self, inc_sep):
''' setter method for separator '''
self._separator = inc_sep
@property
def yaml_dict(self):
''' getter method for yaml_dict '''
return self.__yaml_dict
@yaml_dict.setter
def yaml_dict(self, value):
''' setter method for yaml_dict '''
self.__yaml_dict = value
@staticmethod
def parse_key(key, sep='.'):
'''parse the key allowing the appropriate separator'''
common_separators = list(Yedit.com_sep - set([sep]))
return re.findall(Yedit.re_key.format(''.join(common_separators)), key)
@staticmethod
def valid_key(key, sep='.'):
'''validate the incoming key'''
common_separators = list(Yedit.com_sep - set([sep]))
if not re.match(Yedit.re_valid_key.format(''.join(common_separators)), key):
return False
return True
@staticmethod
def remove_entry(data, key, sep='.'):
''' remove data at location key '''
if key == '' and isinstance(data, dict):
data.clear()
return True
elif key == '' and isinstance(data, list):
del data[:]
return True
if not (key and Yedit.valid_key(key, sep)) and \
isinstance(data, (list, dict)):
return None
key_indexes = Yedit.parse_key(key, sep)
for arr_ind, dict_key in key_indexes[:-1]:
if dict_key and isinstance(data, dict):
data = data.get(dict_key)
elif (arr_ind and isinstance(data, list) and
int(arr_ind) <= len(data) - 1):
data = data[int(arr_ind)]
else:
return None
# process last index for remove
# expected list entry
if key_indexes[-1][0]:
if isinstance(data, list) and int(key_indexes[-1][0]) <= len(data) - 1: # noqa: E501
del data[int(key_indexes[-1][0])]
return True
# expected dict entry
elif key_indexes[-1][1]:
if isinstance(data, dict):
del data[key_indexes[-1][1]]
return True
@staticmethod
def add_entry(data, key, item=None, sep='.'):
''' Get an item from a dictionary with key notation a.b.c
d = {'a': {'b': 'c'}}}
key = a#b
return c
'''
if key == '':
pass
elif (not (key and Yedit.valid_key(key, sep)) and
isinstance(data, (list, dict))):
return None
key_indexes = Yedit.parse_key(key, sep)
for arr_ind, dict_key in key_indexes[:-1]:
if dict_key:
if isinstance(data, dict) and dict_key in data and data[dict_key]: # noqa: E501
data = data[dict_key]
continue
elif data and not isinstance(data, dict):
raise YeditException("Unexpected item type found while going through key " +
"path: {} (at key: {})".format(key, dict_key))
data[dict_key] = {}
data = data[dict_key]
elif (arr_ind and isinstance(data, list) and
int(arr_ind) <= len(data) - 1):
data = data[int(arr_ind)]
else:
raise YeditException("Unexpected item type found while going through key path: {}".format(key))
if key == '':
data = item
# process last index for add
# expected list entry
elif key_indexes[-1][0] and isinstance(data, list) and int(key_indexes[-1][0]) <= len(data) - 1: # noqa: E501
data[int(key_indexes[-1][0])] = item
# expected dict entry
elif key_indexes[-1][1] and isinstance(data, dict):
data[key_indexes[-1][1]] = item
# didn't add/update to an existing list, nor add/update key to a dict
# so we must have been provided some syntax like a.b.c[<int>] = "data" for a
# non-existent array
else:
raise YeditException("Error adding to object at path: {}".format(key))
return data
@staticmethod
def get_entry(data, key, sep='.'):
''' Get an item from a dictionary with key notation a.b.c
d = {'a': {'b': 'c'}}}
key = a.b
return c
'''
if key == '':
pass
elif (not (key and Yedit.valid_key(key, sep)) and
isinstance(data, (list, dict))):
return None
key_indexes = Yedit.parse_key(key, sep)
for arr_ind, dict_key in key_indexes:
if dict_key and isinstance(data, dict):
data = data.get(dict_key)
elif (arr_ind and isinstance(data, list) and
int(arr_ind) <= len(data) - 1):
data = data[int(arr_ind)]
else:
return None
return data
@staticmethod
def _write(filename, contents):
''' Actually write the file contents to disk. This helps with mocking. '''
tmp_filename = filename + '.yedit'
with open(tmp_filename, 'w') as yfd:
yfd.write(contents)
os.rename(tmp_filename, filename)
def write(self):
''' write to file '''
if not self.filename:
raise YeditException('Please specify a filename.')
if self.backup and self.file_exists():
shutil.copy(self.filename, self.filename + '.orig')
# Try to set format attributes if supported
try:
self.yaml_dict.fa.set_block_style()
except AttributeError:
pass
# Try to use RoundTripDumper if supported.
try:
Yedit._write(self.filename, yaml.dump(self.yaml_dict, Dumper=yaml.RoundTripDumper))
except AttributeError:
Yedit._write(self.filename, yaml.safe_dump(self.yaml_dict, default_flow_style=False))
return (True, self.yaml_dict)
def read(self):
''' read from file '''
# check if it exists
if self.filename is None or not self.file_exists():
return None
contents = None
with open(self.filename) as yfd:
contents = yfd.read()
return contents
def file_exists(self):
''' return whether file exists '''
if os.path.exists(self.filename):
return True
return False
def load(self, content_type='yaml'):
''' return yaml file '''
contents = self.read()
if not contents and not self.content:
return None
if self.content:
if isinstance(self.content, dict):
self.yaml_dict = self.content
return self.yaml_dict
elif isinstance(self.content, str):
contents = self.content
# check if it is yaml
try:
if content_type == 'yaml' and contents:
# Try to set format attributes if supported
try:
self.yaml_dict.fa.set_block_style()
except AttributeError:
pass
# Try to use RoundTripLoader if supported.
try:
self.yaml_dict = yaml.safe_load(contents, yaml.RoundTripLoader)
except AttributeError:
self.yaml_dict = yaml.safe_load(contents)
# Try to set format attributes if supported
try:
self.yaml_dict.fa.set_block_style()
except AttributeError:
pass
elif content_type == 'json' and contents:
self.yaml_dict = json.loads(contents)
except yaml.YAMLError as err:
# Error loading yaml or json
raise YeditException('Problem with loading yaml file. {}'.format(err))
return self.yaml_dict
def get(self, key):
''' get a specified key'''
try:
entry = Yedit.get_entry(self.yaml_dict, key, self.separator)
except KeyError:
entry = None
return entry
def pop(self, path, key_or_item):
''' remove a key, value pair from a dict or an item for a list'''
try:
entry = Yedit.get_entry(self.yaml_dict, path, self.separator)
except KeyError:
entry = None
if entry is None:
return (False, self.yaml_dict)
if isinstance(entry, dict):
# AUDIT:maybe-no-member makes sense due to fuzzy types
# pylint: disable=maybe-no-member
if key_or_item in entry:
entry.pop(key_or_item)
return (True, self.yaml_dict)
return (False, self.yaml_dict)
elif isinstance(entry, list):
# AUDIT:maybe-no-member makes sense due to fuzzy types
# pylint: disable=maybe-no-member
ind = None
try:
ind = entry.index(key_or_item)
except ValueError:
return (False, self.yaml_dict)
entry.pop(ind)
return (True, self.yaml_dict)
return (False, self.yaml_dict)
def delete(self, path):
''' remove path from a dict'''
try:
entry = Yedit.get_entry(self.yaml_dict, path, self.separator)
except KeyError:
entry = None
if entry is None:
return (False, self.yaml_dict)
result = Yedit.remove_entry(self.yaml_dict, path, self.separator)
if not result:
return (False, self.yaml_dict)
return (True, self.yaml_dict)
def exists(self, path, value):
''' check if value exists at path'''
try:
entry = Yedit.get_entry(self.yaml_dict, path, self.separator)
except KeyError:
entry = None
if isinstance(entry, list):
if value in entry:
return True
return False
elif isinstance(entry, dict):
if isinstance(value, dict):
rval = False
for key, val in value.items():
if entry[key] != val:
rval = False
break
else:
rval = True
return rval
return value in entry
return entry == value
def append(self, path, value):
'''append value to a list'''
try:
entry = Yedit.get_entry(self.yaml_dict, path, self.separator)
except KeyError:
entry = None
if entry is None:
self.put(path, [])
entry = Yedit.get_entry(self.yaml_dict, path, self.separator)
if not isinstance(entry, list):
return (False, self.yaml_dict)
# AUDIT:maybe-no-member makes sense due to loading data from
# a serialized format.
# pylint: disable=maybe-no-member
entry.append(value)
return (True, self.yaml_dict)
# pylint: disable=too-many-arguments
def update(self, path, value, index=None, curr_value=None):
''' put path, value into a dict '''
try:
entry = Yedit.get_entry(self.yaml_dict, path, self.separator)
except KeyError:
entry = None
if isinstance(entry, dict):
# AUDIT:maybe-no-member makes sense due to fuzzy types
# pylint: disable=maybe-no-member
if not isinstance(value, dict):
raise YeditException('Cannot replace key, value entry in dict with non-dict type. ' +
'value=[{}] type=[{}]'.format(value, type(value)))
entry.update(value)
return (True, self.yaml_dict)
elif isinstance(entry, list):
# AUDIT:maybe-no-member makes sense due to fuzzy types
# pylint: disable=maybe-no-member
ind = None
if curr_value:
try:
ind = entry.index(curr_value)
except ValueError:
return (False, self.yaml_dict)
elif index is not None:
ind = index
if ind is not None and entry[ind] != value:
entry[ind] = value
return (True, self.yaml_dict)
# see if it exists in the list
try:
ind = entry.index(value)
except ValueError:
# doesn't exist, append it
entry.append(value)
return (True, self.yaml_dict)
# already exists, return
if ind is not None:
return (False, self.yaml_dict)
return (False, self.yaml_dict)
def put(self, path, value):
''' put path, value into a dict '''
try:
entry = Yedit.get_entry(self.yaml_dict, path, self.separator)
except KeyError:
entry = None
if entry == value:
return (False, self.yaml_dict)
# deepcopy didn't work
# Try to use ruamel.yaml and fallback to pyyaml
try:
tmp_copy = yaml.load(yaml.round_trip_dump(self.yaml_dict,
default_flow_style=False),
yaml.RoundTripLoader)
except AttributeError:
tmp_copy = copy.deepcopy(self.yaml_dict)
# set the format attributes if available
try:
tmp_copy.fa.set_block_style()
except AttributeError:
pass
result = Yedit.add_entry(tmp_copy, path, value, self.separator)
if result is None:
return (False, self.yaml_dict)
# When path equals "" it is a special case.
# "" refers to the root of the document
# Only update the root path (entire document) when its a list or dict
if path == '':
if isinstance(result, list) or isinstance(result, dict):
self.yaml_dict = result
return (True, self.yaml_dict)
return (False, self.yaml_dict)
self.yaml_dict = tmp_copy
return (True, self.yaml_dict)
def create(self, path, value):
''' create a yaml file '''
if not self.file_exists():
# deepcopy didn't work
# Try to use ruamel.yaml and fallback to pyyaml
try:
tmp_copy = yaml.load(yaml.round_trip_dump(self.yaml_dict,
default_flow_style=False),
yaml.RoundTripLoader)
except AttributeError:
tmp_copy = copy.deepcopy(self.yaml_dict)
# set the format attributes if available
try:
tmp_copy.fa.set_block_style()
except AttributeError:
pass
result = Yedit.add_entry(tmp_copy, path, value, self.separator)
if result is not None:
self.yaml_dict = tmp_copy
return (True, self.yaml_dict)
return (False, self.yaml_dict)
@staticmethod
def get_curr_value(invalue, val_type):
'''return the current value'''
if invalue is None:
return None
curr_value = invalue
if val_type == 'yaml':
curr_value = yaml.load(invalue)
elif val_type == 'json':
curr_value = json.loads(invalue)
return curr_value
@staticmethod
def parse_value(inc_value, vtype=''):
'''determine value type passed'''
true_bools = ['y', 'Y', 'yes', 'Yes', 'YES', 'true', 'True', 'TRUE',
'on', 'On', 'ON', ]
false_bools = ['n', 'N', 'no', 'No', 'NO', 'false', 'False', 'FALSE',
'off', 'Off', 'OFF']
# It came in as a string but you didn't specify value_type as string
# we will convert to bool if it matches any of the above cases
if isinstance(inc_value, str) and 'bool' in vtype:
if inc_value not in true_bools and inc_value not in false_bools:
raise YeditException('Not a boolean type. str=[{}] vtype=[{}]'.format(inc_value, vtype))
elif isinstance(inc_value, bool) and 'str' in vtype:
inc_value = str(inc_value)
# There is a special case where '' will turn into None after yaml loading it so skip
if isinstance(inc_value, str) and inc_value == '':
pass
# If vtype is not str then go ahead and attempt to yaml load it.
elif isinstance(inc_value, str) and 'str' not in vtype:
try:
inc_value = yaml.safe_load(inc_value)
except Exception:
raise YeditException('Could not determine type of incoming value. ' +
'value=[{}] vtype=[{}]'.format(type(inc_value), vtype))
return inc_value
@staticmethod
def process_edits(edits, yamlfile):
'''run through a list of edits and process them one-by-one'''
results = []
for edit in edits:
value = Yedit.parse_value(edit['value'], edit.get('value_type', ''))
if edit.get('action') == 'update':
# pylint: disable=line-too-long
curr_value = Yedit.get_curr_value(
Yedit.parse_value(edit.get('curr_value')),
edit.get('curr_value_format'))
rval = yamlfile.update(edit['key'],
value,
edit.get('index'),
curr_value)
elif edit.get('action') == 'append':
rval = yamlfile.append(edit['key'], value)
else:
rval = yamlfile.put(edit['key'], value)
if rval[0]:
results.append({'key': edit['key'], 'edit': rval[1]})
return {'changed': len(results) > 0, 'results': results}
# pylint: disable=too-many-return-statements,too-many-branches
@staticmethod
def run_ansible(params):
'''perform the idempotent crud operations'''
yamlfile = Yedit(filename=params['src'],
backup=params['backup'],
separator=params['separator'])
state = params['state']
if params['src']:
rval = yamlfile.load()
if yamlfile.yaml_dict is None and state != 'present':
return {'failed': True,
'msg': 'Error opening file [{}]. Verify that the '.format(params['src']) +
'file exists, that it is has correct permissions, and is valid yaml.'}
if state == 'list':
if params['content']:
content = Yedit.parse_value(params['content'], params['content_type'])
yamlfile.yaml_dict = content
if params['key']:
rval = yamlfile.get(params['key']) or {}
return {'changed': False, 'result': rval, 'state': state}
elif state == 'absent':
if params['content']:
content = Yedit.parse_value(params['content'], params['content_type'])
yamlfile.yaml_dict = content
if params['update']:
rval = yamlfile.pop(params['key'], params['value'])
else:
rval = yamlfile.delete(params['key'])
if rval[0] and params['src']:
yamlfile.write()
return {'changed': rval[0], 'result': rval[1], 'state': state}
elif state == 'present':
# check if content is different than what is in the file
if params['content']:
content = Yedit.parse_value(params['content'], params['content_type'])
# We had no edits to make and the contents are the same
if yamlfile.yaml_dict == content and \
params['value'] is None:
return {'changed': False, 'result': yamlfile.yaml_dict, 'state': state}
yamlfile.yaml_dict = content
# If we were passed a key, value then
# we enapsulate it in a list and process it
# Key, Value passed to the module : Converted to Edits list #
edits = []
_edit = {}
if params['value'] is not None:
_edit['value'] = params['value']
_edit['value_type'] = params['value_type']
_edit['key'] = params['key']
if params['update']:
_edit['action'] = 'update'
_edit['curr_value'] = params['curr_value']
_edit['curr_value_format'] = params['curr_value_format']
_edit['index'] = params['index']
elif params['append']:
_edit['action'] = 'append'
edits.append(_edit)
elif params['edits'] is not None:
edits = params['edits']
if edits:
results = Yedit.process_edits(edits, yamlfile)
# if there were changes and a src provided to us we need to write
if results['changed'] and params['src']:
yamlfile.write()
return {'changed': results['changed'], 'result': results['results'], 'state': state}
# no edits to make
if params['src']:
# pylint: disable=redefined-variable-type
rval = yamlfile.write()
return {'changed': rval[0],
'result': rval[1],
'state': state}
# We were passed content but no src, key or value, or edits. Return contents in memory
return {'changed': False, 'result': yamlfile.yaml_dict, 'state': state}
return {'failed': True, 'msg': 'Unkown state passed'}
# -*- -*- -*- End included fragment: ../../lib_utils/src/class/yedit.py -*- -*- -*-
# -*- -*- -*- Begin included fragment: lib/base.py -*- -*- -*-
# pylint: disable=too-many-lines
# noqa: E301,E302,E303,T001
class OpenShiftCLIError(Exception):
'''Exception class for openshiftcli'''
pass
ADDITIONAL_PATH_LOOKUPS = ['/usr/local/bin', os.path.expanduser('~/bin')]
def locate_oc_binary():
''' Find and return oc binary file '''
# https://github.com/openshift/openshift-ansible/issues/3410
# oc can be in /usr/local/bin in some cases, but that may not
# be in $PATH due to ansible/sudo
paths = os.environ.get("PATH", os.defpath).split(os.pathsep) + ADDITIONAL_PATH_LOOKUPS
oc_binary = 'oc'
# Use shutil.which if it is available, otherwise fallback to a naive path search
try:
which_result = shutil.which(oc_binary, path=os.pathsep.join(paths))
if which_result is not None:
oc_binary = which_result
except AttributeError:
for path in paths:
if os.path.exists(os.path.join(path, oc_binary)):
oc_binary = os.path.join(path, oc_binary)
break
return oc_binary
# pylint: disable=too-few-public-methods
class OpenShiftCLI(object):
''' Class to wrap the command line tools '''
def __init__(self,
namespace,
kubeconfig='/etc/origin/master/admin.kubeconfig',
verbose=False,
all_namespaces=False):
''' Constructor for OpenshiftCLI '''
self.namespace = namespace
self.verbose = verbose
self.kubeconfig = Utils.create_tmpfile_copy(kubeconfig)
self.all_namespaces = all_namespaces
self.oc_binary = locate_oc_binary()
# Pylint allows only 5 arguments to be passed.
# pylint: disable=too-many-arguments
def _replace_content(self, resource, rname, content, force=False, sep='.'):
''' replace the current object with the content '''
res = self._get(resource, rname)
if not res['results']:
return res
fname = Utils.create_tmpfile(rname + '-')
yed = Yedit(fname, res['results'][0], separator=sep)
changes = []
for key, value in content.items():
changes.append(yed.put(key, value))
if any([change[0] for change in changes]):
yed.write()
atexit.register(Utils.cleanup, [fname])
return self._replace(fname, force)
return {'returncode': 0, 'updated': False}
def _replace(self, fname, force=False):
'''replace the current object with oc replace'''
# We are removing the 'resourceVersion' to handle
# a race condition when modifying oc objects
yed = Yedit(fname)
results = yed.delete('metadata.resourceVersion')
if results[0]:
yed.write()
cmd = ['replace', '-f', fname]
if force:
cmd.append('--force')
return self.openshift_cmd(cmd)
def _create_from_content(self, rname, content):
'''create a temporary file and then call oc create on it'''
fname = Utils.create_tmpfile(rname + '-')
yed = Yedit(fname, content=content)
yed.write()
atexit.register(Utils.cleanup, [fname])
return self._create(fname)
def _create(self, fname):
'''call oc create on a filename'''
return self.openshift_cmd(['create', '-f', fname])
def _delete(self, resource, name=None, selector=None):
'''call oc delete on a resource'''
cmd = ['delete', resource]
if selector is not None:
cmd.append('--selector={}'.format(selector))
elif name is not None:
cmd.append(name)
else:
raise OpenShiftCLIError('Either name or selector is required when calling delete.')
return self.openshift_cmd(cmd)
def _process(self, template_name, create=False, params=None, template_data=None): # noqa: E501
'''process a template
template_name: the name of the template to process
create: whether to send to oc create after processing
params: the parameters for the template
template_data: the incoming template's data; instead of a file
'''
cmd = ['process']
if template_data:
cmd.extend(['-f', '-'])
else:
cmd.append(template_name)
if params:
param_str = ["{}={}".format(key, str(value).replace("'", r'"')) for key, value in params.items()]
cmd.append('-v')
cmd.extend(param_str)
results = self.openshift_cmd(cmd, output=True, input_data=template_data)
if results['returncode'] != 0 or not create:
return results
fname = Utils.create_tmpfile(template_name + '-')
yed = Yedit(fname, results['results'])
yed.write()
atexit.register(Utils.cleanup, [fname])
return self.openshift_cmd(['create', '-f', fname])
def _get(self, resource, name=None, selector=None):
'''return a resource by name '''
cmd = ['get', resource]
if selector is not None:
cmd.append('--selector={}'.format(selector))
elif name is not None:
cmd.append(name)
cmd.extend(['-o', 'json'])
rval = self.openshift_cmd(cmd, output=True)
# Ensure results are retuned in an array
if 'items' in rval:
rval['results'] = rval['items']
elif not isinstance(rval['results'], list):
rval['results'] = [rval['results']]
return rval
def _schedulable(self, node=None, selector=None, schedulable=True):
''' perform oadm manage-node scheduable '''
cmd = ['manage-node']
if node:
cmd.extend(node)
else:
cmd.append('--selector={}'.format(selector))
cmd.append('--schedulable={}'.format(schedulable))
return self.openshift_cmd(cmd, oadm=True, output=True, output_type='raw') # noqa: E501
def _list_pods(self, node=None, selector=None, pod_selector=None):
''' perform oadm list pods
node: the node in which to list pods
selector: the label selector filter if provided
pod_selector: the pod selector filter if provided
'''
cmd = ['manage-node']
if node:
cmd.extend(node)
else:
cmd.append('--selector={}'.format(selector))
if pod_selector:
cmd.append('--pod-selector={}'.format(pod_selector))
cmd.extend(['--list-pods', '-o', 'json'])
return self.openshift_cmd(cmd, oadm=True, output=True, output_type='raw')
# pylint: disable=too-many-arguments
def _evacuate(self, node=None, selector=None, pod_selector=None, dry_run=False, grace_period=None, force=False):
''' perform oadm manage-node evacuate '''
cmd = ['manage-node']
if node:
cmd.extend(node)
else:
cmd.append('--selector={}'.format(selector))
if dry_run:
cmd.append('--dry-run')
if pod_selector:
cmd.append('--pod-selector={}'.format(pod_selector))
if grace_period:
cmd.append('--grace-period={}'.format(int(grace_period)))
if force:
cmd.append('--force')
cmd.append('--evacuate')
return self.openshift_cmd(cmd, oadm=True, output=True, output_type='raw')
def _version(self):
''' return the openshift version'''
return self.openshift_cmd(['version'], output=True, output_type='raw')
def _import_image(self, url=None, name=None, tag=None):
''' perform image import '''
cmd = ['import-image']
image = '{0}'.format(name)
if tag:
image += ':{0}'.format(tag)
cmd.append(image)
if url:
cmd.append('--from={0}/{1}'.format(url, image))
cmd.append('-n{0}'.format(self.namespace))
cmd.append('--confirm')
return self.openshift_cmd(cmd)
def _run(self, cmds, input_data):
''' Actually executes the command. This makes mocking easier. '''
curr_env = os.environ.copy()
curr_env.update({'KUBECONFIG': self.kubeconfig})
proc = subprocess.Popen(cmds,
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
env=curr_env)
stdout, stderr = proc.communicate(input_data)
return proc.returncode, stdout.decode('utf-8'), stderr.decode('utf-8')
# pylint: disable=too-many-arguments,too-many-branches
def openshift_cmd(self, cmd, oadm=False, output=False, output_type='json', input_data=None):
'''Base command for oc '''
cmds = [self.oc_binary]
if oadm:
cmds.append('adm')
cmds.extend(cmd)
if self.all_namespaces:
cmds.extend(['--all-namespaces'])
elif self.namespace is not None and self.namespace.lower() not in ['none', 'emtpy']: # E501
cmds.extend(['-n', self.namespace])
if self.verbose:
print(' '.join(cmds))
try:
returncode, stdout, stderr = self._run(cmds, input_data)
except OSError as ex:
returncode, stdout, stderr = 1, '', 'Failed to execute {}: {}'.format(subprocess.list2cmdline(cmds), ex)
rval = {"returncode": returncode,
"cmd": ' '.join(cmds)}
if output_type == 'json':
rval['results'] = {}
if output and stdout:
try:
rval['results'] = json.loads(stdout)
except ValueError as verr:
if "No JSON object could be decoded" in verr.args:
rval['err'] = verr.args
elif output_type == 'raw':
rval['results'] = stdout if output else ''
if self.verbose:
print("STDOUT: {0}".format(stdout))
print("STDERR: {0}".format(stderr))
if 'err' in rval or returncode != 0:
rval.update({"stderr": stderr,
"stdout": stdout})
return rval
class Utils(object): # pragma: no cover
''' utilities for openshiftcli modules '''
@staticmethod
def _write(filename, contents):
''' Actually write the file contents to disk. This helps with mocking. '''
with open(filename, 'w') as sfd:
sfd.write(contents)
@staticmethod
def create_tmp_file_from_contents(rname, data, ftype='yaml'):
''' create a file in tmp with name and contents'''
tmp = Utils.create_tmpfile(prefix=rname)
if ftype == 'yaml':
# AUDIT:no-member makes sense here due to ruamel.YAML/PyYAML usage
# pylint: disable=no-member
if hasattr(yaml, 'RoundTripDumper'):
Utils._write(tmp, yaml.dump(data, Dumper=yaml.RoundTripDumper))
else:
Utils._write(tmp, yaml.safe_dump(data, default_flow_style=False))
elif ftype == 'json':
Utils._write(tmp, json.dumps(data))
else:
Utils._write(tmp, data)
# Register cleanup when module is done
atexit.register(Utils.cleanup, [tmp])
return tmp
@staticmethod
def create_tmpfile_copy(inc_file):
'''create a temporary copy of a file'''
tmpfile = Utils.create_tmpfile('lib_openshift-')
Utils._write(tmpfile, open(inc_file).read())
# Cleanup the tmpfile
atexit.register(Utils.cleanup, [tmpfile])
return tmpfile
@staticmethod
def create_tmpfile(prefix='tmp'):
''' Generates and returns a temporary file name '''
with tempfile.NamedTemporaryFile(prefix=prefix, delete=False) as tmp:
return tmp.name
@staticmethod
def create_tmp_files_from_contents(content, content_type=None):
'''Turn an array of dict: filename, content into a files array'''
if not isinstance(content, list):
content = [content]
files = []
for item in content:
path = Utils.create_tmp_file_from_contents(item['path'] + '-',
item['data'],
ftype=content_type)
files.append({'name': os.path.basename(item['path']),
'path': path})
return files
@staticmethod
def cleanup(files):
'''Clean up on exit '''
for sfile in files:
if os.path.exists(sfile):
if os.path.isdir(sfile):
shutil.rmtree(sfile)
elif os.path.isfile(sfile):
os.remove(sfile)
@staticmethod
def exists(results, _name):
''' Check to see if the results include the name '''
if not results:
return False
if Utils.find_result(results, _name):
return True
return False
@staticmethod
def find_result(results, _name):
''' Find the specified result by name'''
rval = None
for result in results:
if 'metadata' in result and result['metadata']['name'] == _name:
rval = result
break
return rval
@staticmethod
def get_resource_file(sfile, sfile_type='yaml'):
''' return the service file '''
contents = None
with open(sfile) as sfd:
contents = sfd.read()
if sfile_type == 'yaml':
# AUDIT:no-member makes sense here due to ruamel.YAML/PyYAML usage
# pylint: disable=no-member
if hasattr(yaml, 'RoundTripLoader'):
contents = yaml.load(contents, yaml.RoundTripLoader)
else:
contents = yaml.safe_load(contents)
elif sfile_type == 'json':
contents = json.loads(contents)
return contents
@staticmethod
def filter_versions(stdout):
''' filter the oc version output '''
version_dict = {}
version_search = ['oc', 'openshift', 'kubernetes']
for line in stdout.strip().split('\n'):
for term in version_search:
if not line:
continue
if line.startswith(term):
version_dict[term] = line.split()[-1]
# horrible hack to get openshift version in Openshift 3.2
# By default "oc version in 3.2 does not return an "openshift" version
if "openshift" not in version_dict:
version_dict["openshift"] = version_dict["oc"]
return version_dict
@staticmethod
def add_custom_versions(versions):
''' create custom versions strings '''
versions_dict = {}
for tech, version in versions.items():
# clean up "-" from version
if "-" in version:
version = version.split("-")[0]
if version.startswith('v'):
versions_dict[tech + '_numeric'] = version[1:].split('+')[0]
# "v3.3.0.33" is what we have, we want "3.3"
versions_dict[tech + '_short'] = version[1:4]
return versions_dict
@staticmethod
def openshift_installed():
''' check if openshift is installed '''
import yum
yum_base = yum.YumBase()
if yum_base.rpmdb.searchNevra(name='atomic-openshift'):
return True
return False
# Disabling too-many-branches. This is a yaml dictionary comparison function
# pylint: disable=too-many-branches,too-many-return-statements,too-many-statements
@staticmethod
def check_def_equal(user_def, result_def, skip_keys=None, debug=False):
''' Given a user defined definition, compare it with the results given back by our query. '''
# Currently these values are autogenerated and we do not need to check them
skip = ['metadata', 'status']
if skip_keys:
skip.extend(skip_keys)
for key, value in result_def.items():
if key in skip:
continue
# Both are lists
if isinstance(value, list):
if key not in user_def:
if debug:
print('User data does not have key [%s]' % key)
print('User data: %s' % user_def)
return False
if not isinstance(user_def[key], list):
if debug:
print('user_def[key] is not a list key=[%s] user_def[key]=%s' % (key, user_def[key]))
return False
if len(user_def[key]) != len(value):
if debug:
print("List lengths are not equal.")
print("key=[%s]: user_def[%s] != value[%s]" % (key, len(user_def[key]), len(value)))
print("user_def: %s" % user_def[key])
print("value: %s" % value)
return False
for values in zip(user_def[key], value):
if isinstance(values[0], dict) and isinstance(values[1], dict):
if debug:
print('sending list - list')
print(type(values[0]))
print(type(values[1]))
result = Utils.check_def_equal(values[0], values[1], skip_keys=skip_keys, debug=debug)
if not result:
print('list compare returned false')
return False
elif value != user_def[key]:
if debug:
print('value should be identical')
print(user_def[key])
print(value)
return False
# recurse on a dictionary
elif isinstance(value, dict):
if key not in user_def:
if debug:
print("user_def does not have key [%s]" % key)
return False
if not isinstance(user_def[key], dict):
if debug:
print("dict returned false: not instance of dict")
return False
# before passing ensure keys match
api_values = set(value.keys()) - set(skip)
user_values = set(user_def[key].keys()) - set(skip)
if api_values != user_values:
if debug:
print("keys are not equal in dict")
print(user_values)
print(api_values)
return False
result = Utils.check_def_equal(user_def[key], value, skip_keys=skip_keys, debug=debug)
if not result:
if debug:
print("dict returned false")
print(result)
return False
# Verify each key, value pair is the same
else:
if key not in user_def or value != user_def[key]:
if debug:
print("value not equal; user_def does not have key")
print(key)
print(value)
if key in user_def:
print(user_def[key])
return False
if debug:
print('returning true')
return True
class OpenShiftCLIConfig(object):
'''Generic Config'''
def __init__(self, rname, namespace, kubeconfig, options):
self.kubeconfig = kubeconfig
self.name = rname
self.namespace = namespace
self._options = options
@property
def config_options(self):
''' return config options '''
return self._options
def to_option_list(self, ascommalist=''):
'''return all options as a string
if ascommalist is set to the name of a key, and
the value of that key is a dict, format the dict
as a list of comma delimited key=value pairs'''
return self.stringify(ascommalist)
def stringify(self, ascommalist=''):
''' return the options hash as cli params in a string
if ascommalist is set to the name of a key, and
the value of that key is a dict, format the dict
as a list of comma delimited key=value pairs '''
rval = []
for key in sorted(self.config_options.keys()):
data = self.config_options[key]
if data['include'] \
and (data['value'] or isinstance(data['value'], int)):
if key == ascommalist:
val = ','.join(['{}={}'.format(kk, vv) for kk, vv in sorted(data['value'].items())])
else:
val = data['value']
rval.append('--{}={}'.format(key.replace('_', '-'), val))
return rval
# -*- -*- -*- End included fragment: lib/base.py -*- -*- -*-
# -*- -*- -*- Begin included fragment: lib/project.py -*- -*- -*-
# pylint: disable=too-many-instance-attributes
class ProjectConfig(OpenShiftCLIConfig):
''' project config object '''
def __init__(self, rname, namespace, kubeconfig, project_options):
super(ProjectConfig, self).__init__(rname, None, kubeconfig, project_options)
class Project(Yedit):
''' Class to wrap the oc command line tools '''
annotations_path = "metadata.annotations"
kind = 'Project'
annotation_prefix = 'openshift.io/'
def __init__(self, content):
'''Project constructor'''
super(Project, self).__init__(content=content)
def get_annotations(self):
''' return the annotations'''
return self.get(Project.annotations_path) or {}
def add_annotations(self, inc_annos):
''' add an annotation to the other annotations'''
if not isinstance(inc_annos, list):
inc_annos = [inc_annos]
annos = self.get_annotations()
if not annos:
self.put(Project.annotations_path, inc_annos)
else:
for anno in inc_annos:
for key, value in anno.items():
annos[key] = value
return True
def find_annotation(self, key):
''' find an annotation'''
annotations = self.get_annotations()
for anno in annotations:
if Project.annotation_prefix + key == anno:
return annotations[anno]
return None
def delete_annotation(self, inc_anno_keys):
''' remove an annotation from a project'''
if not isinstance(inc_anno_keys, list):
inc_anno_keys = [inc_anno_keys]
annos = self.get(Project.annotations_path) or {}
if not annos:
return True
removed = False
for inc_anno in inc_anno_keys:
anno = self.find_annotation(inc_anno)
if anno:
del annos[Project.annotation_prefix + anno]
removed = True
return removed
def update_annotation(self, key, value):
''' remove an annotation for a project'''
annos = self.get(Project.annotations_path) or {}
if not annos:
return True
updated = False
anno = self.find_annotation(key)
if anno:
annos[Project.annotation_prefix + key] = value
updated = True
else:
self.add_annotations({Project.annotation_prefix + key: value})
return updated
# -*- -*- -*- End included fragment: lib/project.py -*- -*- -*-
# -*- -*- -*- Begin included fragment: class/oc_project.py -*- -*- -*-
# pylint: disable=too-many-instance-attributes
class OCProject(OpenShiftCLI):
''' Project Class to manage project/namespace objects'''
kind = 'namespace'
def __init__(self,
config,
verbose=False):
''' Constructor for OCProject '''
super(OCProject, self).__init__(None, config.kubeconfig)
self.config = config
self._project = None
@property
def project(self):
''' property for project'''
if not self._project:
self.get()
return self._project
@project.setter
def project(self, data):
''' setter function for project propeorty'''
self._project = data
def exists(self):
''' return whether a project exists '''
if self.project:
return True
return False
def get(self):
'''return project '''
result = self._get(self.kind, self.config.name)
if result['returncode'] == 0:
self.project = Project(content=result['results'][0])
result['results'] = self.project.yaml_dict
elif 'namespaces "%s" not found' % self.config.name in result['stderr']:
result = {'results': [], 'returncode': 0}
return result
def delete(self):
'''delete the object'''
return self._delete(self.kind, self.config.name)
def create(self):
'''create a project '''
cmd = ['new-project', self.config.name]
cmd.extend(self.config.to_option_list())
return self.openshift_cmd(cmd, oadm=True)
def update(self):
'''update a project '''
if self.config.config_options['display_name']['value'] is not None:
self.project.update_annotation('display-name', self.config.config_options['display_name']['value'])
if self.config.config_options['description']['value'] is not None:
self.project.update_annotation('description', self.config.config_options['description']['value'])
# work around for immutable project field
if self.config.config_options['node_selector']['value'] is not None:
self.project.update_annotation('node-selector', self.config.config_options['node_selector']['value'])
return self._replace_content(self.kind, self.config.name, self.project.yaml_dict)
def needs_update(self):
''' verify an update is needed '''
if self.config.config_options['display_name']['value'] is not None:
result = self.project.find_annotation("display-name")
if result != self.config.config_options['display_name']['value']:
return True
if self.config.config_options['description']['value'] is not None:
result = self.project.find_annotation("description")
if result != self.config.config_options['description']['value']:
return True
if self.config.config_options['node_selector']['value'] is not None:
result = self.project.find_annotation("node-selector")
if result != self.config.config_options['node_selector']['value']:
return True
return False
# pylint: disable=too-many-return-statements,too-many-branches
@staticmethod
def run_ansible(params, check_mode):
'''run the idempotent ansible code'''
node_selector = None
if params['node_selector'] is not None:
node_selector = ','.join(params['node_selector'])
pconfig = ProjectConfig(
params['name'],
'None',
params['kubeconfig'],
{
'admin': {'value': params['admin'], 'include': True},
'admin_role': {'value': params['admin_role'], 'include': True},
'description': {'value': params['description'], 'include': True},
'display_name': {'value': params['display_name'], 'include': True},
'node_selector': {'value': node_selector, 'include': True},
},
)
oadm_project = OCProject(pconfig, verbose=params['debug'])
state = params['state']
api_rval = oadm_project.get()
#####
# Get
#####
if state == 'list':
return {'changed': False, 'results': api_rval['results'], 'state': state}
########
# Delete
########
if state == 'absent':
if oadm_project.exists():
if check_mode:
return {'changed': True, 'msg': 'CHECK_MODE: Would have performed a delete.'}
api_rval = oadm_project.delete()
if api_rval['returncode'] != 0:
return {'failed': True, 'msg': api_rval}
return {'changed': True, 'results': api_rval, 'state': state}
return {'changed': False, 'state': state}
if state == 'present':
########
# Create
########
if not oadm_project.exists():
if check_mode:
return {'changed': True, 'msg': 'CHECK_MODE: Would have performed a create.'}
# Create it here
api_rval = oadm_project.create()
# return the created object
api_rval = oadm_project.get()
if api_rval['returncode'] != 0:
return {'failed': True, 'msg': api_rval}
return {'changed': True, 'results': api_rval, 'state': state}
########
# Update
########
if oadm_project.needs_update():
if check_mode:
return {'changed': True, 'msg': 'CHECK_MODE: Would have performed an update.'}
api_rval = oadm_project.update()
if api_rval['returncode'] != 0:
return {'failed': True, 'msg': api_rval}
# return the created object
api_rval = oadm_project.get()
if api_rval['returncode'] != 0:
return {'failed': True, 'msg': api_rval}
return {'changed': True, 'results': api_rval, 'state': state}
return {'changed': False, 'results': api_rval, 'state': state}
return {'failed': True,
'changed': False,
'msg': 'Unknown state passed. [%s]' % state}
# -*- -*- -*- End included fragment: class/oc_project.py -*- -*- -*-
# -*- -*- -*- Begin included fragment: ansible/oc_project.py -*- -*- -*-
def main():
'''
ansible oc module for project
'''
module = AnsibleModule(
argument_spec=dict(
kubeconfig=dict(default='/etc/origin/master/admin.kubeconfig', type='str'),
state=dict(default='present', type='str',
choices=['present', 'absent', 'list']),
debug=dict(default=False, type='bool'),
name=dict(default=None, require=True, type='str'),
display_name=dict(default=None, type='str'),
node_selector=dict(default=None, type='list'),
description=dict(default=None, type='str'),
admin=dict(default=None, type='str'),
admin_role=dict(default='admin', type='str'),
),
supports_check_mode=True,
)
rval = OCProject.run_ansible(module.params, module.check_mode)
if 'failed' in rval:
return module.fail_json(**rval)
return module.exit_json(**rval)
if __name__ == '__main__':
main()
# -*- -*- -*- End included fragment: ansible/oc_project.py -*- -*- -*-
|
thoraxe/openshift-ansible
|
roles/lib_openshift/library/oc_project.py
|
Python
|
apache-2.0
| 58,493
|
# Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Internal helpers for tests in this directory."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import functools
import os
import sqlite3
from tensorflow.contrib.summary import summary_ops
from tensorflow.python.framework import test_util
class SummaryDbTest(test_util.TensorFlowTestCase):
"""Helper for summary database testing."""
def setUp(self):
super(SummaryDbTest, self).setUp()
self.db_path = os.path.join(self.get_temp_dir(), 'DbTest.sqlite')
if os.path.exists(self.db_path):
os.unlink(self.db_path)
self.db = sqlite3.connect(self.db_path)
self.create_summary_db_writer = functools.partial(
summary_ops.create_summary_db_writer,
db_uri=self.db_path,
experiment_name='experiment',
run_name='run',
user_name='user')
def tearDown(self):
self.db.close()
super(SummaryDbTest, self).tearDown()
def get_one(db, q, *p):
return db.execute(q, p).fetchone()[0]
def get_all(db, q, *p):
return unroll(db.execute(q, p).fetchall())
def unroll(list_of_tuples):
return sum(list_of_tuples, ())
|
alistairlow/tensorflow
|
tensorflow/contrib/summary/summary_test_internal.py
|
Python
|
apache-2.0
| 1,837
|
# Licensed to the Software Freedom Conservancy (SFC) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The SFC licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from .firefox.webdriver import WebDriver as Firefox # noqa
from .firefox.firefox_profile import FirefoxProfile # noqa
from .chrome.webdriver import WebDriver as Chrome # noqa
from .chrome.options import Options as ChromeOptions # noqa
from .ie.webdriver import WebDriver as Ie # noqa
from .edge.webdriver import WebDriver as Edge # noqa
from .opera.webdriver import WebDriver as Opera # noqa
from .safari.webdriver import WebDriver as Safari # noqa
from .blackberry.webdriver import WebDriver as BlackBerry # noqa
from .phantomjs.webdriver import WebDriver as PhantomJS # noqa
from .android.webdriver import WebDriver as Android # noqa
from .remote.webdriver import WebDriver as Remote # noqa
from .common.desired_capabilities import DesiredCapabilities # noqa
from .common.action_chains import ActionChains # noqa
from .common.touch_actions import TouchActions # noqa
from .common.proxy import Proxy # noqa
__version__ = '3.3.1'
|
Jarob22/selenium
|
py/selenium/webdriver/__init__.py
|
Python
|
apache-2.0
| 1,735
|
# coding=utf-8
# Copyright 2014 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from __future__ import (nested_scopes, generators, division, absolute_import, with_statement,
print_function, unicode_literals)
from pants.base.exceptions import TaskError
from pants.engine.engine import Engine
from pants_test.base.context_utils import create_context
from pants_test.engine.base_engine_test import EngineTestBase
# TODO(John Sirois): Kill this test - the core Engine should unlearn dependencies ordering
# and leave this to subclasses that can form a strategy for this like RoundEngine.
class ExecutionOrderTest(EngineTestBase):
def test_execution_order(self):
self.install_task('invalidate')
self.install_task('clean-all', dependencies=['invalidate'])
self.install_task('resolve')
self.install_task('javac', dependencies=['resolve'], goal='compile')
self.install_task('scalac', dependencies=['resolve'], goal='compile')
self.install_task('junit', dependencies=['compile'], goal='test')
self.assertEqual(self.as_goals('invalidate', 'clean-all', 'resolve', 'compile', 'test'),
list(Engine.execution_order(self.as_goals('clean-all', 'test'))))
self.assertEqual(self.as_goals('resolve', 'compile', 'test', 'invalidate', 'clean-all'),
list(Engine.execution_order(self.as_goals('test', 'clean-all'))))
class EngineTest(EngineTestBase):
class RecordingEngine(Engine):
def __init__(self, action=None):
super(EngineTest.RecordingEngine, self).__init__()
self._action = action
self._attempts = []
@property
def attempts(self):
return self._attempts
def attempt(self, context, goals):
self._attempts.append((context, goals))
if self._action:
self._action()
def setUp(self):
self.context = create_context()
def assert_attempt(self, engine, *goal_names):
self.assertEqual(1, len(engine.attempts))
context, goals = engine.attempts[0]
self.assertEqual(self.context, context)
self.assertEqual(self.as_goals(*goal_names), goals)
def test_execute_success(self):
engine = self.RecordingEngine()
result = engine.execute(self.context, self.as_goals('one', 'two'))
self.assertEqual(0, result)
self.assert_attempt(engine, 'one', 'two')
def _throw(self, error):
def throw():
raise error
return throw
def test_execute_raise(self):
engine = self.RecordingEngine(action=self._throw(TaskError()))
result = engine.execute(self.context, self.as_goals('three'))
self.assertEqual(1, result)
self.assert_attempt(engine, 'three')
def test_execute_code(self):
engine = self.RecordingEngine(action=self._throw(TaskError(exit_code=42)))
result = engine.execute(self.context, self.as_goals('four', 'five', 'six'))
self.assertEqual(42, result)
self.assert_attempt(engine, 'four', 'five', 'six')
|
square/pants
|
tests/python/pants_test/engine/test_engine.py
|
Python
|
apache-2.0
| 2,994
|
"""This component provides basic support for Foscam IP cameras."""
import asyncio
from libpyfoscam import FoscamCamera
import voluptuous as vol
from homeassistant.components.camera import PLATFORM_SCHEMA, SUPPORT_STREAM, Camera
from homeassistant.config_entries import SOURCE_IMPORT
from homeassistant.const import (
CONF_HOST,
CONF_NAME,
CONF_PASSWORD,
CONF_PORT,
CONF_USERNAME,
)
from homeassistant.helpers import config_validation as cv, entity_platform
from .const import (
CONF_RTSP_PORT,
CONF_STREAM,
DOMAIN,
LOGGER,
SERVICE_PTZ,
SERVICE_PTZ_PRESET,
)
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Required("ip"): cv.string,
vol.Required(CONF_PASSWORD): cv.string,
vol.Required(CONF_USERNAME): cv.string,
vol.Optional(CONF_NAME, default="Foscam Camera"): cv.string,
vol.Optional(CONF_PORT, default=88): cv.port,
vol.Optional(CONF_RTSP_PORT): cv.port,
}
)
DIR_UP = "up"
DIR_DOWN = "down"
DIR_LEFT = "left"
DIR_RIGHT = "right"
DIR_TOPLEFT = "top_left"
DIR_TOPRIGHT = "top_right"
DIR_BOTTOMLEFT = "bottom_left"
DIR_BOTTOMRIGHT = "bottom_right"
MOVEMENT_ATTRS = {
DIR_UP: "ptz_move_up",
DIR_DOWN: "ptz_move_down",
DIR_LEFT: "ptz_move_left",
DIR_RIGHT: "ptz_move_right",
DIR_TOPLEFT: "ptz_move_top_left",
DIR_TOPRIGHT: "ptz_move_top_right",
DIR_BOTTOMLEFT: "ptz_move_bottom_left",
DIR_BOTTOMRIGHT: "ptz_move_bottom_right",
}
DEFAULT_TRAVELTIME = 0.125
ATTR_MOVEMENT = "movement"
ATTR_TRAVELTIME = "travel_time"
ATTR_PRESET_NAME = "preset_name"
PTZ_GOTO_PRESET_COMMAND = "ptz_goto_preset"
async def async_setup_platform(hass, config, async_add_entities, discovery_info=None):
"""Set up a Foscam IP Camera."""
LOGGER.warning(
"Loading foscam via platform config is deprecated, it will be automatically imported. Please remove it afterwards."
)
config_new = {
CONF_NAME: config[CONF_NAME],
CONF_HOST: config["ip"],
CONF_PORT: config[CONF_PORT],
CONF_USERNAME: config[CONF_USERNAME],
CONF_PASSWORD: config[CONF_PASSWORD],
CONF_STREAM: "Main",
CONF_RTSP_PORT: config.get(CONF_RTSP_PORT, 554),
}
hass.async_create_task(
hass.config_entries.flow.async_init(
DOMAIN, context={"source": SOURCE_IMPORT}, data=config_new
)
)
async def async_setup_entry(hass, config_entry, async_add_entities):
"""Add a Foscam IP camera from a config entry."""
platform = entity_platform.current_platform.get()
platform.async_register_entity_service(
SERVICE_PTZ,
{
vol.Required(ATTR_MOVEMENT): vol.In(
[
DIR_UP,
DIR_DOWN,
DIR_LEFT,
DIR_RIGHT,
DIR_TOPLEFT,
DIR_TOPRIGHT,
DIR_BOTTOMLEFT,
DIR_BOTTOMRIGHT,
]
),
vol.Optional(ATTR_TRAVELTIME, default=DEFAULT_TRAVELTIME): cv.small_float,
},
"async_perform_ptz",
)
platform.async_register_entity_service(
SERVICE_PTZ_PRESET,
{
vol.Required(ATTR_PRESET_NAME): cv.string,
},
"async_perform_ptz_preset",
)
camera = FoscamCamera(
config_entry.data[CONF_HOST],
config_entry.data[CONF_PORT],
config_entry.data[CONF_USERNAME],
config_entry.data[CONF_PASSWORD],
verbose=False,
)
async_add_entities([HassFoscamCamera(camera, config_entry)])
class HassFoscamCamera(Camera):
"""An implementation of a Foscam IP camera."""
def __init__(self, camera, config_entry):
"""Initialize a Foscam camera."""
super().__init__()
self._foscam_session = camera
self._name = config_entry.title
self._username = config_entry.data[CONF_USERNAME]
self._password = config_entry.data[CONF_PASSWORD]
self._stream = config_entry.data[CONF_STREAM]
self._unique_id = config_entry.entry_id
self._rtsp_port = config_entry.data[CONF_RTSP_PORT]
self._motion_status = False
async def async_added_to_hass(self):
"""Handle entity addition to hass."""
# Get motion detection status
ret, response = await self.hass.async_add_executor_job(
self._foscam_session.get_motion_detect_config
)
if ret == -3:
LOGGER.info(
"Can't get motion detection status, camera %s configured with non-admin user",
self._name,
)
elif ret != 0:
LOGGER.error(
"Error getting motion detection status of %s: %s", self._name, ret
)
else:
self._motion_status = response == 1
@property
def unique_id(self):
"""Return the entity unique ID."""
return self._unique_id
def camera_image(self):
"""Return a still image response from the camera."""
# Send the request to snap a picture and return raw jpg data
# Handle exception if host is not reachable or url failed
result, response = self._foscam_session.snap_picture_2()
if result != 0:
return None
return response
@property
def supported_features(self):
"""Return supported features."""
if self._rtsp_port:
return SUPPORT_STREAM
return None
async def stream_source(self):
"""Return the stream source."""
if self._rtsp_port:
return f"rtsp://{self._username}:{self._password}@{self._foscam_session.host}:{self._rtsp_port}/video{self._stream}"
return None
@property
def motion_detection_enabled(self):
"""Camera Motion Detection Status."""
return self._motion_status
def enable_motion_detection(self):
"""Enable motion detection in camera."""
try:
ret = self._foscam_session.enable_motion_detection()
if ret != 0:
if ret == -3:
LOGGER.info(
"Can't set motion detection status, camera %s configured with non-admin user",
self._name,
)
return
self._motion_status = True
except TypeError:
LOGGER.debug(
"Failed enabling motion detection on '%s'. Is it supported by the device?",
self._name,
)
def disable_motion_detection(self):
"""Disable motion detection."""
try:
ret = self._foscam_session.disable_motion_detection()
if ret != 0:
if ret == -3:
LOGGER.info(
"Can't set motion detection status, camera %s configured with non-admin user",
self._name,
)
return
self._motion_status = False
except TypeError:
LOGGER.debug(
"Failed disabling motion detection on '%s'. Is it supported by the device?",
self._name,
)
async def async_perform_ptz(self, movement, travel_time):
"""Perform a PTZ action on the camera."""
LOGGER.debug("PTZ action '%s' on %s", movement, self._name)
movement_function = getattr(self._foscam_session, MOVEMENT_ATTRS[movement])
ret, _ = await self.hass.async_add_executor_job(movement_function)
if ret != 0:
LOGGER.error("Error moving %s '%s': %s", movement, self._name, ret)
return
await asyncio.sleep(travel_time)
ret, _ = await self.hass.async_add_executor_job(
self._foscam_session.ptz_stop_run
)
if ret != 0:
LOGGER.error("Error stopping movement on '%s': %s", self._name, ret)
return
async def async_perform_ptz_preset(self, preset_name):
"""Perform a PTZ preset action on the camera."""
LOGGER.debug("PTZ preset '%s' on %s", preset_name, self._name)
preset_function = getattr(self._foscam_session, PTZ_GOTO_PRESET_COMMAND)
ret, _ = await self.hass.async_add_executor_job(preset_function, preset_name)
if ret != 0:
LOGGER.error(
"Error moving to preset %s on '%s': %s", preset_name, self._name, ret
)
return
@property
def name(self):
"""Return the name of this camera."""
return self._name
|
partofthething/home-assistant
|
homeassistant/components/foscam/camera.py
|
Python
|
apache-2.0
| 8,589
|
# ----------------------------------------------------------------------------
# pyglet
# Copyright (c) 2006-2008 Alex Holkner
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in
# the documentation and/or other materials provided with the
# distribution.
# * Neither the name of pyglet nor the names of its
# contributors may be used to endorse or promote products
# derived from this software without specific prior written
# permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
# COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
# ----------------------------------------------------------------------------
from __future__ import absolute_import, print_function
import ctypes
import math
import threading
from . import interface
from pyglet.debug import debug_print
from pyglet.media.events import MediaEvent
from pyglet.media.drivers.base import AbstractAudioDriver, AbstractAudioPlayer
from pyglet.media.listener import AbstractListener
from pyglet.media.threads import PlayerWorker
_debug = debug_print('debug_media')
def _convert_coordinates(coordinates):
x, y, z = coordinates
return (x, y, -z)
def _gain2db(gain):
"""
Convert linear gain in range [0.0, 1.0] to 100ths of dB.
Power gain = P1/P2
dB = 10 log(P1/P2)
dB * 100 = 1000 * log(power gain)
"""
if gain <= 0:
return -10000
return max(-10000, min(int(1000 * math.log10(min(gain, 1))), 0))
def _db2gain(db):
"""Convert 100ths of dB to linear gain."""
return math.pow(10.0, float(db)/1000.0)
class DirectSoundAudioPlayer(AbstractAudioPlayer):
# Need to cache these because pyglet API allows update separately, but
# DSound requires both to be set at once.
_cone_inner_angle = 360
_cone_outer_angle = 360
min_buffer_size = 9600
def __init__(self, driver, ds_driver, source_group, player):
super(DirectSoundAudioPlayer, self).__init__(source_group, player)
self.driver = driver
self._ds_driver = ds_driver
# Locking strategy:
# All DirectSound calls should be locked. All instance vars relating
# to buffering/filling/time/events should be locked (used by both
# application and worker thread). Other instance vars (consts and
# 3d vars) do not need to be locked.
self._lock = threading.RLock()
# Desired play state (may be actually paused due to underrun -- not
# implemented yet).
self._playing = False
# Up to one audio data may be buffered if too much data was received
# from the source that could not be written immediately into the
# buffer. See refill().
self._audiodata_buffer = None
# Theoretical write and play cursors for an infinite buffer. play
# cursor is always <= write cursor (when equal, underrun is
# happening).
self._write_cursor = 0
self._play_cursor = 0
# Cursor position of end of data. Silence is written after
# eos for one buffer size.
self._eos_cursor = None
# Indexes into DSound circular buffer. Complications ensue wrt each
# other to avoid writing over the play cursor. See get_write_size and
# write().
self._play_cursor_ring = 0
self._write_cursor_ring = 0
# List of (play_cursor, MediaEvent), in sort order
self._events = []
# List of (cursor, timestamp), in sort order (cursor gives expiry
# place of the timestamp)
self._timestamps = []
audio_format = source_group.audio_format
# DSound buffer
self._ds_buffer = self._ds_driver.create_buffer(audio_format)
self._buffer_size = self._ds_buffer.buffer_size
self._ds_buffer.current_position = 0
self.refill(self._buffer_size)
def __del__(self):
try:
self.delete()
except:
pass
def delete(self):
if self.driver and self.driver.worker:
self.driver.worker.remove(self)
with self._lock:
self._ds_buffer = None
def play(self):
assert _debug('DirectSound play')
self.driver.worker.add(self)
with self._lock:
if not self._playing:
self._get_audiodata() # prebuffer if needed
self._playing = True
self._ds_buffer.play()
assert _debug('return DirectSound play')
def stop(self):
assert _debug('DirectSound stop')
with self._lock:
if self._playing:
self._playing = False
self._ds_buffer.stop()
assert _debug('return DirectSound stop')
def clear(self):
assert _debug('DirectSound clear')
with self._lock:
self._ds_buffer.current_position = 0
self._play_cursor_ring = self._write_cursor_ring = 0
self._play_cursor = self._write_cursor
self._eos_cursor = None
self._audiodata_buffer = None
del self._events[:]
del self._timestamps[:]
def refill(self, write_size):
with self._lock:
while write_size > 0:
assert _debug('refill, write_size =', write_size)
audio_data = self._get_audiodata()
if audio_data is not None:
assert _debug('write', audio_data.length)
length = min(write_size, audio_data.length)
self.write(audio_data, length)
write_size -= length
else:
assert _debug('write silence')
self.write(None, write_size)
write_size = 0
def _has_underrun(self):
return (self._eos_cursor is not None
and self._play_cursor > self._eos_cursor)
def _dispatch_new_event(self, event_name):
MediaEvent(0, event_name)._sync_dispatch_to_player(self.player)
def _get_audiodata(self):
if self._audiodata_buffer is None or self._audiodata_buffer.length == 0:
self._get_new_audiodata()
return self._audiodata_buffer
def _get_new_audiodata(self):
assert _debug('Getting new audio data buffer.')
self._audiodata_buffer = self.source_group.get_audio_data(self._buffer_size)
if self._audiodata_buffer is not None:
assert _debug('New audio data available: {} bytes'.format(self._audiodata_buffer.length))
if self._eos_cursor is not None:
self._move_write_cursor_after_eos()
self._add_audiodata_events(self._audiodata_buffer)
self._add_audiodata_timestamp(self._audiodata_buffer)
self._eos_cursor = None
elif self._eos_cursor is None:
assert _debug('No more audio data.')
self._eos_cursor = self._write_cursor
def _move_write_cursor_after_eos(self):
# Set the write cursor back to eos_cursor or play_cursor to prevent gaps
if self._play_cursor < self._eos_cursor:
cursor_diff = self._write_cursor - self._eos_cursor
assert _debug('Moving cursor back', cursor_diff)
self._write_cursor = self._eos_cursor
self._write_cursor_ring -= cursor_diff
self._write_cursor_ring %= self._buffer_size
else:
cursor_diff = self._play_cursor - self._eos_cursor
assert _debug('Moving cursor back', cursor_diff)
self._write_cursor = self._play_cursor
self._write_cursor_ring -= cursor_diff
self._write_cursor_ring %= self._buffer_size
def _add_audiodata_events(self, audio_data):
for event in audio_data.events:
event_cursor = self._write_cursor + event.timestamp * \
self.source_group.audio_format.bytes_per_second
assert _debug('Adding event', event, 'at', event_cursor)
self._events.append((event_cursor, event))
def _add_audiodata_timestamp(self, audio_data):
ts_cursor = self._write_cursor + audio_data.length
self._timestamps.append(
(ts_cursor, audio_data.timestamp + audio_data.duration))
def update_play_cursor(self):
with self._lock:
play_cursor_ring = self._ds_buffer.current_position.play_cursor
if play_cursor_ring < self._play_cursor_ring:
# Wrapped around
self._play_cursor += self._buffer_size - self._play_cursor_ring
self._play_cursor_ring = 0
self._play_cursor += play_cursor_ring - self._play_cursor_ring
self._play_cursor_ring = play_cursor_ring
self._dispatch_pending_events()
self._cleanup_timestamps()
self._check_underrun()
def _dispatch_pending_events(self):
with self._lock:
pending_events = []
while self._events and self._events[0][0] <= self._play_cursor:
_, event = self._events.pop(0)
pending_events.append(event)
assert _debug('Dispatching pending events: {}'.format(pending_events))
assert _debug('Remaining events: {}'.format(self._events))
for event in pending_events:
event._sync_dispatch_to_player(self.player)
def _cleanup_timestamps(self):
with self._lock:
while self._timestamps and self._timestamps[0][0] < self._play_cursor:
del self._timestamps[0]
def _check_underrun(self):
if self._playing and self._has_underrun():
assert _debug('underrun, stopping')
self.stop()
self._dispatch_new_event('on_eos')
self._dispatch_new_event('on_source_group_eos')
def get_write_size(self):
self.update_play_cursor()
with self._lock:
play_cursor = self._play_cursor
write_cursor = self._write_cursor
return self._buffer_size - max(write_cursor - play_cursor, 0)
def write(self, audio_data, length):
# Pass audio_data=None to write silence
if length == 0:
return 0
with self._lock:
write_ptr = self._ds_buffer.lock(self._write_cursor_ring, length)
assert 0 < length <= self._buffer_size
assert length == write_ptr.audio_length_1.value + write_ptr.audio_length_2.value
if audio_data:
ctypes.memmove(write_ptr.audio_ptr_1, audio_data.data, write_ptr.audio_length_1.value)
audio_data.consume(write_ptr.audio_length_1.value, self.source_group.audio_format)
if write_ptr.audio_length_2.value > 0:
ctypes.memmove(write_ptr.audio_ptr_2, audio_data.data, write_ptr.audio_length_2.value)
audio_data.consume(write_ptr.audio_length_2.value, self.source_group.audio_format)
else:
if self.source_group.audio_format.sample_size == 8:
c = 0x80
else:
c = 0
ctypes.memset(write_ptr.audio_ptr_1, c, write_ptr.audio_length_1.value)
if write_ptr.audio_length_2.value > 0:
ctypes.memset(write_ptr.audio_ptr_2, c, write_ptr.audio_length_2.value)
self._ds_buffer.unlock(write_ptr)
self._write_cursor += length
self._write_cursor_ring += length
self._write_cursor_ring %= self._buffer_size
def get_time(self):
with self._lock:
if self._timestamps:
cursor, ts = self._timestamps[0]
result = ts + (self._play_cursor - cursor) / \
float(self.source_group.audio_format.bytes_per_second)
else:
result = None
return result
def set_volume(self, volume):
with self._lock:
self._ds_buffer.volume = _gain2db(volume)
def set_position(self, position):
if self._ds_buffer.is3d:
with self._lock:
self._ds_buffer.position = _convert_coordinates(position)
def set_min_distance(self, min_distance):
if self._ds_buffer.is3d:
with self._lock:
self._ds_buffer.min_distance = min_distance
def set_max_distance(self, max_distance):
if self._ds_buffer.is3d:
with self._lock:
self._ds_buffer.max_distance = max_distance
def set_pitch(self, pitch):
frequency = int(pitch * self.source_group.audio_format.sample_rate)
with self._lock:
self._ds_buffer.frequency = frequency
def set_cone_orientation(self, cone_orientation):
if self._ds_buffer.is3d:
with self._lock:
self._ds_buffer.cone_orientation = _convert_coordinates(cone_orientation)
def set_cone_inner_angle(self, cone_inner_angle):
if self._ds_buffer.is3d:
self._cone_inner_angle = int(cone_inner_angle)
self._set_cone_angles()
def set_cone_outer_angle(self, cone_outer_angle):
if self._ds_buffer.is3d:
self._cone_outer_angle = int(cone_outer_angle)
self._set_cone_angles()
def _set_cone_angles(self):
inner = min(self._cone_inner_angle, self._cone_outer_angle)
outer = max(self._cone_inner_angle, self._cone_outer_angle)
with self._lock:
self._ds_buffer.set_cone_angles(inner, outer)
def set_cone_outer_gain(self, cone_outer_gain):
if self._ds_buffer.is3d:
volume = _gain2db(cone_outer_gain)
with self._lock:
self._ds_buffer.cone_outside_volume = volume
class DirectSoundDriver(AbstractAudioDriver):
def __init__(self):
self._ds_driver = interface.DirectSoundDriver()
self._ds_listener = self._ds_driver.create_listener()
assert self._ds_driver is not None
assert self._ds_listener is not None
# Create worker thread
self.worker = PlayerWorker()
self.worker.start()
def __del__(self):
try:
if self._ds_driver:
self.delete()
except:
pass
def create_audio_player(self, source_group, player):
assert self._ds_driver is not None
return DirectSoundAudioPlayer(self, self._ds_driver, source_group, player)
def get_listener(self):
assert self._ds_driver is not None
assert self._ds_listener is not None
return DirectSoundListener(self._ds_listener, self._ds_driver.primary_buffer)
def delete(self):
self.worker.stop()
self._ds_listener = None
self._ds_driver = None
class DirectSoundListener(AbstractListener):
def __init__(self, ds_listener, ds_buffer):
self._ds_listener = ds_listener
self._ds_buffer = ds_buffer
def _set_volume(self, volume):
self._volume = volume
self._ds_buffer.volume = _gain2db(volume)
def _set_position(self, position):
self._position = position
self._ds_listener.position = _convert_coordinates(position)
def _set_forward_orientation(self, orientation):
self._forward_orientation = orientation
self._set_orientation()
def _set_up_orientation(self, orientation):
self._up_orientation = orientation
self._set_orientation()
def _set_orientation(self):
self._ds_listener.orientation = (_convert_coordinates(self._forward_orientation)
+ _convert_coordinates(self._up_orientation))
|
nicememory/pie
|
pyglet/pyglet/media/drivers/directsound/adaptation.py
|
Python
|
apache-2.0
| 17,118
|
# Copyright (c) 2010-2012 OpenStack, LLC.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import operator
import os
import mock
import unittest
from contextlib import contextmanager
from shutil import rmtree
from StringIO import StringIO
from tempfile import mkdtemp
from xml.dom import minidom
from eventlet import spawn, Timeout, listen
import simplejson
from swift.common.swob import Request, HeaderKeyDict
import swift.container
from swift.container import server as container_server
from swift.common.utils import mkdirs, public, replication
from swift.common.ondisk import normalize_timestamp
from test.unit import fake_http_connect
@contextmanager
def save_globals():
orig_http_connect = getattr(swift.container.server, 'http_connect',
None)
try:
yield True
finally:
swift.container.server.http_connect = orig_http_connect
class TestContainerController(unittest.TestCase):
"""Test swift.container.server.ContainerController"""
def setUp(self):
"""Set up for testing swift.object_server.ObjectController"""
self.testdir = os.path.join(mkdtemp(),
'tmp_test_object_server_ObjectController')
mkdirs(self.testdir)
rmtree(self.testdir)
mkdirs(os.path.join(self.testdir, 'sda1'))
mkdirs(os.path.join(self.testdir, 'sda1', 'tmp'))
self.controller = container_server.ContainerController(
{'devices': self.testdir, 'mount_check': 'false'})
def tearDown(self):
"""Tear down for testing swift.object_server.ObjectController"""
rmtree(os.path.dirname(self.testdir), ignore_errors=1)
def test_acl_container(self):
# Ensure no acl by default
req = Request.blank(
'/sda1/p/a/c', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': '0'})
resp = req.get_response(self.controller)
self.assert_(resp.status.startswith('201'))
req = Request.blank(
'/sda1/p/a/c', environ={'REQUEST_METHOD': 'HEAD'})
response = req.get_response(self.controller)
self.assert_(response.status.startswith('204'))
self.assert_('x-container-read' not in response.headers)
self.assert_('x-container-write' not in response.headers)
# Ensure POSTing acls works
req = Request.blank(
'/sda1/p/a/c', environ={'REQUEST_METHOD': 'POST'},
headers={'X-Timestamp': '1', 'X-Container-Read': '.r:*',
'X-Container-Write': 'account:user'})
resp = req.get_response(self.controller)
self.assert_(resp.status.startswith('204'))
req = Request.blank(
'/sda1/p/a/c', environ={'REQUEST_METHOD': 'HEAD'})
response = req.get_response(self.controller)
self.assert_(response.status.startswith('204'))
self.assertEquals(response.headers.get('x-container-read'), '.r:*')
self.assertEquals(response.headers.get('x-container-write'),
'account:user')
# Ensure we can clear acls on POST
req = Request.blank(
'/sda1/p/a/c', environ={'REQUEST_METHOD': 'POST'},
headers={'X-Timestamp': '3', 'X-Container-Read': '',
'X-Container-Write': ''})
resp = req.get_response(self.controller)
self.assert_(resp.status.startswith('204'))
req = Request.blank(
'/sda1/p/a/c', environ={'REQUEST_METHOD': 'HEAD'})
response = req.get_response(self.controller)
self.assert_(response.status.startswith('204'))
self.assert_('x-container-read' not in response.headers)
self.assert_('x-container-write' not in response.headers)
# Ensure PUTing acls works
req = Request.blank(
'/sda1/p/a/c2', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': '4', 'X-Container-Read': '.r:*',
'X-Container-Write': 'account:user'})
resp = req.get_response(self.controller)
self.assert_(resp.status.startswith('201'))
req = Request.blank('/sda1/p/a/c2', environ={'REQUEST_METHOD': 'HEAD'})
response = req.get_response(self.controller)
self.assert_(response.status.startswith('204'))
self.assertEquals(response.headers.get('x-container-read'), '.r:*')
self.assertEquals(response.headers.get('x-container-write'),
'account:user')
def test_HEAD(self):
req = Request.blank(
'/sda1/p/a/c', environ={'REQUEST_METHOD': 'PUT',
'HTTP_X_TIMESTAMP': '0'})
req.get_response(self.controller)
req = Request.blank(
'/sda1/p/a/c', environ={'REQUEST_METHOD': 'HEAD',
'HTTP_X_TIMESTAMP': '0'})
response = req.get_response(self.controller)
self.assert_(response.status.startswith('204'))
self.assertEquals(int(response.headers['x-container-bytes-used']), 0)
self.assertEquals(int(response.headers['x-container-object-count']), 0)
req2 = Request.blank(
'/sda1/p/a/c/o', environ={
'REQUEST_METHOD': 'PUT',
'HTTP_X_TIMESTAMP': '1', 'HTTP_X_SIZE': 42,
'HTTP_X_CONTENT_TYPE': 'text/plain', 'HTTP_X_ETAG': 'x'})
req2.get_response(self.controller)
response = req.get_response(self.controller)
self.assertEquals(int(response.headers['x-container-bytes-used']), 42)
self.assertEquals(int(response.headers['x-container-object-count']), 1)
def test_HEAD_not_found(self):
req = Request.blank('/sda1/p/a/c', environ={'REQUEST_METHOD': 'HEAD'})
resp = req.get_response(self.controller)
self.assertEquals(resp.status_int, 404)
def test_HEAD_invalid_partition(self):
req = Request.blank('/sda1/./a/c', environ={'REQUEST_METHOD': 'HEAD',
'HTTP_X_TIMESTAMP': '1'})
resp = req.get_response(self.controller)
self.assertEquals(resp.status_int, 400)
def test_HEAD_insufficient_storage(self):
self.controller = container_server.ContainerController(
{'devices': self.testdir})
req = Request.blank(
'/sda-null/p/a/c', environ={'REQUEST_METHOD': 'HEAD',
'HTTP_X_TIMESTAMP': '1'})
resp = req.get_response(self.controller)
self.assertEquals(resp.status_int, 507)
def test_HEAD_invalid_content_type(self):
req = Request.blank(
'/sda1/p/a/c', environ={'REQUEST_METHOD': 'HEAD'},
headers={'Accept': 'application/plain'})
resp = req.get_response(self.controller)
self.assertEquals(resp.status_int, 406)
def test_HEAD_invalid_format(self):
format = '%D1%BD%8A9' # invalid UTF-8; should be %E1%BD%8A9 (E -> D)
req = Request.blank(
'/sda1/p/a/c?format=' + format,
environ={'REQUEST_METHOD': 'HEAD'})
resp = req.get_response(self.controller)
self.assertEquals(resp.status_int, 400)
def test_PUT(self):
req = Request.blank(
'/sda1/p/a/c', environ={'REQUEST_METHOD': 'PUT',
'HTTP_X_TIMESTAMP': '1'})
resp = req.get_response(self.controller)
self.assertEquals(resp.status_int, 201)
req = Request.blank(
'/sda1/p/a/c', environ={'REQUEST_METHOD': 'PUT',
'HTTP_X_TIMESTAMP': '2'})
resp = req.get_response(self.controller)
self.assertEquals(resp.status_int, 202)
def test_PUT_obj_not_found(self):
req = Request.blank(
'/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': '1', 'X-Size': '0',
'X-Content-Type': 'text/plain', 'X-ETag': 'e'})
resp = req.get_response(self.controller)
self.assertEquals(resp.status_int, 404)
def test_PUT_GET_metadata(self):
# Set metadata header
req = Request.blank(
'/sda1/p/a/c', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': normalize_timestamp(1),
'X-Container-Meta-Test': 'Value'})
resp = req.get_response(self.controller)
self.assertEquals(resp.status_int, 201)
req = Request.blank('/sda1/p/a/c', environ={'REQUEST_METHOD': 'GET'})
resp = req.get_response(self.controller)
self.assertEquals(resp.status_int, 204)
self.assertEquals(resp.headers.get('x-container-meta-test'), 'Value')
# Set another metadata header, ensuring old one doesn't disappear
req = Request.blank(
'/sda1/p/a/c', environ={'REQUEST_METHOD': 'POST'},
headers={'X-Timestamp': normalize_timestamp(1),
'X-Container-Meta-Test2': 'Value2'})
resp = req.get_response(self.controller)
self.assertEquals(resp.status_int, 204)
req = Request.blank('/sda1/p/a/c', environ={'REQUEST_METHOD': 'GET'})
resp = req.get_response(self.controller)
self.assertEquals(resp.status_int, 204)
self.assertEquals(resp.headers.get('x-container-meta-test'), 'Value')
self.assertEquals(resp.headers.get('x-container-meta-test2'), 'Value2')
# Update metadata header
req = Request.blank(
'/sda1/p/a/c', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': normalize_timestamp(3),
'X-Container-Meta-Test': 'New Value'})
resp = req.get_response(self.controller)
self.assertEquals(resp.status_int, 202)
req = Request.blank('/sda1/p/a/c', environ={'REQUEST_METHOD': 'GET'})
resp = req.get_response(self.controller)
self.assertEquals(resp.status_int, 204)
self.assertEquals(resp.headers.get('x-container-meta-test'),
'New Value')
# Send old update to metadata header
req = Request.blank(
'/sda1/p/a/c', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': normalize_timestamp(2),
'X-Container-Meta-Test': 'Old Value'})
resp = req.get_response(self.controller)
self.assertEquals(resp.status_int, 202)
req = Request.blank('/sda1/p/a/c', environ={'REQUEST_METHOD': 'GET'})
resp = req.get_response(self.controller)
self.assertEquals(resp.status_int, 204)
self.assertEquals(resp.headers.get('x-container-meta-test'),
'New Value')
# Remove metadata header (by setting it to empty)
req = Request.blank(
'/sda1/p/a/c', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': normalize_timestamp(4),
'X-Container-Meta-Test': ''})
resp = req.get_response(self.controller)
self.assertEquals(resp.status_int, 202)
req = Request.blank('/sda1/p/a/c', environ={'REQUEST_METHOD': 'GET'})
resp = req.get_response(self.controller)
self.assertEquals(resp.status_int, 204)
self.assert_('x-container-meta-test' not in resp.headers)
def test_PUT_invalid_partition(self):
req = Request.blank('/sda1/./a/c', environ={'REQUEST_METHOD': 'PUT',
'HTTP_X_TIMESTAMP': '1'})
resp = req.get_response(self.controller)
self.assertEquals(resp.status_int, 400)
def test_PUT_timestamp_not_float(self):
req = Request.blank('/sda1/p/a/c', environ={'REQUEST_METHOD': 'PUT',
'HTTP_X_TIMESTAMP': '0'})
req.get_response(self.controller)
req = Request.blank('/sda1/p/a/c', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': 'not-float'})
resp = req.get_response(self.controller)
self.assertEquals(resp.status_int, 400)
def test_PUT_insufficient_storage(self):
self.controller = container_server.ContainerController(
{'devices': self.testdir})
req = Request.blank(
'/sda-null/p/a/c', environ={'REQUEST_METHOD': 'PUT',
'HTTP_X_TIMESTAMP': '1'})
resp = req.get_response(self.controller)
self.assertEquals(resp.status_int, 507)
def test_POST_HEAD_metadata(self):
req = Request.blank(
'/sda1/p/a/c', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': normalize_timestamp(1)})
resp = req.get_response(self.controller)
self.assertEquals(resp.status_int, 201)
# Set metadata header
req = Request.blank(
'/sda1/p/a/c', environ={'REQUEST_METHOD': 'POST'},
headers={'X-Timestamp': normalize_timestamp(1),
'X-Container-Meta-Test': 'Value'})
resp = req.get_response(self.controller)
self.assertEquals(resp.status_int, 204)
req = Request.blank('/sda1/p/a/c', environ={'REQUEST_METHOD': 'HEAD'})
resp = req.get_response(self.controller)
self.assertEquals(resp.status_int, 204)
self.assertEquals(resp.headers.get('x-container-meta-test'), 'Value')
# Update metadata header
req = Request.blank(
'/sda1/p/a/c', environ={'REQUEST_METHOD': 'POST'},
headers={'X-Timestamp': normalize_timestamp(3),
'X-Container-Meta-Test': 'New Value'})
resp = req.get_response(self.controller)
self.assertEquals(resp.status_int, 204)
req = Request.blank('/sda1/p/a/c', environ={'REQUEST_METHOD': 'HEAD'})
resp = req.get_response(self.controller)
self.assertEquals(resp.status_int, 204)
self.assertEquals(resp.headers.get('x-container-meta-test'),
'New Value')
# Send old update to metadata header
req = Request.blank(
'/sda1/p/a/c', environ={'REQUEST_METHOD': 'POST'},
headers={'X-Timestamp': normalize_timestamp(2),
'X-Container-Meta-Test': 'Old Value'})
resp = req.get_response(self.controller)
self.assertEquals(resp.status_int, 204)
req = Request.blank('/sda1/p/a/c', environ={'REQUEST_METHOD': 'HEAD'})
resp = req.get_response(self.controller)
self.assertEquals(resp.status_int, 204)
self.assertEquals(resp.headers.get('x-container-meta-test'),
'New Value')
# Remove metadata header (by setting it to empty)
req = Request.blank(
'/sda1/p/a/c', environ={'REQUEST_METHOD': 'POST'},
headers={'X-Timestamp': normalize_timestamp(4),
'X-Container-Meta-Test': ''})
resp = req.get_response(self.controller)
self.assertEquals(resp.status_int, 204)
req = Request.blank('/sda1/p/a/c', environ={'REQUEST_METHOD': 'HEAD'})
resp = req.get_response(self.controller)
self.assertEquals(resp.status_int, 204)
self.assert_('x-container-meta-test' not in resp.headers)
def test_POST_invalid_partition(self):
req = Request.blank('/sda1/./a/c', environ={'REQUEST_METHOD': 'POST',
'HTTP_X_TIMESTAMP': '1'})
resp = req.get_response(self.controller)
self.assertEquals(resp.status_int, 400)
def test_POST_timestamp_not_float(self):
req = Request.blank('/sda1/p/a/c', environ={'REQUEST_METHOD': 'PUT',
'HTTP_X_TIMESTAMP': '0'})
req.get_response(self.controller)
req = Request.blank('/sda1/p/a/c', environ={'REQUEST_METHOD': 'POST'},
headers={'X-Timestamp': 'not-float'})
resp = req.get_response(self.controller)
self.assertEquals(resp.status_int, 400)
def test_POST_insufficient_storage(self):
self.controller = container_server.ContainerController(
{'devices': self.testdir})
req = Request.blank(
'/sda-null/p/a/c', environ={'REQUEST_METHOD': 'POST',
'HTTP_X_TIMESTAMP': '1'})
resp = req.get_response(self.controller)
self.assertEquals(resp.status_int, 507)
def test_POST_invalid_container_sync_to(self):
self.controller = container_server.ContainerController(
{'devices': self.testdir})
req = Request.blank(
'/sda-null/p/a/c', environ={'REQUEST_METHOD': 'POST',
'HTTP_X_TIMESTAMP': '1'},
headers={'x-container-sync-to': '192.168.0.1'})
resp = req.get_response(self.controller)
self.assertEquals(resp.status_int, 400)
def test_POST_after_DELETE_not_found(self):
req = Request.blank('/sda1/p/a/c',
environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': '1'})
resp = req.get_response(self.controller)
req = Request.blank('/sda1/p/a/c',
environ={'REQUEST_METHOD': 'DELETE'},
headers={'X-Timestamp': '2'})
resp = req.get_response(self.controller)
req = Request.blank('/sda1/p/a/c/',
environ={'REQUEST_METHOD': 'POST'},
headers={'X-Timestamp': '3'})
resp = req.get_response(self.controller)
self.assertEquals(resp.status_int, 404)
def test_DELETE_obj_not_found(self):
req = Request.blank(
'/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'DELETE'},
headers={'X-Timestamp': '1'})
resp = req.get_response(self.controller)
self.assertEquals(resp.status_int, 404)
def test_DELETE_container_not_found(self):
req = Request.blank('/sda1/p/a/c', environ={'REQUEST_METHOD': 'PUT',
'HTTP_X_TIMESTAMP': '0'})
resp = req.get_response(self.controller)
self.assertEquals(resp.status_int, 201)
req = Request.blank('/sda1/p/a/c', environ={'REQUEST_METHOD': 'DELETE',
'HTTP_X_TIMESTAMP': '1'})
resp = req.get_response(self.controller)
self.assertEquals(resp.status_int, 404)
def test_PUT_utf8(self):
snowman = u'\u2603'
container_name = snowman.encode('utf-8')
req = Request.blank(
'/sda1/p/a/%s' % container_name, environ={
'REQUEST_METHOD': 'PUT',
'HTTP_X_TIMESTAMP': '1'})
resp = req.get_response(self.controller)
self.assertEquals(resp.status_int, 201)
def test_account_update_mismatched_host_device(self):
req = Request.blank(
'/sda1/p/a/c',
environ={'REQUEST_METHOD': 'PUT',
'HTTP_X_TIMESTAMP': '1'},
headers={'X-Timestamp': '0000000001.00000',
'X-Account-Host': '127.0.0.1:0',
'X-Account-Partition': '123',
'X-Account-Device': 'sda1,sda2'})
broker = self.controller._get_container_broker('sda1', 'p', 'a', 'c')
resp = self.controller.account_update(req, 'a', 'c', broker)
self.assertEquals(resp.status_int, 400)
def test_account_update_account_override_deleted(self):
bindsock = listen(('127.0.0.1', 0))
req = Request.blank(
'/sda1/p/a/c',
environ={'REQUEST_METHOD': 'PUT',
'HTTP_X_TIMESTAMP': '1'},
headers={'X-Timestamp': '0000000001.00000',
'X-Account-Host': '%s:%s' %
bindsock.getsockname(),
'X-Account-Partition': '123',
'X-Account-Device': 'sda1',
'X-Account-Override-Deleted': 'yes'})
with save_globals():
new_connect = fake_http_connect(200, count=123)
swift.container.server.http_connect = new_connect
resp = req.get_response(self.controller)
self.assertEquals(resp.status_int, 201)
def test_PUT_account_update(self):
bindsock = listen(('127.0.0.1', 0))
def accept(return_code, expected_timestamp):
try:
with Timeout(3):
sock, addr = bindsock.accept()
inc = sock.makefile('rb')
out = sock.makefile('wb')
out.write('HTTP/1.1 %d OK\r\nContent-Length: 0\r\n\r\n' %
return_code)
out.flush()
self.assertEquals(inc.readline(),
'PUT /sda1/123/a/c HTTP/1.1\r\n')
headers = {}
line = inc.readline()
while line and line != '\r\n':
headers[line.split(':')[0].lower()] = \
line.split(':')[1].strip()
line = inc.readline()
self.assertEquals(headers['x-put-timestamp'],
expected_timestamp)
except BaseException as err:
return err
return None
req = Request.blank(
'/sda1/p/a/c',
environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': '0000000001.00000',
'X-Account-Host': '%s:%s' % bindsock.getsockname(),
'X-Account-Partition': '123',
'X-Account-Device': 'sda1'})
event = spawn(accept, 201, '0000000001.00000')
try:
with Timeout(3):
resp = req.get_response(self.controller)
self.assertEquals(resp.status_int, 201)
finally:
err = event.wait()
if err:
raise Exception(err)
req = Request.blank(
'/sda1/p/a/c',
environ={'REQUEST_METHOD': 'DELETE'},
headers={'X-Timestamp': '2'})
resp = req.get_response(self.controller)
self.assertEquals(resp.status_int, 204)
req = Request.blank(
'/sda1/p/a/c',
environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': '0000000003.00000',
'X-Account-Host': '%s:%s' % bindsock.getsockname(),
'X-Account-Partition': '123',
'X-Account-Device': 'sda1'})
event = spawn(accept, 404, '0000000003.00000')
try:
with Timeout(3):
resp = req.get_response(self.controller)
self.assertEquals(resp.status_int, 404)
finally:
err = event.wait()
if err:
raise Exception(err)
req = Request.blank(
'/sda1/p/a/c',
environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': '0000000005.00000',
'X-Account-Host': '%s:%s' % bindsock.getsockname(),
'X-Account-Partition': '123',
'X-Account-Device': 'sda1'})
event = spawn(accept, 503, '0000000005.00000')
got_exc = False
try:
with Timeout(3):
resp = req.get_response(self.controller)
except BaseException as err:
got_exc = True
finally:
err = event.wait()
if err:
raise Exception(err)
self.assert_(not got_exc)
def test_PUT_reset_container_sync(self):
req = Request.blank(
'/sda1/p/a/c', environ={'REQUEST_METHOD': 'PUT'},
headers={'x-timestamp': '1',
'x-container-sync-to': 'http://127.0.0.1:12345/v1/a/c'})
resp = req.get_response(self.controller)
self.assertEquals(resp.status_int, 201)
db = self.controller._get_container_broker('sda1', 'p', 'a', 'c')
info = db.get_info()
self.assertEquals(info['x_container_sync_point1'], -1)
self.assertEquals(info['x_container_sync_point2'], -1)
db.set_x_container_sync_points(123, 456)
info = db.get_info()
self.assertEquals(info['x_container_sync_point1'], 123)
self.assertEquals(info['x_container_sync_point2'], 456)
# Set to same value
req = Request.blank(
'/sda1/p/a/c', environ={'REQUEST_METHOD': 'PUT'},
headers={'x-timestamp': '1',
'x-container-sync-to': 'http://127.0.0.1:12345/v1/a/c'})
resp = req.get_response(self.controller)
self.assertEquals(resp.status_int, 202)
db = self.controller._get_container_broker('sda1', 'p', 'a', 'c')
info = db.get_info()
self.assertEquals(info['x_container_sync_point1'], 123)
self.assertEquals(info['x_container_sync_point2'], 456)
# Set to new value
req = Request.blank(
'/sda1/p/a/c', environ={'REQUEST_METHOD': 'PUT'},
headers={'x-timestamp': '1',
'x-container-sync-to': 'http://127.0.0.1:12345/v1/a/c2'})
resp = req.get_response(self.controller)
self.assertEquals(resp.status_int, 202)
db = self.controller._get_container_broker('sda1', 'p', 'a', 'c')
info = db.get_info()
self.assertEquals(info['x_container_sync_point1'], -1)
self.assertEquals(info['x_container_sync_point2'], -1)
def test_POST_reset_container_sync(self):
req = Request.blank(
'/sda1/p/a/c', environ={'REQUEST_METHOD': 'PUT'},
headers={'x-timestamp': '1',
'x-container-sync-to': 'http://127.0.0.1:12345/v1/a/c'})
resp = req.get_response(self.controller)
self.assertEquals(resp.status_int, 201)
db = self.controller._get_container_broker('sda1', 'p', 'a', 'c')
info = db.get_info()
self.assertEquals(info['x_container_sync_point1'], -1)
self.assertEquals(info['x_container_sync_point2'], -1)
db.set_x_container_sync_points(123, 456)
info = db.get_info()
self.assertEquals(info['x_container_sync_point1'], 123)
self.assertEquals(info['x_container_sync_point2'], 456)
# Set to same value
req = Request.blank(
'/sda1/p/a/c', environ={'REQUEST_METHOD': 'POST'},
headers={'x-timestamp': '1',
'x-container-sync-to': 'http://127.0.0.1:12345/v1/a/c'})
resp = req.get_response(self.controller)
self.assertEquals(resp.status_int, 204)
db = self.controller._get_container_broker('sda1', 'p', 'a', 'c')
info = db.get_info()
self.assertEquals(info['x_container_sync_point1'], 123)
self.assertEquals(info['x_container_sync_point2'], 456)
# Set to new value
req = Request.blank(
'/sda1/p/a/c', environ={'REQUEST_METHOD': 'POST'},
headers={'x-timestamp': '1',
'x-container-sync-to': 'http://127.0.0.1:12345/v1/a/c2'})
resp = req.get_response(self.controller)
self.assertEquals(resp.status_int, 204)
db = self.controller._get_container_broker('sda1', 'p', 'a', 'c')
info = db.get_info()
self.assertEquals(info['x_container_sync_point1'], -1)
self.assertEquals(info['x_container_sync_point2'], -1)
def test_DELETE(self):
req = Request.blank(
'/sda1/p/a/c',
environ={'REQUEST_METHOD': 'PUT'}, headers={'X-Timestamp': '1'})
resp = req.get_response(self.controller)
self.assertEquals(resp.status_int, 201)
req = Request.blank(
'/sda1/p/a/c',
environ={'REQUEST_METHOD': 'DELETE'}, headers={'X-Timestamp': '2'})
resp = req.get_response(self.controller)
self.assertEquals(resp.status_int, 204)
req = Request.blank(
'/sda1/p/a/c',
environ={'REQUEST_METHOD': 'GET'}, headers={'X-Timestamp': '3'})
resp = req.get_response(self.controller)
self.assertEquals(resp.status_int, 404)
def test_DELETE_not_found(self):
# Even if the container wasn't previously heard of, the container
# server will accept the delete and replicate it to where it belongs
# later.
req = Request.blank(
'/sda1/p/a/c',
environ={'REQUEST_METHOD': 'DELETE', 'HTTP_X_TIMESTAMP': '1'})
resp = req.get_response(self.controller)
self.assertEquals(resp.status_int, 404)
def test_DELETE_object(self):
req = Request.blank(
'/sda1/p/a/c',
environ={'REQUEST_METHOD': 'PUT'}, headers={'X-Timestamp': '2'})
resp = req.get_response(self.controller)
self.assertEquals(resp.status_int, 201)
req = Request.blank(
'/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'PUT', 'HTTP_X_TIMESTAMP': '0',
'HTTP_X_SIZE': 1, 'HTTP_X_CONTENT_TYPE': 'text/plain',
'HTTP_X_ETAG': 'x'})
resp = req.get_response(self.controller)
self.assertEquals(resp.status_int, 201)
req = Request.blank(
'/sda1/p/a/c',
environ={'REQUEST_METHOD': 'DELETE'}, headers={'X-Timestamp': '3'})
resp = req.get_response(self.controller)
self.assertEquals(resp.status_int, 409)
req = Request.blank(
'/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'DELETE'}, headers={'X-Timestamp': '4'})
resp = req.get_response(self.controller)
self.assertEquals(resp.status_int, 204)
req = Request.blank(
'/sda1/p/a/c',
environ={'REQUEST_METHOD': 'DELETE'}, headers={'X-Timestamp': '5'})
resp = req.get_response(self.controller)
self.assertEquals(resp.status_int, 204)
req = Request.blank(
'/sda1/p/a/c',
environ={'REQUEST_METHOD': 'GET'}, headers={'X-Timestamp': '6'})
resp = req.get_response(self.controller)
self.assertEquals(resp.status_int, 404)
def test_DELETE_account_update(self):
bindsock = listen(('127.0.0.1', 0))
def accept(return_code, expected_timestamp):
try:
with Timeout(3):
sock, addr = bindsock.accept()
inc = sock.makefile('rb')
out = sock.makefile('wb')
out.write('HTTP/1.1 %d OK\r\nContent-Length: 0\r\n\r\n' %
return_code)
out.flush()
self.assertEquals(inc.readline(),
'PUT /sda1/123/a/c HTTP/1.1\r\n')
headers = {}
line = inc.readline()
while line and line != '\r\n':
headers[line.split(':')[0].lower()] = \
line.split(':')[1].strip()
line = inc.readline()
self.assertEquals(headers['x-delete-timestamp'],
expected_timestamp)
except BaseException as err:
return err
return None
req = Request.blank(
'/sda1/p/a/c',
environ={'REQUEST_METHOD': 'PUT'}, headers={'X-Timestamp': '1'})
resp = req.get_response(self.controller)
self.assertEquals(resp.status_int, 201)
req = Request.blank(
'/sda1/p/a/c',
environ={'REQUEST_METHOD': 'DELETE'},
headers={'X-Timestamp': '0000000002.00000',
'X-Account-Host': '%s:%s' % bindsock.getsockname(),
'X-Account-Partition': '123',
'X-Account-Device': 'sda1'})
event = spawn(accept, 204, '0000000002.00000')
try:
with Timeout(3):
resp = req.get_response(self.controller)
self.assertEquals(resp.status_int, 204)
finally:
err = event.wait()
if err:
raise Exception(err)
req = Request.blank(
'/sda1/p/a/c',
environ={'REQUEST_METHOD': 'PUT', 'HTTP_X_TIMESTAMP': '2'})
resp = req.get_response(self.controller)
self.assertEquals(resp.status_int, 201)
req = Request.blank(
'/sda1/p/a/c',
environ={'REQUEST_METHOD': 'DELETE'},
headers={'X-Timestamp': '0000000003.00000',
'X-Account-Host': '%s:%s' % bindsock.getsockname(),
'X-Account-Partition': '123',
'X-Account-Device': 'sda1'})
event = spawn(accept, 404, '0000000003.00000')
try:
with Timeout(3):
resp = req.get_response(self.controller)
self.assertEquals(resp.status_int, 404)
finally:
err = event.wait()
if err:
raise Exception(err)
req = Request.blank(
'/sda1/p/a/c',
environ={'REQUEST_METHOD': 'PUT', 'HTTP_X_TIMESTAMP': '4'})
resp = req.get_response(self.controller)
self.assertEquals(resp.status_int, 201)
req = Request.blank(
'/sda1/p/a/c',
environ={'REQUEST_METHOD': 'DELETE'},
headers={'X-Timestamp': '0000000005.00000',
'X-Account-Host': '%s:%s' % bindsock.getsockname(),
'X-Account-Partition': '123',
'X-Account-Device': 'sda1'})
event = spawn(accept, 503, '0000000005.00000')
got_exc = False
try:
with Timeout(3):
resp = req.get_response(self.controller)
except BaseException as err:
got_exc = True
finally:
err = event.wait()
if err:
raise Exception(err)
self.assert_(not got_exc)
def test_DELETE_invalid_partition(self):
req = Request.blank(
'/sda1/./a/c', environ={'REQUEST_METHOD': 'DELETE',
'HTTP_X_TIMESTAMP': '1'})
resp = req.get_response(self.controller)
self.assertEquals(resp.status_int, 400)
def test_DELETE_timestamp_not_float(self):
req = Request.blank(
'/sda1/p/a/c', environ={'REQUEST_METHOD': 'PUT',
'HTTP_X_TIMESTAMP': '0'})
req.get_response(self.controller)
req = Request.blank(
'/sda1/p/a/c', environ={'REQUEST_METHOD': 'DELETE'},
headers={'X-Timestamp': 'not-float'})
resp = req.get_response(self.controller)
self.assertEquals(resp.status_int, 400)
def test_DELETE_insufficient_storage(self):
self.controller = container_server.ContainerController(
{'devices': self.testdir})
req = Request.blank(
'/sda-null/p/a/c', environ={'REQUEST_METHOD': 'DELETE',
'HTTP_X_TIMESTAMP': '1'})
resp = req.get_response(self.controller)
self.assertEquals(resp.status_int, 507)
def test_GET_over_limit(self):
req = Request.blank(
'/sda1/p/a/c?limit=%d' %
(container_server.CONTAINER_LISTING_LIMIT + 1),
environ={'REQUEST_METHOD': 'GET'})
resp = req.get_response(self.controller)
self.assertEquals(resp.status_int, 412)
def test_GET_json(self):
# make a container
req = Request.blank(
'/sda1/p/a/jsonc', environ={'REQUEST_METHOD': 'PUT',
'HTTP_X_TIMESTAMP': '0'})
resp = req.get_response(self.controller)
# test an empty container
req = Request.blank(
'/sda1/p/a/jsonc?format=json',
environ={'REQUEST_METHOD': 'GET'})
resp = req.get_response(self.controller)
self.assertEquals(resp.status_int, 200)
self.assertEquals(simplejson.loads(resp.body), [])
# fill the container
for i in range(3):
req = Request.blank(
'/sda1/p/a/jsonc/%s' % i, environ={
'REQUEST_METHOD': 'PUT',
'HTTP_X_TIMESTAMP': '1',
'HTTP_X_CONTENT_TYPE': 'text/plain',
'HTTP_X_ETAG': 'x',
'HTTP_X_SIZE': 0})
resp = req.get_response(self.controller)
self.assertEquals(resp.status_int, 201)
# test format
json_body = [{"name": "0",
"hash": "x",
"bytes": 0,
"content_type": "text/plain",
"last_modified": "1970-01-01T00:00:01.000000"},
{"name": "1",
"hash": "x",
"bytes": 0,
"content_type": "text/plain",
"last_modified": "1970-01-01T00:00:01.000000"},
{"name": "2",
"hash": "x",
"bytes": 0,
"content_type": "text/plain",
"last_modified": "1970-01-01T00:00:01.000000"}]
req = Request.blank(
'/sda1/p/a/jsonc?format=json',
environ={'REQUEST_METHOD': 'GET'})
resp = req.get_response(self.controller)
self.assertEquals(resp.content_type, 'application/json')
self.assertEquals(simplejson.loads(resp.body), json_body)
self.assertEquals(resp.charset, 'utf-8')
req = Request.blank(
'/sda1/p/a/jsonc?format=json',
environ={'REQUEST_METHOD': 'HEAD'})
resp = req.get_response(self.controller)
self.assertEquals(resp.content_type, 'application/json')
for accept in ('application/json', 'application/json;q=1.0,*/*;q=0.9',
'*/*;q=0.9,application/json;q=1.0', 'application/*'):
req = Request.blank(
'/sda1/p/a/jsonc',
environ={'REQUEST_METHOD': 'GET'})
req.accept = accept
resp = req.get_response(self.controller)
self.assertEquals(
simplejson.loads(resp.body), json_body,
'Invalid body for Accept: %s' % accept)
self.assertEquals(
resp.content_type, 'application/json',
'Invalid content_type for Accept: %s' % accept)
req = Request.blank(
'/sda1/p/a/jsonc',
environ={'REQUEST_METHOD': 'HEAD'})
req.accept = accept
resp = req.get_response(self.controller)
self.assertEquals(
resp.content_type, 'application/json',
'Invalid content_type for Accept: %s' % accept)
def test_GET_plain(self):
# make a container
req = Request.blank(
'/sda1/p/a/plainc', environ={'REQUEST_METHOD': 'PUT',
'HTTP_X_TIMESTAMP': '0'})
resp = req.get_response(self.controller)
# test an empty container
req = Request.blank(
'/sda1/p/a/plainc', environ={'REQUEST_METHOD': 'GET'})
resp = req.get_response(self.controller)
self.assertEquals(resp.status_int, 204)
# fill the container
for i in range(3):
req = Request.blank(
'/sda1/p/a/plainc/%s' % i, environ={
'REQUEST_METHOD': 'PUT',
'HTTP_X_TIMESTAMP': '1',
'HTTP_X_CONTENT_TYPE': 'text/plain',
'HTTP_X_ETAG': 'x',
'HTTP_X_SIZE': 0})
resp = req.get_response(self.controller)
self.assertEquals(resp.status_int, 201)
plain_body = '0\n1\n2\n'
req = Request.blank('/sda1/p/a/plainc',
environ={'REQUEST_METHOD': 'GET'})
resp = req.get_response(self.controller)
self.assertEquals(resp.content_type, 'text/plain')
self.assertEquals(resp.body, plain_body)
self.assertEquals(resp.charset, 'utf-8')
req = Request.blank('/sda1/p/a/plainc',
environ={'REQUEST_METHOD': 'HEAD'})
resp = req.get_response(self.controller)
self.assertEquals(resp.content_type, 'text/plain')
for accept in ('', 'text/plain', 'application/xml;q=0.8,*/*;q=0.9',
'*/*;q=0.9,application/xml;q=0.8', '*/*',
'text/plain,application/xml'):
req = Request.blank(
'/sda1/p/a/plainc',
environ={'REQUEST_METHOD': 'GET'})
req.accept = accept
resp = req.get_response(self.controller)
self.assertEquals(
resp.body, plain_body,
'Invalid body for Accept: %s' % accept)
self.assertEquals(
resp.content_type, 'text/plain',
'Invalid content_type for Accept: %s' % accept)
req = Request.blank(
'/sda1/p/a/plainc',
environ={'REQUEST_METHOD': 'GET'})
req.accept = accept
resp = req.get_response(self.controller)
self.assertEquals(
resp.content_type, 'text/plain',
'Invalid content_type for Accept: %s' % accept)
# test conflicting formats
req = Request.blank(
'/sda1/p/a/plainc?format=plain',
environ={'REQUEST_METHOD': 'GET'})
req.accept = 'application/json'
resp = req.get_response(self.controller)
self.assertEquals(resp.content_type, 'text/plain')
self.assertEquals(resp.body, plain_body)
# test unknown format uses default plain
req = Request.blank(
'/sda1/p/a/plainc?format=somethingelse',
environ={'REQUEST_METHOD': 'GET'})
resp = req.get_response(self.controller)
self.assertEquals(resp.status_int, 200)
self.assertEquals(resp.content_type, 'text/plain')
self.assertEquals(resp.body, plain_body)
def test_GET_json_last_modified(self):
# make a container
req = Request.blank(
'/sda1/p/a/jsonc', environ={
'REQUEST_METHOD': 'PUT',
'HTTP_X_TIMESTAMP': '0'})
resp = req.get_response(self.controller)
for i, d in [(0, 1.5), (1, 1.0), ]:
req = Request.blank(
'/sda1/p/a/jsonc/%s' % i, environ={
'REQUEST_METHOD': 'PUT',
'HTTP_X_TIMESTAMP': d,
'HTTP_X_CONTENT_TYPE': 'text/plain',
'HTTP_X_ETAG': 'x',
'HTTP_X_SIZE': 0})
resp = req.get_response(self.controller)
self.assertEquals(resp.status_int, 201)
# test format
# last_modified format must be uniform, even when there are not msecs
json_body = [{"name": "0",
"hash": "x",
"bytes": 0,
"content_type": "text/plain",
"last_modified": "1970-01-01T00:00:01.500000"},
{"name": "1",
"hash": "x",
"bytes": 0,
"content_type": "text/plain",
"last_modified": "1970-01-01T00:00:01.000000"}, ]
req = Request.blank(
'/sda1/p/a/jsonc?format=json',
environ={'REQUEST_METHOD': 'GET'})
resp = req.get_response(self.controller)
self.assertEquals(resp.content_type, 'application/json')
self.assertEquals(simplejson.loads(resp.body), json_body)
self.assertEquals(resp.charset, 'utf-8')
def test_GET_xml(self):
# make a container
req = Request.blank(
'/sda1/p/a/xmlc', environ={'REQUEST_METHOD': 'PUT',
'HTTP_X_TIMESTAMP': '0'})
resp = req.get_response(self.controller)
# fill the container
for i in range(3):
req = Request.blank(
'/sda1/p/a/xmlc/%s' % i,
environ={
'REQUEST_METHOD': 'PUT',
'HTTP_X_TIMESTAMP': '1',
'HTTP_X_CONTENT_TYPE': 'text/plain',
'HTTP_X_ETAG': 'x',
'HTTP_X_SIZE': 0})
resp = req.get_response(self.controller)
self.assertEquals(resp.status_int, 201)
xml_body = '<?xml version="1.0" encoding="UTF-8"?>\n' \
'<container name="xmlc">' \
'<object><name>0</name><hash>x</hash><bytes>0</bytes>' \
'<content_type>text/plain</content_type>' \
'<last_modified>1970-01-01T00:00:01.000000' \
'</last_modified></object>' \
'<object><name>1</name><hash>x</hash><bytes>0</bytes>' \
'<content_type>text/plain</content_type>' \
'<last_modified>1970-01-01T00:00:01.000000' \
'</last_modified></object>' \
'<object><name>2</name><hash>x</hash><bytes>0</bytes>' \
'<content_type>text/plain</content_type>' \
'<last_modified>1970-01-01T00:00:01.000000' \
'</last_modified></object>' \
'</container>'
# tests
req = Request.blank(
'/sda1/p/a/xmlc?format=xml',
environ={'REQUEST_METHOD': 'GET'})
resp = req.get_response(self.controller)
self.assertEquals(resp.content_type, 'application/xml')
self.assertEquals(resp.body, xml_body)
self.assertEquals(resp.charset, 'utf-8')
req = Request.blank(
'/sda1/p/a/xmlc?format=xml',
environ={'REQUEST_METHOD': 'HEAD'})
resp = req.get_response(self.controller)
self.assertEquals(resp.content_type, 'application/xml')
for xml_accept in (
'application/xml', 'application/xml;q=1.0,*/*;q=0.9',
'*/*;q=0.9,application/xml;q=1.0', 'application/xml,text/xml'):
req = Request.blank(
'/sda1/p/a/xmlc',
environ={'REQUEST_METHOD': 'GET'})
req.accept = xml_accept
resp = req.get_response(self.controller)
self.assertEquals(
resp.body, xml_body,
'Invalid body for Accept: %s' % xml_accept)
self.assertEquals(
resp.content_type, 'application/xml',
'Invalid content_type for Accept: %s' % xml_accept)
req = Request.blank(
'/sda1/p/a/xmlc',
environ={'REQUEST_METHOD': 'HEAD'})
req.accept = xml_accept
resp = req.get_response(self.controller)
self.assertEquals(
resp.content_type, 'application/xml',
'Invalid content_type for Accept: %s' % xml_accept)
req = Request.blank(
'/sda1/p/a/xmlc',
environ={'REQUEST_METHOD': 'GET'})
req.accept = 'text/xml'
resp = req.get_response(self.controller)
self.assertEquals(resp.content_type, 'text/xml')
self.assertEquals(resp.body, xml_body)
def test_GET_marker(self):
# make a container
req = Request.blank(
'/sda1/p/a/c', environ={'REQUEST_METHOD': 'PUT',
'HTTP_X_TIMESTAMP': '0'})
resp = req.get_response(self.controller)
# fill the container
for i in range(3):
req = Request.blank(
'/sda1/p/a/c/%s' % i, environ={
'REQUEST_METHOD': 'PUT',
'HTTP_X_TIMESTAMP': '1',
'HTTP_X_CONTENT_TYPE': 'text/plain',
'HTTP_X_ETAG': 'x', 'HTTP_X_SIZE': 0})
resp = req.get_response(self.controller)
self.assertEquals(resp.status_int, 201)
# test limit with marker
req = Request.blank('/sda1/p/a/c?limit=2&marker=1',
environ={'REQUEST_METHOD': 'GET'})
resp = req.get_response(self.controller)
result = resp.body.split()
self.assertEquals(result, ['2', ])
def test_weird_content_types(self):
snowman = u'\u2603'
req = Request.blank(
'/sda1/p/a/c', environ={'REQUEST_METHOD': 'PUT',
'HTTP_X_TIMESTAMP': '0'})
resp = req.get_response(self.controller)
for i, ctype in enumerate((snowman.encode('utf-8'),
'text/plain; charset="utf-8"')):
req = Request.blank(
'/sda1/p/a/c/%s' % i, environ={
'REQUEST_METHOD': 'PUT',
'HTTP_X_TIMESTAMP': '1', 'HTTP_X_CONTENT_TYPE': ctype,
'HTTP_X_ETAG': 'x', 'HTTP_X_SIZE': 0})
resp = req.get_response(self.controller)
self.assertEquals(resp.status_int, 201)
req = Request.blank('/sda1/p/a/c?format=json',
environ={'REQUEST_METHOD': 'GET'})
resp = req.get_response(self.controller)
result = [x['content_type'] for x in simplejson.loads(resp.body)]
self.assertEquals(result, [u'\u2603', 'text/plain;charset="utf-8"'])
def test_GET_accept_not_valid(self):
req = Request.blank(
'/sda1/p/a/c', environ={'REQUEST_METHOD': 'PUT',
'HTTP_X_TIMESTAMP': '0'})
req.get_response(self.controller)
req = Request.blank('/sda1/p/a/c1', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Put-Timestamp': '1',
'X-Delete-Timestamp': '0',
'X-Object-Count': '0',
'X-Bytes-Used': '0',
'X-Timestamp': normalize_timestamp(0)})
req.get_response(self.controller)
req = Request.blank('/sda1/p/a/c', environ={'REQUEST_METHOD': 'GET'})
req.accept = 'application/xml*'
resp = req.get_response(self.controller)
self.assertEquals(resp.status_int, 406)
def test_GET_limit(self):
# make a container
req = Request.blank(
'/sda1/p/a/c', environ={'REQUEST_METHOD': 'PUT',
'HTTP_X_TIMESTAMP': '0'})
resp = req.get_response(self.controller)
# fill the container
for i in range(3):
req = Request.blank(
'/sda1/p/a/c/%s' % i,
environ={
'REQUEST_METHOD': 'PUT',
'HTTP_X_TIMESTAMP': '1',
'HTTP_X_CONTENT_TYPE': 'text/plain',
'HTTP_X_ETAG': 'x',
'HTTP_X_SIZE': 0})
resp = req.get_response(self.controller)
self.assertEquals(resp.status_int, 201)
# test limit
req = Request.blank(
'/sda1/p/a/c?limit=2', environ={'REQUEST_METHOD': 'GET'})
resp = req.get_response(self.controller)
result = resp.body.split()
self.assertEquals(result, ['0', '1'])
def test_GET_prefix(self):
req = Request.blank(
'/sda1/p/a/c', environ={'REQUEST_METHOD': 'PUT',
'HTTP_X_TIMESTAMP': '0'})
resp = req.get_response(self.controller)
for i in ('a1', 'b1', 'a2', 'b2', 'a3', 'b3'):
req = Request.blank(
'/sda1/p/a/c/%s' % i,
environ={
'REQUEST_METHOD': 'PUT',
'HTTP_X_TIMESTAMP': '1',
'HTTP_X_CONTENT_TYPE': 'text/plain',
'HTTP_X_ETAG': 'x',
'HTTP_X_SIZE': 0})
resp = req.get_response(self.controller)
self.assertEquals(resp.status_int, 201)
req = Request.blank(
'/sda1/p/a/c?prefix=a', environ={'REQUEST_METHOD': 'GET'})
resp = req.get_response(self.controller)
self.assertEquals(resp.body.split(), ['a1', 'a2', 'a3'])
def test_GET_delimiter_too_long(self):
req = Request.blank('/sda1/p/a/c?delimiter=xx',
environ={'REQUEST_METHOD': 'GET',
'HTTP_X_TIMESTAMP': '0'})
resp = req.get_response(self.controller)
self.assertEquals(resp.status_int, 412)
def test_GET_delimiter(self):
req = Request.blank(
'/sda1/p/a/c', environ={'REQUEST_METHOD': 'PUT',
'HTTP_X_TIMESTAMP': '0'})
resp = req.get_response(self.controller)
for i in ('US-TX-A', 'US-TX-B', 'US-OK-A', 'US-OK-B', 'US-UT-A'):
req = Request.blank(
'/sda1/p/a/c/%s' % i,
environ={
'REQUEST_METHOD': 'PUT', 'HTTP_X_TIMESTAMP': '1',
'HTTP_X_CONTENT_TYPE': 'text/plain', 'HTTP_X_ETAG': 'x',
'HTTP_X_SIZE': 0})
resp = req.get_response(self.controller)
self.assertEquals(resp.status_int, 201)
req = Request.blank(
'/sda1/p/a/c?prefix=US-&delimiter=-&format=json',
environ={'REQUEST_METHOD': 'GET'})
resp = req.get_response(self.controller)
self.assertEquals(
simplejson.loads(resp.body),
[{"subdir": "US-OK-"},
{"subdir": "US-TX-"},
{"subdir": "US-UT-"}])
def test_GET_delimiter_xml(self):
req = Request.blank(
'/sda1/p/a/c', environ={'REQUEST_METHOD': 'PUT',
'HTTP_X_TIMESTAMP': '0'})
resp = req.get_response(self.controller)
for i in ('US-TX-A', 'US-TX-B', 'US-OK-A', 'US-OK-B', 'US-UT-A'):
req = Request.blank(
'/sda1/p/a/c/%s' % i,
environ={
'REQUEST_METHOD': 'PUT', 'HTTP_X_TIMESTAMP': '1',
'HTTP_X_CONTENT_TYPE': 'text/plain', 'HTTP_X_ETAG': 'x',
'HTTP_X_SIZE': 0})
resp = req.get_response(self.controller)
self.assertEquals(resp.status_int, 201)
req = Request.blank(
'/sda1/p/a/c?prefix=US-&delimiter=-&format=xml',
environ={'REQUEST_METHOD': 'GET'})
resp = req.get_response(self.controller)
self.assertEquals(
resp.body, '<?xml version="1.0" encoding="UTF-8"?>'
'\n<container name="c"><subdir name="US-OK-">'
'<name>US-OK-</name></subdir>'
'<subdir name="US-TX-"><name>US-TX-</name></subdir>'
'<subdir name="US-UT-"><name>US-UT-</name></subdir></container>')
def test_GET_delimiter_xml_with_quotes(self):
req = Request.blank(
'/sda1/p/a/c', environ={'REQUEST_METHOD': 'PUT',
'HTTP_X_TIMESTAMP': '0'})
resp = req.get_response(self.controller)
req = Request.blank(
'/sda1/p/a/c/<\'sub\' "dir">/object',
environ={
'REQUEST_METHOD': 'PUT', 'HTTP_X_TIMESTAMP': '1',
'HTTP_X_CONTENT_TYPE': 'text/plain', 'HTTP_X_ETAG': 'x',
'HTTP_X_SIZE': 0})
resp = req.get_response(self.controller)
self.assertEquals(resp.status_int, 201)
req = Request.blank(
'/sda1/p/a/c?delimiter=/&format=xml',
environ={'REQUEST_METHOD': 'GET'})
resp = req.get_response(self.controller)
dom = minidom.parseString(resp.body)
self.assert_(len(dom.getElementsByTagName('container')) == 1)
container = dom.getElementsByTagName('container')[0]
self.assert_(len(container.getElementsByTagName('subdir')) == 1)
subdir = container.getElementsByTagName('subdir')[0]
self.assertEquals(unicode(subdir.attributes['name'].value),
u'<\'sub\' "dir">/')
self.assert_(len(subdir.getElementsByTagName('name')) == 1)
name = subdir.getElementsByTagName('name')[0]
self.assertEquals(unicode(name.childNodes[0].data),
u'<\'sub\' "dir">/')
def test_GET_path(self):
req = Request.blank(
'/sda1/p/a/c', environ={'REQUEST_METHOD': 'PUT',
'HTTP_X_TIMESTAMP': '0'})
resp = req.get_response(self.controller)
for i in ('US/TX', 'US/TX/B', 'US/OK', 'US/OK/B', 'US/UT/A'):
req = Request.blank(
'/sda1/p/a/c/%s' % i,
environ={
'REQUEST_METHOD': 'PUT', 'HTTP_X_TIMESTAMP': '1',
'HTTP_X_CONTENT_TYPE': 'text/plain', 'HTTP_X_ETAG': 'x',
'HTTP_X_SIZE': 0})
resp = req.get_response(self.controller)
self.assertEquals(resp.status_int, 201)
req = Request.blank(
'/sda1/p/a/c?path=US&format=json',
environ={'REQUEST_METHOD': 'GET'})
resp = req.get_response(self.controller)
self.assertEquals(
simplejson.loads(resp.body),
[{"name": "US/OK", "hash": "x", "bytes": 0,
"content_type": "text/plain",
"last_modified": "1970-01-01T00:00:01.000000"},
{"name": "US/TX", "hash": "x", "bytes": 0,
"content_type": "text/plain",
"last_modified": "1970-01-01T00:00:01.000000"}])
def test_GET_insufficient_storage(self):
self.controller = container_server.ContainerController(
{'devices': self.testdir})
req = Request.blank(
'/sda-null/p/a/c', environ={'REQUEST_METHOD': 'GET',
'HTTP_X_TIMESTAMP': '1'})
resp = req.get_response(self.controller)
self.assertEquals(resp.status_int, 507)
def test_through_call(self):
inbuf = StringIO()
errbuf = StringIO()
outbuf = StringIO()
def start_response(*args):
outbuf.writelines(args)
self.controller.__call__({'REQUEST_METHOD': 'GET',
'SCRIPT_NAME': '',
'PATH_INFO': '/sda1/p/a/c',
'SERVER_NAME': '127.0.0.1',
'SERVER_PORT': '8080',
'SERVER_PROTOCOL': 'HTTP/1.0',
'CONTENT_LENGTH': '0',
'wsgi.version': (1, 0),
'wsgi.url_scheme': 'http',
'wsgi.input': inbuf,
'wsgi.errors': errbuf,
'wsgi.multithread': False,
'wsgi.multiprocess': False,
'wsgi.run_once': False},
start_response)
self.assertEquals(errbuf.getvalue(), '')
self.assertEquals(outbuf.getvalue()[:4], '404 ')
def test_through_call_invalid_path(self):
inbuf = StringIO()
errbuf = StringIO()
outbuf = StringIO()
def start_response(*args):
outbuf.writelines(args)
self.controller.__call__({'REQUEST_METHOD': 'GET',
'SCRIPT_NAME': '',
'PATH_INFO': '/bob',
'SERVER_NAME': '127.0.0.1',
'SERVER_PORT': '8080',
'SERVER_PROTOCOL': 'HTTP/1.0',
'CONTENT_LENGTH': '0',
'wsgi.version': (1, 0),
'wsgi.url_scheme': 'http',
'wsgi.input': inbuf,
'wsgi.errors': errbuf,
'wsgi.multithread': False,
'wsgi.multiprocess': False,
'wsgi.run_once': False},
start_response)
self.assertEquals(errbuf.getvalue(), '')
self.assertEquals(outbuf.getvalue()[:4], '400 ')
def test_through_call_invalid_path_utf8(self):
inbuf = StringIO()
errbuf = StringIO()
outbuf = StringIO()
def start_response(*args):
outbuf.writelines(args)
self.controller.__call__({'REQUEST_METHOD': 'GET',
'SCRIPT_NAME': '',
'PATH_INFO': '\x00',
'SERVER_NAME': '127.0.0.1',
'SERVER_PORT': '8080',
'SERVER_PROTOCOL': 'HTTP/1.0',
'CONTENT_LENGTH': '0',
'wsgi.version': (1, 0),
'wsgi.url_scheme': 'http',
'wsgi.input': inbuf,
'wsgi.errors': errbuf,
'wsgi.multithread': False,
'wsgi.multiprocess': False,
'wsgi.run_once': False},
start_response)
self.assertEquals(errbuf.getvalue(), '')
self.assertEquals(outbuf.getvalue()[:4], '412 ')
def test_invalid_method_doesnt_exist(self):
errbuf = StringIO()
outbuf = StringIO()
def start_response(*args):
outbuf.writelines(args)
self.controller.__call__({'REQUEST_METHOD': 'method_doesnt_exist',
'PATH_INFO': '/sda1/p/a/c'},
start_response)
self.assertEquals(errbuf.getvalue(), '')
self.assertEquals(outbuf.getvalue()[:4], '405 ')
def test_invalid_method_is_not_public(self):
errbuf = StringIO()
outbuf = StringIO()
def start_response(*args):
outbuf.writelines(args)
self.controller.__call__({'REQUEST_METHOD': '__init__',
'PATH_INFO': '/sda1/p/a/c'},
start_response)
self.assertEquals(errbuf.getvalue(), '')
self.assertEquals(outbuf.getvalue()[:4], '405 ')
def test_params_format(self):
req = Request.blank(
'/sda1/p/a/c',
headers={'X-Timestamp': normalize_timestamp(1)},
environ={'REQUEST_METHOD': 'PUT'})
req.get_response(self.controller)
for format in ('xml', 'json'):
req = Request.blank('/sda1/p/a/c?format=%s' % format,
environ={'REQUEST_METHOD': 'GET'})
resp = req.get_response(self.controller)
self.assertEquals(resp.status_int, 200)
def test_params_utf8(self):
# Bad UTF8 sequence, all parameters should cause 400 error
for param in ('delimiter', 'limit', 'marker', 'path', 'prefix',
'end_marker', 'format'):
req = Request.blank('/sda1/p/a/c?%s=\xce' % param,
environ={'REQUEST_METHOD': 'GET'})
resp = req.get_response(self.controller)
self.assertEquals(resp.status_int, 400,
"%d on param %s" % (resp.status_int, param))
# Good UTF8 sequence for delimiter, too long (1 byte delimiters only)
req = Request.blank('/sda1/p/a/c?delimiter=\xce\xa9',
environ={'REQUEST_METHOD': 'GET'})
resp = req.get_response(self.controller)
self.assertEquals(resp.status_int, 412,
"%d on param delimiter" % (resp.status_int))
req = Request.blank('/sda1/p/a/c',
headers={'X-Timestamp': normalize_timestamp(1)},
environ={'REQUEST_METHOD': 'PUT'})
req.get_response(self.controller)
# Good UTF8 sequence, ignored for limit, doesn't affect other queries
for param in ('limit', 'marker', 'path', 'prefix', 'end_marker',
'format'):
req = Request.blank('/sda1/p/a/c?%s=\xce\xa9' % param,
environ={'REQUEST_METHOD': 'GET'})
resp = req.get_response(self.controller)
self.assertEquals(resp.status_int, 204,
"%d on param %s" % (resp.status_int, param))
def test_put_auto_create(self):
headers = {'x-timestamp': normalize_timestamp(1),
'x-size': '0',
'x-content-type': 'text/plain',
'x-etag': 'd41d8cd98f00b204e9800998ecf8427e'}
req = Request.blank('/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'PUT'},
headers=dict(headers))
resp = req.get_response(self.controller)
self.assertEquals(resp.status_int, 404)
req = Request.blank('/sda1/p/.a/c/o',
environ={'REQUEST_METHOD': 'PUT'},
headers=dict(headers))
resp = req.get_response(self.controller)
self.assertEquals(resp.status_int, 201)
req = Request.blank('/sda1/p/a/.c/o',
environ={'REQUEST_METHOD': 'PUT'},
headers=dict(headers))
resp = req.get_response(self.controller)
self.assertEquals(resp.status_int, 404)
req = Request.blank('/sda1/p/a/c/.o',
environ={'REQUEST_METHOD': 'PUT'},
headers=dict(headers))
resp = req.get_response(self.controller)
self.assertEquals(resp.status_int, 404)
def test_delete_auto_create(self):
headers = {'x-timestamp': normalize_timestamp(1)}
req = Request.blank('/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'DELETE'},
headers=dict(headers))
resp = req.get_response(self.controller)
self.assertEquals(resp.status_int, 404)
req = Request.blank('/sda1/p/.a/c/o',
environ={'REQUEST_METHOD': 'DELETE'},
headers=dict(headers))
resp = req.get_response(self.controller)
self.assertEquals(resp.status_int, 204)
req = Request.blank('/sda1/p/a/.c/o',
environ={'REQUEST_METHOD': 'DELETE'},
headers=dict(headers))
resp = req.get_response(self.controller)
self.assertEquals(resp.status_int, 404)
req = Request.blank('/sda1/p/a/.c/.o',
environ={'REQUEST_METHOD': 'DELETE'},
headers=dict(headers))
resp = req.get_response(self.controller)
self.assertEquals(resp.status_int, 404)
def test_content_type_on_HEAD(self):
Request.blank('/sda1/p/a/o',
headers={'X-Timestamp': normalize_timestamp(1)},
environ={'REQUEST_METHOD': 'PUT'}).get_response(
self.controller)
env = {'REQUEST_METHOD': 'HEAD'}
req = Request.blank('/sda1/p/a/o?format=xml', environ=env)
resp = req.get_response(self.controller)
self.assertEquals(resp.content_type, 'application/xml')
self.assertEquals(resp.charset, 'utf-8')
req = Request.blank('/sda1/p/a/o?format=json', environ=env)
resp = req.get_response(self.controller)
self.assertEquals(resp.content_type, 'application/json')
self.assertEquals(resp.charset, 'utf-8')
req = Request.blank('/sda1/p/a/o', environ=env)
resp = req.get_response(self.controller)
self.assertEquals(resp.content_type, 'text/plain')
self.assertEquals(resp.charset, 'utf-8')
req = Request.blank(
'/sda1/p/a/o', headers={'Accept': 'application/json'}, environ=env)
resp = req.get_response(self.controller)
self.assertEquals(resp.content_type, 'application/json')
self.assertEquals(resp.charset, 'utf-8')
req = Request.blank(
'/sda1/p/a/o', headers={'Accept': 'application/xml'}, environ=env)
resp = req.get_response(self.controller)
self.assertEquals(resp.content_type, 'application/xml')
self.assertEquals(resp.charset, 'utf-8')
def test_updating_multiple_container_servers(self):
http_connect_args = []
def fake_http_connect(ipaddr, port, device, partition, method, path,
headers=None, query_string=None, ssl=False):
class SuccessfulFakeConn(object):
@property
def status(self):
return 200
def getresponse(self):
return self
def read(self):
return ''
captured_args = {'ipaddr': ipaddr, 'port': port,
'device': device, 'partition': partition,
'method': method, 'path': path, 'ssl': ssl,
'headers': headers, 'query_string': query_string}
http_connect_args.append(
dict((k, v) for k, v in captured_args.iteritems()
if v is not None))
req = Request.blank(
'/sda1/p/a/c',
environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': '12345',
'X-Account-Partition': '30',
'X-Account-Host': '1.2.3.4:5, 6.7.8.9:10',
'X-Account-Device': 'sdb1, sdf1'})
orig_http_connect = container_server.http_connect
try:
container_server.http_connect = fake_http_connect
req.get_response(self.controller)
finally:
container_server.http_connect = orig_http_connect
http_connect_args.sort(key=operator.itemgetter('ipaddr'))
self.assertEquals(len(http_connect_args), 2)
self.assertEquals(
http_connect_args[0],
{'ipaddr': '1.2.3.4',
'port': '5',
'path': '/a/c',
'device': 'sdb1',
'partition': '30',
'method': 'PUT',
'ssl': False,
'headers': HeaderKeyDict({
'x-bytes-used': 0,
'x-delete-timestamp': '0',
'x-object-count': 0,
'x-put-timestamp': '0000012345.00000',
'referer': 'PUT http://localhost/sda1/p/a/c',
'user-agent': 'container-server %d' % os.getpid(),
'x-trans-id': '-'})})
self.assertEquals(
http_connect_args[1],
{'ipaddr': '6.7.8.9',
'port': '10',
'path': '/a/c',
'device': 'sdf1',
'partition': '30',
'method': 'PUT',
'ssl': False,
'headers': HeaderKeyDict({
'x-bytes-used': 0,
'x-delete-timestamp': '0',
'x-object-count': 0,
'x-put-timestamp': '0000012345.00000',
'referer': 'PUT http://localhost/sda1/p/a/c',
'user-agent': 'container-server %d' % os.getpid(),
'x-trans-id': '-'})})
def test_serv_reserv(self):
# Test replication_server flag was set from configuration file.
container_controller = container_server.ContainerController
conf = {'devices': self.testdir, 'mount_check': 'false'}
self.assertEquals(container_controller(conf).replication_server, None)
for val in [True, '1', 'True', 'true']:
conf['replication_server'] = val
self.assertTrue(container_controller(conf).replication_server)
for val in [False, 0, '0', 'False', 'false', 'test_string']:
conf['replication_server'] = val
self.assertFalse(container_controller(conf).replication_server)
def test_list_allowed_methods(self):
# Test list of allowed_methods
obj_methods = ['DELETE', 'PUT', 'HEAD', 'GET', 'POST']
repl_methods = ['REPLICATE']
for method_name in obj_methods:
method = getattr(self.controller, method_name)
self.assertFalse(hasattr(method, 'replication'))
for method_name in repl_methods:
method = getattr(self.controller, method_name)
self.assertEquals(method.replication, True)
def test_correct_allowed_method(self):
# Test correct work for allowed method using
# swift.container.server.ContainerController.__call__
inbuf = StringIO()
errbuf = StringIO()
outbuf = StringIO()
self.controller = container_server.ContainerController(
{'devices': self.testdir, 'mount_check': 'false',
'replication_server': 'false'})
def start_response(*args):
"""Sends args to outbuf"""
outbuf.writelines(args)
method = 'PUT'
env = {'REQUEST_METHOD': method,
'SCRIPT_NAME': '',
'PATH_INFO': '/sda1/p/a/c',
'SERVER_NAME': '127.0.0.1',
'SERVER_PORT': '8080',
'SERVER_PROTOCOL': 'HTTP/1.0',
'CONTENT_LENGTH': '0',
'wsgi.version': (1, 0),
'wsgi.url_scheme': 'http',
'wsgi.input': inbuf,
'wsgi.errors': errbuf,
'wsgi.multithread': False,
'wsgi.multiprocess': False,
'wsgi.run_once': False}
method_res = mock.MagicMock()
mock_method = public(lambda x: mock.MagicMock(return_value=method_res))
with mock.patch.object(self.controller, method, new=mock_method):
response = self.controller.__call__(env, start_response)
self.assertEqual(response, method_res)
def test_not_allowed_method(self):
# Test correct work for NOT allowed method using
# swift.container.server.ContainerController.__call__
inbuf = StringIO()
errbuf = StringIO()
outbuf = StringIO()
self.controller = container_server.ContainerController(
{'devices': self.testdir, 'mount_check': 'false',
'replication_server': 'false'})
def start_response(*args):
"""Sends args to outbuf"""
outbuf.writelines(args)
method = 'PUT'
env = {'REQUEST_METHOD': method,
'SCRIPT_NAME': '',
'PATH_INFO': '/sda1/p/a/c',
'SERVER_NAME': '127.0.0.1',
'SERVER_PORT': '8080',
'SERVER_PROTOCOL': 'HTTP/1.0',
'CONTENT_LENGTH': '0',
'wsgi.version': (1, 0),
'wsgi.url_scheme': 'http',
'wsgi.input': inbuf,
'wsgi.errors': errbuf,
'wsgi.multithread': False,
'wsgi.multiprocess': False,
'wsgi.run_once': False}
answer = ['<html><h1>Method Not Allowed</h1><p>The method is not '
'allowed for this resource.</p></html>']
mock_method = replication(public(lambda x: mock.MagicMock()))
with mock.patch.object(self.controller, method, new=mock_method):
response = self.controller.__call__(env, start_response)
self.assertEqual(response, answer)
if __name__ == '__main__':
unittest.main()
|
citrix-openstack-build/swift
|
test/unit/container/test_server.py
|
Python
|
apache-2.0
| 75,820
|