code
stringlengths 2
1.05M
| repo_name
stringlengths 5
104
| path
stringlengths 4
251
| language
stringclasses 1
value | license
stringclasses 15
values | size
int32 2
1.05M
|
|---|---|---|---|---|---|
# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class Bml(CMakePackage):
"""The basic matrix library (bml) is a collection of various matrix data
formats (in dense and sparse) and their associated algorithms for basic
matrix operations."""
homepage = "http://lanl.github.io/bml/"
url = "https://github.com/lanl/bml/tarball/v1.2.2"
git = "https://github.com/lanl/bml.git"
version('develop', branch='master')
version('1.3.1', sha256='17145eda96aa5e550dcbff1ee7ce62b45723af8210b1ab70c5975ec792fa3d13')
version('1.3.0', sha256='d9465079fe77210eb2af2dcf8ed96802edf5bb76bfbfdbcc97e206c8cd460b07')
version('1.2.3', sha256='9a2ee6c47d2445bfdb34495497ea338a047e9e4767802af47614d9ff94b0c523')
version('1.2.2', sha256='89ab78f9fe8395fe019cc0495a1d7b69875b5708069faeb831ddb9a6a9280a8a')
version('1.1.0', sha256='29162f1f7355ad28b44d3358206ccd3c7ac7794ee13788483abcbd2f8063e7fc')
variant('shared', default=True, description='Build shared libs')
variant('mpi', default=True, description='Build with MPI Support')
conflicts('+mpi', when='@:1.2.2')
depends_on("blas")
depends_on("lapack")
depends_on('mpi', when='+mpi')
def cmake_args(self):
args = [
'-DBUILD_SHARED_LIBS={0}'.format(
'ON' if '+shared' in self.spec else 'OFF')
]
spec = self.spec
if '+mpi' in spec:
args.append('-DBML_MPI=True')
args.append('-DCMAKE_C_COMPILER=%s' % spec['mpi'].mpicc)
args.append('-DCMAKE_CXX_COMPILER=%s' % spec['mpi'].mpicxx)
args.append('-DCMAKE_Fortran_COMPILER=%s' % spec['mpi'].mpifc)
else:
args.append('-DBML_MPI=False')
return args
|
rspavel/spack
|
var/spack/repos/builtin/packages/bml/package.py
|
Python
|
lgpl-2.1
| 1,909
|
# Copyright (C) 2006-2007 Robey Pointer <robeypointer@gmail.com>
#
# This file is part of paramiko.
#
# Paramiko is free software; you can redistribute it and/or modify it under the
# terms of the GNU Lesser General Public License as published by the Free
# Software Foundation; either version 2.1 of the License, or (at your option)
# any later version.
#
# Paramiko is distrubuted in the hope that it will be useful, but WITHOUT ANY
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
# A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
# details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with Paramiko; if not, write to the Free Software Foundation, Inc.,
# 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA.
"""
L{HostKeys}
"""
import base64
import binascii
from Crypto.Hash import SHA, HMAC
try:
import UserDict
import DictMixin
except:
from collections import UserDict
#from collections import MutableMapping as DictMixin
from collections import Mapping as DictMixin
from paramiko.common import *
from paramiko.dsskey import DSSKey
from paramiko.rsakey import RSAKey
from paramiko.ecdsakey import ECDSAKey
from paramiko.util import get_logger
class InvalidHostKey(Exception):
def __init__(self, line, exc):
self.line = line
self.exc = exc
self.args = (line, exc)
class HostKeyEntry:
"""
Representation of a line in an OpenSSH-style "known hosts" file.
"""
def __init__(self, hostnames=None, key=None):
self.valid = (hostnames is not None) and (key is not None)
self.hostnames = hostnames
self.key = key
def from_line(cls, line, lineno=None):
"""
Parses the given line of text to find the names for the host,
the type of key, and the key data. The line is expected to be in the
format used by the openssh known_hosts file.
Lines are expected to not have leading or trailing whitespace.
We don't bother to check for comments or empty lines. All of
that should be taken care of before sending the line to us.
@param line: a line from an OpenSSH known_hosts file
@type line: str
"""
log = get_logger('paramiko.hostkeys')
fields = line.split(' ')
if len(fields) < 3:
# Bad number of fields
log.info("Not enough fields found in known_hosts in line %s (%r)" %
(lineno, line))
return None
fields = fields[:3]
names, keytype, key = fields
names = names.split(',')
# Decide what kind of key we're looking at and create an object
# to hold it accordingly.
try:
if keytype == 'ssh-rsa':
key = RSAKey(data=base64.decodebytes(key.encode()))
elif keytype == 'ssh-dss':
key = DSSKey(data=base64.decodebytes(key.encode()))
elif keytype == 'ecdsa-sha2-nistp256':
key = ECDSAKey(data=base64.decodebytes(key.encode()))
else:
log.info("Unable to handle key of type %s" % (keytype,))
return None
except binascii.Error as e:
raise InvalidHostKey(line, e)
return cls(names, key)
from_line = classmethod(from_line)
def to_line(self):
"""
Returns a string in OpenSSH known_hosts file format, or None if
the object is not in a valid state. A trailing newline is
included.
"""
if self.valid:
return '%s %s %s\n' % (','.join(self.hostnames), self.key.get_name(),
self.key.get_base64())
return None
def __repr__(self):
return '<HostKeyEntry %r: %r>' % (self.hostnames, self.key)
class HostKeys (DictMixin):
"""
Representation of an openssh-style "known hosts" file. Host keys can be
read from one or more files, and then individual hosts can be looked up to
verify server keys during SSH negotiation.
A HostKeys object can be treated like a dict; any dict lookup is equivalent
to calling L{lookup}.
@since: 1.5.3
"""
def __init__(self, filename=None):
"""
Create a new HostKeys object, optionally loading keys from an openssh
style host-key file.
@param filename: filename to load host keys from, or C{None}
@type filename: str
"""
# emulate a dict of { hostname: { keytype: PKey } }
self._entries = []
if filename is not None:
self.load(filename)
def add(self, hostname, keytype, key):
"""
Add a host key entry to the table. Any existing entry for a
C{(hostname, keytype)} pair will be replaced.
@param hostname: the hostname (or IP) to add
@type hostname: str
@param keytype: key type (C{"ssh-rsa"} or C{"ssh-dss"})
@type keytype: str
@param key: the key to add
@type key: L{PKey}
"""
for e in self._entries:
if (hostname in e.hostnames) and (e.key.get_name() == keytype):
e.key = key
return
self._entries.append(HostKeyEntry([hostname], key))
def load(self, filename):
"""
Read a file of known SSH host keys, in the format used by openssh.
This type of file unfortunately doesn't exist on Windows, but on
posix, it will usually be stored in
C{os.path.expanduser("~/.ssh/known_hosts")}.
If this method is called multiple times, the host keys are merged,
not cleared. So multiple calls to C{load} will just call L{add},
replacing any existing entries and adding new ones.
@param filename: name of the file to read host keys from
@type filename: str
@raise IOError: if there was an error reading the file
"""
f = open(filename, 'r')
for lineno, line in enumerate(f):
line = line.strip()
if (len(line) == 0) or (line[0] == '#'):
continue
e = HostKeyEntry.from_line(line, lineno)
if e is not None:
_hostnames = e.hostnames
for h in _hostnames:
if self.check(h, e.key):
e.hostnames.remove(h)
if len(e.hostnames):
self._entries.append(e)
f.close()
def save(self, filename):
"""
Save host keys into a file, in the format used by openssh. The order of
keys in the file will be preserved when possible (if these keys were
loaded from a file originally). The single exception is that combined
lines will be split into individual key lines, which is arguably a bug.
@param filename: name of the file to write
@type filename: str
@raise IOError: if there was an error writing the file
@since: 1.6.1
"""
f = open(filename, 'w')
for e in self._entries:
line = e.to_line()
if line:
f.write(line)
f.close()
def lookup(self, hostname):
"""
Find a hostkey entry for a given hostname or IP. If no entry is found,
C{None} is returned. Otherwise a dictionary of keytype to key is
returned. The keytype will be either C{"ssh-rsa"} or C{"ssh-dss"}.
@param hostname: the hostname (or IP) to lookup
@type hostname: str
@return: keys associated with this host (or C{None})
@rtype: dict(str, L{PKey})
"""
class SubDict (DictMixin):
def __init__(self, hostname, entries, hostkeys):
self._hostname = hostname
self._entries = entries
self._hostkeys = hostkeys
def __len__(self):
return len(self.keys())
def __iter__(self):
return self.keys().__iter__()
def __getitem__(self, key):
for e in self._entries:
if e.key.get_name() == key:
return e.key
raise KeyError(key)
def __setitem__(self, key, val):
for e in self._entries:
if e.key is None:
continue
if e.key.get_name() == key:
# replace
e.key = val
break
else:
# add a new one
e = HostKeyEntry([hostname], val)
self._entries.append(e)
self._hostkeys._entries.append(e)
def keys(self):
return [e.key.get_name() for e in self._entries if e.key is not None]
entries = []
for e in self._entries:
for h in e.hostnames:
if (h.startswith('|1|') and (self.hash_host(hostname, h) == h)) or (h == hostname):
entries.append(e)
if len(entries) == 0:
return None
return SubDict(hostname, entries, self)
def check(self, hostname, key):
"""
Return True if the given key is associated with the given hostname
in this dictionary.
@param hostname: hostname (or IP) of the SSH server
@type hostname: str
@param key: the key to check
@type key: L{PKey}
@return: C{True} if the key is associated with the hostname; C{False}
if not
@rtype: bool
"""
k = self.lookup(hostname)
if k is None:
return False
host_key = k.get(key.get_name(), None)
if host_key is None:
return False
return str(host_key) == str(key)
def clear(self):
"""
Remove all host keys from the dictionary.
"""
self._entries = []
def __len__(self):
return len(self.keys())
def __iter__(self):
return self.keys().__iter__()
def __getitem__(self, key):
ret = self.lookup(key)
if ret is None:
raise KeyError(key)
return ret
def __setitem__(self, hostname, entry):
# don't use this please.
if len(entry) == 0:
self._entries.append(HostKeyEntry([hostname], None))
return
for key_type in entry.keys():
found = False
for e in self._entries:
if (hostname in e.hostnames) and (e.key.get_name() == key_type):
# replace
e.key = entry[key_type]
found = True
if not found:
self._entries.append(HostKeyEntry([hostname], entry[key_type]))
def keys(self):
# python 2.4 sets would be nice here.
ret = []
for e in self._entries:
for h in e.hostnames:
if h not in ret:
ret.append(h)
return ret
def values(self):
ret = []
for k in self.keys():
ret.append(self.lookup(k))
return ret
def hash_host(hostname, salt=None):
"""
Return a "hashed" form of the hostname, as used by openssh when storing
hashed hostnames in the known_hosts file.
@param hostname: the hostname to hash
@type hostname: str
@param salt: optional salt to use when hashing (must be 20 bytes long)
@type salt: str
@return: the hashed hostname
@rtype: str
"""
if salt is None:
salt = rng.read(SHA.digest_size)
else:
if salt.startswith('|1|'):
salt = salt.split('|')[2]
salt = base64.decodebytes(salt.encode())
assert len(salt) == SHA.digest_size
hmac = HMAC.HMAC(salt, hostname.encode(), SHA).digest()
hostkey = '|1|%s|%s' % (base64.encodestring(salt).decode(), base64.encodestring(hmac).decode())
return hostkey.replace('\n', '')
hash_host = staticmethod(hash_host)
|
nischu7/paramiko
|
paramiko/hostkeys.py
|
Python
|
lgpl-2.1
| 12,117
|
"""
Tests outgoing calls created with InitialAudio and/or InitialVideo, and
exposing the initial contents of incoming calls as values of InitialAudio and
InitialVideo
"""
import operator
from servicetest import (
assertContains, assertEquals, assertLength,
wrap_channel, EventPattern, call_async, make_channel_proxy)
from jingletest2 import JingleTest2, test_all_dialects
import constants as cs
def outgoing(jp, q, bus, conn, stream):
remote_jid = 'flames@cold.mountain/beyond'
jt = JingleTest2(jp, conn, q, stream, 'test@localhost', remote_jid)
jt.prepare()
self_handle = conn.GetSelfHandle()
remote_handle = conn.RequestHandles(cs.HT_CONTACT, [remote_jid])[0]
rccs = conn.Properties.Get(cs.CONN_IFACE_REQUESTS, 'RequestableChannelClasses')
media_classes = [ rcc for rcc in rccs
if rcc[0][cs.CHANNEL_TYPE] == cs.CHANNEL_TYPE_STREAMED_MEDIA ]
assertLength(1, media_classes)
fixed, allowed = media_classes[0]
assertContains(cs.INITIAL_AUDIO, allowed)
assertContains(cs.INITIAL_VIDEO, allowed)
check_neither(q, conn, bus, stream, remote_handle)
check_iav(jt, q, conn, bus, stream, remote_handle, True, False)
check_iav(jt, q, conn, bus, stream, remote_handle, False, True)
check_iav(jt, q, conn, bus, stream, remote_handle, True, True)
def check_neither(q, conn, bus, stream, remote_handle):
"""
Make a channel without specifying InitialAudio or InitialVideo; check
that it's announced with both False, and that they're both present and
false in GetAll().
"""
path, props = conn.Requests.CreateChannel({
cs.CHANNEL_TYPE: cs.CHANNEL_TYPE_STREAMED_MEDIA,
cs.TARGET_HANDLE_TYPE: cs.HT_CONTACT,
cs.TARGET_HANDLE: remote_handle})
assertContains((cs.INITIAL_AUDIO, False), props.items())
assertContains((cs.INITIAL_VIDEO, False), props.items())
chan = wrap_channel(bus.get_object(conn.bus_name, path),
cs.CHANNEL_TYPE_STREAMED_MEDIA, ['MediaSignalling'])
props = chan.Properties.GetAll(cs.CHANNEL_TYPE_STREAMED_MEDIA)
assertContains(('InitialAudio', False), props.items())
assertContains(('InitialVideo', False), props.items())
# We shouldn't have started a session yet, so there shouldn't be any
# session handlers. Strictly speaking, there could be a session handler
# with no stream handlers, but...
session_handlers = chan.MediaSignalling.GetSessionHandlers()
assertLength(0, session_handlers)
def check_iav(jt, q, conn, bus, stream, remote_handle, initial_audio,
initial_video):
"""
Make a channel and check that its InitialAudio and InitialVideo properties
come out correctly.
"""
call_async(q, conn.Requests, 'CreateChannel', {
cs.CHANNEL_TYPE: cs.CHANNEL_TYPE_STREAMED_MEDIA,
cs.TARGET_HANDLE_TYPE: cs.HT_CONTACT,
cs.TARGET_HANDLE: remote_handle,
cs.INITIAL_AUDIO: initial_audio,
cs.INITIAL_VIDEO: initial_video,
})
if initial_video and (not jt.jp.can_do_video()
or (not initial_audio and not jt.jp.can_do_video_only ())):
# Some protocols can't do video
event = q.expect('dbus-error', method='CreateChannel')
assertEquals(cs.NOT_CAPABLE, event.error.get_dbus_name())
else:
path, props = q.expect('dbus-return', method='CreateChannel').value
assertContains((cs.INITIAL_AUDIO, initial_audio), props.items())
assertContains((cs.INITIAL_VIDEO, initial_video), props.items())
chan = wrap_channel(bus.get_object(conn.bus_name, path),
cs.CHANNEL_TYPE_STREAMED_MEDIA, ['MediaSignalling'])
props = chan.Properties.GetAll(cs.CHANNEL_TYPE_STREAMED_MEDIA)
assertContains(('InitialAudio', initial_audio), props.items())
assertContains(('InitialVideo', initial_video), props.items())
session_handlers = chan.MediaSignalling.GetSessionHandlers()
assertLength(1, session_handlers)
path, type = session_handlers[0]
assertEquals('rtp', type)
session_handler = make_channel_proxy(conn, path, 'Media.SessionHandler')
session_handler.Ready()
stream_handler_paths = []
stream_handler_types = []
for x in [initial_audio, initial_video]:
if x:
e = q.expect('dbus-signal', signal='NewStreamHandler')
stream_handler_paths.append(e.args[0])
stream_handler_types.append(e.args[2])
if initial_audio:
assertContains(cs.MEDIA_STREAM_TYPE_AUDIO, stream_handler_types)
if initial_video:
assertContains(cs.MEDIA_STREAM_TYPE_VIDEO, stream_handler_types)
for x in xrange (0, len(stream_handler_paths)):
p = stream_handler_paths[x]
t = stream_handler_types[x]
sh = make_channel_proxy(conn, p, 'Media.StreamHandler')
sh.NewNativeCandidate("fake", jt.get_remote_transports_dbus())
if t == cs.MEDIA_STREAM_TYPE_AUDIO:
sh.Ready(jt.get_audio_codecs_dbus())
else:
sh.Ready(jt.get_video_codecs_dbus())
sh.StreamState(cs.MEDIA_STREAM_STATE_CONNECTED)
e = q.expect('stream-iq',
predicate=jt.jp.action_predicate('session-initiate'))
jt.parse_session_initiate (e.query)
jt.accept()
events = reduce(operator.concat,
[ [ EventPattern('dbus-signal', signal='SetRemoteCodecs', path=p),
EventPattern('dbus-signal', signal='SetStreamPlaying', path=p),
] for p in stream_handler_paths
], [])
q.expect_many(*events)
chan.Close()
def incoming(jp, q, bus, conn, stream):
remote_jid = 'skinny.fists@heaven/antennas'
jt = JingleTest2(jp, conn, q, stream, 'test@localhost', remote_jid)
jt.prepare()
self_handle = conn.GetSelfHandle()
remote_handle = conn.RequestHandles(cs.HT_CONTACT, [remote_jid])[0]
for a, v in [("audio1", None), (None, "video1"), ("audio1", "video1")]:
if v!= None and not jp.can_do_video():
continue
if a == None and v != None and not jp.can_do_video_only():
continue
jt.incoming_call(audio=a, video=v)
e = q.expect('dbus-signal', signal='NewChannels',
predicate=lambda e:
cs.CHANNEL_TYPE_CONTACT_LIST not in e.args[0][0][1].values())
chans = e.args[0]
assertLength(1, chans)
path, props = chans[0]
assertEquals(cs.CHANNEL_TYPE_STREAMED_MEDIA, props[cs.CHANNEL_TYPE])
assertEquals(a != None, props[cs.INITIAL_AUDIO])
assertEquals(v != None, props[cs.INITIAL_VIDEO])
# FIXME: This doesn't check non-Google contacts that can only do one
# media type, as such contacts as simulated by JingleTest2 can always
# do both.
assertEquals(not jp.can_do_video() or not jp.can_do_video_only(),
props[cs.IMMUTABLE_STREAMS])
chan = wrap_channel(bus.get_object(conn.bus_name, path),
cs.CHANNEL_TYPE_STREAMED_MEDIA)
chan.Close()
if __name__ == '__main__':
test_all_dialects(outgoing)
test_all_dialects(incoming)
|
jku/telepathy-gabble
|
tests/twisted/jingle/initial-audio-video.py
|
Python
|
lgpl-2.1
| 7,213
|
"Conferencing code"
# XXX A relatively simple enhancement to this would be to store the
# volumes for each source in the conference, and use an exponential
# decay type algorithm to determine the "loudest".
from shtoom.doug.source import Source
from twisted.internet.task import LoopingCall
from twisted.python import log
from sets import Set
class ConferenceError(Exception): pass
class ConferenceClosedError(ConferenceError): pass
class ConferenceMemberNotFoundError(ConferenceError): pass
CONFDEBUG = True
CONFDEBUG = False
class ConfSource(Source):
"A ConfSource connects a voiceapp, and via that, a leg, to a room"
def __init__(self, room, leg):
self._user = leg.getDialog().getRemoteTag().getURI()
self._room = room
self._room.addMember(self)
self._quiet = False
self.makeBuffer()
super(ConfSource, self).__init__()
def makeBuffer(self):
try:
from collections import deque
except ImportError:
# not optimal, but the queue isn't large
self.deque = list()
self.popleft = lambda: self.deque.pop(0)
else:
self.deque = deque()
self.popleft = self.deque.popleft
def truncBuffer(self):
while len(self.deque) > 3:
self.popleft()
def isPlaying(self):
return True
def isRecording(self):
return True
def read(self):
try:
ret = self._room.readAudio(self)
except ConferenceClosedError:
return self.app._va_sourceDone(self)
if not ret:
if not self._quiet:
log.msg("%r is now receiving silence"%(self))
self._quiet = True
elif self._quiet:
log.msg("%r has stopped receiving silence"%(self))
self._quiet = False
return ret
def close(self):
self._room.removeMember(self)
def write(self, bytes):
self.deque.append(bytes)
self.truncBuffer()
if not self._room.isOpen():
self.app._va_sourceDone(self)
def getAudioForRoom(self):
"get audio into the room"
# XXX tofix - might not have enough data (short packets). rock on.
if len(self.deque):
bytes = self.popleft()
return bytes
def __repr__(self):
return "<ConferenceUser %s in room %s at %x>"%(self._user,
self._room.getName(), id(self))
class Room:
"""A room is a conference. Everyone in the room hears everyone else
(well, kinda)
"""
# Theory of operation. Rather than rely on the individual sources
# timer loops (which would be, well, horrid), we trigger off our
# own timer.
# This means we don't have to worry about the end systems not
# contributing during a window.
_open = False
def __init__(self, name, MaxSpeakers=4):
self._name = name
self._members = Set()
self._audioOut = {}
self._audioOutDefault = ''
self._maxSpeakers = MaxSpeakers
self.start()
def start(self):
self._audioCalcLoop = LoopingCall(self.mixAudio)
self._audioCalcLoop.start(0.020)
self._open = True
def getName(self):
return self._name
def __repr__(self):
if self._open:
o = ''
else:
o = ' (closed)'
return "<ConferenceRoom %s%s with %d members>"%(self._name, o,
len(self._members))
def shutdown(self):
if hasattr(self._audioCalcLoop, 'cancel'):
self._audioCalcLoop.cancel()
else:
self._audioCalcLoop.stop()
# XXX close down any running sources!
self._members = Set()
del self._audioOut
self._open = False
removeRoom(self._name)
def addMember(self, confsource):
self._members.add(confsource)
if CONFDEBUG:
print "added", confsource, "to room", self
if not self._open:
self.start()
def removeMember(self, confsource):
if len(self._members) and confsource in self._members:
self._members.remove(confsource)
if CONFDEBUG:
print "removed", confsource, "from", self
else:
raise ConferenceMemberNotFoundError(confsource)
if not len(self._members):
if CONFDEBUG:
print "No members left, shutting down"
self.shutdown()
def isMember(self, confsource):
return confsource in self._members
def isOpen(self):
return self._open
def memberCount(self):
return len(self._members)
def readAudio(self, confsource):
if self._open:
return self._audioOut.get(confsource, self._audioOutDefault)
else:
raise ConferenceClosedError()
def mixAudio(self):
# XXX see the comment above about storing a decaying number for the
# volume. For instance, each time round the loop, take the calculated
# volume, and the stored volume, and do something like:
# newStoredVolume = (oldStoredVolume * 0.33) + (thisPacketVolume * 0.66)
import audioop
self._audioOut = {}
if not self._open:
log.msg('mixing closed room %r'%(self,), system='doug')
return
audioIn = {}
for m in self._members:
bytes = m.getAudioForRoom()
if bytes: audioIn[m] = bytes
if CONFDEBUG:
print "room %r has %d members"%(self, len(self._members))
print "got %d samples this time"%len(audioIn)
print "samples: %r"%(audioIn.items(),)
# short-circuit this case
if len(self._members) < 2:
if CONFDEBUG:
print "less than 2 members, no sound"
self._audioOutDefault = ''
return
# Samples is (confsource, audio)
samples = audioIn.items()
# power is three-tuples of (rms,audio,confsource)
power = [ (audioop.rms(x[1],2),x[1], x[0]) for x in samples ]
power.sort(); power.reverse()
if CONFDEBUG:
for rms,audio,confsource in power:
print confsource, rms
# Speakers is a list of the _maxSpeakers loudest speakers
speakers = Set([x[2] for x in power[:self._maxSpeakers]])
# First we calculate the 'default' audio. Used for everyone who's
# not a speaker in the room.
samples = [ x[1] for x in power[:self._maxSpeakers] ]
scaledsamples = [ audioop.mul(x, 2, 1.0/len(samples)) for x in samples ]
if scaledsamples:
# ooo. a use of reduce. first time for everything...
try:
combined = reduce(lambda x,y: audioop.add(x, y, 2), scaledsamples)
except audioop.error, exc:
# XXX tofix!
print "combine got error %s"%(exc,)
print "lengths", [len(x) for x in scaledsamples]
combined = ''
else:
combined = ''
self._audioOutDefault = combined
# Now calculate output for each speaker.
allsamples = {}
for p,sample,speaker in power:
allsamples[speaker] = p, sample
for s in speakers:
# For each speaker, take the set of (other speakers), grab
# the top N speakers, and combine them. Add to the _audioOut
# dictionary
all = allsamples.copy()
del all[s]
power = all.values()
power.sort() ; power.reverse()
samples = [ x[1] for x in power[:self._maxSpeakers] ]
if samples:
scaled = [ audioop.mul(x, 2, 1.0/len(samples)) for x in samples]
try:
out = reduce(lambda x,y: audioop.add(x, y, 2), scaled)
except audioop.error, exc:
# XXX tofix!
print "combine got error %s"%(exc,)
print "lengths", [len(x) for x in scaled]
out = ''
else:
out = ''
if CONFDEBUG:
print "calc for", s, "is", audioop.rms(out, 2)
self._audioOut[s] = out
_RegisterOfAllRooms = {}
_StickyRoomNames = {}
def removeRoom(roomname):
global _RegisterOfAllRooms
if roomname in _RegisterOfAllRooms and roomname not in _StickyRoomNames:
del _RegisterOfAllRooms[roomname]
def newConferenceMember(roomname, leg):
global _RegisterOfAllRooms
if not roomname in _RegisterOfAllRooms:
_RegisterOfAllRooms[roomname] = Room(roomname)
room = _RegisterOfAllRooms[roomname]
return ConfSource(room, leg)
|
braams/shtoom
|
shtoom/doug/conferencing.py
|
Python
|
lgpl-2.1
| 8,770
|
#!/usr/bin/python
"""Test of Dojo combo box presentation."""
from macaroon.playback import *
import utils
sequence = MacroSequence()
sequence.append(PauseAction(5000))
sequence.append(KeyComboAction("Tab"))
sequence.append(KeyComboAction("Tab"))
sequence.append(utils.StartRecordingAction())
sequence.append(KeyComboAction("Tab"))
sequence.append(utils.AssertPresentationAction(
"1. Tab to the first combo box",
["BRAILLE LINE: 'US State test 1 (200% Courier font): California $l'",
" VISIBLE: '(200% Courier font): California ', cursor=32",
"BRAILLE LINE: 'Focus mode'",
" VISIBLE: 'Focus mode', cursor=0",
"BRAILLE LINE: 'US State test 1 (200% Courier font): California $l'",
" VISIBLE: '(200% Courier font): California ', cursor=32",
"SPEECH OUTPUT: 'collapsed'",
"SPEECH OUTPUT: 'US State test 1 (200% Courier font): entry California selected'",
"SPEECH OUTPUT: 'Focus mode' voice=system"]))
sequence.append(utils.StartRecordingAction())
sequence.append(TypeAction("C"))
sequence.append(utils.AssertPresentationAction(
"2. Replace existing text with a 'C'",
["KNOWN ISSUE: The braille line is not quite right",
"BRAILLE LINE: 'US State test 1 (200% Courier font): C $l'",
" VISIBLE: '(200% Courier font): C $l', cursor=23",
"BRAILLE LINE: 'US State test 1 (200% Courier font): US State test 1 (200% Courier font): combo box'",
" VISIBLE: 'te test 1 (200% Courier font): U', cursor=32",
"SPEECH OUTPUT: 'expanded'"]))
sequence.append(utils.StartRecordingAction())
sequence.append(KeyComboAction("Down"))
sequence.append(utils.AssertPresentationAction(
"3. Down Arrow",
["BRAILLE LINE: 'C alifornia (CA)'",
" VISIBLE: 'C alifornia (CA)', cursor=1",
"SPEECH OUTPUT: 'California menu'",
"SPEECH OUTPUT: 'C alifornia (CA).'"]))
sequence.append(utils.StartRecordingAction())
sequence.append(KeyComboAction("Down"))
sequence.append(utils.AssertPresentationAction(
"4. Down Arrow",
["BRAILLE LINE: 'C olorado (CO)'",
" VISIBLE: 'C olorado (CO)', cursor=1",
"SPEECH OUTPUT: 'C olorado (CO).'"]))
sequence.append(utils.StartRecordingAction())
sequence.append(KeyComboAction("Down"))
sequence.append(utils.AssertPresentationAction(
"5. Down Arrow",
["BRAILLE LINE: 'C onnecticut (CT)'",
" VISIBLE: 'C onnecticut (CT)', cursor=1",
"SPEECH OUTPUT: 'C onnecticut (CT).'"]))
sequence.append(utils.StartRecordingAction())
sequence.append(KeyComboAction("Down"))
sequence.append(utils.AssertPresentationAction(
"6. Down Arrow",
["BRAILLE LINE: 'C alifornia (CA)'",
" VISIBLE: 'C alifornia (CA)', cursor=1",
"SPEECH OUTPUT: 'C alifornia (CA).'"]))
sequence.append(utils.StartRecordingAction())
sequence.append(KeyComboAction("Up"))
sequence.append(utils.AssertPresentationAction(
"7. Up Arrow",
["BRAILLE LINE: 'C onnecticut (CT)'",
" VISIBLE: 'C onnecticut (CT)', cursor=1",
"SPEECH OUTPUT: 'C onnecticut (CT).'"]))
sequence.append(utils.StartRecordingAction())
sequence.append(KeyComboAction("Up"))
sequence.append(utils.AssertPresentationAction(
"8. Up Arrow",
["BRAILLE LINE: 'C olorado (CO)'",
" VISIBLE: 'C olorado (CO)', cursor=1",
"SPEECH OUTPUT: 'C olorado (CO).'"]))
sequence.append(utils.StartRecordingAction())
sequence.append(KeyComboAction("Up"))
sequence.append(utils.AssertPresentationAction(
"9. Up Arrow",
["BRAILLE LINE: 'C alifornia (CA)'",
" VISIBLE: 'C alifornia (CA)', cursor=1",
"SPEECH OUTPUT: 'C alifornia (CA).'"]))
sequence.append(utils.StartRecordingAction())
sequence.append(KeyComboAction("KP_Enter"))
sequence.append(utils.AssertPresentationAction(
"10. Basic Where Am I - Combo box expanded",
["BRAILLE LINE: 'C alifornia (CA)'",
" VISIBLE: 'C alifornia (CA)', cursor=1",
"SPEECH OUTPUT: 'California menu'",
"SPEECH OUTPUT: 'C alifornia (CA).'",
"SPEECH OUTPUT: '1 of 3'"]))
sequence.append(utils.StartRecordingAction())
sequence.append(KeyComboAction("Escape"))
sequence.append(utils.AssertPresentationAction(
"11. Escape",
["BRAILLE LINE: 'US State test 1 (200% Courier font): US State test 1 (200% Courier font): combo box'",
" VISIBLE: 'te test 1 (200% Courier font): U', cursor=32",
"BRAILLE LINE: 'US State test 1 (200% Courier font): California $l'",
" VISIBLE: '(200% Courier font): California ', cursor=32",
"SPEECH OUTPUT: 'collapsed'",
"SPEECH OUTPUT: 'US State test 1 (200% Courier font): entry California selected'"]))
sequence.append(utils.AssertionSummaryAction())
sequence.start()
|
pvagner/orca
|
test/keystrokes/firefox/aria_combobox_dojo.py
|
Python
|
lgpl-2.1
| 4,695
|
import __builtin__
import etcd
from etcd import Client
import importlib
import inspect
import maps
from mock import MagicMock
from mock import patch
import os
import pkgutil
import pytest
import yaml
from tendrl.commons import objects
import tendrl.commons.objects.node_context as node
from tendrl.commons import TendrlNS
from tendrl.commons.utils import etcd_utils
@patch.object(etcd, "Client")
@patch.object(Client, "read")
@patch.object(node.NodeContext, '_get_node_id')
@patch.object(etcd_utils, 'read')
@patch.object(node.NodeContext, 'load')
def init(patch_node_load,
patch_etcd_utils_read,
patch_get_node_id,
patch_read,
patch_client):
patch_get_node_id.return_value = 1
patch_read.return_value = etcd.Client()
patch_client.return_value = etcd.Client()
setattr(__builtin__, "NS", maps.NamedDict())
setattr(NS, "_int", maps.NamedDict())
NS._int.etcd_kwargs = {
'port': 1,
'host': 2,
'allow_reconnect': True}
NS._int.client = etcd.Client(**NS._int.etcd_kwargs)
NS["config"] = maps.NamedDict()
NS.config["data"] = maps.NamedDict()
NS.config.data['tags'] = "test"
patch_etcd_utils_read.return_value = maps.NamedDict(
value='{"status": "UP",'
'"pkey": "tendrl-node-test",'
'"node_id": "test_node_id",'
'"ipv4_addr": "test_ip",'
'"tags": "[\\"my_tag\\"]",'
'"sync_status": "done",'
'"locked_by": "fd",'
'"fqdn": "tendrl-node-test",'
'"last_sync": "date"}')
patch_node_load.return_value = node.NodeContext
tendrlNS = TendrlNS()
return tendrlNS
def test_constructor():
with patch.object(TendrlNS, 'setup_common_objects') as \
mocked_method:
mocked_method.return_value = None
tendrlNS = TendrlNS()
tendrlNS = init()
# Default Parameter Testing
assert tendrlNS.ns_name == "tendrl"
assert tendrlNS.ns_src == "tendrl.commons"
# Check for existance and right data type
assert isinstance(NS, maps.NamedDict)
# Testing _list_modules_in_package_path
def test_list_modules_in_package_path():
tendrlNS = init()
modules = [
('alert',
'tendrl.commons.objects.alert'),
('block_device',
'tendrl.commons.objects.block_device'),
('cluster',
'tendrl.commons.objects.cluster'),
('cluster_alert',
'tendrl.commons.objects.cluster_alert'),
('cluster_alert_counters',
'tendrl.commons.objects.cluster_alert_counters'),
('cluster_node_alert_counters',
'tendrl.commons.objects.cluster_node_alert_counters'),
('cluster_node_context',
'tendrl.commons.objects.cluster_node_context'),
('cluster_tendrl_context',
'tendrl.commons.objects.cluster_tendrl_context'),
('cpu', 'tendrl.commons.objects.cpu'),
('definition', 'tendrl.commons.objects.definition'),
('detected_cluster', 'tendrl.commons.objects.detected_cluster'),
('disk', 'tendrl.commons.objects.disk'),
('geo_replication_session',
'tendrl.commons.objects.geo_replication_session'),
('global_details',
'tendrl.commons.objects.global_details'),
('gluster_brick', 'tendrl.commons.objects.gluster_brick'),
('gluster_volume', 'tendrl.commons.objects.gluster_volume'),
('gluster_peer', 'tendrl.commons.objects.gluster_peer'),
('job', 'tendrl.commons.objects.job'),
('memory', 'tendrl.commons.objects.memory'),
('node', 'tendrl.commons.objects.node'),
('node_alert',
'tendrl.commons.objects.node_alert'),
('node_context', 'tendrl.commons.objects.node_context'),
('node_network', 'tendrl.commons.objects.node_network'),
('notification_only_alert',
'tendrl.commons.objects.notification_only_alert'),
('os', 'tendrl.commons.objects.os'),
('platform', 'tendrl.commons.objects.platform'),
('service', 'tendrl.commons.objects.service'),
('tendrl_context', 'tendrl.commons.objects.tendrl_context'),
('virtual_disk', 'tendrl.commons.objects.virtual_disk')
]
ns_objects_path = os.path.join(os.path.dirname(os.path.abspath(__file__)).
rsplit('/', 1)[0], "objects")
ns_objects_prefix = "tendrl.commons.objects."
ret = tendrlNS._list_modules_in_package_path(ns_objects_path,
ns_objects_prefix)
# TO-DISCUSS : modules is hard coded and might change in future
if len(ret) != len(modules):
raise AssertionError()
ret = tendrlNS._list_modules_in_package_path("test", "test")
assert len(ret) == 0
# Testing _register_subclasses_to_ns
def test_register_subclasses_to_ns(monkeypatch):
tendrlNS = init()
tendrlNS._register_subclasses_to_ns()
assert len(getattr(NS.tendrl, "objects")) > 0
assert len(getattr(NS.tendrl, "flows")) > 0
ns_objects_path = os.path.join(
os.path.dirname(
os.path.abspath(__file__)).rsplit(
'/', 1)[0], "objects")
ns_objects_prefix = "tendrl.commons.objects."
modules = tendrlNS._list_modules_in_package_path(ns_objects_path,
ns_objects_prefix)
for mode_name, mod_cls in modules:
assert hasattr(NS.tendrl.objects, mode_name.title().replace('_', '')) \
is True
def list_package(self_obj, package_path, prefix):
if "flows" in prefix:
return [
('ImportCluster', 'tendrl.commons.flows.import_cluster'),
('UnmanageCluster', 'tendrl.commons.flows.unmanage_cluster')
]
else:
modules = []
for importer, name, ispkg in pkgutil.walk_packages(
path=[package_path]):
modules.append((name, prefix + name))
return modules
monkeypatch.setattr(TendrlNS, '_list_modules_in_package_path',
list_package)
tendrlNS._register_subclasses_to_ns()
assert len(getattr(NS.tendrl, "objects")) > 0
# Testing _add_object
def test_add_object():
tendrlNS = init()
obj_name = "test_obj"
obj = importlib.import_module(
"tendrl.commons.objects.cluster_node_context")
current_ns = tendrlNS._get_ns()
obj_cls = ""
for obj_cls in inspect.getmembers(obj, inspect.isclass):
tendrlNS._add_object(obj_name, obj_cls[1])
break
assert isinstance(getattr(current_ns.objects, "_test_obj")['atoms'],
maps.NamedDict)
assert isinstance(getattr(current_ns.objects, "_test_obj")['flows'],
maps.NamedDict)
with patch.object(TendrlNS, "_get_ns") as mock_add_obj:
mock_add_obj.return_value = maps.NamedDict(
objects=maps.NamedDict(_Service=maps.NamedDict(
atoms=maps.NamedDict())))
tendrlNS._add_object("Service", obj_cls[1])
with patch.object(TendrlNS, "_get_ns") as mock_add_obj:
mock_add_obj.return_value = maps.NamedDict(
objects=maps.NamedDict(
_Service=maps.NamedDict(
flows=maps.NamedDict())))
tendrlNS._add_object("Service", obj_cls[1])
# Testing _get_objects
def test_get_objects():
path = os.path.join(os.path.dirname(
os.path.dirname(os.path.abspath(__file__))), "objects")
objects_list = [d.title().replace('_', '') for d in os.listdir(path)
if os.path.isdir(os.path.join(path, d))]
tendrlNS = init()
ret = tendrlNS._get_objects()
assert isinstance(objects_list, list)
assert ret is not None
# TO-DISCUSS : object_list is hard coded and might change in future
assert set(ret) == set(objects_list)
# Testing _get_object
def test_get_object():
tendrlNS = init()
ret = tendrlNS._get_object("NodeNetwork")
assert (inspect.isclass(ret)) is True
assert (issubclass(ret, objects.BaseObject)) is True
path = os.path.join(os.path.dirname(os.path.dirname(
os.path.abspath(__file__))), "objects",
"definition")
with open(os.path.join(path, "master.yaml"), 'r') as f:
definition = yaml.safe_load(f)
def_obj = definition["namespace.tendrl"]["objects"]["NodeNetwork"]["attrs"]
# Creating instance of the class
temp_instance = ret()
# Comparing attributes of object from actual definition
for k, v in def_obj.items():
assert hasattr(temp_instance, k.lower())
# Testing _get_ns():
def test_get_ns():
tendrlNS = init()
assert isinstance(tendrlNS._get_ns(), maps.NamedDict) is True
tendrlNS.ns_name = "integrations"
tendrlNS._create_ns()
assert isinstance(tendrlNS._get_ns(), maps.NamedDict) is True
# Testing get_obj_definition
def test_get_obj_definition():
tendrlNS = init()
ret = tendrlNS.get_obj_definition("Service")
assert ret is not None
assert isinstance(ret, maps.NamedDict) is True
assert hasattr(ret, "attrs") is True
NS["compiled_definitions"] = tendrlNS.current_ns.definitions
ret = tendrlNS.get_obj_definition("Service")
assert ret is not None
assert isinstance(ret, maps.NamedDict) is True
assert hasattr(ret, "attrs") is True
# Testing get_obj_flow_definition
def test_get_obj_flow_definition():
tendrlNS = init()
with pytest.raises(KeyError):
tendrlNS.get_obj_flow_definition("Service", "test")
# Testing get_flow_definiiton()
def test_get_flow_definition():
tendrlNS = init()
with pytest.raises(KeyError):
tendrlNS.get_flow_definition("BaseFlow")
NS["compiled_definitions"] = tendrlNS.current_ns.definitions
tendrlNS.get_flow_definition("ImportCluster")
# Testing get_atom_definition
def test_get_atom_definition():
tendrlNS = init()
ret = tendrlNS.get_atom_definition("Service", "CheckServiceStatus")
assert ret is not None
assert isinstance(ret, maps.NamedDict) is True
assert hasattr(ret, "inputs") is True
# Testing add_atom
def test_add_atom():
tendrlNS = init()
obj_name = "Service"
current_ns = tendrlNS._get_ns()
obj = importlib.import_module(
"tendrl.commons.objects.service.atoms.check_service_status")
atom_class = ""
for atom_cls in inspect.getmembers(obj, inspect.isclass):
tendrlNS._add_atom(obj_name, "test_atom", atom_cls[1])
atom_class = atom_cls[1]
break
assert hasattr(current_ns.objects["_Service"]['atoms'], "test_atom")
assert current_ns.objects["_Service"]['atoms']["test_atom"] == atom_class
# Testing setup_definitions
def test_setup_definitions():
tendrlNS = init()
tendrlNS.setup_definitions()
assert tendrlNS.current_ns is not None
assert isinstance(tendrlNS.current_ns, maps.NamedDict) is True
# Testing add_flow
def test_add_flow():
tendrlNS = init()
flow_class = ""
flow = importlib.import_module("tendrl.commons.flows.import_cluster")
for flow_cls in inspect.getmembers(flow, inspect.isclass):
tendrlNS._add_flow("test_flow", flow_cls[1])
flow_class = flow_cls[1]
break
current_ns = tendrlNS._get_ns()
assert hasattr(current_ns.flows, "test_flow") is True
assert current_ns.flows["test_flow"] is flow_class
# Testing get_flow
def test_get_flow():
tendrlNS = init()
ret = tendrlNS.get_flow("ImportCluster")
assert ret is not None
# Testing add_obj_flow
def test_add_obj_flow():
tendrlNS = init()
flow = importlib.import_module("tendrl.commons.flows")
for flow_cls in inspect.getmembers(flow, inspect.isclass):
tendrlNS._add_obj_flow("Node", "AtomExecutionFailedError", flow_cls[1])
break
ret = tendrlNS.get_obj_flow("Node", "AtomExecutionFailedError")
assert ret is not None
assert (inspect.isclass(ret)) is True
# Testing get_obj_flow
def test_get_obj_flow():
tendrlNS = init()
flow = importlib.import_module("tendrl.commons.flows")
for flow_cls in inspect.getmembers(flow, inspect.isclass):
tendrlNS._add_obj_flow("Node", "AtomExecutionFailedError", flow_cls[1])
break
ret = tendrlNS.get_obj_flow("Node", "AtomExecutionFailedError")
assert ret is not None
assert (inspect.isclass(ret)) is True
# Testing get_obj_flows
def test_get_obj_flows():
tendrlNS = init()
flow = importlib.import_module("tendrl.commons.flows")
for flow_cls in inspect.getmembers(flow, inspect.isclass):
tendrlNS._add_obj_flow("Node", "AtomExecutionFailedError", flow_cls[1])
break
ret = tendrlNS._get_obj_flows("Node")
assert ret is not None
assert isinstance(ret, maps.NamedDict)
# Testing get_atom
def test_get_atom():
tendrlNS = init()
ret = tendrlNS.get_atom("Node", "Cmd")
assert ret is not None
assert (inspect.isclass(ret)) is True
# Testing get_atoms
def test_get_atoms():
tendrlNS = init()
ret = tendrlNS._get_atoms("Node")
assert ret is not None
assert isinstance(ret, maps.NamedDict)
# Testing _create_ns()
def test_create_ns():
tendrlNS = init()
assert getattr(NS, "tendrl")
tendrlNS.ns_name = "integrations"
tendrlNS._create_ns()
assert getattr(NS, "integrations")
tendrlNS._create_ns()
# Testing_validate_ns_flow_definitions
def test_validate_ns_flow_definitions():
tendrlNS = init()
raw_ns = "namespace.tendrl"
defs = tendrlNS.current_ns.definitions.get_parsed_defs()[raw_ns]
defs["flows"]["test"] = maps.NamedDict()
with pytest.raises(Exception):
tendrlNS._validate_ns_flow_definitions(raw_ns, defs)
tendrlNS.current_ns.flows["Test"] = "Test Flow"
with pytest.raises(Exception):
tendrlNS._validate_ns_flow_definitions(raw_ns, defs)
tendrlNS.current_ns.flows = None
defs = maps.NamedDict()
tendrlNS._validate_ns_flow_definitions(raw_ns, defs)
# Testing _validate_ns_obj_definitions
def test_validate_ns_obj_definitions():
tendrlNS = init()
raw_ns = "namespace.tendrl"
defs = tendrlNS.current_ns.definitions.get_parsed_defs()[raw_ns]
defs_temp = defs
defs_temp["objects"]["TestObject"] = maps.NamedDict()
with pytest.raises(Exception):
tendrlNS._validate_ns_obj_definitions(raw_ns, defs_temp)
tendrlNS.current_ns.objects["_Node"]["atoms"]["Test"] = \
"Test atom class"
with pytest.raises(Exception):
tendrlNS._validate_ns_obj_definitions(raw_ns, defs)
tendrlNS_temp = init()
tendrlNS_temp.current_ns.objects["_Node"]["flows"]["Test"] = \
"Test flow class"
with pytest.raises(Exception):
tendrlNS_temp._validate_ns_obj_definitions(raw_ns, defs)
tendrlNS.current_ns.objects["Test"] = "Test Object"
with pytest.raises(Exception):
tendrlNS._validate_ns_obj_definitions(raw_ns, defs)
tendrlNS_temp = init()
defs = tendrlNS_temp.current_ns.definitions.get_parsed_defs()[raw_ns]
defs["objects"]["Node"]["atoms"]["Test"] = \
"Test atom class"
with pytest.raises(Exception):
tendrlNS_temp._validate_ns_obj_definitions(raw_ns, defs)
defs = tendrlNS_temp.current_ns.definitions.get_parsed_defs()[raw_ns]
defs["objects"]["Node"]["flows"] = maps.NamedDict()
defs["objects"]["Node"]["flows"]["Test"] = "Test flow class"
with pytest.raises(Exception):
tendrlNS_temp._validate_ns_obj_definitions(raw_ns, defs)
defs = maps.NamedDict()
tendrlNS.current_ns.objects = None
tendrlNS._validate_ns_obj_definitions(raw_ns, defs)
# Testing _validate_ns_definitions
def test_validate_ns_definitions():
tendrlNS = init()
tendrlNS._validate_ns_obj_definitions = MagicMock(return_value=None)
tendrlNS._validate_ns_definitions()
raw_ns = "namespace.tendrl"
defs = tendrlNS.current_ns.definitions.get_parsed_defs()[raw_ns]
tendrlNS._validate_ns_obj_definitions.assert_called_with(raw_ns, defs)
tendrlNS._validate_ns_flow_definitions = MagicMock(return_value=None)
tendrlNS._validate_ns_definitions()
tendrlNS._validate_ns_flow_definitions.assert_called_with(raw_ns, defs)
tendrlNS.current_ns.definitions = maps.NamedDict()
with pytest.raises(Exception):
tendrlNS._validate_ns_definitions()
# Testing setup_common_objects
def test_setup_common_objects(monkeypatch):
tendrlNS = init()
obj = importlib.import_module("tendrl.commons.tests.fixtures.config")
for obj_cls in inspect.getmembers(obj, inspect.isclass):
tendrlNS.current_ns.objects["Config"] = obj_cls[1]
with patch.object(etcd, "Client", return_value=etcd.Client()) as client:
tendrlNS.current_ns.objects.pop("NodeContext")
tendrlNS.setup_common_objects()
assert NS._int.client is not None
assert NS._int.wclient is not None
etcd.Client.assert_called_with(host=1, port=1)
tendrlNS.current_ns.objects.pop("TendrlContext")
tendrlNS.setup_common_objects()
def client(**param):
raise Exception
monkeypatch.setattr(etcd, 'Client', client)
with pytest.raises(Exception):
tendrlNS.setup_common_objects()
|
r0h4n/commons
|
tendrl/commons/tests/test_init.py
|
Python
|
lgpl-2.1
| 17,194
|
##############################################################################
# Copyright (c) 2013-2017, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://github.com/spack/spack
# Please also see the NOTICE and LICENSE files for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as
# published by the Free Software Foundation) version 2.1, February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import *
class Scons(PythonPackage):
"""SCons is a software construction tool"""
homepage = "http://scons.org"
url = "https://pypi.io/packages/source/s/scons/scons-2.5.1.tar.gz"
version('2.5.1', '3eac81e5e8206304a9b4683c57665aa4')
version('2.5.0', 'bda5530a70a41a7831d83c8b191c021e')
# Python 3 is not supported
depends_on('python@:2.8', type=('build', 'run'))
|
skosukhin/spack
|
var/spack/repos/builtin/packages/scons/package.py
|
Python
|
lgpl-2.1
| 1,645
|
# Copyright (C) 2010-2014 CEA/DEN, EDF R&D
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
#
# See http://www.salome-platform.org/ or email : webmaster.salome@opencascade.com
#
# This case corresponds to: /visu/StreamLines/F7 case
# Create Stream Lines for all fields of the the given MED file
import sys
from paravistest import datadir, pictureext, get_picture_dir
from presentations import CreatePrsForFile, PrsTypeEnum
import pvserver as paravis
# Directory for saving snapshots
picturedir = get_picture_dir("StreamLines/F7")
# Create presentations
myParavis = paravis.myParavis
file = datadir + "occ4050.med"
print " --------------------------------- "
print "file ", file
print " --------------------------------- "
print "\nCreatePrsForFile..."
CreatePrsForFile(myParavis, file, [PrsTypeEnum.STREAMLINES], picturedir, pictureext)
|
FedoraScientific/salome-paravis
|
test/VisuPrs/StreamLines/F7.py
|
Python
|
lgpl-2.1
| 1,521
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import
import re
from collections import OrderedDict
from uuid import uuid4
from warnings import warn
from numpy import object as np_object
from numpy import array, inf, isinf
from six import string_types
from cobra.core import Metabolite, Model, Reaction
from cobra.util import create_stoichiometric_matrix
from cobra.util.solver import set_objective
try:
import scipy.sparse as scipy_sparse
import scipy.io as scipy_io
except ImportError:
scipy_sparse = None
scipy_io = None
# precompiled regular expressions
_bracket_re = re.compile("r\[[a-z]\]$")
_underscore_re = re.compile(r"_[a-z]$")
def _get_id_compartment(id):
"""extract the compartment from the id string"""
bracket_search = _bracket_re.findall(id)
if len(bracket_search) == 1:
return bracket_search[0][1]
underscore_search = _underscore_re.findall(id)
if len(underscore_search) == 1:
return underscore_search[0][1]
return None
def _cell(x):
"""translate an array x into a MATLAB cell array"""
x_no_none = [i if i is not None else "" for i in x]
return array(x_no_none, dtype=np_object)
def load_matlab_model(infile_path, variable_name=None, inf=inf):
"""Load a cobra model stored as a .mat file
Parameters
----------
infile_path: str
path to the file to to read
variable_name: str, optional
The variable name of the model in the .mat file. If this is not
specified, then the first MATLAB variable which looks like a COBRA
model will be used
inf: value
The value to use for infinite bounds. Some solvers do not handle
infinite values so for using those, set this to a high numeric value.
Returns
-------
cobra.core.Model.Model:
The resulting cobra model
"""
if not scipy_io:
raise ImportError('load_matlab_model requires scipy')
data = scipy_io.loadmat(infile_path)
possible_names = []
if variable_name is None:
# skip meta variables
meta_vars = {"__globals__", "__header__", "__version__"}
possible_names = sorted(i for i in data if i not in meta_vars)
if len(possible_names) == 1:
variable_name = possible_names[0]
if variable_name is not None:
return from_mat_struct(data[variable_name], model_id=variable_name,
inf=inf)
for possible_name in possible_names:
try:
return from_mat_struct(data[possible_name], model_id=possible_name,
inf=inf)
except ValueError:
pass
# If code here is executed, then no model was found.
raise IOError("no COBRA model found")
def save_matlab_model(model, file_name, varname=None):
"""Save the cobra model as a .mat file.
This .mat file can be used directly in the MATLAB version of COBRA.
Parameters
----------
model : cobra.core.Model.Model object
The model to save
file_name : str or file-like object
The file to save to
varname : string
The name of the variable within the workspace
"""
if not scipy_io:
raise ImportError('load_matlab_model requires scipy')
if varname is None:
varname = str(model.id) \
if model.id is not None and len(model.id) > 0 \
else "exported_model"
mat = create_mat_dict(model)
scipy_io.savemat(file_name, {varname: mat},
appendmat=True, oned_as="column")
def create_mat_metabolite_id(model):
for met in model.metabolites:
if not _get_id_compartment(met.id) and met.compartment:
yield '{}[{}]'.format(met.id,
model.compartments[met.compartment].lower())
else:
yield met.id
def create_mat_dict(model):
"""create a dict mapping model attributes to arrays"""
rxns = model.reactions
mets = model.metabolites
mat = OrderedDict()
mat["mets"] = _cell([met_id for met_id in create_mat_metabolite_id(model)])
mat["metNames"] = _cell(mets.list_attr("name"))
mat["metFormulas"] = _cell([str(m.formula) for m in mets])
try:
mat["metCharge"] = array(mets.list_attr("charge")) * 1.
except TypeError:
# can't have any None entries for charge, or this will fail
pass
mat["genes"] = _cell(model.genes.list_attr("id"))
# make a matrix for rxnGeneMat
# reactions are rows, genes are columns
rxn_gene = scipy_sparse.dok_matrix((len(model.reactions),
len(model.genes)))
if min(rxn_gene.shape) > 0:
for i, reaction in enumerate(model.reactions):
for gene in reaction.genes:
rxn_gene[i, model.genes.index(gene)] = 1
mat["rxnGeneMat"] = rxn_gene
mat["grRules"] = _cell(rxns.list_attr("gene_reaction_rule"))
mat["rxns"] = _cell(rxns.list_attr("id"))
mat["rxnNames"] = _cell(rxns.list_attr("name"))
mat["subSystems"] = _cell(rxns.list_attr("subsystem"))
mat["csense"] = "".join((
met._constraint_sense for met in model.metabolites))
stoich_mat = create_stoichiometric_matrix(model)
mat["S"] = stoich_mat if stoich_mat is not None else [[]]
# multiply by 1 to convert to float, working around scipy bug
# https://github.com/scipy/scipy/issues/4537
mat["lb"] = array(rxns.list_attr("lower_bound")) * 1.
mat["ub"] = array(rxns.list_attr("upper_bound")) * 1.
mat["b"] = array(mets.list_attr("_bound")) * 1.
mat["c"] = array(rxns.list_attr("objective_coefficient")) * 1.
mat["rev"] = array(rxns.list_attr("reversibility")) * 1
mat["description"] = str(model.id)
return mat
def from_mat_struct(mat_struct, model_id=None, inf=inf):
"""create a model from the COBRA toolbox struct
The struct will be a dict read in by scipy.io.loadmat
"""
m = mat_struct
if m.dtype.names is None:
raise ValueError("not a valid mat struct")
if not {"rxns", "mets", "S", "lb", "ub"} <= set(m.dtype.names):
raise ValueError("not a valid mat struct")
if "c" in m.dtype.names:
c_vec = m["c"][0, 0]
else:
c_vec = None
warn("objective vector 'c' not found")
model = Model()
if model_id is not None:
model.id = model_id
elif "description" in m.dtype.names:
description = m["description"][0, 0][0]
if not isinstance(description, string_types) and len(description) > 1:
model.id = description[0]
warn("Several IDs detected, only using the first.")
else:
model.id = description
else:
model.id = "imported_model"
for i, name in enumerate(m["mets"][0, 0]):
new_metabolite = Metabolite()
new_metabolite.id = str(name[0][0])
if all(var in m.dtype.names for var in
['metComps', 'comps', 'compNames']):
comp_index = m["metComps"][0, 0][i][0] - 1
new_metabolite.compartment = m['comps'][0, 0][comp_index][0][0]
if new_metabolite.compartment not in model.compartments:
comp_name = m['compNames'][0, 0][comp_index][0][0]
model.compartments[new_metabolite.compartment] = comp_name
else:
new_metabolite.compartment = _get_id_compartment(new_metabolite.id)
if new_metabolite.compartment not in model.compartments:
model.compartments[
new_metabolite.compartment] = new_metabolite.compartment
try:
new_metabolite.name = str(m["metNames"][0, 0][i][0][0])
except (IndexError, ValueError):
pass
try:
new_metabolite.formula = str(m["metFormulas"][0][0][i][0][0])
except (IndexError, ValueError):
pass
try:
new_metabolite.charge = float(m["metCharge"][0, 0][i][0])
int_charge = int(new_metabolite.charge)
if new_metabolite.charge == int_charge:
new_metabolite.charge = int_charge
except (IndexError, ValueError):
pass
model.add_metabolites([new_metabolite])
new_reactions = []
coefficients = {}
for i, name in enumerate(m["rxns"][0, 0]):
new_reaction = Reaction()
new_reaction.id = str(name[0][0])
new_reaction.lower_bound = float(m["lb"][0, 0][i][0])
new_reaction.upper_bound = float(m["ub"][0, 0][i][0])
if isinf(new_reaction.lower_bound) and new_reaction.lower_bound < 0:
new_reaction.lower_bound = -inf
if isinf(new_reaction.upper_bound) and new_reaction.upper_bound > 0:
new_reaction.upper_bound = inf
if c_vec is not None:
coefficients[new_reaction] = float(c_vec[i][0])
try:
new_reaction.gene_reaction_rule = str(m['grRules'][0, 0][i][0][0])
except (IndexError, ValueError):
pass
try:
new_reaction.name = str(m["rxnNames"][0, 0][i][0][0])
except (IndexError, ValueError):
pass
try:
new_reaction.subsystem = str(m['subSystems'][0, 0][i][0][0])
except (IndexError, ValueError):
pass
new_reactions.append(new_reaction)
model.add_reactions(new_reactions)
set_objective(model, coefficients)
coo = scipy_sparse.coo_matrix(m["S"][0, 0])
for i, j, v in zip(coo.row, coo.col, coo.data):
model.reactions[j].add_metabolites({model.metabolites[i]: v})
return model
def _check(result):
"""ensure success of a pymatbridge operation"""
if result["success"] is not True:
raise RuntimeError(result["content"]["stdout"])
def model_to_pymatbridge(model, variable_name="model", matlab=None):
"""send the model to a MATLAB workspace through pymatbridge
This model can then be manipulated through the COBRA toolbox
Parameters
----------
variable_name : str
The variable name to which the model will be assigned in the
MATLAB workspace
matlab : None or pymatbridge.Matlab instance
The MATLAB workspace to which the variable will be sent. If
this is None, then this will be sent to the same environment
used in IPython magics.
"""
if scipy_sparse is None:
raise ImportError("`model_to_pymatbridge` requires scipy!")
if matlab is None: # assumed to be running an IPython magic
from IPython import get_ipython
matlab = get_ipython().magics_manager.registry["MatlabMagics"].Matlab
model_info = create_mat_dict(model)
S = model_info["S"].todok()
model_info["S"] = 0
temp_S_name = "cobra_pymatbridge_temp_" + uuid4().hex
_check(matlab.set_variable(variable_name, model_info))
_check(matlab.set_variable(temp_S_name, S))
_check(matlab.run_code("%s.S = %s;" % (variable_name, temp_S_name)))
# all vectors need to be transposed
for i in model_info.keys():
if i == "S":
continue
_check(matlab.run_code("{0}.{1} = {0}.{1}';".format(variable_name, i)))
_check(matlab.run_code("clear %s;" % temp_S_name))
|
zakandrewking/cobrapy
|
cobra/io/mat.py
|
Python
|
lgpl-2.1
| 11,174
|
#!/usr/bin/env python
# coding: utf8
from nose.tools import assert_equal
from nose import SkipTest
#lines above are inserted automatically by pythoscope. Line below overrides them
from Goulib.tests import *
from Goulib.interval import *
class TestInInterval:
def test_in_interval(self):
assert_equal(in_interval([1,2], 1),True)
assert_equal(in_interval([2,1], 1),True) #interval might be unordered
assert_equal(in_interval((2,1), 1),True) #or defined by a tuple
assert_equal(in_interval([1,2], 2,closed=True),True)
assert_equal(in_interval([1,2], 2,closed=False),False)
class TestIntersect:
def test_intersect(self):
assert_equal(intersect([1,3],[2,4]),True)
assert_equal(intersect([3,1],(4,2)),True)
assert_equal(intersect((1,2),[2,4]),False)
assert_equal(intersect((5,1),(2,3)),True)
class TestIntersection:
def test_intersection(self):
assert_equal(intersection([1,3],(4,2)),(2,3))
assert_equal(intersection([1,5],(3,2)),(2,3))
assert_equal(intersection((1,2),[2,4]),(2,2))
assert_equal(intersection((1,2),[3,4]),None)
class TestIntersectlen:
def test_intersectlen(self):
assert_equal(intersectlen([1,5],(3,2)),1)
assert_equal(intersectlen((1,2),[2,4]),0)
assert_equal(intersectlen((1,2),[3,4],None),None)
class TestInterval:
@classmethod
def setup_class(self):
self.none = Interval(None,None) #required for Box, equivalent t
self.i12 = Interval(1,2)
self.i13 = Interval(1,3)
self.i23 = Interval(2,3)
self.i24 = Interval(2,4)
self.i25 = Interval(5,2)
assert_equal(self.i25,Interval(2,5)) #check order
self.i33 = Interval(3,3) #empty
self.i34 = Interval(3,4)
def test___init__(self):
pass #tested above
def test___repr__(self):
assert_equal(repr(self.i12),'[1,2)')
def test___str__(self):
assert_equal(str(self.i12),'[1,2)')
def test___hash__(self):
"""test that we can use an Interval as key in a dict and retrieve it with a different Interval with same values"""
dict={}
dict[self.i12]=self.i12
assert_equal(dict[Interval(2,1)],self.i12)
def test___lt__(self):
assert_equal(self.i12<self.i34,True)
assert_equal(self.i12>self.i34,False)
def test___contains__(self):
assert_true(2 in self.i13)
assert_false(3 in self.i13)
def test_empty(self):
assert_true(self.i33.empty())
assert_false(self.i13.empty())
def test_hull(self):
assert_equal(self.i12.hull(self.i34),Interval(1,4))
def test_intersection(self):
assert_equal(self.i12.intersection(self.i34),None)
assert_equal(self.i13.intersection(self.i25),self.i23)
assert_equal(self.i25.intersection(self.i13),self.i23)
def test_overlap(self):
assert_false(Interval(1,2).overlap(Interval(3,4)))
assert_true(Interval(1,3).overlap(Interval(2,5)))
def test_separation(self):
assert_equal(self.i12.separation(self.i23),0)
assert_equal(self.i12.separation(self.i34),3-2)
assert_equal(self.i34.separation(self.i12),3-2)
def test_subset(self):
assert_true(Interval(1,3).subset(Interval(1,3)))
assert_false(Interval(1,3).subset(Interval(1,2)))
assert_false(Interval(2,3).subset(Interval(1,2)))
def test_proper_subset(self):
assert_false(Interval(1,3).proper_subset(Interval(1,3)))
eps=1E-12
assert_true(Interval(1,3).proper_subset(Interval(1-eps,3+eps)))
def test_singleton(self):
assert_true(Interval(1,2).singleton())
assert_false(Interval(1,3).singleton())
def test___add__(self):
assert_equal(Interval(1,3)+Interval(2,4),Interval(1,4))
i24=Interval(2,3)+Interval(3,4)
assert_equal(i24,self.i24)
assert_equal(Interval(4,5)+Interval(2,3),Intervals([Interval(4,5),Interval(2,3)]))
a=Interval(5,6)+Interval(2,3)
a+=Interval(3,4)
b=Intervals([Interval(5,6),Interval(2,4)])
assert_equal(a,b)
def test___eq__(self):
pass #tested in other tests...
def test___iadd__(self):
pass #tested in other tests...
def test_center(self):
pass #tested in other tests...
def test_size(self):
pass #tested in other tests...
def test___call__(self):
# interval = Interval(start, end)
# assert_equal(expected, interval.__call__())
raise SkipTest
def test___nonzero__(self):
# interval = Interval(start, end)
# assert_equal(expected, interval.__nonzero__())
raise SkipTest
class TestIntervals:
@classmethod
def setup_class(self):
i12 = Interval(1,2)
i13 = Interval(1,3)
i24 = Interval(2,4)
i56 = Interval(5,6)
self.intervals=Intervals([i24,i13,i12,i56])
assert_equal(str(self.intervals),'[[1,4), [5,6)]')
def test___init__(self):
pass #tested above
def test___call__(self):
assert_equal(self.intervals(2),Interval(1,4))
assert_equal(self.intervals(4),None)
assert_equal(self.intervals(5),Interval(5,6))
def test_insert(self):
pass #tested above
def test_extend(self):
pass #tested above
def test___add__(self):
i=self.intervals+Interval(-1,-3)
assert_equal(str(i),'[[-3,-1), [1,4), [5,6)]')
def test___iadd__(self):
i=Intervals(self.intervals)
i+=Interval(-1,-3)
assert_equal(str(i),'[[-3,-1), [1,4), [5,6)]')
def test___repr__(self):
# intervals = Intervals()
# assert_equal(expected, intervals.__repr__())
raise SkipTest
class TestBox:
@classmethod
def setup_class(self):
self.empty=Box(2)
self.unit=Box(Interval(0,1),Interval(0,1))
self.box=Box((-1,4),[3,-2])
self.copy=Box(self.box)
assert_equal(self.box,self.copy)
def test___init__(self):
pass #tested in setup_class
def test___repr__(self):
assert_equal(repr(self.box),'[[-1,3), [-2,4)]')
def test_min(self):
assert_equal(self.unit.min, (0,0))
assert_equal(self.box.min, (-1,-2))
def test_max(self):
assert_equal(self.unit.max, (1,1))
assert_equal(self.box.max, (3,4))
def test_size(self):
assert_equal(self.box.size, (4,6))
def test_center(self):
assert_equal(self.box.center, (1,1))
def test___add__(self):
box=self.unit+(2,0)
assert_equal(repr(box),'[[0,2), [0,1)]')
box=box+Box((-2,-1),(.5,.5))
assert_equal(repr(box),'[[-2,2), [-1,1)]')
def test___iadd__(self):
box=Box(self.unit)
box+=(2,0)
assert_equal(repr(box),'[[0,2), [0,1)]')
box+=Box((-2,-1),(.5,.5))
assert_equal(repr(box),'[[-2,2), [-1,1)]')
def test_end(self):
pass #tested in other tests...
def test_start(self):
pass #tested in other tests...
def test___contains__(self):
# box = Box(*args)
# assert_equal(expected, box.__contains__(other))
raise SkipTest
def test___nonzero__(self):
# box = Box(*args)
# assert_equal(expected, box.__nonzero__())
raise SkipTest
def test_empty(self):
# box = Box(*args)
# assert_equal(expected, box.empty())
raise SkipTest
def test_corner(self):
# box = Box(*args)
# assert_equal(expected, box.corner(n))
raise SkipTest
def test___call__(self):
# box = Box(*args)
# assert_equal(expected, box.__call__())
raise SkipTest
if __name__ == "__main__":
runmodule()
|
goulu/Goulib
|
tests/test_Goulib_interval.py
|
Python
|
lgpl-3.0
| 8,146
|
'''
Created on Feb 26, 2015
@author: Akshat
'''
import pandas as pd
import matplotlib.pyplot as plt
import matplotlib.gridspec as mgrid
import ntpath
class PrbLogAgg:
'''
classdocs
'''
dataFrame = {}
csv = []
def __init__(self, output_directory, csvFilePath):
self.output_directory = output_directory
self.csv = csvFilePath
def plotGraph(self, ax, df, clm, collumn_name):
ax.set_title(collumn_name + " vs. Time")
ax.set_xlabel("Time")
ax.set_ylabel(collumn_name)
ax.scatter(df.time, clm, alpha=0.5, edgecolors='none', s=5)
ax.set_ylim(1)
ax.set_xlim(df.time.max(), df.time.min())
ax.invert_xaxis()
def big_plot(self, i_df, file_name):
# Set some Pandas options
pd.set_option('display.notebook_repr_html', False)
pd.set_option('display.max_columns', 20)
pd.set_option('display.max_rows', 25)
# For Graph Plot
# df.plot(subplots=True, figsize=(20, 60)); plt.legend(loc='best')
plt.close('all')
fig, ((ax0, ax1, ax2), (ax3, ax4, ax5)) = plt.subplots(nrows=2, ncols=3)
fig.set_size_inches(15,5)
gs = mgrid.GridSpec(2, 3)
ax0 = plt.subplot(gs[0, 0])
ax1 = plt.subplot(gs[0, 1])
ax2 = plt.subplot(gs[0, 2])
ax3 = plt.subplot(gs[1, 0])
ax4 = plt.subplot(gs[1, 1])
ax5 = plt.subplot(gs[1, 2])
self.plotGraph(ax0, i_df, i_df.cpuperc, 'CPU(Mhz)')
self.plotGraph(ax1, i_df, i_df.memmb, 'RAM(Mb)')
self.plotGraph(ax2, i_df, i_df.readcount, 'Disk-read-count')
self.plotGraph(ax3, i_df, i_df.writecount, 'Disk-write-count')
self.plotGraph(ax4, i_df, i_df.readbytes, 'Disk-read-bytes')
self.plotGraph(ax5, i_df, i_df.writebyte, 'Disk-write-bytes')
# self.plotGraph(ax7, i_df, i_df.netConnCount, 'Net-connection-count')
# self.plotGraph(ax0, i_df, i_df.childProcCount, 'Child-proc-count')
plt.tight_layout(pad=0.4, w_pad=0.5, h_pad=1.0)
fig.savefig(self.output_directory + file_name + '.png', bbox_inches='tight')
def loadDataFrame(self):
for path in self.csv:
df = pd.read_csv(path)
self.dataFrame[self.path_leaf(path)] = df
for f_name, i_df in self.dataFrame.items():
i_df.columns = ['time', 'cpuperc', 'memmb', 'readcount', 'writecount', 'readbytes', 'writebyte', 'netConnCount', 'childProcCount']
self.big_plot(i_df, 'big_plot')
def getDataFrame(self, logfile_name):
return self.dataFrame[logfile_name]
def getDataFrames(self):
return self.dataFrame
def path_leaf(self, path):
head, tail = ntpath.split(path)
return tail or ntpath.basename(head)
|
R-daneel-olivaw/CPET
|
module2/ProbeLogAggregator.py
|
Python
|
lgpl-3.0
| 3,096
|
#!/usr/bin/env python
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "downloadmusic.settings")
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
|
irvingprog/gmusic
|
manage.py
|
Python
|
lgpl-3.0
| 256
|
"""Contain the socket handler for players"""
from game import Game
from websocket import WebSocketHandler
from zone import Zone
import errcode
class AdminWs(WebSocketHandler):
"""The socket handler for websocket"""
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.callable_from_json = {"setParams": self.set_params,
"login": self.login,
"logout": self.logout,
"getParams": self.get_params,
"startGame": self.start_game}
def open(self, *args, **kwargs):
super().open(*args, **kwargs)
self.login()
@staticmethod
def start_game():
"""start the game"""
Game().start_game()
@staticmethod
def set_params(**kwargs):
"""set params of the game"""
params = Game().params
map_ = kwargs.get('map', None)
if map_:
params.map_center = (map_['lat'], map_['lng'])
zones = kwargs.get('zones', [])
for zone in zones:
Game().create_zone(zone['team'], tuple(zone['pos']), zone['radius'], zone['id'], Zone)
timeout = kwargs.get('time')
params.game_timeout = timeout
def get_params(self):
"""send to admin all params"""
pass
def login(self):
"""Login player and look if username and team are valids"""
if Game().admin:
self.send(errcode.USERNAME_ALREADY_SET)
self.close()
else:
Game().admin = self
self.logged = True
def logout(self):
"""logout player and remove it from game"""
self.close()
def on_close(self):
print("Admin is exiting...")
self.logged = False
Game().admin = None
def send(self, msg):
super().send(msg)
print('Send to Admin : {}'.format(msg))
def on_message(self, msg):
print('Send by Admin : {}'.format(msg))
super().on_message(msg)
|
mdl29/tidutyzef
|
serveur/adminws.py
|
Python
|
lgpl-3.0
| 2,061
|
#!/usr/bin/env python
"""pypozyx.definitions.bitmasks - contains all bitmasks used in Pozyx functionality, such as interrupt flags."""
# Bit mask for POZYX_ST_RESULT
POZYX_ST_RESULT_ACC = 0x01
POZYX_ST_RESULT_MAGN = 0x02
POZYX_ST_RESULT_GYR = 0x04
POZYX_ST_RESULT_MCU = 0x08
POZYX_ST_RESULT_PRES = 0x10
POZYX_ST_RESULT_UWB = 0x20
# Bit mask for POZYX_INT_STATUS
POZYX_INT_STATUS_ERR = 0x01
POZYX_INT_STATUS_POS = 0x02
POZYX_INT_STATUS_IMU = 0x04
POZYX_INT_STATUS_RX_DATA = 0x08
POZYX_INT_STATUS_FUNC = 0x10
# Bit mask for POZYX_INT_MASK
POZYX_INT_MASK_ERR = 0x01
POZYX_INT_MASK_POS = 0x02
POZYX_INT_MASK_IMU = 0x04
POZYX_INT_MASK_RX_DATA = 0x08
POZYX_INT_MASK_FUNC = 0x10
POZYX_INT_MASK_TDMA = 0x40
POZYX_INT_MASK_PIN = 0x80
POZYX_INT_MASK_ALL = 0x1F
# Bit mask for POZYX_LED_CTRL
POZYX_LED_CTRL_LED1 = 0x01
POZYX_LED_CTRL_LED2 = 0x02
POZYX_LED_CTRL_LED3 = 0x04
POZYX_LED_CTRL_LED4 = 0x08
# Bit mask for device type
POZYX_TYPE = 0xE0
|
laurentva/Pozyx-Python-library
|
pypozyx/definitions/bitmasks.py
|
Python
|
lgpl-3.0
| 939
|
#!/usr/bin/env python
"""
Tests for CANtact interfaces
"""
import unittest
import can
from can.interfaces import cantact
class CantactTest(unittest.TestCase):
def test_bus_creation(self):
bus = can.Bus(channel=0, bustype="cantact", _testing=True)
self.assertIsInstance(bus, cantact.CantactBus)
cantact.MockInterface.set_bitrate.assert_called()
cantact.MockInterface.set_bit_timing.assert_not_called()
cantact.MockInterface.set_enabled.assert_called()
cantact.MockInterface.set_monitor.assert_called()
cantact.MockInterface.start.assert_called()
def test_bus_creation_bittiming(self):
cantact.MockInterface.set_bitrate.reset_mock()
bt = can.BitTiming(tseg1=13, tseg2=2, brp=6, sjw=1)
bus = can.Bus(channel=0, bustype="cantact", bit_timing=bt, _testing=True)
self.assertIsInstance(bus, cantact.CantactBus)
cantact.MockInterface.set_bitrate.assert_not_called()
cantact.MockInterface.set_bit_timing.assert_called()
cantact.MockInterface.set_enabled.assert_called()
cantact.MockInterface.set_monitor.assert_called()
cantact.MockInterface.start.assert_called()
def test_transmit(self):
bus = can.Bus(channel=0, bustype="cantact", _testing=True)
msg = can.Message(
arbitration_id=0xC0FFEF, data=[1, 2, 3, 4, 5, 6, 7, 8], is_extended_id=True
)
bus.send(msg)
cantact.MockInterface.send.assert_called()
def test_recv(self):
bus = can.Bus(channel=0, bustype="cantact", _testing=True)
frame = bus.recv(timeout=0.5)
cantact.MockInterface.recv.assert_called()
self.assertIsInstance(frame, can.Message)
def test_recv_timeout(self):
bus = can.Bus(channel=0, bustype="cantact", _testing=True)
frame = bus.recv(timeout=0.0)
cantact.MockInterface.recv.assert_called()
self.assertIsNone(frame)
def test_shutdown(self):
bus = can.Bus(channel=0, bustype="cantact", _testing=True)
bus.shutdown()
cantact.MockInterface.stop.assert_called()
|
hardbyte/python-can
|
test/test_cantact.py
|
Python
|
lgpl-3.0
| 2,119
|
# This file is generated from pydcs_export.lua
class Weapons:
AB_250_2___144_x_SD_2__250kg_CBU_with_HE_submunitions = {"clsid": "{AB_250_2_SD_2}", "name": "AB 250-2 - 144 x SD-2, 250kg CBU with HE submunitions", "weight": 280}
AB_250_2___17_x_SD_10A__250kg_CBU_with_10kg_Frag_HE_submunitions = {"clsid": "{AB_250_2_SD_10A}", "name": "AB 250-2 - 17 x SD-10A, 250kg CBU with 10kg Frag/HE submunitions", "weight": 220}
AB_500_1___34_x_SD_10A__500kg_CBU_with_10kg_Frag_HE_submunitions = {"clsid": "{AB_500_1_SD_10A}", "name": "AB 500-1 - 34 x SD-10A, 500kg CBU with 10kg Frag/HE submunitions", "weight": 470}
ADEN_GUNPOD = {"clsid": "{ADEN_GUNPOD}", "name": "ADEN GUNPOD", "weight": 87}
ADM_141A = {"clsid": "{BRU42_ADM141}", "name": "ADM_141A", "weight": 308}
ADM_141A_ = {"clsid": "{BRU3242_ADM141}", "name": "ADM_141A", "weight": 365.38}
ADM_141A_TALD = {"clsid": "{ADM_141A}", "name": "ADM-141A TALD", "weight": 180}
ADM_141B_TALD = {"clsid": "{ADM_141B}", "name": "ADM-141B TALD", "weight": 180}
AERO_1D_300_Gallons_Fuel_Tank_ = {"clsid": "{AV8BNA_AERO1D}", "name": "AERO 1D 300 Gallons Fuel Tank ", "weight": 1002.439}
AERO_1D_300_Gallons_Fuel_Tank__Empty_ = {"clsid": "{AV8BNA_AERO1D_EMPTY}", "name": "AERO 1D 300 Gallons Fuel Tank (Empty)", "weight": 93.89362}
AGM114x2_OH_58 = {"clsid": "AGM114x2_OH_58", "name": "AGM-114K * 2", "weight": 250}
AGM_114K = {"clsid": "{ee368869-c35a-486a-afe7-284beb7c5d52}", "name": "AGM-114K", "weight": 65}
AGM_114K___4 = {"clsid": "{88D18A5E-99C8-4B04-B40B-1C02F2018B6E}", "name": "AGM-114K * 4", "weight": 250}
AGM_119B_Penguin_ASM = {"clsid": "{7B8DCEB4-820B-4015-9B48-1028A4195692}", "name": "AGM-119B Penguin ASM", "weight": 300}
AGM_122_Sidearm = {"clsid": "{AGM_122_SIDEARM}", "name": "AGM-122 Sidearm", "weight": 92}
AGM_122_Sidearm_ = {"clsid": "{LAU_7_AGM_122_SIDEARM}", "name": "AGM-122 Sidearm", "weight": 107}
AGM_122_Sidearm___light_ARM = {"clsid": "{AGM_122}", "name": "AGM-122 Sidearm - light ARM", "weight": 88}
AGM_154A___JSOW_CEB__CBU_type_ = {"clsid": "{AGM-154A}", "name": "AGM-154A - JSOW CEB (CBU-type)", "weight": 485}
AGM_154B___JSOW_Anti_Armour = {"clsid": "{AGM-154B}", "name": "AGM-154B - JSOW Anti-Armour", "weight": 485}
AGM_154C___JSOW_Unitary_BROACH = {"clsid": "{9BCC2A2B-5708-4860-B1F1-053A18442067}", "name": "AGM-154C - JSOW Unitary BROACH", "weight": 484}
AGM_45A_Shrike_ARM = {"clsid": "{AGM_45A}", "name": "AGM-45A Shrike ARM", "weight": 177}
AGM_45B_Shrike_ARM__Imp_ = {"clsid": "{3E6B632D-65EB-44D2-9501-1C2D04515404}", "name": "AGM-45B Shrike ARM (Imp)", "weight": 177}
AGM_62_Walleye_II___Guided_Weapon_Mk_5__TV_Guided_ = {"clsid": "{C40A1E3A-DD05-40D9-85A4-217729E37FAE}", "name": "AGM-62 Walleye II - Guided Weapon Mk 5 (TV Guided)", "weight": 1061}
AGM_65D___Maverick_D__IIR_ASM_ = {"clsid": "{444BA8AE-82A7-4345-842E-76154EFCCA47}", "name": "AGM-65D - Maverick D (IIR ASM)", "weight": 218}
AGM_65E___Maverick_E__Laser_ASM___Lg_Whd_ = {"clsid": "{F16A4DE0-116C-4A71-97F0-2CF85B0313EF}", "name": "AGM-65E - Maverick E (Laser ASM - Lg Whd)", "weight": 286}
AGM_65K___Maverick_K__CCD_Imp_ASM_ = {"clsid": "{69DC8AE7-8F77-427B-B8AA-B19D3F478B65}", "name": "AGM-65K - Maverick K (CCD Imp ASM)", "weight": 360}
AGM_84 = {"clsid": "AGM_84", "name": "AGM-84 HARPOON", "weight": None}
AGM_84A_Harpoon_ASM = {"clsid": "{8B7CADF9-4954-46B3-8CFB-93F2F5B90B03}", "name": "AGM-84A Harpoon ASM", "weight": 661.5}
AGM_84D_Harpoon_AShM = {"clsid": "{AGM_84D}", "name": "AGM-84D Harpoon AShM", "weight": 540}
AGM_84E_Harpoon_SLAM__Stand_Off_Land_Attack_Missile_ = {"clsid": "{AF42E6DF-9A60-46D8-A9A0-1708B241AADB}", "name": "AGM-84E Harpoon/SLAM (Stand-Off Land-Attack Missile)", "weight": 628}
AGM_84E_Harpoon_SLAM__Stand_Off_Land_Attack_Missile__ = {"clsid": "{AGM_84E}", "name": "AGM-84E Harpoon/SLAM (Stand-Off Land-Attack Missile)", "weight": 628}
AGM_84H_SLAM_ER__Expanded_Response_ = {"clsid": "{AGM_84H}", "name": "AGM-84H SLAM-ER (Expanded Response)", "weight": 675}
AGM_86C_ALCM = {"clsid": "{769A15DF-6AFB-439F-9B24-5B7A45C59D16}", "name": "AGM-86C ALCM", "weight": 1950}
AGM_88C_HARM___High_Speed_Anti_Radiation_Missile = {"clsid": "{B06DD79A-F21E-4EB9-BD9D-AB3844618C9C}", "name": "AGM-88C HARM - High Speed Anti-Radiation Missile", "weight": 361}
AGM_88C_HARM___High_Speed_Anti_Radiation_Missile_ = {"clsid": "{B06DD79A-F21E-4EB9-BD9D-AB3844618C93}", "name": "AGM-88C HARM - High Speed Anti-Radiation Missile", "weight": 406.4}
AIM_120B_AMRAAM___Active_Rdr_AAM = {"clsid": "{C8E06185-7CD6-4C90-959F-044679E90751}", "name": "AIM-120B AMRAAM - Active Rdr AAM", "weight": 156}
AIM_120C_5_AMRAAM___Active_Rdr_AAM = {"clsid": "{40EF17B7-F508-45de-8566-6FFECC0C1AB8}", "name": "AIM-120C-5 AMRAAM - Active Rdr AAM", "weight": 161.5}
AIM_54A_Mk47 = {"clsid": "{AIM_54A_Mk47}", "name": "AIM-54A-Mk47", "weight": 444}
AIM_54A_Mk47_ = {"clsid": "{SHOULDER AIM_54A_Mk47 L}", "name": "AIM-54A-Mk47", "weight": 489.36}
AIM_54A_Mk47__ = {"clsid": "{SHOULDER AIM_54A_Mk47 R}", "name": "AIM-54A-Mk47", "weight": 489.36}
AIM_54A_Mk60 = {"clsid": "{AIM_54A_Mk60}", "name": "AIM-54A-Mk60", "weight": 471.7}
AIM_54A_Mk60_ = {"clsid": "{SHOULDER AIM_54A_Mk60 L}", "name": "AIM-54A-Mk60", "weight": 517.06}
AIM_54A_Mk60__ = {"clsid": "{SHOULDER AIM_54A_Mk60 R}", "name": "AIM-54A-Mk60", "weight": 517.06}
AIM_54C_Mk47 = {"clsid": "{AIM_54C_Mk47}", "name": "AIM-54C-Mk47", "weight": 465.6}
AIM_54C_Mk47_ = {"clsid": "{SHOULDER AIM_54C_Mk47 L}", "name": "AIM-54C-Mk47", "weight": 510.96}
AIM_54C_Mk47_Phoenix_IN__Semi_Active_Radar = {"clsid": "{7575BA0B-7294-4844-857B-031A144B2595}", "name": "AIM-54C-Mk47 Phoenix IN & Semi-Active Radar", "weight": 463}
AIM_54C_Mk47__ = {"clsid": "{SHOULDER AIM_54C_Mk47 R}", "name": "AIM-54C-Mk47", "weight": 510.96}
AIM_7E_Sparrow_Semi_Active_Radar = {"clsid": "{AIM-7E}", "name": "AIM-7E Sparrow Semi-Active Radar", "weight": 230}
AIM_7F = {"clsid": "{SHOULDER AIM-7F}", "name": "AIM-7F", "weight": 284.4}
AIM_7F_ = {"clsid": "{BELLY AIM-7F}", "name": "AIM-7F", "weight": 230}
AIM_7F_Sparrow_Semi_Active_Radar = {"clsid": "{AIM-7F}", "name": "AIM-7F Sparrow Semi-Active Radar", "weight": 231}
AIM_7M = {"clsid": "{SHOULDER AIM-7M}", "name": "AIM-7M", "weight": 284.4}
AIM_7MH = {"clsid": "{SHOULDER AIM-7MH}", "name": "AIM-7MH", "weight": 284.4}
AIM_7MH_ = {"clsid": "{BELLY AIM-7MH}", "name": "AIM-7MH", "weight": 230}
AIM_7MH_Sparrow_Semi_Active_Radar = {"clsid": "{AIM-7H}", "name": "AIM-7MH Sparrow Semi-Active Radar", "weight": 231}
AIM_7M_ = {"clsid": "{BELLY AIM-7M}", "name": "AIM-7M", "weight": 230}
AIM_7M_Sparrow_Semi_Active_Radar = {"clsid": "{8D399DDA-FF81-4F14-904D-099B34FE7918}", "name": "AIM-7M Sparrow Semi-Active Radar", "weight": 231.1}
AIM_9B_Sidewinder_IR_AAM = {"clsid": "{AIM-9B}", "name": "AIM-9B Sidewinder IR AAM", "weight": 74.39}
AIM_9L_Sidewinder_IR_AAM = {"clsid": "{AIM-9L}", "name": "AIM-9L Sidewinder IR AAM", "weight": 85.73}
AIM_9M_Sidewinder_IR_AAM = {"clsid": "{6CEB49FC-DED8-4DED-B053-E1F033FF72D3}", "name": "AIM-9M Sidewinder IR AAM", "weight": 85.73}
AIM_9P5_Sidewinder_IR_AAM = {"clsid": "{AIM-9P5}", "name": "AIM-9P5 Sidewinder IR AAM", "weight": 85.5}
AIM_9P_Sidewinder_IR_AAM = {"clsid": "{9BFD8C90-F7AE-4e90-833B-BFD0CED0E536}", "name": "AIM-9P Sidewinder IR AAM", "weight": 86.18}
AIM_9X_Sidewinder_IR_AAM = {"clsid": "{5CE2FF2A-645A-4197-B48D-8720AC69394F}", "name": "AIM-9X Sidewinder IR AAM", "weight": 84.46}
AJS_External_tank_1013kg_fuel = {"clsid": "{VIGGEN_X-TANK}", "name": "AJS External-tank 1013kg fuel", "weight": 1208}
AKAN_M_55_Gunpod__150_rnds_MINGR55_HE = {"clsid": "{AKAN}", "name": "AKAN M/55 Gunpod, 150 rnds MINGR55-HE", "weight": 276}
ALARM = {"clsid": "{E6747967-B1F0-4C77-977B-AB2E6EB0C102}", "name": "ALARM", "weight": 268}
ALQ_131___ECM_Pod = {"clsid": "{6D21ECEA-F85B-4E8D-9D51-31DC9B8AA4EF}", "name": "ALQ-131 - ECM Pod", "weight": 305}
ALQ_184 = {"clsid": "ALQ_184", "name": "ALQ-184 - ECM Pod", "weight": 215}
ALQ_184_Long = {"clsid": "ALQ_184_Long", "name": "ALQ-184 Long - ECM Pod", "weight": 286}
AN_AAQ_28_LITENING___Targeting_Pod = {"clsid": "{A111396E-D3E8-4b9c-8AC9-2432489304D5}", "name": "AN/AAQ-28 LITENING - Targeting Pod", "weight": 208}
AN_AAQ_28_LITENING___Targeting_Pod_ = {"clsid": "{AAQ-28_LEFT}", "name": "AN/AAQ-28 LITENING - Targeting Pod", "weight": 208}
AN_ALQ_164_DECM_Pod = {"clsid": "{ALQ_164_RF_Jammer}", "name": "AN/ALQ-164 DECM Pod", "weight": 143.789}
AN_ASQ_173_Laser_Spot_Tracker_Strike_CAMera__LST_SCAM_ = {"clsid": "{1C2B16EB-8EB0-43de-8788-8EBB2D70B8BC}", "name": "AN/ASQ-173 Laser Spot Tracker/Strike CAMera (LST/SCAM)", "weight": 250}
AN_ASQ_213_HTS___HARM_Targeting_System = {"clsid": "{AN_ASQ_213}", "name": "AN/ASQ-213 HTS - HARM Targeting System", "weight": 57.2}
AN_ASQ_228_ATFLIR___Targeting_Pod = {"clsid": "{AN_ASQ_228}", "name": "AN/ASQ-228 ATFLIR - Targeting Pod", "weight": 195}
AN_ASQ_T50_TCTS_Pod___ACMI_Pod = {"clsid": "{AIS_ASQ_T50}", "name": "AN/ASQ-T50 TCTS Pod - ACMI Pod", "weight": 62.6}
AN_M30A1___100lb_GP_Bomb_LD = {"clsid": "{AN_M30A1}", "name": "AN-M30A1 - 100lb GP Bomb LD", "weight": 45.8}
AN_M3___2_Browning_Machine_Guns_12_7mm = {"clsid": "{AN-M3}", "name": "AN-M3 - 2*Browning Machine Guns 12.7mm", "weight": 218}
AN_M57___250lb_GP_Bomb_LD = {"clsid": "{AN_M57}", "name": "AN-M57 - 250lb GP Bomb LD", "weight": 113}
AN_M64___500lb_GP_Bomb_LD = {"clsid": "{AN-M64}", "name": "AN-M64 - 500lb GP Bomb LD", "weight": 227}
AN_M64___500lb_GP_Bomb_LD_ = {"clsid": "{F86ANM64}", "name": "AN-M64 - 500lb GP Bomb LD", "weight": 227}
AN_M65___1000lb_GP_Bomb_LD = {"clsid": "{AN_M65}", "name": "AN-M65 - 1000lb GP Bomb LD", "weight": 475}
AN_M66___2000lb_GP_Bomb_LD = {"clsid": "{AN_M66}", "name": "AN-M66 - 2000lb GP Bomb LD", "weight": 977}
APU_60_1M_with_R_60M__AA_8_Aphid____Infra_Red = {"clsid": "{APU-60-1_R_60M}", "name": "APU-60-1M with R-60M (AA-8 Aphid) - Infra Red", "weight": 76}
APU_60_2M_with_2_x_R_60M__AA_8_Aphid____Infra_Red = {"clsid": "{B0DBC591-0F52-4F7D-AD7B-51E67725FB81}", "name": "APU-60-2M with 2 x R-60M (AA-8 Aphid) - Infra Red", "weight": 148}
APU_60_2M_with_2_x_R_60M__AA_8_Aphid____Infra_Red_ = {"clsid": "{275A2855-4A79-4B2D-B082-91EA2ADF4691}", "name": "APU-60-2M with 2 x R-60M (AA-8 Aphid) - Infra Red", "weight": 148}
APU_68___S_24B = {"clsid": "{APU_68_S-24}", "name": "APU-68 - S-24B", "weight": 273.5}
APU_6___6_9A4172_Vikhr = {"clsid": "{A6FD14D3-6D30-4C85-88A7-8D17BEE120E2}", "name": "APU-6 - 6 9A4172 Vikhr", "weight": 330}
APU_8___8_9A4172_Vikhr = {"clsid": "{F789E86A-EE2E-4E6B-B81E-D5E5F903B6ED}", "name": "APU-8 - 8 9A4172 Vikhr", "weight": 404}
ARAK_M_70B_AP_6x_135mm_UnGd_Rkts__Pshu70_HEAT = {"clsid": "{ARAKM70BAP}", "name": "ARAK M/70B AP 6x 135mm UnGd Rkts, Pshu70 HEAT", "weight": 372.2}
ARAK_M_70B_HE_6x_135mm_UnGd_Rkts__Shu70_HE_FRAG = {"clsid": "{ARAKM70BHE}", "name": "ARAK M/70B HE 6x 135mm UnGd Rkts, Shu70 HE/FRAG", "weight": 372.2}
ASO_2___countermeasures_pod = {"clsid": "{ASO-2}", "name": "ASO-2 - countermeasures pod", "weight": 22}
AUF2_BLG_66_AC_x_2 = {"clsid": "{M2KC_RAFAUT_BLG66}", "name": "AUF2 BLG-66-AC x 2", "weight": 685}
AUF2_GBU_12_x_2 = {"clsid": "{M2KC_RAFAUT_GBU12}", "name": "AUF2 GBU-12 x 2", "weight": 621}
AUF2_MK_82_Air_x_2 = {"clsid": "{M2KC_RAFAUT_MK82A}", "name": "AUF2 MK-82 Air x 2", "weight": 525}
AUF2_MK_82_Snakeyes_x_2 = {"clsid": "{M2KC_RAFAUT_MK82S}", "name": "AUF2 MK-82 Snakeyes x 2", "weight": 525}
AUF2_MK_82_x_2 = {"clsid": "{M2KC_RAFAUT_MK82}", "name": "AUF2 MK-82 x 2", "weight": 525}
AUF2_ROCKEYE_x_2 = {"clsid": "{M2KC_RAFAUT_ROCKEYE}", "name": "AUF2 ROCKEYE x 2", "weight": 525}
AWW_13_DATALINK_POD = {"clsid": "{AWW-13}", "name": "AWW-13 DATALINK POD", "weight": 200}
A_A_Training = {"clsid": "{M2KC_AAF}", "name": "A/A Training", "weight": 0}
A_G_Training = {"clsid": "{M2KC_AGF}", "name": "A/G Training", "weight": 0}
BAP_100_Anti_Runway = {"clsid": "{BAP_100}", "name": "BAP-100 Anti-Runway", "weight": None}
BAP_100_x_12 = {"clsid": "{M2KC_BAP100_12_RACK}", "name": "BAP-100 x 12", "weight": 465}
BAP_100_x_18 = {"clsid": "{M2KC_BAP100_18_RACK}", "name": "BAP-100 x 18", "weight": 660}
BAP_100_x_6 = {"clsid": "{M2KC_BAP100_6_RACK}", "name": "BAP-100 x 6", "weight": 270}
BDU_33___25lb_Practice_Bomb_LD = {"clsid": "{BDU-33}", "name": "BDU-33 - 25lb Practice Bomb LD", "weight": 11}
BDU_45 = {"clsid": "{BDU_45}", "name": "BDU-45", "weight": 232}
BDU_45B = {"clsid": "{BDU_45B}", "name": "BDU-45B", "weight": 232}
BDU_45B_ = {"clsid": "{BRU-32 BDU-45B}", "name": "BDU-45B", "weight": 298.38}
BDU_45_ = {"clsid": "{BRU-32 BDU-45}", "name": "BDU-45", "weight": 298.38}
BDU_45_LG = {"clsid": "{BDU_45LG}", "name": "BDU-45 LG", "weight": 277}
BDU_50HD___500lb_Inert_Practice_Bomb_HD = {"clsid": "{BDU-50HD}", "name": "BDU-50HD - 500lb Inert Practice Bomb HD", "weight": 232}
BDU_50LD___500lb_Inert_Practice_Bomb_LD = {"clsid": "{BDU-50LD}", "name": "BDU-50LD - 500lb Inert Practice Bomb LD", "weight": 232}
BDU_50LGB___500lb_Laser_Guided_Inert_Practice_Bomb_LD = {"clsid": "{BDU-50LGB}", "name": "BDU-50LGB - 500lb Laser Guided Inert Practice Bomb LD", "weight": 280}
BETAB_500M___479_kg__bomb__penetrating = {"clsid": "{BETAB-500M}", "name": "BETAB-500M - 479 kg, bomb, penetrating", "weight": 479}
BETAB_500S___425_kg__bomb__penetrating = {"clsid": "{BETAB-500S}", "name": "BETAB-500S - 425 kg, bomb, penetrating", "weight": 425}
Beer_Bomb = {"clsid": "{BEER_BOMB}", "name": "\"Beer Bomb\"", "weight": 100}
Beer_Bomb__D__on_LH_Spitfire_Wing_Carrier = {"clsid": "Beer_Bomb_(D)_on_LH_Spitfire_Wing_Carrier", "name": "\"Beer Bomb\" (Bitter Ale)", "weight": 104}
Beer_Bomb__D__on_RH_Spitfire_Wing_Carrier = {"clsid": "Beer_Bomb_(D)_on_RH_Spitfire_Wing_Carrier", "name": "\"Beer Bomb\" (Bitter Ale)", "weight": 104}
Beer_Bomb__L__on_LH_Spitfire_Wing_Carrier = {"clsid": "Beer_Bomb_(L)_on_LH_Spitfire_Wing_Carrier", "name": "\"Beer Bomb\" (Pale Ale)", "weight": 104}
Beer_Bomb__L__on_RH_Spitfire_Wing_Carrier = {"clsid": "Beer_Bomb_(L)_on_RH_Spitfire_Wing_Carrier", "name": "\"Beer Bomb\" (Pale Ale)", "weight": 104}
Belouga = {"clsid": "{BLG66_BELOUGA}", "name": "Belouga", "weight": 290}
BetAB_500ShP___500kg_Concrete_Piercing_HD_w_booster_Bomb = {"clsid": "{BD289E34-DF84-4C5E-9220-4B14C346E79D}", "name": "BetAB-500ShP - 500kg Concrete Piercing HD w booster Bomb", "weight": 424}
BetAB_500___500kg_Concrete_Piercing_Bomb_LD = {"clsid": "{35B698AC-9FEF-4EC4-AD29-484A0085F62B}", "name": "BetAB-500 - 500kg Concrete Piercing Bomb LD", "weight": 430}
BF109K_4_FUEL_TANK = {"clsid": "BF109K_4_FUEL_TANK", "name": "300 liter Fuel Tank", "weight": 266}
BGM_109 = {"clsid": "BGM_109", "name": "BGM-109B Tomahawk", "weight": None}
BGM_109B = {"clsid": "BGM-109B", "name": "BGM-109B", "weight": None}
BIN_200 = {"clsid": "BIN_200", "name": "BIN-200 - 200kg Napalm Incendiary Bomb", "weight": 200}
BKF___12_x_AO_2_5RT = {"clsid": "{BKF_AO2_5RT}", "name": "BKF - 12 x AO-2.5RT", "weight": 76}
BKF___12_x_PTAB_2_5KO = {"clsid": "{BKF_PTAB2_5KO}", "name": "BKF - 12 x PTAB-2.5KO", "weight": 63.2}
BK_90_MJ12__12x_MJ2_HEAT___36x_MJ1_HE_FRAG_Bomblets_ = {"clsid": "{BK90}", "name": "BK-90 MJ1+2 (12x MJ2 HEAT / 36x MJ1 HE-FRAG Bomblets)", "weight": 605}
BK_90_MJ1__72_x_MJ1_HE_FRAG_Bomblets_ = {"clsid": "{BK90MJ1}", "name": "BK-90 MJ1 (72 x MJ1 HE-FRAG Bomblets)", "weight": 605}
BK_90_MJ2__24_x_MJ2_HEAT_Bomblets_ = {"clsid": "{BK90MJ2}", "name": "BK-90 MJ2 (24 x MJ2 HEAT Bomblets)", "weight": 605}
BLG_66_AC_Belouga = {"clsid": "{BLG66_BELOUGA_AC}", "name": "BLG-66-AC Belouga", "weight": 305}
BLG_66_Belouga___290kg_CBU__151_Frag_Pen_bomblets = {"clsid": "{BLG66_AC}", "name": "BLG-66 Belouga - 290kg CBU, 151 Frag/Pen bomblets", "weight": 305}
BLU_107___440lb_Anti_Runway_Penetrator_Bomb = {"clsid": "{752B9781-F962-11d5-9190-00A0249B6F00}", "name": "BLU-107 - 440lb Anti-Runway Penetrator Bomb", "weight": 185}
BL_755_CBU___450kg__147_Frag_Pen_bomblets = {"clsid": "{08164777-5E9C-4B08-B48E-5AA7AFB246E2}", "name": "BL-755 CBU - 450kg, 147 Frag/Pen bomblets", "weight": 264}
BOZ_107___Countermeasure_Dispenser = {"clsid": "{8C3F26A1-FA0F-11d5-9190-00A0249B6F00}", "name": "BOZ-107 - Countermeasure Dispenser", "weight": 200}
BRU_33_with_1_x_LAU_10_pod___4_x_127mm_ZUNI__UnGd_Rkts_Mk71__HE_FRAG = {"clsid": "{BRU33_LAU10}", "name": "BRU-33 with 1 x LAU-10 pod - 4 x 127mm ZUNI, UnGd Rkts Mk71, HE/FRAG", "weight": 407.6}
BRU_33_with_1_x_LAU_61_pod___19_x_2_75_Hydra__UnGd_Rkts_M151__HE = {"clsid": "{BRU33_LAU61}", "name": "BRU-33 with 1 x LAU-61 pod - 19 x 2.75\" Hydra, UnGd Rkts M151, HE", "weight": 364.4}
BRU_33_with_1_x_LAU_61_pod___19_x_2_75_Hydra__UnGd_Rkts_M282__HEDP = {"clsid": "{BRU33_LAU61_M282}", "name": "BRU-33 with 1 x LAU-61 pod - 19 x 2.75\" Hydra, UnGd Rkts M282, HEDP", "weight": 400.88}
BRU_33_with_1_x_LAU_68_pod___7_x_2_75_Hydra__UnGd_Rkts_M151__HE = {"clsid": "{BRU33_LAU68}", "name": "BRU-33 with 1 x LAU-68 pod - 7 x 2.75\" Hydra, UnGd Rkts M151, HE", "weight": 204.9}
BRU_33_with_1_x_LAU_68_pod___7_x_2_75_Hydra__UnGd_Rkts_M282__HEDP = {"clsid": "{BRU33_LAU68_M282}", "name": "BRU-33 with 1 x LAU-68 pod - 7 x 2.75\" Hydra, UnGd Rkts M282, HEDP", "weight": 218.34}
BRU_33_with_1_x_LAU_68_pod___7_x_2_75_Hydra__UnGd_Rkts_Mk5__HEAT = {"clsid": "{BRU33_LAU68_MK5}", "name": "BRU-33 with 1 x LAU-68 pod - 7 x 2.75\" Hydra, UnGd Rkts Mk5, HEAT", "weight": 193.1}
BRU_33_with_2_x_BDU_45B___500lb_Practice_Bomb = {"clsid": "{BRU33_2X_BDU-45B}", "name": "BRU-33 with 2 x BDU-45B - 500lb Practice Bomb", "weight": 555}
BRU_33_with_2_x_BDU_45_LG_500lb_Practice_Laser_Guided_Bomb = {"clsid": "{BRU33_2X_BDU_45LG}", "name": "BRU-33 with 2 x BDU-45 LG 500lb Practice Laser Guided Bomb", "weight": 645}
BRU_33_with_2_x_BDU_45___500lb_Practice_Bomb = {"clsid": "{BRU33_2X_BDU-45}", "name": "BRU-33 with 2 x BDU-45 - 500lb Practice Bomb", "weight": 555}
BRU_33_with_2_x_CBU_99___490lbs__247_x_HEAT_Bomblets = {"clsid": "{BRU33_2X_CBU-99}", "name": "BRU-33 with 2 x CBU-99 - 490lbs, 247 x HEAT Bomblets", "weight": 535}
BRU_33_with_2_x_GBU_12___500lb_Laser_Guided_Bomb = {"clsid": "{BRU33_2X_GBU-12}", "name": "BRU-33 with 2 x GBU-12 - 500lb Laser Guided Bomb", "weight": 645}
BRU_33_with_2_x_GBU_16___1000lb_Laser_Guided_Bomb = {"clsid": "{BRU33_2X_GBU-16}", "name": "BRU-33 with 2 x GBU-16 - 1000lb Laser Guided Bomb", "weight": 1117}
BRU_33_with_2_x_LAU_10_pod___4_x_127mm_ZUNI__UnGd_Rkts_Mk71__HE_FRAG = {"clsid": "{BRU33_2*LAU10}", "name": "BRU-33 with 2 x LAU-10 pod - 4 x 127mm ZUNI, UnGd Rkts Mk71, HE/FRAG", "weight": 724.2}
BRU_33_with_2_x_LAU_61_pod___19_x_2_75_Hydra__UnGd_Rkts_M151__HE = {"clsid": "{BRU33_2*LAU61}", "name": "BRU-33 with 2 x LAU-61 pod - 19 x 2.75\" Hydra, UnGd Rkts M151, HE", "weight": 637.8}
BRU_33_with_2_x_LAU_61_pod___19_x_2_75_Hydra__UnGd_Rkts_M282__HEDP = {"clsid": "{BRU33_2*LAU61_M282}", "name": "BRU-33 with 2 x LAU-61 pod - 19 x 2.75\" Hydra, UnGd Rkts M282, HEDP", "weight": 710.76}
BRU_33_with_2_x_LAU_68_pod___7_x_2_75_Hydra__UnGd_Rkts_M151__HE = {"clsid": "{BRU33_2*LAU68}", "name": "BRU-33 with 2 x LAU-68 pod - 7 x 2.75\" Hydra, UnGd Rkts M151, HE", "weight": 318.8}
BRU_33_with_2_x_LAU_68_pod___7_x_2_75_Hydra__UnGd_Rkts_M282__HEDP = {"clsid": "{BRU33_2*LAU68_M282}", "name": "BRU-33 with 2 x LAU-68 pod - 7 x 2.75\" Hydra, UnGd Rkts M282, HEDP", "weight": 345.68}
BRU_33_with_2_x_LAU_68_pod___7_x_2_75_Hydra__UnGd_Rkts_Mk5__HEAT = {"clsid": "{BRU33_2*LAU68_MK5}", "name": "BRU-33 with 2 x LAU-68 pod - 7 x 2.75\" Hydra, UnGd Rkts Mk5, HEAT", "weight": 295.2}
BRU_33_with_2_x_Mk_20_Rockeye___490lbs_CBU__247_x_HEAT_Bomblets = {"clsid": "{BRU33_2X_ROCKEYE}", "name": "BRU-33 with 2 x Mk-20 Rockeye - 490lbs CBU, 247 x HEAT Bomblets", "weight": 535}
BRU_33_with_2_x_Mk_82Y___500lb_GP_Chute_Retarded_HD = {"clsid": "{BRU33_2X_MK-82Y}", "name": "BRU-33 with 2 x Mk-82Y - 500lb GP Chute Retarded HD", "weight": 555}
BRU_33_with_2_x_Mk_82_Snakeye___500lb_GP_Bomb_HD = {"clsid": "{BRU33_2X_MK-82_Snakeye}", "name": "BRU-33 with 2 x Mk-82 Snakeye - 500lb GP Bomb HD", "weight": 590}
BRU_33_with_2_x_Mk_82___500lb_GP_Bomb_LD = {"clsid": "{BRU33_2X_MK-82}", "name": "BRU-33 with 2 x Mk-82 - 500lb GP Bomb LD", "weight": 547}
BRU_33_with_2_x_Mk_83___1000lb_GP_Bomb_LD = {"clsid": "{BRU33_2X_MK-83}", "name": "BRU-33 with 2 x Mk-83 - 1000lb GP Bomb LD", "weight": 999}
BRU_41A_with_6_x_BDU_33___25lb_Practice_Bomb_LD = {"clsid": "{BRU41_6X_BDU-33}", "name": "BRU-41A with 6 x BDU-33 - 25lb Practice Bomb LD", "weight": 195.713}
BRU_41A_with_6_x_Mk_82___500lb_GP_Bomb_LD = {"clsid": "{BRU41_6X_MK-82}", "name": "BRU-41A with 6 x Mk-82 - 500lb GP Bomb LD", "weight": 1495.913}
BRU_42_3_BDU_33 = {"clsid": "BRU-42_3*BDU-33", "name": "BRU-42 with 3 x BDU-33 - 25lb Practice Bombs LD", "weight": 90.15}
BRU_42_3_GBU_12 = {"clsid": "BRU-42_3*GBU-12", "name": "BRU-42 with 3 x GBU-12 - 500lb Laser Guided Bombs", "weight": 887.25}
BRU_42_LS = {"clsid": "BRU-42_LS", "name": "BRU-42 - Triple Ejector Rack (TER)", "weight": 56.25}
BRU_42_with_2_x_GBU_10___2000lb_Laser_Guided_Bombs = {"clsid": "{62BE78B1-9258-48AE-B882-279534C0D278}", "name": "BRU-42 with 2 x GBU-10 - 2000lb Laser Guided Bombs", "weight": 1974.25}
BRU_42_with_2_x_GBU_27___2000lb_Laser_Guided_Penetrator_Bombs = {"clsid": "{EB969276-1922-4ED1-A5CB-18590F45D7FE}", "name": "BRU-42 with 2 x GBU-27 - 2000lb Laser Guided Penetrator Bombs", "weight": 2038.25}
BRU_42_with_3_x_GBU_16___1000lb_Laser_Guided_Bombs = {"clsid": "{88D49E04-78DF-4F08-B47E-B81247A9E3C5}", "name": "BRU-42 with 3 x GBU-16 - 1000lb Laser Guided Bombs", "weight": 1595.25}
BRU_42_with_3_x_LAU_131_pods___7_x_2_75_Hydra__Laser_Guided_Rkts_M151__HE_APKWS = {"clsid": "{LAU-131x3 - 7 AGR-20A}", "name": "BRU-42 with 3 x LAU-131 pods - 7 x 2.75\" Hydra, Laser Guided Rkts M151, HE APKWS", "weight": 454.3}
BRU_42_with_3_x_LAU_131_pods___7_x_2_75_Hydra__Laser_Guided_Rkts_M282__MPP_APKWS = {"clsid": "{LAU-131x3 - 7 AGR-20 M282}", "name": "BRU-42 with 3 x LAU-131 pods - 7 x 2.75\" Hydra, Laser Guided Rkts M282, MPP APKWS", "weight": 496.3}
BRU_42_with_3_x_LAU_68_pods___21_x_2_75_Hydra__UnGd_Rkts_M151__HE = {"clsid": "{64329ED9-B14C-4c0b-A923-A3C911DA1527}", "name": "BRU-42 with 3 x LAU-68 pods - 21 x 2.75\" Hydra, UnGd Rkts M151, HE", "weight": 397.95}
BRU_42_with_3_x_LAU_68_pods___21_x_2_75_Hydra__UnGd_Rkts_M156__Wht_Phos = {"clsid": "{C2593383-3CA8-4b18-B73D-0E750BCA1C85}", "name": "BRU-42 with 3 x LAU-68 pods - 21 x 2.75\" Hydra, UnGd Rkts M156, Wht Phos", "weight": 399.63}
BRU_42_with_3_x_LAU_68_pods___21_x_2_75_Hydra__UnGd_Rkts_M257__Para_Illum = {"clsid": "{E6966004-A525-4f47-AF94-BCFEDF8FDBDA}", "name": "BRU-42 with 3 x LAU-68 pods - 21 x 2.75\" Hydra, UnGd Rkts M257, Para Illum", "weight": 412.65}
BRU_42_with_3_x_LAU_68_pods___21_x_2_75_Hydra__UnGd_Rkts_M274__Practice_Smk = {"clsid": "{4C044B08-886B-46c8-9B1F-AB05B3ED9C1D}", "name": "BRU-42 with 3 x LAU-68 pods - 21 x 2.75\" Hydra, UnGd Rkts M274, Practice Smk", "weight": 395.85}
BRU_42_with_3_x_LAU_68_pods___21_x_2_75_Hydra__UnGd_Rkts_Mk1__Practice = {"clsid": "{443364AE-D557-488e-9499-45EDB3BA6730}", "name": "BRU-42 with 3 x LAU-68 pods - 21 x 2.75\" Hydra, UnGd Rkts Mk1, Practice", "weight": 368.76}
BRU_42_with_3_x_LAU_68_pods___21_x_2_75_Hydra__UnGd_Rkts_Mk5__HEAT = {"clsid": "{9BC82B3D-FE70-4910-B2B7-3E54EFE73262}", "name": "BRU-42 with 3 x LAU-68 pods - 21 x 2.75\" Hydra, UnGd Rkts Mk5, HEAT", "weight": 362.46}
BRU_42_with_3_x_LAU_68_pods___21_x_2_75_Hydra__UnGd_Rkts_Mk61__Practice = {"clsid": "{C0FA251E-B645-4ce5-926B-F4BC20822F8B}", "name": "BRU-42 with 3 x LAU-68 pods - 21 x 2.75\" Hydra, UnGd Rkts Mk61, Practice", "weight": 368.76}
BRU_42_with_3_x_LAU_68_pods___21_x_2_75_Hydra__UnGd_Rkts_WTU_1_B__Practice = {"clsid": "{A1853B38-2160-4ffe-B7E9-9BF81E6C3D77}", "name": "BRU-42 with 3 x LAU-68 pods - 21 x 2.75\" Hydra, UnGd Rkts WTU-1/B, Practice", "weight": 395.85}
BRU_42_with_3_x_LAU_68_pod___7_x_2_75_Hydra__UnGd_Rkts_M282__HEDP = {"clsid": "{BRU_42_3xLAU68_M282}", "name": "BRU-42 with 3 x LAU-68 pod - 7 x 2.75\" Hydra, UnGd Rkts M282, HEDP", "weight": 438.27}
BRU_42_with_3_x_Mk_20_Rockeye___490lbs_CBUs__247_x_HEAT_Bomblets = {"clsid": "{B83CB620-5BBE-4BEA-910C-EB605A327EF9}", "name": "BRU-42 with 3 x Mk-20 Rockeye - 490lbs CBUs, 247 x HEAT Bomblets", "weight": 722.25}
BRU_42_with_3_x_Mk_81___250lb_GP_Bombs_LD = {"clsid": "{7B34E0BB-E427-4C2A-A61A-8407CE18B54D}", "name": "BRU-42 with 3 x Mk-81 - 250lb GP Bombs LD", "weight": 396.45}
BRU_42_with_3_x_Mk_82_AIR_Ballute___500lb_GP_Bombs_HD = {"clsid": "{BRU-42_3*Mk-82AIR}", "name": "BRU-42 with 3 x Mk-82 AIR Ballute - 500lb GP Bombs HD", "weight": 782.25}
BRU_42_with_3_x_Mk_82___500lb_GP_Bombs_LD = {"clsid": "{60CC734F-0AFA-4E2E-82B8-93B941AB11CF}", "name": "BRU-42 with 3 x Mk-82 - 500lb GP Bombs LD", "weight": 740.25}
BRU_42_with_3_x_SUU_25_x_8_LUU_2___Target_Marker_Flares = {"clsid": "{BRU-42_LS_3*SUU-25_8*LUU-2}", "name": "BRU-42 with 3 x SUU-25 x 8 LUU-2 - Target Marker Flares", "weight": 736.65}
BRU_55_with_2_x_AGM_154A___JSOW_CEB__CBU_type_ = {"clsid": "{BRU55_2*AGM-154A}", "name": "BRU-55 with 2 x AGM-154A - JSOW CEB (CBU-type)", "weight": 1057.5}
BRU_55_with_2_x_AGM_154C___JSOW_Unitary_BROACH = {"clsid": "{BRU55_2*AGM-154C}", "name": "BRU-55 with 2 x AGM-154C - JSOW Unitary BROACH", "weight": 1055.5}
BRU_55_with_2_x_GBU_38___JDAM__500lb_GPS_Guided_Bomb = {"clsid": "{BRU55_2*GBU-38}", "name": "BRU-55 with 2 x GBU-38 - JDAM, 500lb GPS Guided Bomb", "weight": 573}
BRU_57_with_2_x_AGM_154A___JSOW_CEB__CBU_type_ = {"clsid": "{BRU57_2*AGM-154A}", "name": "BRU-57 with 2 x AGM-154A - JSOW CEB (CBU-type)", "weight": 1082}
BRU_57_with_2_x_AGM_154B___JSOW_Anti_Armour = {"clsid": "{BRU57_2*AGM-154B}", "name": "BRU-57 with 2 x AGM-154B - JSOW Anti-Armour", "weight": 1082}
BRU_57_with_2_x_CBU_103___202_x_CEM__CBU_with_WCMD = {"clsid": "{BRU57_2*CBU-103}", "name": "BRU-57 with 2 x CBU-103 - 202 x CEM, CBU with WCMD", "weight": 951}
BRU_57_with_2_x_CBU_105___10_x_SFW__CBU_with_WCMD = {"clsid": "{BRU57_2*CBU-105}", "name": "BRU-57 with 2 x CBU-105 - 10 x SFW, CBU with WCMD", "weight": 925}
BRU_57_with_2_x_GBU_38___JDAM__500lb_GPS_Guided_Bomb = {"clsid": "{BRU57_2*GBU-38}", "name": "BRU-57 with 2 x GBU-38 - JDAM, 500lb GPS Guided Bomb", "weight": 573}
BR_250 = {"clsid": "BR_250", "name": "BR-250 - 250kg GP Bomb LD", "weight": 250}
BR_500 = {"clsid": "BR_500", "name": "BR-500 - 500kg GP Bomb LD", "weight": 500}
British_GP_250LBS_Bomb_MK4_on_LH_Spitfire_Wing_Carrier = {"clsid": "British_GP_250LBS_Bomb_MK4_on_LH_Spitfire_Wing_Carrier", "name": "250 lb GP Mk.I", "weight": 108.326}
British_GP_250LBS_Bomb_MK4_on_RH_Spitfire_Wing_Carrier = {"clsid": "British_GP_250LBS_Bomb_MK4_on_RH_Spitfire_Wing_Carrier", "name": "250 lb GP Mk.I", "weight": 108.326}
British_GP_500LBS_Bomb_MK4_on_British_UniversalBC_MK3 = {"clsid": "British_GP_500LBS_Bomb_MK4_on_British_UniversalBC_MK3", "name": "500 lb GP Mk.I", "weight": 225.188}
B_13L_pod___5_x_S_13_OF__122mm_UnGd_Rkts__Blast_Frag = {"clsid": "{FC56DF80-9B09-44C5-8976-DCFAFF219062}", "name": "B-13L pod - 5 x S-13-OF, 122mm UnGd Rkts, Blast/Frag", "weight": 510}
B_1B_Mk_84_8 = {"clsid": "B-1B_Mk-84*8", "name": "8 x Mk-84 - 2000lb GP Bombs LD", "weight": 7152}
B_8M1_pod___20_x_S_8KOM__80mm_UnGd_Rkts__HEAT_AP = {"clsid": "{F72F47E5-C83A-4B85-96ED-D3E46671EE9A}", "name": "B-8M1 pod - 20 x S-8KOM, 80mm UnGd Rkts, HEAT/AP", "weight": 363.5}
B_8M1_pod___20_x_S_8TsM__80mm_UnGd_Rkts__Smk = {"clsid": "{3DFB7320-AB0E-11d7-9897-000476191836}", "name": "B-8M1 pod - 20 x S-8TsM, 80mm UnGd Rkts, Smk", "weight": 359.5}
B_8M1___20_S_8OFP2 = {"clsid": "B-8M1 - 20 S-8OFP2", "name": "B-8M1 pod - 20 x S-8OFP2, 80mm UnGd Rkts, HE/Frag/AP", "weight": 471.5}
B_8V20A_CM = {"clsid": "B_8V20A_CM", "name": "B-8V20A pod - 20 x S-8TsM, 80mm UnGd Rkts, Smk, OG", "weight": 345}
B_8V20A_CM_BU = {"clsid": "B_8V20A_CM_BU", "name": "B-8V20A pod - 20 x S-8TsM, 80mm UnGd Rkts, Smk, BU", "weight": 345}
B_8V20A_CM_GN = {"clsid": "B_8V20A_CM_GN", "name": "B-8V20A pod - 20 x S-8TsM, 80mm UnGd Rkts, Smk, GN", "weight": 345}
B_8V20A_CM_RD = {"clsid": "B_8V20A_CM_RD", "name": "B-8V20A pod - 20 x S-8TsM, 80mm UnGd Rkts, Smk, RD", "weight": 345}
B_8V20A_CM_VT = {"clsid": "B_8V20A_CM_VT", "name": "B-8V20A pod - 20 x S-8TsM, 80mm UnGd Rkts, Smk, VT", "weight": 345}
B_8V20A_CM_WH = {"clsid": "B_8V20A_CM_WH", "name": "B-8V20A pod - 20 x S-8TsM, 80mm UnGd Rkts, Smk, WH", "weight": 345}
B_8V20A_CM_YE = {"clsid": "B_8V20A_CM_YE", "name": "B-8V20A pod - 20 x S-8TsM, 80mm UnGd Rkts, Smk, YE", "weight": 345}
B_8V20A_OFP2 = {"clsid": "B_8V20A_OFP2", "name": "B-8V20A pod - 20 x S-8OFP2, 80mm UnGd Rkts, HE/Frag/AP", "weight": 457}
B_8V20A_OM = {"clsid": "B_8V20A_OM", "name": "B-8V20A pod - 20 x S-8OM, 80mm UnGd Rkts, Illum", "weight": 365}
B_8V20A_pod___20_x_S_8KOM__80mm_UnGd_Rkts__HEAT_AP = {"clsid": "{6A4B9E69-64FE-439a-9163-3A87FB6A4D81}", "name": "B-8V20A pod - 20 x S-8KOM, 80mm UnGd Rkts, HEAT/AP", "weight": 349}
CATM_9M = {"clsid": "CATM-9M", "name": "Captive AIM-9M for ACM", "weight": 85.73}
CBLS_200 = {"clsid": "CBLS-200", "name": "4*BDU-33 - AF/B37K Rack with 4*25lb Practice Bomb LD", "weight": 84.4}
CBU87_10 = {"clsid": "CBU87*10", "name": "10 x CBU-87 - 202 x CEM Cluster Bombs", "weight": 4300}
CBU97_10 = {"clsid": "CBU97*10", "name": "10 x CBU-97 - 10 x SFW Cluster Bombs", "weight": 4170}
CBU_103___202_x_CEM__CBU_with_WCMD = {"clsid": "{CBU_103}", "name": "CBU-103 - 202 x CEM, CBU with WCMD", "weight": 430}
CBU_105___10_x_SFW__CBU_with_WCMD = {"clsid": "{CBU_105}", "name": "CBU-105 - 10 x SFW, CBU with WCMD", "weight": 417}
CBU_52B___220_x_HE_Frag_bomblets = {"clsid": "{CBU-52B}", "name": "CBU-52B - 220 x HE/Frag bomblets", "weight": 356}
CBU_87___202_x_CEM_Cluster_Bomb = {"clsid": "{CBU-87}", "name": "CBU-87 - 202 x CEM Cluster Bomb", "weight": 430}
CBU_97___10_x_SFW_Cluster_Bomb = {"clsid": "{5335D97A-35A5-4643-9D9B-026C75961E52}", "name": "CBU-97 - 10 x SFW Cluster Bomb", "weight": 417}
CBU_99___490lbs__247_x_HEAT_Bomblets = {"clsid": "{CBU_99}", "name": "CBU-99 - 490lbs, 247 x HEAT Bomblets", "weight": 222}
CM_802AKG = {"clsid": "{CM_802AKG}", "name": "CM-802AKG", "weight": None}
C_802AK = {"clsid": "{C_802AK}", "name": "C-802AK", "weight": 600}
DEFA_553___30mm_Revolver_Cannon = {"clsid": "{C-101-DEFA553}", "name": "DEFA-553 - 30mm Revolver Cannon", "weight": 218}
DIS_AKD_10 = {"clsid": "DIS_AKD-10", "name": "AKD-10", "weight": 58}
DIS_AKG_DLPOD = {"clsid": "DIS_AKG_DLPOD", "name": "DATA-LINK POD", "weight": 295}
DIS_BOMB_250_2 = {"clsid": "DIS_BOMB_250_2", "name": "250-2 - 250kg GP Bombs HD", "weight": 250}
DIS_BOMB_250_3 = {"clsid": "DIS_BOMB_250_3", "name": "250-3 - 250kg GP Bombs LD", "weight": 250}
DIS_BRM1_90 = {"clsid": "DIS_BRM1_90", "name": "BRM-1_90MM", "weight": 462.5}
DIS_CM_802AKG = {"clsid": "DIS_CM-802AKG", "name": "CM-802AKG", "weight": 765}
DIS_C_701IR = {"clsid": "DIS_C-701IR", "name": "C-701IR", "weight": 170}
DIS_C_701T = {"clsid": "DIS_C-701T", "name": "C-701T", "weight": 170}
DIS_C_802AK = {"clsid": "DIS_C-802AK", "name": "C-802AK", "weight": 765}
DIS_DF4A_KD20 = {"clsid": "DIS_DF4A_KD20", "name": "KD-20", "weight": 1750}
DIS_DF4B_YJ12 = {"clsid": "DIS_DF4B_YJ12", "name": "YJ-12", "weight": 2550}
DIS_GB6 = {"clsid": "DIS_GB6", "name": "GB-6", "weight": 672}
DIS_GB6_HE = {"clsid": "DIS_GB6_HE", "name": "GB-6-HE", "weight": 672}
DIS_GB6_TSP = {"clsid": "DIS_GB6_TSP", "name": "GB-6-SFW", "weight": 672}
DIS_GBU_10 = {"clsid": "DIS_GBU_10", "name": "GBU-10", "weight": 1162}
DIS_GBU_12 = {"clsid": "DIS_GBU_12", "name": "GBU-12", "weight": 275}
DIS_GBU_12_DUAL_GDJ_II19_L = {"clsid": "DIS_GBU_12_DUAL_GDJ_II19_L", "name": "GDJ-II19 - 2 x GBU-12", "weight": 629}
DIS_GBU_12_DUAL_GDJ_II19_R = {"clsid": "DIS_GBU_12_DUAL_GDJ_II19_R", "name": "GDJ-II19 - 2 x GBU-12", "weight": 629}
DIS_GBU_16 = {"clsid": "DIS_GBU_16", "name": "GBU-16", "weight": 564}
DIS_GDJ_KD63 = {"clsid": "DIS_GDJ_KD63", "name": "KD-63", "weight": 2050}
DIS_GDJ_KD63B = {"clsid": "DIS_GDJ_KD63B", "name": "KD-63B", "weight": 2050}
DIS_GDJ_YJ83K = {"clsid": "DIS_GDJ_YJ83K", "name": "YJ-83K", "weight": 765}
DIS_H6_250_2_N12 = {"clsid": "DIS_H6_250_2_N12", "name": "12 x 250-2 - 250kg GP Bombs HD", "weight": 3000}
DIS_H6_250_2_N24 = {"clsid": "DIS_H6_250_2_N24", "name": "24 x 250-2 - 250kg GP Bombs HD", "weight": 6000}
DIS_KD20 = {"clsid": "DIS_KD20", "name": "KD-20", "weight": 1700}
DIS_KD63 = {"clsid": "DIS_KD63", "name": "KD-63", "weight": 2000}
DIS_KD63B = {"clsid": "DIS_KD63B", "name": "KD-63B", "weight": 2000}
DIS_LAU68_MK5_DUAL_GDJ_II19_L = {"clsid": "DIS_LAU68_MK5_DUAL_GDJ_II19_L", "name": "GDJ-II19 - 2 x LAU68 MK5", "weight": 261.06}
DIS_LAU68_MK5_DUAL_GDJ_II19_R = {"clsid": "DIS_LAU68_MK5_DUAL_GDJ_II19_R", "name": "GDJ-II19 - 2 x LAU68 MK5", "weight": 261.06}
DIS_LD_10 = {"clsid": "DIS_LD-10", "name": "LD-10", "weight": 289}
DIS_LD_10_DUAL_L = {"clsid": "DIS_LD-10_DUAL_L", "name": "LD-10 x 2", "weight": 558}
DIS_LD_10_DUAL_R = {"clsid": "DIS_LD-10_DUAL_R", "name": "LD-10 x 2", "weight": 558}
DIS_LS_6_500 = {"clsid": "DIS_LS_6_500", "name": "LS-6-500", "weight": 570}
DIS_MER6_250_3_N6 = {"clsid": "DIS_MER6_250_3_N6", "name": "MER6 - 6 x 250-3 - 250kg GP Bombs LD", "weight": 1550}
DIS_MK_20 = {"clsid": "DIS_MK_20", "name": "Mk-20", "weight": 222}
DIS_MK_20_DUAL_GDJ_II19_L = {"clsid": "DIS_MK_20_DUAL_GDJ_II19_L", "name": "GDJ-II19 - 2 x Mk-20", "weight": 523}
DIS_MK_20_DUAL_GDJ_II19_R = {"clsid": "DIS_MK_20_DUAL_GDJ_II19_R", "name": "GDJ-II19 - 2 x Mk-20", "weight": 523}
DIS_MK_82S_DUAL_GDJ_II19_L = {"clsid": "DIS_MK_82S_DUAL_GDJ_II19_L", "name": "GDJ-II19 - 2 x Mk-82 SnakeEye", "weight": 543}
DIS_MK_82S_DUAL_GDJ_II19_R = {"clsid": "DIS_MK_82S_DUAL_GDJ_II19_R", "name": "GDJ-II19 - 2 x Mk-82 SnakeEye", "weight": 543}
DIS_MK_82_DUAL_GDJ_II19_L = {"clsid": "DIS_MK_82_DUAL_GDJ_II19_L", "name": "GDJ-II19 - 2 x Mk-82", "weight": 561}
DIS_MK_82_DUAL_GDJ_II19_R = {"clsid": "DIS_MK_82_DUAL_GDJ_II19_R", "name": "GDJ-II19 - 2 x Mk-82", "weight": 561}
DIS_PL_12 = {"clsid": "DIS_PL-12", "name": "PL-12", "weight": 199}
DIS_PL_5EII = {"clsid": "DIS_PL-5EII", "name": "PL-5EII", "weight": 153}
DIS_PL_8A = {"clsid": "DIS_PL-8A", "name": "PL-8A", "weight": 115}
DIS_PL_8B = {"clsid": "DIS_PL-8B", "name": "PL-8B", "weight": 115}
DIS_RKT_90_UG = {"clsid": "DIS_RKT_90_UG", "name": "UG_90MM", "weight": 382.5}
DIS_SD_10 = {"clsid": "DIS_SD-10", "name": "SD-10", "weight": 289}
DIS_SD_10_DUAL_L = {"clsid": "DIS_SD-10_DUAL_L", "name": "SD-10 x 2", "weight": 558}
DIS_SD_10_DUAL_R = {"clsid": "DIS_SD-10_DUAL_R", "name": "SD-10 x 2", "weight": 558}
DIS_SMOKE_GENERATOR_B = {"clsid": "DIS_SMOKE_GENERATOR_B", "name": "Smoke Generator - blue", "weight": 0}
DIS_SMOKE_GENERATOR_G = {"clsid": "DIS_SMOKE_GENERATOR_G", "name": "Smoke Generator - green", "weight": 0}
DIS_SMOKE_GENERATOR_O = {"clsid": "DIS_SMOKE_GENERATOR_O", "name": "Smoke Generator - orange", "weight": 0}
DIS_SMOKE_GENERATOR_R = {"clsid": "DIS_SMOKE_GENERATOR_R", "name": "Smoke Generator - red", "weight": 0}
DIS_SMOKE_GENERATOR_W = {"clsid": "DIS_SMOKE_GENERATOR_W", "name": "Smoke Generator - white", "weight": 0}
DIS_SMOKE_GENERATOR_Y = {"clsid": "DIS_SMOKE_GENERATOR_Y", "name": "Smoke Generator - yellow", "weight": 0}
DIS_SPJ_POD = {"clsid": "DIS_SPJ_POD", "name": "KG-600", "weight": 270}
DIS_TANK1100 = {"clsid": "DIS_TANK1100", "name": "1100L Tank", "weight": 1064}
DIS_TANK1100_EMPTY = {"clsid": "DIS_TANK1100_EMPTY", "name": "1100L Tank Empty", "weight": 75}
DIS_TANK800 = {"clsid": "DIS_TANK800", "name": "800L Tank", "weight": 730}
DIS_TANK800_EMPTY = {"clsid": "DIS_TANK800_EMPTY", "name": "800L Tank Empty", "weight": 45}
DIS_TYPE200 = {"clsid": "DIS_TYPE200", "name": "TYPE-200A", "weight": 200}
DIS_TYPE200_DUAL_L = {"clsid": "DIS_TYPE200_DUAL_L", "name": "TYPE-200A Dual", "weight": 400}
DIS_TYPE200_DUAL_R = {"clsid": "DIS_TYPE200_DUAL_R", "name": "TYPE-200A Dual", "weight": 400}
DIS_WMD7 = {"clsid": "DIS_WMD7", "name": "WMD7 POD", "weight": 295}
DIS_YJ12 = {"clsid": "DIS_YJ12", "name": "YJ-12", "weight": 2500}
DIS_YJ83K = {"clsid": "DIS_YJ83K", "name": "YJ-83K", "weight": 715}
DWS39_MJ1 = {"clsid": "{DWS39_MJ1}", "name": "DWS39 MJ1", "weight": 605}
DWS39_MJ1_MJ2 = {"clsid": "{DWS39_MJ1_MJ2}", "name": "DWS39 MJ1-MJ2", "weight": 605}
DWS39_MJ2 = {"clsid": "{DWS39_MJ2}", "name": "DWS39 MJ2", "weight": 605}
Eclair = {"clsid": "{Eclair}", "name": "Eclair", "weight": 20}
ER_4_SC50 = {"clsid": "ER_4_SC50", "name": "4 x SC 50 - 50kg GP Bomb LD", "weight": 220}
ETHER = {"clsid": "{0519A261-0AB6-11d6-9193-00A0249B6F00}", "name": "ETHER", "weight": 200}
FAB_100M = {"clsid": "FAB_100M", "name": "FAB-100M - 100kg GP Bomb LD", "weight": 100}
FAB_100M_ = {"clsid": "FAB_100M", "name": "FAB-100M", "weight": 100}
FAB_100_x_4 = {"clsid": "{FAB-100-4}", "name": "FAB-100 x 4", "weight": 465}
FAB_100___100kg_GP_Bomb_LD = {"clsid": "{FB3CE165-BF07-4979-887C-92B87F13276B}", "name": "FAB-100 - 100kg GP Bomb LD", "weight": 100}
FAB_1500_M_54___1500kg_GP_Bomb_LD = {"clsid": "{40AA4ABE-D6EB-4CD6-AEFE-A1A0477B24AB}", "name": "FAB-1500 M-54 - 1500kg GP Bomb LD", "weight": 1392}
FAB_250_M54_TU___235_kg__bomb__parachute = {"clsid": "{FAB-250-M54-TU}", "name": "FAB-250 M54 TU - 235 kg, bomb, parachute", "weight": 235}
FAB_250_M54___235_kg__bomb__parachute = {"clsid": "{FAB-250-M54}", "name": "FAB-250 M54 - 235 kg, bomb, parachute", "weight": 235}
FAB_250_M62___250kg_GP_Bomb_LD = {"clsid": "{FAB_250_M62}", "name": "FAB-250-M62 - 250kg GP Bomb LD", "weight": 227}
FAB_250___250kg_GP_Bomb_LD = {"clsid": "{3C612111-C7AD-476E-8A8E-2485812F4E5C}", "name": "FAB-250 - 250kg GP Bomb LD", "weight": 250}
FAB_50 = {"clsid": "FAB_50", "name": "FAB-50 - 50kg GP Bomb LD", "weight": 50}
FAB_500_M54_TU___480_kg__bomb__parachute = {"clsid": "{FAB-500-M54-TU}", "name": "FAB-500 M54 TU - 480 kg, bomb, parachute", "weight": 480}
FAB_500_M54___474_kg__bomb__free_fall = {"clsid": "{FAB-500-M54}", "name": "FAB-500 M54 - 474 kg, bomb, free-fall", "weight": 474}
FAB_500_M_62___500kg_GP_Bomb_LD = {"clsid": "{37DCC01E-9E02-432F-B61D-10C166CA2798}", "name": "FAB-500 M-62 - 500kg GP Bomb LD", "weight": 506}
FAB_500_SL___515_kg__bomb__parachute = {"clsid": "{FAB-500-SL}", "name": "FAB-500 SL - 515 kg, bomb, parachute", "weight": 515}
FAB_500_TA___477_kg__bomb__free_fall = {"clsid": "{FAB-500-TA}", "name": "FAB-500 TA - 477 kg, bomb, free-fall", "weight": 477}
FAB_50_ = {"clsid": "FAB_50", "name": "FAB-50", "weight": 50}
FIM_92 = {"clsid": "FIM_92", "name": "STINGER", "weight": None}
FPU_8A_Fuel_Tank_330_gallons = {"clsid": "{FPU_8A_FUEL_TANK}", "name": "FPU-8A Fuel Tank 330 gallons", "weight": 1150}
Fuel_Tank_120_gallons = {"clsid": "{PTB_120_F86F35}", "name": "Fuel Tank 120 gallons", "weight": 413.36}
Fuel_Tank_150_liters = {"clsid": "{PTB_150L_L39}", "name": "Fuel Tank 150 liters", "weight": 124.25}
Fuel_Tank_200_gallons = {"clsid": "{PTB_200_F86F35}", "name": "Fuel Tank 200 gallons", "weight": 675.6}
Fuel_Tank_350_liters = {"clsid": "{PTB_350L_L39}", "name": "Fuel Tank 350 liters", "weight": 283.25}
Fuel_Tank_490_L_Central__21_ = {"clsid": "{PTB_490C_MIG21}", "name": "Fuel Tank 490 L Central (21)", "weight": 434}
Fuel_Tank_490_L__21_ = {"clsid": "{PTB_490_MIG21}", "name": "Fuel Tank 490 L (21)", "weight": 434}
Fuel_Tank_800_L__21_ = {"clsid": "{PTB_800_MIG21}", "name": "Fuel Tank 800 L (21)", "weight": 682}
Fuel_Tank_FT600 = {"clsid": "Fuel_Tank_FT600", "name": "Fuel tank FT600", "weight": 1925}
Fuel_tank_1150L = {"clsid": "{414E383A-59EB-41BC-8566-2B5E0788ED1F}", "name": "Fuel tank 1150L", "weight": 975.25}
Fuel_tank_1150L_MiG_29 = {"clsid": "{C0FF4842-FBAC-11d5-9190-00A0249B6F00}", "name": "Fuel tank 1150L MiG-29", "weight": 975.25}
Fuel_tank_1400L = {"clsid": "{2BEC576B-CDF5-4B7F-961F-B0FA4312B841}", "name": "Fuel tank 1400L", "weight": 1262.5}
Fuel_tank_2000L = {"clsid": "{16602053-4A12-40A2-B214-AB60D481B20E}", "name": "Fuel tank 2000L", "weight": 1700}
Fuel_tank_3000L = {"clsid": "{7D7EC917-05F6-49D4-8045-61FC587DD019}", "name": "Fuel tank 3000L", "weight": 2550}
Fuel_tank_300_gal = {"clsid": "{8A0BE8AE-58D4-4572-9263-3144C0D06364}", "name": "Fuel tank 300 gal", "weight": 1083.5076415}
Fuel_tank_300_gal_ = {"clsid": "{F14-300gal}", "name": "Fuel tank 300 gal", "weight": 958.4}
Fuel_tank_300_gal__empty_ = {"clsid": "{F14-300gal-empty}", "name": "Fuel tank 300 gal (empty)", "weight": 70}
Fuel_tank_330_gal = {"clsid": "{EFEC8200-B922-11d7-9897-000476191836}", "name": "Fuel tank 330 gal", "weight": 1067.750921}
Fuel_tank_330_gal_ = {"clsid": "{EFEC8201-B922-11d7-9897-000476191836}", "name": "Fuel tank 330 gal", "weight": 1067.750921}
Fuel_tank_367_gal = {"clsid": "{82364E69-5564-4043-A866-E13032926C3E}", "name": "Fuel tank 367 gal", "weight": 1181.8623879}
Fuel_tank_370_gal = {"clsid": "{F376DBEE-4CAE-41BA-ADD9-B2910AC95DEC}", "name": "Fuel tank 370 gal", "weight": 1338.1101068}
Fuel_tank_5000L = {"clsid": "{0855A3A1-FA50-4C89-BDBB-5D5360ABA071}", "name": "Fuel tank 5000L", "weight": 4420}
Fuel_tank_610_gal = {"clsid": "{E1F29B21-F291-4589-9FD8-3272EEC69506}", "name": "Fuel tank 610 gal", "weight": 2010.8766885}
Fuel_tank_800L = {"clsid": "{A5BAEAB7-6FAF-4236-AF72-0FD900F493F9}", "name": "Fuel tank 800L", "weight": 680}
Fuel_tank_800L_Wing = {"clsid": "{E8D4652F-FD48-45B7-BA5B-2AE05BB5A9CF}", "name": "Fuel tank 800L Wing", "weight": 760}
Fuel_tank_PTB_450 = {"clsid": "{B99EE8A8-99BC-4a8d-89AC-A26831920DCE}", "name": "Fuel tank PTB-450", "weight": 550}
Fuel_tank_PTB_450_ = {"clsid": "{PTB_450}", "name": "Fuel tank PTB-450", "weight": 550}
Fuel_tank_S_3 = {"clsid": "{A504D93B-4E80-4B4F-A533-0D9B65F2C55F}", "name": "Fuel tank S-3", "weight": 964}
FW109_FUEL_TANK = {"clsid": "FW109_FUEL_TANK", "name": "300 liter Fuel Tank Type E2", "weight": 266}
F_4_Fuel_tank_C = {"clsid": "{8B9E3FD0-F034-4A07-B6CE-C269884CC71B}", "name": "F-4 Fuel tank-C", "weight": 2345}
F_4_Fuel_tank_W = {"clsid": "{7B4B122D-C12C-4DB4-834E-4D8BB4D863A8}", "name": "F-4 Fuel tank-W", "weight": 1420}
F_5_150Gal_Fuel_tank = {"clsid": "{PTB-150GAL}", "name": "F-5 150Gal Fuel tank", "weight": 509}
F_5_275Gal_Fuel_tank = {"clsid": "{0395076D-2F77-4420-9D33-087A4398130B}", "name": "F-5 275Gal Fuel tank", "weight": 909}
GAU_12_Gunpod_w_AP_M79 = {"clsid": "{GAU_12_Equalizer_AP}", "name": "GAU 12 Gunpod w/AP M79", "weight": 283.9}
GAU_12_Gunpod_w_HE_M792 = {"clsid": "{GAU_12_Equalizer_HE}", "name": "GAU 12 Gunpod w/HE M792", "weight": 283.9}
GAU_12_Gunpod_w_SAPHEI_T = {"clsid": "{GAU_12_Equalizer}", "name": "GAU 12 Gunpod w/SAPHEI-T", "weight": 283.9}
GBU_10 = {"clsid": "{BRU-32 GBU-10}", "name": "GBU-10", "weight": 997.38}
GBU_10___2000lb_Laser_Guided_Bomb = {"clsid": "{51F9AAE5-964F-4D21-83FB-502E3BFE5F8A}", "name": "GBU-10 - 2000lb Laser Guided Bomb", "weight": 959}
GBU_12 = {"clsid": "{BRU-32 GBU-12}", "name": "GBU-12", "weight": 332.38}
GBU_12___500lb_Laser_Guided_Bomb = {"clsid": "{DB769D48-67D7-42ED-A2BE-108D566C8B1E}", "name": "GBU-12 - 500lb Laser Guided Bomb", "weight": 277}
GBU_16 = {"clsid": "{BRU-32 GBU-16}", "name": "GBU-16", "weight": 621.38}
GBU_16___1000lb_Laser_Guided_Bomb = {"clsid": "{0D33DDAE-524F-4A4E-B5B8-621754FE3ADE}", "name": "GBU-16 - 1000lb Laser Guided Bomb", "weight": 513}
GBU_24 = {"clsid": "{BRU-32 GBU-24}", "name": "GBU-24", "weight": 1107.38}
GBU_24_Paveway_III___2000lb_Laser_Guided_Bomb = {"clsid": "{34759BBC-AF1E-4AEE-A581-498FF7A6EBCE}", "name": "GBU-24 Paveway III - 2000lb Laser Guided Bomb", "weight": 1087}
GBU_24_Paveway_III___2000lb_Laser_Guided_Bomb_ = {"clsid": "{GBU-24}", "name": "GBU-24 Paveway III - 2000lb Laser Guided Bomb", "weight": 1087}
GBU_27___2000lb_Laser_Guided_Penetrator_Bomb = {"clsid": "{EF0A9419-01D6-473B-99A3-BEBDB923B14D}", "name": "GBU-27 - 2000lb Laser Guided Penetrator Bomb", "weight": 1200}
GBU_28___5000lb_Laser_Guided_Penetrator_Bomb = {"clsid": "{F06B775B-FC70-44B5-8A9F-5B5E2EB839C7}", "name": "GBU-28 - 5000lb Laser Guided Penetrator Bomb", "weight": 2130}
GBU_31V3B_8 = {"clsid": "GBU-31V3B*8", "name": "8 x GBU-31(V)3/B - JDAM, 2000lb GPS Guided Penetrator Bombs", "weight": 7848}
GBU_31_8 = {"clsid": "GBU-31*8", "name": "8 x GBU-31(V)1/B - JDAM, 2000lb GPS Guided Bombs", "weight": 7152}
GBU_31_V_1_B___JDAM__2000lb_GPS_Guided_Bomb = {"clsid": "{GBU-31}", "name": "GBU-31(V)1/B - JDAM, 2000lb GPS Guided Bomb", "weight": 934}
GBU_31_V_2_B___JDAM__2000lb_GPS_Guided_Bomb = {"clsid": "{GBU_31_V_2B}", "name": "GBU-31(V)2/B - JDAM, 2000lb GPS Guided Bomb", "weight": 934}
GBU_31_V_3_B___JDAM__2000lb_GPS_Guided_Penetrator_Bomb = {"clsid": "{GBU-31V3B}", "name": "GBU-31(V)3/B - JDAM, 2000lb GPS Guided Penetrator Bomb", "weight": 981}
GBU_31_V_4_B___JDAM__2000lb_GPS_Guided_Penetrator_Bomb = {"clsid": "{GBU_31_V_4B}", "name": "GBU-31(V)4/B - JDAM, 2000lb GPS Guided Penetrator Bomb", "weight": 970}
GBU_32_V_2_B___JDAM__1000lb_GPS_Guided_Bomb = {"clsid": "{GBU_32_V_2B}", "name": "GBU-32(V)2/B - JDAM, 1000lb GPS Guided Bomb", "weight": 467}
GBU_38_16 = {"clsid": "GBU-38*16", "name": "16 x GBU-38 - JDAM, 500lb GPS Guided Bombs", "weight": 3856}
GBU_38___JDAM__500lb_GPS_Guided_Bomb = {"clsid": "{GBU-38}", "name": "GBU-38 - JDAM, 500lb GPS Guided Bomb", "weight": 241}
GBU_54B___LJDAM__500lb_Laser__GPS_Guided_Bomb_LD = {"clsid": "{GBU_54_V_1B}", "name": "GBU-54B - LJDAM, 500lb Laser & GPS Guided Bomb LD", "weight": 253}
GUV_VOG = {"clsid": "GUV_VOG", "name": "GUV-8700 w AP-30 - 30mm Grenade Launcher", "weight": 274}
GUV_YakB_GSHP = {"clsid": "GUV_YakB_GSHP", "name": "GUV-8700 w 1x12.7 mm & 2x7.62 mm Rotary HMG", "weight": 452}
HOT3 = {"clsid": "{HOT3G}", "name": "HOT3", "weight": 32}
HOT3_ = {"clsid": "{HOT3D}", "name": "HOT3", "weight": 32}
HSAB_with_9_x_Mk_20_Rockeye___490lbs_CBUs__247_x_HEAT_Bomblets = {"clsid": "{4CD2BB0F-5493-44EF-A927-9760350F7BA1}", "name": "HSAB with 9 x Mk-20 Rockeye - 490lbs CBUs, 247 x HEAT Bomblets", "weight": 2050}
HSAB_with_9_x_Mk_83___1000lb_GP_Bombs_LD = {"clsid": "{696CFFC4-0BDE-42A8-BE4B-0BE3D9DD723C}", "name": "HSAB with 9 x Mk-83 - 1000lb GP Bombs LD", "weight": 8100}
HVAR_SMOKE__UnGd_Rkt = {"clsid": "{HVAR_SMOKE_2}", "name": "HVAR SMOKE, UnGd Rkt", "weight": 100}
HVAR_Smoke_Generator = {"clsid": "{HVAR_SMOKE_GENERATOR}", "name": "HVAR Smoke Generator", "weight": 64}
HVAR__UnGd_Rkt = {"clsid": "{HVAR}", "name": "HVAR, UnGd Rkt", "weight": 64}
I16_DROP_FUEL_TANK = {"clsid": "I16_DROP_FUEL_TANK", "name": "I-16 External Fuel Tank", "weight": 73}
I16_FAB_100SV = {"clsid": "I16_FAB_100SV", "name": "FAB-100SV", "weight": 100}
I16_RS_82 = {"clsid": "I16_RS_82", "name": "RS-82", "weight": 9.7}
IAB_500___470_kg__bomb__free_fall = {"clsid": "{IAB-500}", "name": "IAB-500 - 470 kg, bomb, free fall", "weight": 470}
IR_Deflector = {"clsid": "{IR_Deflector}", "name": "IR Deflector", "weight": 5}
KAB_1500Kr___1500kg_TV_Guided_Bomb = {"clsid": "{KAB_1500Kr_LOADOUT}", "name": "KAB-1500Kr - 1500kg TV Guided Bomb", "weight": 1525}
KAB_1500LG_Pr___1500kg_Laser_Guided_Penetrator_Bomb = {"clsid": "{KAB_1500LG_LOADOUT}", "name": "KAB-1500LG-Pr - 1500kg Laser Guided Penetrator Bomb", "weight": 1525}
KAB_1500L___1500kg_Laser_Guided_Bomb = {"clsid": "{39821727-F6E2-45B3-B1F0-490CC8921D1E}", "name": "KAB-1500L - 1500kg Laser Guided Bomb", "weight": 1560}
KAB_500Kr___500kg_TV_Guided_Bomb = {"clsid": "{E2C426E3-8B10-4E09-B733-9CDC26520F48}", "name": "KAB-500Kr - 500kg TV Guided Bomb", "weight": 560}
KAB_500LG___500kg_Laser_Guided_Bomb = {"clsid": "{BA565F89-2373-4A84-9502-A0E017D3A44A}", "name": "KAB-500LG - 500kg Laser Guided Bomb", "weight": 534}
KAB_500S___500kg_GPS_Guided_Bomb = {"clsid": "{KAB_500S_LOADOUT}", "name": "KAB-500S - 500kg GPS Guided Bomb", "weight": 500}
KB_Flare_Chaff_dispenser_pod = {"clsid": "{KB}", "name": "KB Flare/Chaff dispenser pod", "weight": 296}
Kh_22__AS_4_Kitchen____1000kg__AShM__IN__Act_Pas_Rdr = {"clsid": "{12429ECF-03F0-4DF6-BCBD-5D38B6343DE1}", "name": "Kh-22 (AS-4 Kitchen) - 1000kg, AShM, IN & Act/Pas Rdr", "weight": 6800}
Kh_23L_Grom__AS_7_Kerry____286kg__ASM__Laser_Guided = {"clsid": "{9F390892-E6F9-42C9-B84E-1136A881DCB2}", "name": "Kh-23L Grom (AS-7 Kerry) - 286kg, ASM, Laser Guided", "weight": 288}
Kh_25ML__AS_10_Karen____300kg__ASM__Semi_Act_Laser = {"clsid": "{6DADF342-D4BA-4D8A-B081-BA928C4AF86D}", "name": "Kh-25ML (AS-10 Karen) - 300kg, ASM, Semi-Act Laser", "weight": 360}
Kh_25ML__AS_10_Karen____300kg__ASM__Semi_Act_Laser_ = {"clsid": "{79D73885-0801-45a9-917F-C90FE1CE3DFC}", "name": "Kh-25ML (AS-10 Karen) - 300kg, ASM, Semi-Act Laser", "weight": 360}
Kh_25ML__AS_10_Karen____300kg__ASM__Semi_Act_Laser__ = {"clsid": "{X-25ML}", "name": "Kh-25ML (AS-10 Karen) - 300kg, ASM, Semi-Act Laser", "weight": 360}
Kh_25MPU__Updated_AS_12_Kegler____320kg__ARM__IN__Pas_Rdr = {"clsid": "{E86C5AA5-6D49-4F00-AD2E-79A62D6DDE26}", "name": "Kh-25MPU (Updated AS-12 Kegler) - 320kg, ARM, IN & Pas Rdr", "weight": 370}
Kh_25MPU__Updated_AS_12_Kegler____320kg__ARM__IN__Pas_Rdr_ = {"clsid": "{752AF1D2-EBCC-4bd7-A1E7-2357F5601C70}", "name": "Kh-25MPU (Updated AS-12 Kegler) - 320kg, ARM, IN & Pas Rdr", "weight": 370}
Kh_25MPU__Updated_AS_12_Kegler____320kg__ARM__IN__Pas_Rdr__ = {"clsid": "{X-25MPU}", "name": "Kh-25MPU (Updated AS-12 Kegler) - 320kg, ARM, IN & Pas Rdr", "weight": 370}
Kh_25MP__AS_12_Kegler____320kg__ARM__Pas_Rdr = {"clsid": "{Kh-25MP}", "name": "Kh-25MP (AS-12 Kegler) - 320kg, ARM, Pas Rdr", "weight": 355}
Kh_25MR__AS_10_Karen____300kg__ASM__10km__RC_Guided = {"clsid": "{292960BB-6518-41AC-BADA-210D65D5073C}", "name": "Kh-25MR (AS-10 Karen) - 300kg, ASM, 10km, RC Guided", "weight": 360}
Kh_25MR__AS_10_Karen____300kg__ASM__RC_Guided = {"clsid": "{X-25MR}", "name": "Kh-25MR (AS-10 Karen) - 300kg, ASM, RC Guided", "weight": 360}
Kh_28__AS_9_Kyle____720kg__ARM__Pas_Rdr = {"clsid": "{Kh-28}", "name": "Kh-28 (AS-9 Kyle) - 720kg, ARM, Pas Rdr", "weight": 715}
Kh_29L__AS_14_Kedge____657kg__ASM__Semi_Act_Laser = {"clsid": "{3468C652-E830-4E73-AFA9-B5F260AB7C3D}", "name": "Kh-29L (AS-14 Kedge) - 657kg, ASM, Semi-Act Laser", "weight": 747}
Kh_29L__AS_14_Kedge____657kg__ASM__Semi_Act_Laser_ = {"clsid": "{D4A8D9B9-5C45-42e7-BBD2-0E54F8308432}", "name": "Kh-29L (AS-14 Kedge) - 657kg, ASM, Semi-Act Laser", "weight": 747}
Kh_29L__AS_14_Kedge____657kg__ASM__Semi_Act_Laser__ = {"clsid": "{X-29L}", "name": "Kh-29L (AS-14 Kedge) - 657kg, ASM, Semi-Act Laser", "weight": 747}
Kh_29T__AS_14_Kedge____670kg__ASM__TV_Guided = {"clsid": "{B4FC81C9-B861-4E87-BBDC-A1158E648EBF}", "name": "Kh-29T (AS-14 Kedge) - 670kg, ASM, TV Guided", "weight": 760}
Kh_29T__AS_14_Kedge____670kg__ASM__TV_Guided_ = {"clsid": "{601C99F7-9AF3-4ed7-A565-F8B8EC0D7AAC}", "name": "Kh-29T (AS-14 Kedge) - 670kg, ASM, TV Guided", "weight": 760}
Kh_29T__AS_14_Kedge____670kg__ASM__TV_Guided__ = {"clsid": "{X-29T}", "name": "Kh-29T (AS-14 Kedge) - 670kg, ASM, TV Guided", "weight": 760}
Kh_31A__AS_17_Krypton____610kg__AShM__IN__Act_Rdr = {"clsid": "{4D13E282-DF46-4B23-864A-A9423DFDE504}", "name": "Kh-31A (AS-17 Krypton) - 610kg, AShM, IN & Act Rdr", "weight": 690}
Kh_31A__AS_17_Krypton____610kg__AShM__IN__Act_Rdr_ = {"clsid": "{4D13E282-DF46-4B23-864A-A9423DFDE50A}", "name": "Kh-31A (AS-17 Krypton) - 610kg, AShM, IN & Act Rdr", "weight": 690}
Kh_31A__AS_17_Krypton____610kg__AShM__IN__Act_Rdr__ = {"clsid": "{X-31A}", "name": "Kh-31A (AS-17 Krypton) - 610kg, AShM, IN & Act Rdr", "weight": 690}
Kh_31P__AS_17_Krypton____600kg__ARM__IN__Pas_Rdr = {"clsid": "{D8F2C90B-887B-4B9E-9FE2-996BC9E9AF03}", "name": "Kh-31P (AS-17 Krypton) - 600kg, ARM, IN & Pas Rdr", "weight": 690}
Kh_31P__AS_17_Krypton____600kg__ARM__IN__Pas_Rdr_ = {"clsid": "{D8F2C90B-887B-4B9E-9FE2-996BC9E9AF0A}", "name": "Kh-31P (AS-17 Krypton) - 600kg, ARM, IN & Pas Rdr", "weight": 690}
Kh_31P__AS_17_Krypton____600kg__ARM__IN__Pas_Rdr__ = {"clsid": "{X-31P}", "name": "Kh-31P (AS-17 Krypton) - 600kg, ARM, IN & Pas Rdr", "weight": 690}
Kh_35__AS_20_Kayak____520kg__AShM__IN__Act_Rdr = {"clsid": "{2234F529-1D57-4496-8BB0-0150F9BDBBD2}", "name": "Kh-35 (AS-20 Kayak) - 520kg, AShM, IN & Act Rdr", "weight": 520}
Kh_35__AS_20_Kayak____520kg__AShM__IN__Act_Rdr_ = {"clsid": "{2234F529-1D57-4496-8BB0-0150F9BDBBD3}", "name": "Kh-35 (AS-20 Kayak) - 520kg, AShM, IN & Act Rdr", "weight": 570}
Kh_41__SS_N_22_Sunburn____4500kg__AShM__IN__Act_Rdr = {"clsid": "{3F26D9C5-5CC3-4E42-BC79-82FAA54E9F26}", "name": "Kh-41 (SS-N-22-Sunburn) - 4500kg, AShM, IN & Act Rdr", "weight": 4500}
Kh_58U__AS_11_Kilter____640kg__ARM__IN__Pas_Rdr = {"clsid": "{FE382A68-8620-4AC0-BDF5-709BFE3977D7}", "name": "Kh-58U (AS-11 Kilter) - 640kg, ARM, IN & Pas Rdr", "weight": 730}
Kh_58U__AS_11_Kilter____640kg__ARM__IN__Pas_Rdr_ = {"clsid": "{B5CA9846-776E-4230-B4FD-8BCC9BFB1676}", "name": "Kh-58U (AS-11 Kilter) - 640kg, ARM, IN & Pas Rdr", "weight": 730}
Kh_59M__AS_18_Kazoo____930kg__ASM__IN = {"clsid": "{40AB87E8-BEFB-4D85-90D9-B2753ACF9514}", "name": "Kh-59M (AS-18 Kazoo) - 930kg, ASM, IN", "weight": 1115}
Kh_65__AS_15B_Kent____1250kg__ASM__IN__MCC = {"clsid": "{BADAF2DE-68B5-472A-8AAC-35BAEFF6B4A1}", "name": "Kh-65 (AS-15B Kent) - 1250kg, ASM, IN & MCC", "weight": 1250}
Kh_66_Grom__21____AGM__radar_guided_APU_68 = {"clsid": "{Kh-66_Grom}", "name": "Kh-66 Grom (21) - AGM, radar guided APU-68", "weight": 300}
KMGU_2___96_x_AO_2_5RT_Dispenser__CBU__HE_Frag = {"clsid": "{96A7F676-F956-404A-AD04-F33FB2C74884}", "name": "KMGU-2 - 96 x AO-2.5RT Dispenser (CBU) HE/Frag", "weight": 778}
KMGU_2___96_x_PTAB_2_5KO_Dispenser__CBU__HEAT_AP = {"clsid": "{96A7F676-F956-404A-AD04-F33FB2C74881}", "name": "KMGU-2 - 96 x PTAB-2.5KO Dispenser (CBU) HEAT/AP", "weight": 675.6}
KORD_12_7 = {"clsid": "KORD_12_7", "name": "Kord 12.7mm HMG", "weight": 95}
Kopyo_radar_pod = {"clsid": "{F4920E62-A99A-11d8-9897-000476191836}", "name": "Kopyo radar pod", "weight": 115}
Kormoran___ASM = {"clsid": "{7210496B-7B81-4B52-80D6-8529ECF847CD}", "name": "Kormoran - ASM", "weight": 660}
K_13A = {"clsid": "{K-13A}", "name": "K-13A", "weight": 90}
L005_Sorbtsiya_ECM_pod__left_ = {"clsid": "{44EE8698-89F9-48EE-AF36-5FD31896A82F}", "name": "L005 Sorbtsiya ECM pod (left)", "weight": 150}
L005_Sorbtsiya_ECM_pod__right_ = {"clsid": "{44EE8698-89F9-48EE-AF36-5FD31896A82A}", "name": "L005 Sorbtsiya ECM pod (right)", "weight": 150}
L175V_Khibiny_ECM_pod = {"clsid": "{ECM_POD_L_175V}", "name": "L175V Khibiny ECM pod", "weight": 150}
LANTIRN_Targeting_Pod = {"clsid": "{F14-LANTIRN-TP}", "name": "LANTIRN Targeting Pod", "weight": 342}
LAU3_HE151 = {"clsid": "LAU3_HE151", "name": "LAU-3 pod - 19 x 2.75\" Hydra, UnGd Rkts M151, HE", "weight": 234}
LAU3_HE5 = {"clsid": "LAU3_HE5", "name": "LAU-3 pod - 19 x 2.75\" Hydra, UnGd Rkts Mk5, HEAT", "weight": 234}
LAU3_WP156 = {"clsid": "LAU3_WP156", "name": "LAU-3 pod - 19 x 2.75\" Hydra, UnGd Rkts M156, Wht Phos", "weight": 234}
LAU3_WP1B = {"clsid": "LAU3_WP1B", "name": "LAU-3 pod - 19 x 2.75\" Hydra, UnGd Rkts WTU-1/B, Practice", "weight": 234}
LAU3_WP61 = {"clsid": "LAU3_WP61", "name": "LAU-3 pod - 19 x 2.75\" Hydra, UnGd Rkts Mk61, Practice", "weight": 234}
LAU_105 = {"clsid": "LAU-105", "name": "LAU-105", "weight": 18}
LAU_105_1_AIM_9L_L = {"clsid": "LAU-105_1*AIM-9L_L", "name": "LAU-105 with 1 x AIM-9L Sidewinder IR AAM", "weight": 115.73}
LAU_105_1_AIM_9L_R = {"clsid": "LAU-105_1*AIM-9L_R", "name": "LAU-105 with 1 x AIM-9L Sidewinder IR AAM", "weight": 115.73}
LAU_105_1_AIM_9M_L = {"clsid": "LAU-105_1*AIM-9M_L", "name": "LAU-105 with 1 x AIM-9M Sidewinder IR AAM", "weight": 115.73}
LAU_105_1_AIM_9M_R = {"clsid": "LAU-105_1*AIM-9M_R", "name": "LAU-105 with 1 x AIM-9M Sidewinder IR AAM", "weight": 115.73}
LAU_105_1_CATM_9M_L = {"clsid": "LAU-105_1*CATM-9M_L", "name": "LAU-105 with 1 x Captive AIM-9M for ACM", "weight": 115.73}
LAU_105_1_CATM_9M_R = {"clsid": "LAU-105_1*CATM-9M_R", "name": "LAU-105 with 1 x Captive AIM-9M for ACM", "weight": 115.73}
LAU_105_2_AIM_9L = {"clsid": "LAU-105_2*AIM-9L", "name": "LAU-105 with 2 x AIM-9L Sidewinder IR AAM", "weight": 201.46}
LAU_105_2_AIM_9P5 = {"clsid": "LAU-105_2*AIM-9P5", "name": "LAU-105 with 2 x AIM-9P5 Sidewinder IR AAM", "weight": 201}
LAU_105_2_CATM_9M = {"clsid": "LAU-105_2*CATM-9M", "name": "LAU-105 with 2 x Captive AIM-9M for ACM", "weight": 201.46}
LAU_105_AIS_ASQ_T50_L = {"clsid": "LAU-105_AIS_ASQ_T50_L", "name": "LAU-105 with 1 x AN/ASQ-T50 TCTS Pod - ACMI Pod", "weight": 92.6}
LAU_105_AIS_ASQ_T50_R = {"clsid": "LAU-105_AIS_ASQ_T50_R", "name": "LAU-105 with 1 x AN/ASQ-T50 TCTS Pod - ACMI Pod", "weight": 92.6}
LAU_105_with_2_x_AIM_9M_Sidewinder_IR_AAM = {"clsid": "{DB434044-F5D0-4F1F-9BA9-B73027E18DD3}", "name": "LAU-105 with 2 x AIM-9M Sidewinder IR AAM", "weight": 201.46}
LAU_105_with_2_x_AIM_9P_Sidewinder_IR_AAM = {"clsid": "{3C0745ED-8B0B-42eb-B907-5BD5C1717447}", "name": "LAU-105 with 2 x AIM-9P Sidewinder IR AAM", "weight": 202.36}
LAU_10R_pod___4_x_127mm_ZUNI__UnGd_Rkts_Mk71__HE_FRAG = {"clsid": "{LAU_10R}", "name": "LAU-10R pod - 4 x 127mm ZUNI, UnGd Rkts Mk71, HE/FRAG", "weight": 316.6}
LAU_10_pod___4_x_127mm_ZUNI__UnGd_Rkts_Mk71__HE_FRAG = {"clsid": "{F3EFE0AB-E91A-42D8-9CA2-B63C91ED570A}", "name": "LAU-10 pod - 4 x 127mm ZUNI, UnGd Rkts Mk71, HE/FRAG", "weight": 316.6}
LAU_10___4_ZUNI_MK_71 = {"clsid": "{BRU42_LAU10}", "name": "LAU-10 - 4 ZUNI MK 71", "weight": 568}
LAU_10___4_ZUNI_MK_71_ = {"clsid": "{BRU3242_LAU10}", "name": "LAU-10 - 4 ZUNI MK 71", "weight": 625.38}
LAU_115C_with_AIM_7E_Sparrow_Semi_Active_Radar = {"clsid": "{LAU-115 - AIM-7E}", "name": "LAU-115C with AIM-7E Sparrow Semi-Active Radar", "weight": 284.4}
LAU_115C_with_AIM_7F_Sparrow_Semi_Active_Radar = {"clsid": "{LAU-115 - AIM-7F}", "name": "LAU-115C with AIM-7F Sparrow Semi-Active Radar", "weight": 285.4}
LAU_115C_with_AIM_7MH_Sparrow_Semi_Active_Radar = {"clsid": "{LAU-115 - AIM-7H}", "name": "LAU-115C with AIM-7MH Sparrow Semi-Active Radar", "weight": 285.4}
LAU_115_2_LAU_127_AIM_120B = {"clsid": "LAU-115_2*LAU-127_AIM-120B", "name": "LAU-115 with 2 x LAU-127 AIM-120B AMRAAM - Active Rdr AAM", "weight": 457}
LAU_115_2_LAU_127_AIM_120C = {"clsid": "LAU-115_2*LAU-127_AIM-120C", "name": "LAU-115 with 2 x LAU-127 AIM-120C-5 AMRAAM - Active Rdr AAM", "weight": 468}
LAU_115_2_LAU_127_AIM_9L = {"clsid": "LAU-115_2*LAU-127_AIM-9L", "name": "LAU-115 with 2 x LAU-127 AIM-9L Sidewinder IR AAM", "weight": 316.46}
LAU_115_2_LAU_127_AIM_9M = {"clsid": "LAU-115_2*LAU-127_AIM-9M", "name": "LAU-115 with 2 x LAU-127 AIM-9M Sidewinder IR AAM", "weight": 316.46}
LAU_115_2_LAU_127_AIM_9X = {"clsid": "LAU-115_2*LAU-127_AIM-9X", "name": "LAU-115 with 2 x LAU-127 AIM-9X Sidewinder IR AAM", "weight": 313.92}
LAU_115_2_LAU_127_CATM_9M = {"clsid": "LAU-115_2*LAU-127_CATM-9M", "name": "LAU-115 with 2 x LAU-127 Captive AIM-9M for ACM", "weight": 316.46}
LAU_115_LAU_127_AIM_9L = {"clsid": "LAU-115_LAU-127_AIM-9L", "name": "LAU-115 with 1 x LAU-127 AIM-9L Sidewinder IR AAM", "weight": 230.73}
LAU_115_LAU_127_AIM_9L_R = {"clsid": "LAU-115_LAU-127_AIM-9L_R", "name": "LAU-115 with 1 x LAU-127 AIM-9L Sidewinder IR AAM", "weight": 230.73}
LAU_115_LAU_127_AIM_9M = {"clsid": "LAU-115_LAU-127_AIM-9M", "name": "LAU-115 with 1 x LAU-127 AIM-9M Sidewinder IR AAM", "weight": 230.73}
LAU_115_LAU_127_AIM_9M_R = {"clsid": "LAU-115_LAU-127_AIM-9M_R", "name": "LAU-115 with 1 x LAU-127 AIM-9M Sidewinder IR AAM", "weight": 230.73}
LAU_115_LAU_127_AIM_9X = {"clsid": "LAU-115_LAU-127_AIM-9X", "name": "LAU-115 with 1 x LAU-127 AIM-9X Sidewinder IR AAM", "weight": 229.46}
LAU_115_LAU_127_AIM_9X_R = {"clsid": "LAU-115_LAU-127_AIM-9X_R", "name": "LAU-115 with 1 x LAU-127 AIM-9X Sidewinder IR AAM", "weight": 229.46}
LAU_115_LAU_127_CATM_9M = {"clsid": "LAU-115_LAU-127_CATM-9M", "name": "LAU-115 with 1 x LAU-127 Captive AIM-9M for ACM", "weight": 230.73}
LAU_115_LAU_127_CATM_9M_R = {"clsid": "LAU-115_LAU-127_CATM-9M_R", "name": "LAU-115 with 1 x LAU-127 Captive AIM-9M for ACM", "weight": 230.73}
LAU_115_with_1_x_LAU_127_AIM_120B_AMRAAM___Active_Rdr_AAM = {"clsid": "{LAU-115 - AIM-120B}", "name": "LAU-115 with 1 x LAU-127 AIM-120B AMRAAM - Active Rdr AAM", "weight": 301}
LAU_115_with_1_x_LAU_127_AIM_120B_AMRAAM___Active_Rdr_AAM_ = {"clsid": "{LAU-115 - AIM-120B_R}", "name": "LAU-115 with 1 x LAU-127 AIM-120B AMRAAM - Active Rdr AAM", "weight": 301}
LAU_115_with_1_x_LAU_127_AIM_120C_5_AMRAAM___Active_Rdr_AAM = {"clsid": "{LAU-115 - AIM-120C}", "name": "LAU-115 with 1 x LAU-127 AIM-120C-5 AMRAAM - Active Rdr AAM", "weight": 306.5}
LAU_115_with_1_x_LAU_127_AIM_120C_5_AMRAAM___Active_Rdr_AAM_ = {"clsid": "{LAU-115 - AIM-120C_R}", "name": "LAU-115 with 1 x LAU-127 AIM-120C-5 AMRAAM - Active Rdr AAM", "weight": 306.5}
LAU_115_with_AIM_7M_Sparrow_Semi_Active_Radar = {"clsid": "{LAU-115 - AIM-7M}", "name": "LAU-115 with AIM-7M Sparrow Semi-Active Radar", "weight": 285.5}
LAU_117_AGM_65A = {"clsid": "LAU_117_AGM_65A", "name": "LAU-117 with AGM-65A - Maverick A (TV Guided)", "weight": 269.5}
LAU_117_AGM_65B = {"clsid": "LAU_117_AGM_65B", "name": "LAU-117 with AGM-65B - Maverick B (TV Guided)", "weight": 269.5}
LAU_117_AGM_65F = {"clsid": "LAU_117_AGM_65F", "name": "LAU-117 with AGM-65F - Maverick F (IIR ASM)", "weight": 360}
LAU_117_AGM_65G = {"clsid": "LAU_117_AGM_65G", "name": "LAU-117 with AGM-65G - Maverick G (IIR ASM - Lg Whd)", "weight": 360}
LAU_117_AGM_65H = {"clsid": "LAU_117_AGM_65H", "name": "LAU-117 with AGM-65H - Maverick H (CCD Imp ASM)", "weight": 267}
LAU_117_AGM_65L = {"clsid": "LAU_117_AGM_65L", "name": "LAU-117 with AGM-65L - Maverick E2/L (CCD Laser ASM)", "weight": 351}
LAU_117_CATM_65K = {"clsid": "LAU_117_CATM_65K", "name": "LAU-117 with CATM-65K - Captive Trg Round for Mav K (CCD)", "weight": 356}
LAU_117_TGM_65D = {"clsid": "LAU_117_TGM_65D", "name": "LAU-117 with TGM-65D - Trg Round for Mav D (IIR)", "weight": 277}
LAU_117_TGM_65G = {"clsid": "LAU_117_TGM_65G", "name": "LAU-117 with TGM-65G - Trg Round for Mav G (IIR)", "weight": 360}
LAU_117_TGM_65H = {"clsid": "LAU_117_TGM_65H", "name": "LAU-117 with TGM-65H - Trg Round for Mav H (CCD)", "weight": 267}
LAU_117_with_AGM_65D___Maverick_D__IIR_ASM_ = {"clsid": "{444BA8AE-82A7-4345-842E-76154EFCCA46}", "name": "LAU-117 with AGM-65D - Maverick D (IIR ASM)", "weight": 277}
LAU_117_with_AGM_65E___Maverick_E__Laser_ASM___Lg_Whd_ = {"clsid": "{F16A4DE0-116C-4A71-97F0-2CF85B0313EC}", "name": "LAU-117 with AGM-65E - Maverick E (Laser ASM - Lg Whd)", "weight": 345}
LAU_117_with_AGM_65K___Maverick_K__CCD_Imp_ASM_ = {"clsid": "{69DC8AE7-8F77-427B-B8AA-B19D3F478B66}", "name": "LAU-117 with AGM-65K - Maverick K (CCD Imp ASM)", "weight": 356}
LAU_118a_with_AGM_45B_Shrike_ARM__Imp_ = {"clsid": "{3E6B632D-65EB-44D2-9501-1C2D04515405}", "name": "LAU-118a with AGM-45B Shrike ARM (Imp)", "weight": 222.4}
LAU_127_AIM_9L = {"clsid": "LAU-127_AIM-9L", "name": "LAU-127 AIM-9L Sidewinder IR AAM", "weight": 131.03}
LAU_127_AIM_9M = {"clsid": "LAU-127_AIM-9M", "name": "LAU-127 AIM-9M Sidewinder IR AAM", "weight": 131.03}
LAU_127_AIM_9X = {"clsid": "LAU-127_AIM-9X", "name": "LAU-127 AIM-9X Sidewinder IR AAM", "weight": 129.76}
LAU_127_CATM_9M = {"clsid": "LAU-127_CATM-9M", "name": "LAU-127 Captive AIM-9M for ACM", "weight": 131.03}
LAU_131x3_HYDRA_70_M151 = {"clsid": "LAU_131x3_HYDRA_70_M151", "name": "BRU-42 with 3 x LAU-131 pods - 21 x 2.75\" Hydra, UnGd Rkts M151, HE", "weight": 406.65}
LAU_131x3_HYDRA_70_M156 = {"clsid": "LAU_131x3_HYDRA_70_M156", "name": "BRU-42 with 3 x LAU-131 pods - 21 x 2.75\" Hydra, UnGd Rkts M156, Wht Phos", "weight": 410.43}
LAU_131x3_HYDRA_70_M257 = {"clsid": "LAU_131x3_HYDRA_70_M257", "name": "BRU-42 with 3 x LAU-131 pods - 21 x 2.75\" Hydra, UnGd Rkts M257, Para Illum", "weight": 423.45}
LAU_131x3_HYDRA_70_M274 = {"clsid": "LAU_131x3_HYDRA_70_M274", "name": "BRU-42 with 3 x LAU-131 pods - 21 x 2.75\" Hydra, UnGd Rkts M274, Practice Smk", "weight": 406.65}
LAU_131x3_HYDRA_70_MK1 = {"clsid": "LAU_131x3_HYDRA_70_MK1", "name": "BRU-42 with 3 x LAU-131 pods - 21 x 2.75\" Hydra, UnGd Rkts Mk1, Practice", "weight": 379.56}
LAU_131x3_HYDRA_70_MK5 = {"clsid": "LAU_131x3_HYDRA_70_MK5", "name": "BRU-42 with 3 x LAU-131 pods - 21 x 2.75\" Hydra, UnGd Rkts Mk5, HEAT", "weight": 373.26}
LAU_131x3_HYDRA_70_MK61 = {"clsid": "LAU_131x3_HYDRA_70_MK61", "name": "BRU-42 with 3 x LAU-131 pods - 21 x 2.75\" Hydra, UnGd Rkts Mk61, Practice", "weight": 379.56}
LAU_131x3_HYDRA_70_WTU1B = {"clsid": "LAU_131x3_HYDRA_70_WTU1B", "name": "BRU-42 with 3 x LAU-131 pods - 21 x 2.75\" Hydra, UnGd Rkts WTU-1/B, Practice", "weight": 406.65}
LAU_131_pod___7_x_2_75_Hydra__Laser_Guided_Rkts_M151__HE_APKWS = {"clsid": "{LAU-131 - 7 AGR-20A}", "name": "LAU-131 pod - 7 x 2.75\" Hydra, Laser Guided Rkts M151, HE APKWS", "weight": 134.5}
LAU_131_pod___7_x_2_75_Hydra__Laser_Guided_Rkts_M282__MPP_APKWS = {"clsid": "{LAU-131 - 7 AGR-20 M282}", "name": "LAU-131 pod - 7 x 2.75\" Hydra, Laser Guided Rkts M282, MPP APKWS", "weight": 148.5}
LAU_131_pod___7_x_2_75_Hydra__UnGd_Rkts_M151__HE = {"clsid": "{69926055-0DA8-4530-9F2F-C86B157EA9F6}", "name": "LAU-131 pod - 7 x 2.75\" Hydra, UnGd Rkts M151, HE", "weight": 102.3}
LAU_131_pod___7_x_2_75_Hydra__UnGd_Rkts_M156__Wht_Phos = {"clsid": "{2AF2EC3F-9065-4de5-93E1-1739C9A71EF7}", "name": "LAU-131 pod - 7 x 2.75\" Hydra, UnGd Rkts M156, Wht Phos", "weight": 103.56}
LAU_131_pod___7_x_2_75_Hydra__UnGd_Rkts_M257__Para_Illum = {"clsid": "{DAD45FE5-CFF0-4a2b-99D4-5D044D3BC22F}", "name": "LAU-131 pod - 7 x 2.75\" Hydra, UnGd Rkts M257, Para Illum", "weight": 107.9}
LAU_131_pod___7_x_2_75_Hydra__UnGd_Rkts_M274__Practice_Smk = {"clsid": "{6D6D5C07-2A90-4a68-9A74-C5D0CFFB05D9}", "name": "LAU-131 pod - 7 x 2.75\" Hydra, UnGd Rkts M274, Practice Smk", "weight": 102.3}
LAU_131_pod___7_x_2_75_Hydra__UnGd_Rkts_Mk1__Practice = {"clsid": "{D22C2D63-E5C9-4247-94FB-5E8F3DE22B71}", "name": "LAU-131 pod - 7 x 2.75\" Hydra, UnGd Rkts Mk1, Practice", "weight": 93.27}
LAU_131_pod___7_x_2_75_Hydra__UnGd_Rkts_Mk5__HEAT = {"clsid": "{319293F2-392C-4617-8315-7C88C22AF7C4}", "name": "LAU-131 pod - 7 x 2.75\" Hydra, UnGd Rkts Mk5, HEAT", "weight": 91.17}
LAU_131_pod___7_x_2_75_Hydra__UnGd_Rkts_Mk61__Practice = {"clsid": "{1CA5E00B-D545-4ff9-9B53-5970E292F14D}", "name": "LAU-131 pod - 7 x 2.75\" Hydra, UnGd Rkts Mk61, Practice", "weight": 93.27}
LAU_131_pod___7_x_2_75_Hydra__UnGd_Rkts_WTU_1_B__Practice = {"clsid": "{DDCE7D70-5313-4181-8977-F11018681662}", "name": "LAU-131 pod - 7 x 2.75\" Hydra, UnGd Rkts WTU-1/B, Practice", "weight": 102.3}
LAU_138_AIM_9L = {"clsid": "{LAU-138 wtip - AIM-9L}", "name": "LAU-138 AIM-9L", "weight": 85.5}
LAU_138_AIM_9M = {"clsid": "{LAU-138 wtip - AIM-9M}", "name": "LAU-138 AIM-9M", "weight": 86.64}
LAU_3_pod___19_x_2_75_FFAR__UnGd_Rkts_M156__Wht_Phos = {"clsid": "{LAU3_FFAR_WP156}", "name": "LAU-3 pod - 19 x 2.75\" FFAR, UnGd Rkts M156, Wht Phos", "weight": 312.8707256}
LAU_3_pod___19_x_2_75_FFAR__UnGd_Rkts_Mk1__HE = {"clsid": "{LAU3_FFAR_MK1HE}", "name": "LAU-3 pod - 19 x 2.75\" FFAR, UnGd Rkts Mk1, HE", "weight": 285.292332}
LAU_3_pod___19_x_2_75_FFAR__UnGd_Rkts_Mk5__HEAT = {"clsid": "{LAU3_FFAR_MK5HEAT}", "name": "LAU-3 pod - 19 x 2.75\" FFAR, UnGd Rkts Mk5, HEAT", "weight": 286.1541568}
LAU_61R_pod___19_x_2_75_Hydra__UnGd_Rkts_M151__HE = {"clsid": "{LAU_61R}", "name": "LAU-61R pod - 19 x 2.75\" Hydra, UnGd Rkts M151, HE", "weight": 271.5}
LAU_61_pod___19_x_2_75_Hydra__UnGd_Rkts_M151__HE = {"clsid": "{FD90A1DC-9147-49FA-BF56-CB83EF0BD32B}", "name": "LAU-61 pod - 19 x 2.75\" Hydra, UnGd Rkts M151, HE", "weight": 273.4}
LAU_61_pod___19_x_2_75_Hydra__UnGd_Rkts_M156__Wht_Phos = {"clsid": "{3DFB7321-AB0E-11d7-9897-000476191836}", "name": "LAU-61 pod - 19 x 2.75\" Hydra, UnGd Rkts M156, Wht Phos", "weight": 274.92}
LAU_61_pod___19_x_2_75_Hydra__UnGd_Rkts_M282__HEDP = {"clsid": "{LAU_61_M282}", "name": "LAU-61 pod - 19 x 2.75\" Hydra, UnGd Rkts M282, HEDP", "weight": 309.88}
LAU_68_pod___7_x_2_75_FFAR__UnGd_Rkts_M156__Wht_Phos = {"clsid": "{LAU68_FFAR_WP156}", "name": "LAU-68 pod - 7 x 2.75\" FFAR, UnGd Rkts M156, Wht Phos", "weight": 120.1560568}
LAU_68_pod___7_x_2_75_FFAR__UnGd_Rkts_Mk1__HE = {"clsid": "{LAU68_FFAR_MK1HE}", "name": "LAU-68 pod - 7 x 2.75\" FFAR, UnGd Rkts Mk1, HE", "weight": 109.995596}
LAU_68_pod___7_x_2_75_FFAR__UnGd_Rkts_Mk5__HEAT = {"clsid": "{LAU68_FFAR_MK5HEAT}", "name": "LAU-68 pod - 7 x 2.75\" FFAR, UnGd Rkts Mk5, HEAT", "weight": 110.3131104}
LAU_68_pod___7_x_2_75_Hydra__UnGd_Rkts_M151__HE = {"clsid": "{A021F29D-18AB-4d3e-985C-FC9C60E35E9E}", "name": "LAU-68 pod - 7 x 2.75\" Hydra, UnGd Rkts M151, HE", "weight": 113.9}
LAU_68_pod___7_x_2_75_Hydra__UnGd_Rkts_M156__Wht_Phos = {"clsid": "{4F977A2A-CD25-44df-90EF-164BFA2AE72F}", "name": "LAU-68 pod - 7 x 2.75\" Hydra, UnGd Rkts M156, Wht Phos", "weight": 114.46}
LAU_68_pod___7_x_2_75_Hydra__UnGd_Rkts_M257__Para_Illum = {"clsid": "{647C5F26-BDD1-41e6-A371-8DE1E4CC0E94}", "name": "LAU-68 pod - 7 x 2.75\" Hydra, UnGd Rkts M257, Para Illum", "weight": 118.8}
LAU_68_pod___7_x_2_75_Hydra__UnGd_Rkts_M274__Practice_Smk = {"clsid": "{0877B74B-5A00-4e61-BA8A-A56450BA9E27}", "name": "LAU-68 pod - 7 x 2.75\" Hydra, UnGd Rkts M274, Practice Smk", "weight": 113.2}
LAU_68_pod___7_x_2_75_Hydra__UnGd_Rkts_M282__HEDP = {"clsid": "{LAU_68_M282}", "name": "LAU-68 pod - 7 x 2.75\" Hydra, UnGd Rkts M282, HEDP", "weight": 127.34}
LAU_68_pod___7_x_2_75_Hydra__UnGd_Rkts_Mk1__Practice = {"clsid": "{FC85D2ED-501A-48ce-9863-49D468DDD5FC}", "name": "LAU-68 pod - 7 x 2.75\" Hydra, UnGd Rkts Mk1, Practice", "weight": 104.17}
LAU_68_pod___7_x_2_75_Hydra__UnGd_Rkts_Mk5__HEAT = {"clsid": "{174C6E6D-0C3D-42ff-BCB3-0853CB371F5C}", "name": "LAU-68 pod - 7 x 2.75\" Hydra, UnGd Rkts Mk5, HEAT", "weight": 102.07}
LAU_68_pod___7_x_2_75_Hydra__UnGd_Rkts_Mk61__Practice = {"clsid": "{65396399-9F5C-4ec3-A7D2-5A8F4C1D90C4}", "name": "LAU-68 pod - 7 x 2.75\" Hydra, UnGd Rkts Mk61, Practice", "weight": 104.17}
LAU_68_pod___7_x_2_75_Hydra__UnGd_Rkts_WTU_1_B__Practice = {"clsid": "{1F7136CB-8120-4e77-B97B-945FF01FB67C}", "name": "LAU-68 pod - 7 x 2.75\" Hydra, UnGd Rkts WTU-1/B, Practice", "weight": 113.2}
LAU_7_AIM_9L = {"clsid": "{LAU-7 - AIM-9L}", "name": "LAU-7 AIM-9L", "weight": 100.5}
LAU_7_AIM_9M = {"clsid": "{LAU-7 - AIM-9M}", "name": "LAU-7 AIM-9M", "weight": 101.64}
LAU_7_with_2_x_AIM_9B_Sidewinder_IR_AAM = {"clsid": "{F4-2-AIM9B}", "name": "LAU-7 with 2 x AIM-9B Sidewinder IR AAM", "weight": 178.78}
LAU_7_with_2_x_AIM_9L_Sidewinder_IR_AAM = {"clsid": "{F4-2-AIM9L}", "name": "LAU-7 with 2 x AIM-9L Sidewinder IR AAM", "weight": 201.46}
LAU_7_with_2_x_AIM_9M_Sidewinder_IR_AAM = {"clsid": "{9DDF5297-94B9-42FC-A45E-6E316121CD85}", "name": "LAU-7 with 2 x AIM-9M Sidewinder IR AAM", "weight": 201.46}
LAU_7_with_2_x_AIM_9P5_Sidewinder_IR_AAM = {"clsid": "{F4-2-AIM9P5}", "name": "LAU-7 with 2 x AIM-9P5 Sidewinder IR AAM", "weight": 201}
LAU_7_with_2_x_AIM_9P_Sidewinder_IR_AAM = {"clsid": "{773675AB-7C29-422f-AFD8-32844A7B7F17}", "name": "LAU-7 with 2 x AIM-9P Sidewinder IR AAM", "weight": 202.36}
LAU_7_with_AIM_9B_Sidewinder_IR_AAM = {"clsid": "{GAR-8}", "name": "LAU-7 with AIM-9B Sidewinder IR AAM", "weight": 89.39}
LAU_7_with_AIM_9M_Sidewinder_IR_AAM = {"clsid": "{AIM-9M-ON-ADAPTER}", "name": "LAU-7 with AIM-9M Sidewinder IR AAM", "weight": 100.73}
LAU_7_with_AIM_9P5_Sidewinder_IR_AAM = {"clsid": "{AIM-9P5-ON-ADAPTER}", "name": "LAU-7 with AIM-9P5 Sidewinder IR AAM", "weight": 100.5}
LAU_7_with_AIM_9P_Sidewinder_IR_AAM = {"clsid": "{AIM-9P-ON-ADAPTER}", "name": "LAU-7 with AIM-9P Sidewinder IR AAM", "weight": 101.18}
LAU_7_with_AIM_9X_Sidewinder_IR_AAM = {"clsid": "{AIM-9X-ON-ADAPTER}", "name": "LAU-7 with AIM-9X Sidewinder IR AAM", "weight": 99.46}
LAU_7_with_AN_ASQ_T50_TCTS_Pod___ACMI_Pod = {"clsid": "{LAU-7_AIS_ASQ_T50}", "name": "LAU-7 with AN/ASQ-T50 TCTS Pod - ACMI Pod", "weight": 92.6}
LAU_88_AGM_65D_ONE = {"clsid": "LAU_88_AGM_65D_ONE", "name": "LAU-88 with 1 x AGM-65D - Maverick D (IIR ASM)", "weight": 429}
LAU_88_AGM_65H = {"clsid": "LAU_88_AGM_65H", "name": "LAU-88 with 1 x AGM-65H - Maverick H (CCD Imp ASM)", "weight": 419}
LAU_88_AGM_65H_2_L = {"clsid": "LAU_88_AGM_65H_2_L", "name": "LAU-88 with 2 x AGM-65H - Maverick H (CCD Imp ASM)", "weight": 627}
LAU_88_AGM_65H_2_R = {"clsid": "LAU_88_AGM_65H_2_R", "name": "LAU-88 with 2 x AGM-65H - Maverick H (CCD Imp ASM)", "weight": 627}
LAU_88_AGM_65H_3 = {"clsid": "LAU_88_AGM_65H_3", "name": "LAU-88 with 3 x AGM-65H - Maverick H (CCD Imp ASM)", "weight": 835}
LAU_88_with_2_x_AGM_65D___Maverick_D__IIR_ASM_ = {"clsid": "{E6A6262A-CA08-4B3D-B030-E1A993B98452}", "name": "LAU-88 with 2 x AGM-65D - Maverick D (IIR ASM)", "weight": 647}
LAU_88_with_2_x_AGM_65D___Maverick_D__IIR_ASM__ = {"clsid": "{E6A6262A-CA08-4B3D-B030-E1A993B98453}", "name": "LAU-88 with 2 x AGM-65D - Maverick D (IIR ASM)", "weight": 647}
LAU_88_with_2_x_AGM_65E___Maverick_E__Laser_ASM___Lg_Whd_ = {"clsid": "{2CC29C7A-E863-411C-8A6E-BD6F0E730548}", "name": "LAU-88 with 2 x AGM-65E - Maverick E (Laser ASM - Lg Whd)", "weight": 783}
LAU_88_with_2_x_AGM_65E___Maverick_E__Laser_ASM___Lg_Whd__ = {"clsid": "{2CC29C7A-E863-411C-8A6E-BD6F0E730547}", "name": "LAU-88 with 2 x AGM-65E - Maverick E (Laser ASM - Lg Whd)", "weight": 783}
LAU_88_with_2_x_AGM_65K___Maverick_K__CCD_Imp_ASM_ = {"clsid": "{D7670BC7-881B-4094-906C-73879CF7EB28}", "name": "LAU-88 with 2 x AGM-65K - Maverick K (CCD Imp ASM)", "weight": 805}
LAU_88_with_2_x_AGM_65K___Maverick_K__CCD_Imp_ASM__ = {"clsid": "{D7670BC7-881B-4094-906C-73879CF7EB27}", "name": "LAU-88 with 2 x AGM-65K - Maverick K (CCD Imp ASM)", "weight": 805}
LAU_88_with_3_x_AGM_65D___Maverick_D__IIR_ASM_ = {"clsid": "{DAC53A2F-79CA-42FF-A77A-F5649B601308}", "name": "LAU-88 with 3 x AGM-65D - Maverick D (IIR ASM)", "weight": 865}
LAU_88_with_3_x_AGM_65E___Maverick_E__Laser_ASM___Lg_Whd_ = {"clsid": "{71AAB9B8-81C1-4925-BE50-1EF8E9899271}", "name": "LAU-88 with 3 x AGM-65E - Maverick E (Laser ASM - Lg Whd)", "weight": 1069}
LAU_88_with_3_x_AGM_65K___Maverick_K__CCD_Imp_ASM_ = {"clsid": "{907D835F-E650-4154-BAFD-C656882555C0}", "name": "LAU-88 with 3 x AGM-65K - Maverick K (CCD Imp ASM)", "weight": 1102}
LAU_SNEB68G___8xSNEB68_EAP = {"clsid": "{LAU_SNEB68G}", "name": "LAU_SNEB68G - 8xSNEB68_EAP", "weight": 50.08}
LAU_SNEB68G___8xSNEB68_WP = {"clsid": "{LAU_SNEB68_WP}", "name": "LAU_SNEB68G - 8xSNEB68_WP", "weight": 50.08}
Lantirn_F_16 = {"clsid": "{CAAC1CFD-6745-416B-AFA4-CB57414856D0}", "name": "Lantirn F-16", "weight": 445}
Lantirn_Target_Pod = {"clsid": "{D1744B93-2A8A-4C4D-B004-7A09CD8C8F3F}", "name": "Lantirn Target Pod", "weight": 200}
LR_25___25_x_ARF_8_M3_API = {"clsid": "{LR25_ARF8M3_API}", "name": "LR-25 - 25 x ARF-8/M3 API", "weight": 141}
LR_25___25_x_ARF_8_M3_HEI = {"clsid": "{LR25_ARF8M3_HEI}", "name": "LR-25 - 25 x ARF-8/M3 HEI", "weight": 161}
LR_25___25_x_ARF_8_M3_TP_SM = {"clsid": "{LR25_ARF8M3_TPSM}", "name": "LR-25 - 25 x ARF-8/M3 TP-SM", "weight": 141}
L_081_Fantasmagoria_ELINT_pod = {"clsid": "{0519A264-0AB6-11d6-9193-00A0249B6F00}", "name": "L-081 Fantasmagoria ELINT pod", "weight": 300}
M10_Smoke_Tank___blue = {"clsid": "{US_M10_SMOKE_TANK_BLUE}", "name": "M10 Smoke Tank - blue", "weight": 266.7}
M10_Smoke_Tank___green = {"clsid": "{US_M10_SMOKE_TANK_GREEN}", "name": "M10 Smoke Tank - green", "weight": 266.7}
M10_Smoke_Tank___orange = {"clsid": "{US_M10_SMOKE_TANK_ORANGE}", "name": "M10 Smoke Tank - orange", "weight": 266.7}
M10_Smoke_Tank___red = {"clsid": "{US_M10_SMOKE_TANK_RED}", "name": "M10 Smoke Tank - red", "weight": 266.7}
M10_Smoke_Tank___white = {"clsid": "{US_M10_SMOKE_TANK_WHITE}", "name": "M10 Smoke Tank - white", "weight": 266.7}
M10_Smoke_Tank___yellow = {"clsid": "{US_M10_SMOKE_TANK_YELLOW}", "name": "M10 Smoke Tank - yellow", "weight": 266.7}
M117___750lb_GP_Bomb_LD = {"clsid": "{00F5DAC4-0466-4122-998F-B1A298E34113}", "name": "M117 - 750lb GP Bomb LD", "weight": 340}
M134_L = {"clsid": "M134_L", "name": "M134 - 6 x 7.62mm MiniGun left", "weight": 146.4}
M134_R = {"clsid": "M134_R", "name": "M134 - 6 x 7.62mm MiniGun right", "weight": 146.4}
M134_SIDE_L = {"clsid": "M134_SIDE_L", "name": "M134 - 6 x 7.62mm MiniGun left door", "weight": 270.4}
M134_SIDE_R = {"clsid": "M134_SIDE_R", "name": "M134 - 6 x 7.62mm MiniGun right door", "weight": 270.4}
M2000_Fuel_tank = {"clsid": "{414DA830-B61A-4F9E-B71B-C2F6832E1D7A}", "name": "M2000 Fuel tank", "weight": 1050}
M260_HYDRA = {"clsid": "M260_HYDRA", "name": "M260 pod - 7 x 2.75\" Hydra, UnGd Rkts Mk5, HEAT", "weight": 112}
M260_HYDRA_WP = {"clsid": "M260_HYDRA_WP", "name": "M260 pod - 7 x 2.75\" Hydra, UnGd Rkts M156, Wht Phos", "weight": 112}
M261_MK151 = {"clsid": "M261_MK151", "name": "M261 pod - 19 x 2.75\" Hydra, UnGd Rkts M151, HE", "weight": 234}
M261_MK156 = {"clsid": "M261_MK156", "name": "M261 pod - 19 x 2.75\" Hydra, UnGd Rkts M156, Wht Phos", "weight": 234}
M60_SIDE_L = {"clsid": "M60_SIDE_L", "name": "M60 - 7.62mm MG left door", "weight": 141.4}
M60_SIDE_R = {"clsid": "M60_SIDE_R", "name": "M60 - 7.62mm MG right door", "weight": 141.4}
MAK79_2_MK_20 = {"clsid": "{MAK79_MK20 2L}", "name": "MAK79 2 MK-20", "weight": 464}
MAK79_2_MK_20_ = {"clsid": "{MAK79_MK20 2R}", "name": "MAK79 2 MK-20", "weight": 464}
MAK79_3_BDU_33 = {"clsid": "{MAK79_BDU33 3L}", "name": "MAK79 3 BDU-33", "weight": 63}
MAK79_3_BDU_33_ = {"clsid": "{MAK79_BDU33 3R}", "name": "MAK79 3 BDU-33", "weight": 63}
MAK79_3_BDU_45 = {"clsid": "{MAK79_BDU45 3L}", "name": "MAK79 3 BDU-45", "weight": 726}
MAK79_3_BDU_45B = {"clsid": "{MAK79_BDU45B 3L}", "name": "MAK79 3 BDU-45B", "weight": 726}
MAK79_3_BDU_45B_ = {"clsid": "{MAK79_BDU45B 3R}", "name": "MAK79 3 BDU-45B", "weight": 726}
MAK79_3_BDU_45_ = {"clsid": "{MAK79_BDU45 3R}", "name": "MAK79 3 BDU-45", "weight": 726}
MAK79_3_Mk_81 = {"clsid": "{MAK79_MK81 3L}", "name": "MAK79 3 Mk-81", "weight": 384}
MAK79_3_Mk_81_ = {"clsid": "{MAK79_MK81 3R}", "name": "MAK79 3 Mk-81", "weight": 384}
MAK79_3_Mk_82 = {"clsid": "{MAK79_MK82 3L}", "name": "MAK79 3 Mk-82", "weight": 753}
MAK79_3_Mk_82AIR = {"clsid": "{MAK79_MK82AIR 3L}", "name": "MAK79 3 Mk-82AIR", "weight": 753}
MAK79_3_Mk_82AIR_ = {"clsid": "{MAK79_MK82AIR 3R}", "name": "MAK79 3 Mk-82AIR", "weight": 753}
MAK79_3_Mk_82_ = {"clsid": "{MAK79_MK82 3R}", "name": "MAK79 3 Mk-82", "weight": 753}
MAK79_3_Mk_82_SnakeEye = {"clsid": "{MAK79_MK82SE 3L}", "name": "MAK79 3 Mk-82 SnakeEye", "weight": 753}
MAK79_3_Mk_82_SnakeEye_ = {"clsid": "{MAK79_MK82SE 3R}", "name": "MAK79 3 Mk-82 SnakeEye", "weight": 753}
MAK79_3_Mk_83 = {"clsid": "{MAK79_MK83 3L}", "name": "MAK79 3 Mk-83", "weight": 1371}
MAK79_3_Mk_83_ = {"clsid": "{MAK79_MK83 3R}", "name": "MAK79 3 Mk-83", "weight": 1371}
MAK79_4_BDU_33 = {"clsid": "{MAK79_BDU33 4}", "name": "MAK79 4 BDU-33", "weight": 84}
MAK79_4_BDU_45 = {"clsid": "{MAK79_BDU45 4}", "name": "MAK79 4 BDU-45", "weight": 968}
MAK79_4_BDU_45B = {"clsid": "{MAK79_BDU45B 4}", "name": "MAK79 4 BDU-45B", "weight": 968}
MAK79_4_Mk_81 = {"clsid": "{MAK79_MK81 4}", "name": "MAK79 4 Mk-81", "weight": 512}
MAK79_4_Mk_82 = {"clsid": "{MAK79_MK82 4}", "name": "MAK79 4 Mk-82", "weight": 1004}
MAK79_4_Mk_82AIR = {"clsid": "{MAK79_MK82AIR 4}", "name": "MAK79 4 Mk-82AIR", "weight": 1004}
MAK79_4_Mk_82_SnakeEye = {"clsid": "{MAK79_MK82SE 4}", "name": "MAK79 4 Mk-82 SnakeEye", "weight": 1004}
MAK79_MK_20 = {"clsid": "{MAK79_MK20 1R}", "name": "MAK79 MK-20", "weight": 232}
MAK79_MK_20_ = {"clsid": "{MAK79_MK20 1L}", "name": "MAK79 MK-20", "weight": 232}
MAK79_Mk_83 = {"clsid": "{MAK79_MK83 1R}", "name": "MAK79 Mk-83", "weight": 457}
MAK79_Mk_83_ = {"clsid": "{MAK79_MK83 1L}", "name": "MAK79 Mk-83", "weight": 457}
Matra_Magic_II = {"clsid": "{MMagicII}", "name": "Matra Magic II", "weight": 85}
Matra_Super_530D = {"clsid": "{Matra_S530D}", "name": "Matra Super 530D", "weight": 350}
Matra_Type_155_Rocket_Pod = {"clsid": "{Matra155RocketPod}", "name": "Matra Type 155 Rocket Pod", "weight": 190}
MBD2_67U_with_4_x_FAB_100___100kg_GP_Bombs_LD = {"clsid": "{5A1AC2B4-CA4B-4D09-A1AF-AC52FBC4B60B}", "name": "MBD2-67U with 4 x FAB-100 - 100kg GP Bombs LD", "weight": 465}
MBD2_67U_with_4_x_FAB_100___100kg_GP_Bombs_LD_ = {"clsid": "{29A828E2-C6BB-11d8-9897-000476191836}", "name": "MBD2-67U with 4 x FAB-100 - 100kg GP Bombs LD", "weight": 465}
MBD3_U2T_with_2_x_FAB_1500_M_54___1500kg_GP_Bombs_LD = {"clsid": "{7C5F0F5F-0A0B-46E8-937C-8922303E39A8}", "name": "MBD3-U2T with 2 x FAB-1500 M-54 - 1500kg GP Bombs LD", "weight": 3100}
MBD3_U4T_with_4_x_FAB_250___250kg_GP_Bombs_LD = {"clsid": "{6A367BB4-327F-4A04-8D9E-6D86BDC98E7E}", "name": "MBD3-U4T with 4 x FAB-250 - 250kg GP Bombs LD", "weight": 1060}
MBD3_U4T_with_4_x_RBK_250___42_x_PTAB_2_5M__250kg_CBUs_Medium_HEAT_AP = {"clsid": "{02B81892-7E24-4795-84F9-B8110C641AF0}", "name": "MBD3-U4T with 4 x RBK-250 - 42 x PTAB-2.5M, 250kg CBUs Medium HEAT/AP", "weight": 1126.4}
MBD3_U6_68_with_2_x_FAB_250___250kg_GP_Bombs_LD = {"clsid": "{E659C4BE-2CD8-4472-8C08-3F28ACB61A8A}", "name": "MBD3-U6-68 with 2 x FAB-250 - 250kg GP Bombs LD", "weight": 550}
MBD3_U6_68_with_3_x_FAB_250___250kg_GP_Bombs_LD = {"clsid": "{MBD3_U6_3*FAB-250_fwd}", "name": "MBD3-U6-68 with 3 x FAB-250 - 250kg GP Bombs LD", "weight": 810}
MBD3_U6_68_with_4_x_FAB_250___250kg_GP_Bombs_LD = {"clsid": "{3E35F8C1-052D-11d6-9191-00A0249B6F00}", "name": "MBD3-U6-68 with 4 x FAB-250 - 250kg GP Bombs LD", "weight": 1060}
MBD3_U6_68_with_4_x_FAB_250___250kg_GP_Bombs_LD_ = {"clsid": "{MBD3_U6_4*FAB-250_fwd}", "name": "MBD3-U6-68 with 4 x FAB-250 - 250kg GP Bombs LD", "weight": 1060}
MBD3_U6_68_with_5_x_FAB_250___250kg_GP_Bombs_LD = {"clsid": "{MBD3_U6_5*FAB-250}", "name": "MBD3-U6-68 with 5 x FAB-250 - 250kg GP Bombs LD", "weight": 1310}
MBD3_U6_68_with_6_x_BetAB_500ShP___500kg_Concrete_Piercing_HD_w_booster_Bombs = {"clsid": "{E96E1EDD-FF3F-47CF-A959-576C3B682955}", "name": "MBD3-U6-68 with 6 x BetAB-500ShP - 500kg Concrete Piercing HD w booster Bombs", "weight": 3060}
MBD3_U6_68_with_6_x_BetAB_500___500kg_Concrete_Piercing_Bombs_LD = {"clsid": "{436C6FB9-8BF2-46B6-9DC4-F55ABF3CD1EC}", "name": "MBD3-U6-68 with 6 x BetAB-500 - 500kg Concrete Piercing Bombs LD", "weight": 3060}
MBD3_U6_68_with_6_x_FAB_100___100kg_GP_Bombs_LD = {"clsid": "{F99BEC1A-869D-4AC7-9730-FBA0E3B1F5FC}", "name": "MBD3-U6-68 with 6 x FAB-100 - 100kg GP Bombs LD", "weight": 660}
MBD3_U6_68_with_6_x_FAB_250___250kg_GP_Bombs_LD = {"clsid": "{53BE25A4-C86C-4571-9BC0-47D668349595}", "name": "MBD3-U6-68 with 6 x FAB-250 - 250kg GP Bombs LD", "weight": 1560}
MBD3_U6_68_with_6_x_FAB_500_M_62___500kg_GP_Bombs_LD = {"clsid": "{FA673F4C-D9E4-4993-AA7A-019A92F3C005}", "name": "MBD3-U6-68 with 6 x FAB-500 M-62 - 500kg GP Bombs LD", "weight": 3060}
MBD3_U6_68_with_6_x_FAB_500_M_62___500kg_GP_Bombs_LD_ = {"clsid": "{0D945D78-542C-4E9B-9A17-9B5008CC8D39}", "name": "MBD3-U6-68 with 6 x FAB-500 M-62 - 500kg GP Bombs LD", "weight": 3060}
MBD3_U6_68_with_6_x_RBK_500_255___30_x_PTAB_10_5__500kg_CBUs_Heavy_HEAT_AP = {"clsid": "{F503C276-FE15-4C54-B310-17B50B735A84}", "name": "MBD3-U6-68 with 6 x RBK-500-255 - 30 x PTAB-10-5, 500kg CBUs Heavy HEAT/AP", "weight": 3060}
MBD3_U6_68_with_6_x_RBK_500_255___30_x_PTAB_10_5__500kg_CBUs_Heavy_HEAT_AP_ = {"clsid": "{4D459A95-59C0-462F-8A57-34E80697F38B}", "name": "MBD3-U6-68 with 6 x RBK-500-255 - 30 x PTAB-10-5, 500kg CBUs Heavy HEAT/AP", "weight": 3060}
MBD3_U9M_with_9_x_FAB_100___100kg_GP_Bombs_LD = {"clsid": "{5F1C54C0-0ABD-4868-A883-B52FF9FCB422}", "name": "MBD3-U9M with 9 x FAB-100 - 100kg GP Bombs LD", "weight": 960}
MBD3_U9M_with_9_x_FAB_250___250kg_GP_Bombs_LD = {"clsid": "{E1AAE713-5FC3-4CAA-9FF5-3FDCFB899E33}", "name": "MBD3-U9M with 9 x FAB-250 - 250kg GP Bombs LD", "weight": 2310}
MBD3_U9M_with_9_x_RBK_250___42_x_PTAB_2_5M__250kg_CBUs_Medium_HEAT_AP = {"clsid": "{BF83E8FD-E7A2-40D2-9608-42E13AFE2193}", "name": "MBD3-U9M with 9 x RBK-250 - 42 x PTAB-2.5M, 250kg CBUs Medium HEAT/AP", "weight": 2535}
MBD3_with_3_x_BetAB_500___500kg_Concrete_Piercing_Bombs_LD = {"clsid": "{005E70F5-C3EA-4E95-A148-C1044C42D845}", "name": "MBD3 with 3 x BetAB-500 - 500kg Concrete Piercing Bombs LD", "weight": 1566}
MBD3_with_3_x_FAB_100___100kg_GP_Bombs_LD = {"clsid": "{CEE04106-B9AA-46B4-9CD1-CD3FDCF0CE78}", "name": "MBD3 with 3 x FAB-100 - 100kg GP Bombs LD", "weight": 360}
MBD3_with_3_x_FAB_250___250kg_GP_Bombs_LD = {"clsid": "{D109EE9C-A1B7-4F1C-8D87-631C293A1D26}", "name": "MBD3 with 3 x FAB-250 - 250kg GP Bombs LD", "weight": 810}
MBD3_with_3_x_FAB_500_M_62___500kg_GP_Bombs_LD = {"clsid": "{A1E85991-B58E-4E92-AE91-DED6DC85B2E7}", "name": "MBD3 with 3 x FAB-500 M-62 - 500kg GP Bombs LD", "weight": 1560}
MBD3_with_3_x_RBK_250___42_x_PTAB_2_5M__250kg_CBUs_Medium_HEAT_AP = {"clsid": "{EAD9B2C1-F3BA-4A7B-A2A5-84E2AF8A1975}", "name": "MBD3 with 3 x RBK-250 - 42 x PTAB 2.5M, 250kg CBUs Medium HEAT/AP", "weight": 885}
MBD3_with_3_x_RBK_500_255___30_x_PTAB_10_5__500kg_CBUs_Heavy_HEAT_AP = {"clsid": "{919CE839-9390-4629-BAF7-229DE19B8523}", "name": "MBD3 with 3 x RBK-500-255 - 30 x PTAB-10-5, 500kg CBUs Heavy HEAT/AP", "weight": 1560}
MER12_with_12_x_M117___750lb_GP_Bombs_LD = {"clsid": "{574EDEDF-20DE-4942-B2A2-B2EDFD621562}", "name": "MER12 with 12 x M117 - 750lb GP Bombs LD", "weight": 4250}
MER12_with_12_x_Mk_82___500lb_GP_Bombs_LD = {"clsid": "{585D626E-7F42-4073-AB70-41E728C333E2}", "name": "MER12 with 12 x Mk-82 - 500lb GP Bombs LD", "weight": 3000}
MER2_with_2_x_Mk_20_Rockeye___490lbs_CBUs__247_x_HEAT_Bomblets = {"clsid": "{0B9ABA77-93B8-45FC-9C63-82AFB2CB50A4}", "name": "MER2 with 2 x Mk-20 Rockeye - 490lbs CBUs, 247 x HEAT Bomblets", "weight": 553}
MER2_with_2_x_Mk_82___500lb_GP_Bombs_LD = {"clsid": "{D5D51E24-348C-4702-96AF-97A714E72697}", "name": "MER2 with 2 x Mk-82 - 500lb GP Bombs LD", "weight": 565}
MER2_with_2_x_Mk_83___1000lb_GP_Bombs_LD = {"clsid": "{18617C93-78E7-4359-A8CE-D754103EDF63}", "name": "MER2 with 2 x Mk-83 - 1000lb GP Bombs LD", "weight": 1017}
MER3_with_3_x_M117___750lb_GP_Bombs_LD = {"clsid": "{82F90BEC-0E2E-4CE5-A66E-1E4ADA2B5D1E}", "name": "MER3 with 3 x M117 - 750lb GP Bombs LD", "weight": 1060}
MER6_with_6_x_BLU_107___440lb_Anti_Runway_Penetrator_Bombs = {"clsid": "{752B9782-F962-11d5-9190-00A0249B6F00}", "name": "MER6 with 6 x BLU-107 - 440lb Anti-Runway Penetrator Bombs", "weight": 1800}
MER6_with_6_x_M117___750lb_GP_Bombs_LD = {"clsid": "{6CDB6B36-7165-47D0-889F-6625FB333561}", "name": "MER6 with 6 x M117 - 750lb GP Bombs LD", "weight": 2100}
MER6_with_6_x_Mk_20_Rockeye___490lbs_CBUs__247_x_HEAT_Bomblets = {"clsid": "{3C7CD675-7D39-41C5-8735-0F4F537818A8}", "name": "MER6 with 6 x Mk-20 Rockeye - 490lbs CBUs, 247 x HEAT Bomblets", "weight": 1392}
MER6_with_6_x_Mk_82___500lb_GP_Bombs_LD = {"clsid": "{1C97B4A0-AA3B-43A8-8EE7-D11071457185}", "name": "MER6 with 6 x Mk-82 - 500lb GP Bombs LD", "weight": 1506}
Mercury_LLTV_Pod = {"clsid": "{B1EF6B0E-3D91-4047-A7A5-A99E7D8B4A8B}", "name": "Mercury LLTV Pod", "weight": 230}
MICA_IR = {"clsid": "{0DA03783-61E4-40B2-8FAE-6AEE0A5C5AAE}", "name": "MICA IR", "weight": 110}
MICA_RF = {"clsid": "{6D778860-7BB8-4ACB-9E95-BA772C6BBC2C}", "name": "MICA RF", "weight": 110}
MIM_104 = {"clsid": "MIM_104", "name": "M901 PATRIOT", "weight": None}
MIM_72 = {"clsid": "MIM_72", "name": "M48 CHAPARRAL", "weight": None}
Mistral = {"clsid": "{MBDA_MistralG}", "name": "Mistral", "weight": 27.2}
Mistral_ = {"clsid": "{MBDA_MistralD}", "name": "Mistral", "weight": 27.2}
MK_82_28 = {"clsid": "MK_82*28", "name": "28 x Mk-82 - 500lb GP Bombs LD", "weight": 6748}
Mk_20 = {"clsid": "{BRU-32 MK-20}", "name": "Mk-20", "weight": 279.38}
Mk_20_18 = {"clsid": "{ACADB374-6D6C-45A0-BA7C-B22B2E108AE4}", "name": "Mk 20*18", "weight": 3996}
Mk_20_Rockeye___490lbs_CBU__247_x_HEAT_Bomblets = {"clsid": "{ADD3FAE1-EBF6-4EF9-8EFC-B36B5DDF1E6B}", "name": "Mk-20 Rockeye - 490lbs CBU, 247 x HEAT Bomblets", "weight": 222}
Mk_81___250lb_GP_Bomb_LD = {"clsid": "{90321C8E-7ED1-47D4-A160-E074D5ABD902}", "name": "Mk-81 - 250lb GP Bomb LD", "weight": 118}
Mk_82 = {"clsid": "{BRU-32 MK-82}", "name": "Mk-82", "weight": 298.38}
Mk_82AIR = {"clsid": "{BRU-32 MK-82AIR}", "name": "Mk-82AIR", "weight": 298.38}
Mk_82Y___500lb_GP_Chute_Retarded_HD = {"clsid": "{Mk_82Y}", "name": "Mk-82Y - 500lb GP Chute Retarded HD", "weight": 232}
Mk_82_AIR_Ballute___500lb_GP_Bomb_HD = {"clsid": "{Mk82AIR}", "name": "Mk-82 AIR Ballute - 500lb GP Bomb HD", "weight": 242}
Mk_82_SnakeEye = {"clsid": "{BRU-32 MK-82SE}", "name": "Mk-82 SnakeEye", "weight": 298.38}
Mk_82_Snakeye___500lb_GP_Bomb_HD = {"clsid": "{Mk82SNAKEYE}", "name": "Mk-82 Snakeye - 500lb GP Bomb HD", "weight": 249.5}
Mk_82___500lb_GP_Bomb_LD = {"clsid": "{BCE4E030-38E9-423E-98ED-24BE3DA87C32}", "name": "Mk-82 - 500lb GP Bomb LD", "weight": 228}
Mk_83 = {"clsid": "{BRU-32 MK-83}", "name": "Mk-83", "weight": 504.38}
Mk_83CT = {"clsid": "{Mk_83CT}", "name": "Mk-83CT", "weight": 454}
Mk_83_ = {"clsid": "{BRU42_MK83 RS}", "name": "Mk-83", "weight": 575}
Mk_83__ = {"clsid": "{BRU3242_MK83 RS}", "name": "Mk-83", "weight": 632.38}
Mk_83___ = {"clsid": "{PHXBRU3242_MK83 RS}", "name": "Mk-83", "weight": 632.38}
Mk_83___1000lb_GP_Bomb_LD = {"clsid": "{7A44FF09-527C-4B7E-B42B-3F111CFE50FB}", "name": "Mk-83 - 1000lb GP Bomb LD", "weight": 454}
Mk_83____ = {"clsid": "{BRU42_MK83 LS}", "name": "Mk-83", "weight": 575}
Mk_83_____ = {"clsid": "{BRU3242_MK83 LS}", "name": "Mk-83", "weight": 632.38}
Mk_83______ = {"clsid": "{PHXBRU3242_MK83 LS}", "name": "Mk-83", "weight": 632.38}
Mk_84 = {"clsid": "{BRU-32 MK-84}", "name": "Mk-84", "weight": 951.38}
Mk_84_18 = {"clsid": "{F092B80C-BB54-477E-9408-66DEEF740008}", "name": "Mk 84*18", "weight": 16092}
Mk_84_28 = {"clsid": "{D3ABF208-FA56-4D56-BB31-E0D931D57AE3}", "name": "Mk 84*28", "weight": 25032}
Mk_84___2000lb_GP_Bomb_LD = {"clsid": "{AB8B8299-F1CC-4359-89B5-2172E0CF4A5A}", "name": "Mk-84 - 2000lb GP Bomb LD", "weight": 894}
MPS_410 = {"clsid": "{44EE8698-89F9-48EE-AF36-5FD31896A82D}", "name": "MPS-410", "weight": 150}
MPS_410_ = {"clsid": "{44EE8698-89F9-48EE-AF36-5FD31896A82C}", "name": "MPS-410", "weight": 150}
MXU_648_TP = {"clsid": "MXU-648-TP", "name": "MXU-648 Travel Pod", "weight": 300}
ODAB_500PM___525_kg__bomb__parachute__simulated_aerosol = {"clsid": "{ODAB-500PM}", "name": "ODAB-500PM - 525 kg, bomb, parachute, simulated aerosol", "weight": 520}
OFAB_100_120_TU_x_4 = {"clsid": "{OFAB-100-120-TU}", "name": "OFAB-100-120-TU x 4", "weight": 557}
OFAB_100_Jupiter___100kg_GP_Bomb_LD = {"clsid": "{OFAB_100_Jupiter}", "name": "OFAB-100 Jupiter - 100kg GP Bomb LD", "weight": 121}
ORO_57K___S_5M1_HE_FRAG_FFAR_x_8 = {"clsid": "{ORO57K_S5M1_HEFRAG}", "name": "ORO-57K - S-5M1 HE-FRAG FFAR x 8", "weight": 63.88}
ORO_57K___S_5MO_HE_FRAG_FFAR_x_8 = {"clsid": "{ORO57K_S5MO_HEFRAG}", "name": "ORO-57K - S-5MO HE-FRAG FFAR x 8", "weight": 63.88}
ORO_57K___S_5M_x_8 = {"clsid": "{ORO57K_S5M_HEFRAG}", "name": "ORO-57K - S-5M x 8", "weight": 64.92}
oh_58_brauning = {"clsid": "oh-58-brauning", "name": "OH-58D Brauning", "weight": 290}
Pavetack_F_111 = {"clsid": "{199D6D51-1764-497E-9AE5-7D07C8D4D87E}", "name": "Pavetack F-111", "weight": 200}
PKT_7_62 = {"clsid": "PKT_7_62", "name": "PKT 7.62mm MMG", "weight": 90}
PK_3___7_62mm_GPMG = {"clsid": "{PK-3}", "name": "PK-3 - 7.62mm GPMG", "weight": 218}
PTB300_MIG15 = {"clsid": "PTB300_MIG15", "name": "Fuel Tank 300 liters", "weight": 271}
PTB400_MIG15 = {"clsid": "PTB400_MIG15", "name": "Fuel Tank 400 liters", "weight": 364}
PTB400_MIG19 = {"clsid": "PTB400_MIG19", "name": "Fuel Tank 400 liters", "weight": 364}
PTB600_MIG15 = {"clsid": "PTB600_MIG15", "name": "Fuel Tank 600 liters", "weight": 531}
PTB760_MIG19 = {"clsid": "PTB760_MIG19", "name": "Fuel Tank 760 liters", "weight": 663.8}
P_50T___50kg_Practice_Bomb_LD = {"clsid": "{P-50T}", "name": "P-50T - 50kg Practice Bomb LD", "weight": 50}
RBK_250_275___150_x_AO_1SCh__250kg_CBU_HE_Frag = {"clsid": "{RBK_250_275_AO_1SCH}", "name": "RBK-250-275 - 150 x AO-1SCh, 250kg CBU HE/Frag", "weight": 244.6}
RBK_250___42_x_PTAB_2_5M__250kg_CBU_Medium_HEAT_AP = {"clsid": "{4203753F-8198-4E85-9924-6F8FF679F9FF}", "name": "RBK-250 - 42 x PTAB-2.5M, 250kg CBU Medium HEAT/AP", "weight": 244.6}
RBK_500U___126_x_OAB_2_5RT__500kg_CBU_HE_Frag = {"clsid": "{RBK_500U_OAB_2_5RT}", "name": "RBK-500U - 126 x OAB-2.5RT, 500kg CBU HE/Frag", "weight": 427}
RBK_500_255___30_x_PTAB_10_5__500kg_CBU_Heavy_HEAT_AP = {"clsid": "{D5435F26-F120-4FA3-9867-34ACE562EF1B}", "name": "RBK-500-255 - 30 x PTAB-10-5, 500kg CBU Heavy HEAT/AP", "weight": 253}
RBK_500___268_x_PTAB_1M__500kg_CBU_Light_HEAT_AP = {"clsid": "{7AEC222D-C523-425e-B714-719C0D1EB14D}", "name": "RBK-500 - 268 x PTAB-1M, 500kg CBU Light HEAT/AP", "weight": 427}
RB_04E__for_A_I___with_launcher = {"clsid": "{Rb04AI}", "name": "RB-04E (for A.I.) with launcher", "weight": 661}
RB_15F__for_A_I___with_launcher = {"clsid": "{Rb15AI}", "name": "RB-15F (for A.I.) with launcher", "weight": 610}
Rb_04E_Anti_ship_Missile = {"clsid": "{Rb04}", "name": "Rb-04E Anti-ship Missile", "weight": 661}
Rb_05A_MCLOS_ASM_AShM_AAM = {"clsid": "{Robot05}", "name": "Rb-05A MCLOS ASM/AShM/AAM", "weight": 341}
Rb_15F_Programmable_Anti_ship_Missile = {"clsid": "{Rb15}", "name": "Rb-15F Programmable Anti-ship Missile", "weight": 610}
Rb_24J__AIM_9P__Sidewinder_IR_AAM = {"clsid": "{Robot24J}", "name": "Rb-24J (AIM-9P) Sidewinder IR AAM", "weight": 140}
Rb_24__AIM_9B__Sidewinder_IR_AAM = {"clsid": "{Robot24}", "name": "Rb-24 (AIM-9B) Sidewinder IR AAM", "weight": 132}
Rb_74__AIM_9L__Sidewinder_IR_AAM = {"clsid": "{Robot74}", "name": "Rb-74 (AIM-9L) Sidewinder IR AAM", "weight": 144}
Rb_75A__AGM_65A_Maverick___TV_ASM_ = {"clsid": "{RB75}", "name": "Rb-75A (AGM-65A Maverick) (TV ASM)", "weight": 269.5}
Rb_75B__AGM_65B_Maverick___TV_ASM_ = {"clsid": "{RB75B}", "name": "Rb-75B (AGM-65B Maverick) (TV ASM)", "weight": 269.5}
Rb_75T__AGM_65A_Maverick___TV_ASM_Lg_HE_Whd_ = {"clsid": "{RB75T}", "name": "Rb-75T (AGM-65A Maverick) (TV ASM Lg HE Whd)", "weight": 354}
REFLEX_9M119 = {"clsid": "REFLEX_9M119", "name": "AT-11 SNIPER (Reflex)", "weight": None}
RKL609_ECM_Pod__Left_ = {"clsid": "{RKL609_L}", "name": "RKL609 ECM Pod (Left)", "weight": 150}
RKL609_ECM_Pod__Right_ = {"clsid": "{RKL609_R}", "name": "RKL609 ECM Pod (Right)", "weight": 150}
RN_24___470kg__nuclear_bomb__free_fall = {"clsid": "{RN-24}", "name": "RN-24 - 470kg, nuclear bomb, free fall", "weight": 470}
RN_28___260_kg__nuclear_bomb__free_fall = {"clsid": "{RN-28}", "name": "RN-28 - 260 kg, nuclear bomb, free fall", "weight": 260}
ROLAND = {"clsid": "ROLAND", "name": "ROLAND", "weight": None}
RPL_522_1300_liters_Fuel_Tank = {"clsid": "{M2KC_RPL_522}", "name": "RPL 522 1300 liters Fuel Tank", "weight": 1170}
RPL_522_1300_liters_Fuel_Tank__Empty_ = {"clsid": "{M2KC_RPL_522_EMPTY}", "name": "RPL 522 1300 liters Fuel Tank (Empty)", "weight": 180}
RPL_541_2000_liters_Fuel_Tank_ = {"clsid": "{M2KC_02_RPL541}", "name": "RPL 541 2000 liters Fuel Tank ", "weight": 1837}
RPL_541_2000_liters_Fuel_Tank__ = {"clsid": "{M2KC_08_RPL541}", "name": "RPL 541 2000 liters Fuel Tank ", "weight": 1837}
RPL_541_2000_liters_Fuel_Tank__Empty_ = {"clsid": "{M2KC_02_RPL541_EMPTY}", "name": "RPL 541 2000 liters Fuel Tank (Empty)", "weight": 257}
RPL_541_2000_liters_Fuel_Tank__Empty__ = {"clsid": "{M2KC_08_RPL541_EMPTY}", "name": "RPL 541 2000 liters Fuel Tank (Empty)", "weight": 257}
RP_3_25lb_AP_Mk_I = {"clsid": "{British_AP_25LBNo1_3INCHNo1}", "name": "RP-3 25lb AP Mk.I", "weight": 22}
RP_3_60lb_F_No1_Mk_I = {"clsid": "{British_HE_60LBFNo1_3INCHNo1}", "name": "RP-3 60lb F No1 Mk.I", "weight": 31.6}
RP_3_60lb_SAP_No2_Mk_I = {"clsid": "{British_HE_60LBSAPNo2_3INCHNo1}", "name": "RP-3 60lb SAP No2 Mk.I", "weight": 38.1}
RS2US___AAM__beam_rider = {"clsid": "{RS-2US}", "name": "RS2US - AAM, beam-rider", "weight": 105.2}
R_13M1___AAM__IR_guided = {"clsid": "{R-13M1}", "name": "R-13M1 - AAM, IR guided", "weight": 122.4}
R_13M___AAM__IR_guided = {"clsid": "{R-13M}", "name": "R-13M - AAM, IR guided", "weight": 119.7}
R_24R__AA_7_Apex_SA____Semi_Act_Rdr = {"clsid": "{CCF898C9-5BC7-49A4-9D1E-C3ED3D5166A1}", "name": "R-24R (AA-7 Apex SA) - Semi-Act Rdr", "weight": 215}
R_24T__AA_7_Apex_IR____Infra_Red = {"clsid": "{6980735A-44CC-4BB9-A1B5-591532F1DC69}", "name": "R-24T (AA-7 Apex IR) - Infra Red", "weight": 215}
R_27ER__AA_10_Alamo_C____Semi_Act_Extended_Range = {"clsid": "{E8069896-8435-4B90-95C0-01A03AE6E400}", "name": "R-27ER (AA-10 Alamo C) - Semi-Act Extended Range", "weight": 350}
R_27ET__AA_10_Alamo_D____IR_Extended_Range = {"clsid": "{B79C379A-9E87-4E50-A1EE-7F7E29C2E87A}", "name": "R-27ET (AA-10 Alamo D) - IR Extended Range", "weight": 343}
R_27R__AA_10_Alamo_A____Semi_Act_Rdr = {"clsid": "{9B25D316-0434-4954-868F-D51DB1A38DF0}", "name": "R-27R (AA-10 Alamo A) - Semi-Act Rdr", "weight": 253}
R_27T__AA_10_Alamo_B____Infra_Red = {"clsid": "{88DAC840-9F75-4531-8689-B46E64E42E53}", "name": "R-27T (AA-10 Alamo B) - Infra Red", "weight": 254}
R_33__AA_9_Amos____Semi_Act_Rdr = {"clsid": "{F1243568-8EF0-49D4-9CB5-4DA90D92BC1D}", "name": "R-33 (AA-9 Amos) - Semi-Act Rdr", "weight": 490}
R_3R___AAM__radar_guided = {"clsid": "{R-3R}", "name": "R-3R - AAM, radar guided", "weight": 111.5}
R_3S___AAM__IR_guided = {"clsid": "{R-3S}", "name": "R-3S - AAM, IR guided", "weight": 103.3}
R_40R__AA_6_Acrid____Semi_Act_Rdr = {"clsid": "{4EDBA993-2E34-444C-95FB-549300BF7CAF}", "name": "R-40R (AA-6 Acrid) - Semi-Act Rdr", "weight": 475}
R_40T__AA_6_Acrid____Infra_Red = {"clsid": "{5F26DBC2-FB43-4153-92DE-6BBCE26CB0FF}", "name": "R-40T (AA-6 Acrid) - Infra Red", "weight": 475}
R_550_Magic_2 = {"clsid": "{FC23864E-3B80-48E3-9C03-4DA8B1D7497B}", "name": "R.550 Magic 2", "weight": 89}
R_55___AAM__IR_guided = {"clsid": "{R-55}", "name": "R-55 - AAM, IR guided", "weight": 113}
R_60 = {"clsid": "{R-60}", "name": "R-60", "weight": 58.5}
R_60M = {"clsid": "{R-60M}", "name": "R-60M", "weight": 58.5}
R_60M_x_2 = {"clsid": "{R-60M 2L}", "name": "R-60M x 2", "weight": 122}
R_60M_x_2_ = {"clsid": "{R-60M 2R}", "name": "R-60M x 2", "weight": 122}
R_60M__AA_8_Aphid____Infra_Red = {"clsid": "{682A481F-0CB5-4693-A382-D00DD4A156D7}", "name": "R-60M (AA-8 Aphid) - Infra Red", "weight": 44}
R_60_x_2 = {"clsid": "{R-60 2L}", "name": "R-60 x 2", "weight": 122}
R_60_x_2_ = {"clsid": "{R-60 2R}", "name": "R-60 x 2", "weight": 122}
R_73__AA_11_Archer____Infra_Red = {"clsid": "{FBC29BFE-3D24-4C64-B81D-941239D12249}", "name": "R-73 (AA-11 Archer) - Infra Red", "weight": 110}
R_73__AA_11_Archer____Infra_Red_ = {"clsid": "{CBC29BFE-3D24-4C64-B81D-941239D12249}", "name": "R-73 (AA-11 Archer) - Infra Red", "weight": 110}
R_77__AA_12_Adder____Active_Rdr = {"clsid": "{B4C01D60-A8A3-4237-BD72-CA7655BC0FE9}", "name": "R-77 (AA-12 Adder) - Active Rdr", "weight": 175}
R_77__AA_12_Adder____Active_Rdr_ = {"clsid": "{B4C01D60-A8A3-4237-BD72-CA7655BC0FEC}", "name": "R-77 (AA-12 Adder) - Active Rdr", "weight": 250}
SAB_100___100kg_flare_illumination_Bomb = {"clsid": "{0511E528-EA28-4caf-A212-00D1408DF10A}", "name": "SAB-100 - 100kg flare/illumination Bomb", "weight": 100}
Sand_Filter = {"clsid": "{FAS}", "name": "Sand Filter", "weight": 15}
SC_250_Type_1_L2___250kg_GP_Bomb_LD = {"clsid": "{SC_250_T1_L2}", "name": "SC 250 Type 1 L2 - 250kg GP Bomb LD", "weight": 250}
SC_250_Type_3_J___250kg_GP_Bomb_LD = {"clsid": "{Schloss500XIIC1_SC_250_T3_J}", "name": "SC 250 Type 3 J - 250kg GP Bomb LD", "weight": 270}
SC_500_L2___500kg_GP_Bomb_LD = {"clsid": "{SC_500_L2}", "name": "SC 500 L2 - 500kg GP Bomb LD", "weight": 500}
SC_501_SC250 = {"clsid": "SC_501_SC250", "name": "SC 250 Type 3 J - 250kg GP Bomb LD", "weight": 250}
SC_501_SC500 = {"clsid": "SC_501_SC500", "name": "SC 500 J - 500kg GP Bomb LD", "weight": 500}
SC_50___50kg_GP_Bomb_LD = {"clsid": "{SC_50}", "name": "SC 50 - 50kg GP Bomb LD", "weight": 50}
SD_250_Stg___250kg_GP_Bomb_LD = {"clsid": "{SD_250_Stg}", "name": "SD 250 Stg - 250kg GP Bomb LD", "weight": 250}
SD_500_A___500kg_GP_Bomb_LD = {"clsid": "{SD_500_A}", "name": "SD 500 A - 500kg GP Bomb LD", "weight": 500}
SEASPARROW = {"clsid": "SEASPARROW", "name": "SEASPARROW", "weight": None}
Sea_Eagle___ASM = {"clsid": "{1461CD18-429A-42A9-A21F-4C621ECD4573}", "name": "Sea Eagle - ASM", "weight": 600}
Shpil_2_Laser_Recon__Intel_Pod = {"clsid": "{0519A263-0AB6-11d6-9193-00A0249B6F00}", "name": "Shpil-2 Laser Recon & Intel Pod", "weight": 200}
Sky_Shadow_ECM_Pod = {"clsid": "{8C3F26A2-FA0F-11d5-9190-00A0249B6F00}", "name": "Sky-Shadow ECM Pod", "weight": 200}
SM2 = {"clsid": "SM2", "name": "SM2", "weight": None}
Smokewinder___blue = {"clsid": "{A4BCC903-06C8-47bb-9937-A30FEDB4E743}", "name": "Smokewinder - blue", "weight": 200}
Smokewinder___green = {"clsid": "{A4BCC903-06C8-47bb-9937-A30FEDB4E742}", "name": "Smokewinder - green", "weight": 200}
Smokewinder___orange = {"clsid": "{A4BCC903-06C8-47bb-9937-A30FEDB4E746}", "name": "Smokewinder - orange", "weight": 200}
Smokewinder___red = {"clsid": "{A4BCC903-06C8-47bb-9937-A30FEDB4E741}", "name": "Smokewinder - red", "weight": 200}
Smokewinder___white = {"clsid": "{A4BCC903-06C8-47bb-9937-A30FEDB4E744}", "name": "Smokewinder - white", "weight": 200}
Smokewinder___yellow = {"clsid": "{A4BCC903-06C8-47bb-9937-A30FEDB4E745}", "name": "Smokewinder - yellow", "weight": 200}
Smoke_for_Christen_Eagle_II__white = {"clsid": "{CE2_SMOKE_WHITE}", "name": "Smoke for Christen Eagle II, white", "weight": 7}
Smoke_Generator___blue = {"clsid": "{D3F65166-1AB8-490f-AF2F-2FB6E22568B3}", "name": "Smoke Generator - blue", "weight": 220}
Smoke_Generator___blue_ = {"clsid": "{INV-SMOKE-BLUE}", "name": "Smoke Generator - blue", "weight": 0}
Smoke_Generator___green = {"clsid": "{D3F65166-1AB8-490f-AF2F-2FB6E22568B2}", "name": "Smoke Generator - green", "weight": 220}
Smoke_Generator___green_ = {"clsid": "{INV-SMOKE-GREEN}", "name": "Smoke Generator - green", "weight": 0}
Smoke_Generator___orange = {"clsid": "{D3F65166-1AB8-490f-AF2F-2FB6E22568B6}", "name": "Smoke Generator - orange", "weight": 220}
Smoke_Generator___orange_ = {"clsid": "{INV-SMOKE-ORANGE}", "name": "Smoke Generator - orange", "weight": 0}
Smoke_Generator___red = {"clsid": "{D3F65166-1AB8-490f-AF2F-2FB6E22568B1}", "name": "Smoke Generator - red", "weight": 220}
Smoke_Generator___red_ = {"clsid": "{INV-SMOKE-RED}", "name": "Smoke Generator - red", "weight": 0}
Smoke_Generator___white = {"clsid": "{D3F65166-1AB8-490f-AF2F-2FB6E22568B4}", "name": "Smoke Generator - white", "weight": 220}
Smoke_Generator___white_ = {"clsid": "{INV-SMOKE-WHITE}", "name": "Smoke Generator - white", "weight": 0}
Smoke_Generator___yellow = {"clsid": "{D3F65166-1AB8-490f-AF2F-2FB6E22568B5}", "name": "Smoke Generator - yellow", "weight": 220}
Smoke_Generator___yellow_ = {"clsid": "{INV-SMOKE-YELLOW}", "name": "Smoke Generator - yellow", "weight": 0}
Smoke_System_red_colorant = {"clsid": "{SMOKE-RED-AVIOJET}", "name": "Smoke System red colorant", "weight": 32.6}
Smoke_System_yellow_colorant = {"clsid": "{SMOKE-YELLOW-AVIOJET}", "name": "Smoke System yellow colorant", "weight": 32.6}
Smoke_System__White_Smoke_ = {"clsid": "{SMOKE-SYSTEM-AVIOJET}", "name": "Smoke System (White Smoke)", "weight": 1}
Smoke___red___21__t = {"clsid": "{MIG21_SMOKE_RED}", "name": "Smoke - red - 21 /t", "weight": 30}
Smoke___white___21 = {"clsid": "{SMOKE_WHITE}", "name": "Smoke - white - 21", "weight": 30}
Smoke___white___21_ = {"clsid": "{MIG21_SMOKE_WHITE}", "name": "Smoke - white - 21", "weight": 30}
SPITFIRE_45GAL_SLIPPER_TANK = {"clsid": "SPITFIRE_45GAL_SLIPPER_TANK", "name": "45 gal. Slipper Tank", "weight": 138.647}
SPITFIRE_45GAL_TORPEDO_TANK = {"clsid": "SPITFIRE_45GAL_TORPEDO_TANK", "name": "45 gal. Torpedo Tank", "weight": 144.647}
SPPU_22_1___2_x_23mm__GSh_23L_Autocannon_Pod = {"clsid": "{E92CBFE5-C153-11d8-9897-000476191836}", "name": "SPPU-22-1 - 2 x 23mm GSh-23L Autocannon Pod", "weight": 290}
SPRD_99_takeoff_rocket = {"clsid": "{SPRD}", "name": "SPRD-99 takeoff rocket", "weight": 500}
SPS_141_100__21____jamming_and_countermeasures_pod = {"clsid": "{SPS-141-100}", "name": "SPS-141-100 (21) - jamming and countermeasures pod", "weight": 150}
SPS_141___ECM_Jamming_Pod = {"clsid": "{F75187EF-1D9E-4DA9-84B4-1A1A14A3973A}", "name": "SPS-141 - ECM Jamming Pod", "weight": 150}
SUU_25_x_8_LUU_2___Target_Marker_Flares = {"clsid": "{CAE48299-A294-4bad-8EE6-89EFC5DCDF00}", "name": "SUU-25 x 8 LUU-2 - Target Marker Flares", "weight": 130}
SUU_25___8_LUU_2 = {"clsid": "{BRU42_SUU25}", "name": "SUU-25 * 8 LUU-2", "weight": 258}
SUU_25___8_LUU_2_ = {"clsid": "{BRU3242_SUU25}", "name": "SUU-25 * 8 LUU-2", "weight": 315.38}
Super_530D = {"clsid": "{FD21B13E-57F3-4C2A-9F78-C522D0B5BCE1}", "name": "Super 530D", "weight": 270}
SVIR_9M119 = {"clsid": "SVIR_9M119", "name": "AT-11 SNIPER (Svir')", "weight": None}
S_24A__21____180_kg__cumulative_unguided_rocket = {"clsid": "{S-24A}", "name": "S-24A (21) - 180 kg, cumulative unguided rocket", "weight": 235}
S_24B__21____180_kg__fragmented_unguided_rocket = {"clsid": "{S-24B}", "name": "S-24B (21) - 180 kg, fragmented unguided rocket", "weight": 235}
S_24B___240mm_UnGd_Rkt__235kg__HE_Frag___Low_Smk_ = {"clsid": "{1FA14DEA-8CDB-45AD-88A8-EC068DF1E65A}", "name": "S-24B - 240mm UnGd Rkt, 235kg, HE/Frag, (Low Smk)", "weight": 235}
S_24B___240mm_UnGd_Rkt__235kg__HE_Frag___Low_Smk__ = {"clsid": "{3858707D-F5D5-4bbb-BDD8-ABB0530EBC7C}", "name": "S-24B - 240mm UnGd Rkt, 235kg, HE/Frag, (Low Smk)", "weight": 295}
S_25L___320Kg__340mm_Laser_Guided_Rkt = {"clsid": "{0180F983-C14A-11d8-9897-000476191836}", "name": "S-25L - 320Kg, 340mm Laser Guided Rkt", "weight": 500}
S_25_OFM___340mm_UnGd_Rkt__480kg_Penetrator = {"clsid": "{A0648264-4BC0-4EE8-A543-D119F6BA4257}", "name": "S-25-OFM - 340mm UnGd Rkt, 480kg Penetrator", "weight": 495}
S_25_O___420mm_UnGd_Rkt__380kg_Frag = {"clsid": "{S_25_O}", "name": "S-25-O - 420mm UnGd Rkt, 380kg Frag", "weight": 445}
Tangazh_ELINT_pod = {"clsid": "{0519A262-0AB6-11d6-9193-00A0249B6F00}", "name": "Tangazh ELINT pod", "weight": 200}
TER_9A_with_2_x_CBU_87___202_x_CEM_Cluster_Bomb = {"clsid": "{TER_9A_2L*CBU-87}", "name": "TER-9A with 2 x CBU-87 - 202 x CEM Cluster Bomb", "weight": 913}
TER_9A_with_2_x_CBU_87___202_x_CEM_Cluster_Bomb_ = {"clsid": "{TER_9A_2R*CBU-87}", "name": "TER-9A with 2 x CBU-87 - 202 x CEM Cluster Bomb", "weight": 913}
TER_9A_with_2_x_CBU_97___10_x_SFW_Cluster_Bomb = {"clsid": "{TER_9A_2L*CBU-97}", "name": "TER-9A with 2 x CBU-97 - 10 x SFW Cluster Bomb", "weight": 887}
TER_9A_with_2_x_CBU_97___10_x_SFW_Cluster_Bomb_ = {"clsid": "{TER_9A_2R*CBU-97}", "name": "TER-9A with 2 x CBU-97 - 10 x SFW Cluster Bomb", "weight": 887}
TER_9A_with_2_x_GBU_12___500lb_Laser_Guided_Bomb = {"clsid": "{TER_9A_2L*GBU-12}", "name": "TER-9A with 2 x GBU-12 - 500lb Laser Guided Bomb", "weight": 607}
TER_9A_with_2_x_GBU_12___500lb_Laser_Guided_Bomb_ = {"clsid": "{TER_9A_2R*GBU-12}", "name": "TER-9A with 2 x GBU-12 - 500lb Laser Guided Bomb", "weight": 607}
TER_9A_with_2_x_Mk_82_AIR_Ballute___500lb_GP_Bomb_HD = {"clsid": "{TER_9A_2L*MK-82AIR}", "name": "TER-9A with 2 x Mk-82 AIR Ballute - 500lb GP Bomb HD", "weight": 537}
TER_9A_with_2_x_Mk_82_AIR_Ballute___500lb_GP_Bomb_HD_ = {"clsid": "{TER_9A_2R*MK-82AIR}", "name": "TER-9A with 2 x Mk-82 AIR Ballute - 500lb GP Bomb HD", "weight": 537}
TER_9A_with_2_x_Mk_82_Snakeye___500lb_GP_Bomb_HD = {"clsid": "{TER_9A_2L*MK-82_Snakeye}", "name": "TER-9A with 2 x Mk-82 Snakeye - 500lb GP Bomb HD", "weight": 552}
TER_9A_with_2_x_Mk_82_Snakeye___500lb_GP_Bomb_HD_ = {"clsid": "{TER_9A_2R*MK-82_Snakeye}", "name": "TER-9A with 2 x Mk-82 Snakeye - 500lb GP Bomb HD", "weight": 552}
TER_9A_with_2_x_Mk_82___500lb_GP_Bomb_LD = {"clsid": "{TER_9A_2L*MK-82}", "name": "TER-9A with 2 x Mk-82 - 500lb GP Bomb LD", "weight": 509}
TER_9A_with_2_x_Mk_82___500lb_GP_Bomb_LD_ = {"clsid": "{TER_9A_2R*MK-82}", "name": "TER-9A with 2 x Mk-82 - 500lb GP Bomb LD", "weight": 509}
TER_9A_with_3_x_BDU_33___25lb_Practice_Bomb_LD = {"clsid": "{TER_9A_3*BDU-33}", "name": "TER-9A with 3 x BDU-33 - 25lb Practice Bomb LD", "weight": 86.9}
TER_9A_with_3_x_CBU_87___202_x_CEM_Cluster_Bomb = {"clsid": "{TER_9A_3*CBU-87}", "name": "TER-9A with 3 x CBU-87 - 202 x CEM Cluster Bomb", "weight": 1343}
TER_9A_with_3_x_CBU_97___10_x_SFW_Cluster_Bomb = {"clsid": "{TER_9A_3*CBU-97}", "name": "TER-9A with 3 x CBU-97 - 10 x SFW Cluster Bomb", "weight": 1304}
TER_9A_with_3_x_Mk_82_AIR_Ballute___500lb_GP_Bomb_HD = {"clsid": "{TER_9A_3*MK-82AIR}", "name": "TER-9A with 3 x Mk-82 AIR Ballute - 500lb GP Bomb HD", "weight": 779}
TER_9A_with_3_x_Mk_82_Snakeye___500lb_GP_Bomb_HD = {"clsid": "{TER_9A_3*MK-82_Snakeye}", "name": "TER-9A with 3 x Mk-82 Snakeye - 500lb GP Bomb HD", "weight": 801.5}
TER_9A_with_3_x_Mk_82___500lb_GP_Bomb_LD = {"clsid": "{TER_9A_3*MK-82}", "name": "TER-9A with 3 x Mk-82 - 500lb GP Bomb LD", "weight": 737}
TEST_ROTARY_LAUNCHER_MK82 = {"clsid": "TEST_ROTARY_LAUNCHER_MK82", "name": "TEST ROTARY LAUNCHER MK82", "weight": 6748}
TGM_65H = {"clsid": "TGM_65H", "name": "TGM-65H - Trg Round for Mav H (CCD)", "weight": 208}
TORNADO_Fuel_tank = {"clsid": "{EF124821-F9BB-4314-A153-E0E2FE1162C4}", "name": "TORNADO Fuel tank", "weight": 1275}
TOW = {"clsid": "TOW", "name": "BGM-71D TOW ATGM", "weight": None}
U22_A_Jammer = {"clsid": "{U22A}", "name": "U22/A Jammer", "weight": 348}
UB_16UM_pod___16_x_S_5KO__57mm_UnGd_Rkts__HEAT_Frag = {"clsid": "{UB-16-57UMP}", "name": "UB-16UM pod - 16 x S-5KO, 57mm UnGd Rkts, HEAT/Frag", "weight": 138}
UB_16UM___16_S_5M = {"clsid": "{UB-16_S5M}", "name": "UB-16UM - 16 S-5M", "weight": 119.76}
UB_32A_24_pod___32_x_S_5KO = {"clsid": "{UB-32A-24}", "name": "UB-32A-24 pod - 32 x S-5KO", "weight": 275}
UB_32A_pod___32_x_S_5KO__57mm_UnGd_Rkts__HEAT_Frag = {"clsid": "{637334E4-AB5A-47C0-83A6-51B7F1DF3CD5}", "name": "UB-32A pod - 32 x S-5KO, 57mm UnGd Rkts, HEAT/Frag", "weight": 275}
UB_32M___32_S_5M = {"clsid": "{UB-32_S5M}", "name": "UB-32M - 32 S-5M", "weight": 228.52}
UPK_23_250___2_x_23mm__GSh_23L_Autocannon_Pod = {"clsid": "{05544F1A-C39C-466b-BC37-5BD1D52E57BB}", "name": "UPK-23-250 - 2 x 23mm GSh-23L Autocannon Pod", "weight": 218}
UPK_23_250___gun_pod = {"clsid": "{UPK-23-250 MiG-21}", "name": "UPK-23-250 - gun pod", "weight": 218}
U_22_Jammer_pod = {"clsid": "{U22}", "name": "U/22 Jammer pod", "weight": 348}
Werfer_Granate_21___21_cm_UnGd_air_to_air_rocket = {"clsid": "{WGr21}", "name": "Werfer-Granate 21 - 21 cm UnGd air-to-air rocket", "weight": 121}
XM158_M151 = {"clsid": "XM158_M151", "name": "XM158 pod - 7 x 2.75\" Hydra, UnGd Rkts M151, HE", "weight": 112}
XM158_M156 = {"clsid": "XM158_M156", "name": "XM158 pod - 7 x 2.75\" Hydra, UnGd Rkts M156, Wht Phos", "weight": 112}
XM158_M257 = {"clsid": "XM158_M257", "name": "XM158 pod - 7 x 2.75\" Hydra, UnGd Rkts M257, Para Illum", "weight": 112}
XM158_M274 = {"clsid": "XM158_M274", "name": "XM158 pod - 7 x 2.75\" Hydra, UnGd Rkts M274, Practice Smk", "weight": 112}
XM158_MK1 = {"clsid": "XM158_MK1", "name": "XM158 pod - 7 x 2.75\" Hydra, UnGd Rkts Mk1, Practice", "weight": 112}
XM158_MK5 = {"clsid": "XM158_MK5", "name": "XM158 pod - 7 x 2.75\" Hydra, UnGd Rkts Mk5, HEAT", "weight": 112}
_100_gal__Drop_Tank = {"clsid": "{MOSQUITO_100GAL_SLIPPER_TANK}", "name": "100 gal. Drop Tank", "weight": 375.3}
_108_US_gal__Paper_Fuel_Tank = {"clsid": "{US_108GAL_PAPER_FUEL_TANK}", "name": "108 US gal. Paper Fuel Tank", "weight": 319}
_110_US_gal__Fuel_Tank = {"clsid": "{US_110GAL_FUEL_TANK}", "name": "110 US gal. Fuel Tank", "weight": 349}
_12_AN_M64___500lb_GP_Bomb_LD = {"clsid": "{12xM64}", "name": "12 AN-M64 - 500lb GP Bomb LD", "weight": 2744}
_12_x_BetAB_500___500kg_Concrete_Piercing_Bombs_LD = {"clsid": "{D6A0441E-6794-4FEB-87F7-E68E2290DFAB}", "name": "12 x BetAB-500 - 500kg Concrete Piercing Bombs LD", "weight": 478}
_12_x_FAB_500_M_62___500kg_GP_Bombs_LD = {"clsid": "{E70446B7-C7E6-4B95-B685-DEA10CAD1A0E}", "name": "12 x FAB-500 M-62 - 500kg GP Bombs LD", "weight": 6000}
_13_R4M_3_2kg_UnGd_air_to_air_rocket = {"clsid": "{FW_190_R4M_LEFT_WING}", "name": "13 R4M 3.2kg UnGd air-to-air rocket", "weight": 70.05}
_13_R4M_3_2kg_UnGd_air_to_air_rocket_ = {"clsid": "{FW_190_R4M_RGHT_WING}", "name": "13 R4M 3.2kg UnGd air-to-air rocket", "weight": 70.05}
_150_US_gal__Fuel_Tank = {"clsid": "{US_150GAL_FUEL_TANK}", "name": "150 US gal. Fuel Tank", "weight": 458.8}
_20_x_AGM_86C_ALCM = {"clsid": "{22906569-A97F-404B-BA4F-D96DBF94D05E}", "name": "20 x AGM-86C ALCM", "weight": 39000}
_24_x_FAB_250___250kg_GP_Bombs_LD = {"clsid": "{B0241BD2-5628-47E0-954C-A8675B7E698E}", "name": "24 x FAB-250 - 250kg GP Bombs LD", "weight": 6000}
_250_lb_GP_Mk_I = {"clsid": "{British_GP_250LB_Bomb_Mk1}", "name": "250 lb GP Mk.I", "weight": 104.326}
_250_lb_GP_Mk_IV = {"clsid": "{British_GP_250LB_Bomb_Mk4}", "name": "250 lb GP Mk.IV", "weight": 104.326}
_250_lb_GP_Mk_IV_ = {"clsid": "{British_GP_250LB_Bomb_Mk4_on_Handley_Page_Type_B_Cut_Bar}", "name": "250 lb GP Mk.IV", "weight": 109.626}
_250_lb_GP_Mk_V = {"clsid": "{British_GP_250LB_Bomb_Mk5}", "name": "250 lb GP Mk.V", "weight": 104.326}
_250_lb_GP_Mk_V_ = {"clsid": "{British_GP_250LB_Bomb_Mk5_on_Handley_Page_Type_B_Cut_Bar}", "name": "250 lb GP Mk.V", "weight": 109.626}
_250_lb_MC_Mk_I = {"clsid": "{British_MC_250LB_Bomb_Mk1}", "name": "250 lb MC Mk.I", "weight": 102}
_250_lb_MC_Mk_II = {"clsid": "{British_MC_250LB_Bomb_Mk2}", "name": "250 lb MC Mk.II", "weight": 102}
_250_lb_MC_Mk_II_ = {"clsid": "{British_MC_250LB_Bomb_Mk2_on_Handley_Page_Type_B_Cut_Bar}", "name": "250 lb MC Mk.II", "weight": 107.3}
_250_lb_MC_Mk_I_ = {"clsid": "{British_MC_250LB_Bomb_Mk1_on_Handley_Page_Type_B_Cut_Bar}", "name": "250 lb MC Mk.I", "weight": 107.3}
_250_lb_S_A_P_ = {"clsid": "{British_SAP_250LB_Bomb_Mk5}", "name": "250 lb S.A.P.", "weight": 111.13}
_250_lb_S_A_P__ = {"clsid": "{British_SAP_250LB_Bomb_Mk5_on_Handley_Page_Type_B_Cut_Bar}", "name": "250 lb S.A.P.", "weight": 116.43}
_27_x_M117___750lb_GP_Bombs_LD = {"clsid": "{B58F99BA-5480-4572-8602-28B0449F5260}", "name": "27 x M117 - 750lb GP Bombs LD", "weight": 9180}
_27_x_Mk_82___500lb_GP_Bombs_LD = {"clsid": "{6C47D097-83FF-4FB2-9496-EAB36DDF0B05}", "name": "27 x Mk-82 - 500lb GP Bombs LD", "weight": 6507}
_2xGBU_12___500lb_Laser_Guided_Bomb = {"clsid": "{89D000B0-0360-461A-AD83-FB727E2ABA98}", "name": "2xGBU-12 - 500lb Laser Guided Bomb", "weight": 610.25}
_2xGBU_12___500lb_Laser_Guided_Bomb_ = {"clsid": "{BRU-42_2xGBU-12_right}", "name": "2xGBU-12 - 500lb Laser Guided Bomb", "weight": 610.25}
_2x_80kg_LYSB_71_Illumination_Bomb = {"clsid": "{LYSBOMB}", "name": "2x 80kg LYSB-71 Illumination Bomb", "weight": 220}
_2_BDU_45 = {"clsid": "{BRU42_2*BDU45 RS}", "name": "2 BDU-45", "weight": 592}
_2_BDU_45B = {"clsid": "{BRU42_2*BDU45B RS}", "name": "2 BDU-45B", "weight": 592}
_2_BDU_45B_ = {"clsid": "{BRU3242_2*BDU45B RS}", "name": "2 BDU-45B", "weight": 649.38}
_2_BDU_45B__ = {"clsid": "{PHXBRU3242_2*BDU45B RS}", "name": "2 BDU-45B", "weight": 649.38}
_2_BDU_45B___ = {"clsid": "{BRU42_2*BDU45B LS}", "name": "2 BDU-45B", "weight": 592}
_2_BDU_45B____ = {"clsid": "{BRU3242_2*BDU45B LS}", "name": "2 BDU-45B", "weight": 649.38}
_2_BDU_45B_____ = {"clsid": "{PHXBRU3242_2*BDU45B LS}", "name": "2 BDU-45B", "weight": 649.38}
_2_BDU_45_ = {"clsid": "{BRU3242_2*BDU45 RS}", "name": "2 BDU-45", "weight": 649.38}
_2_BDU_45__ = {"clsid": "{PHXBRU3242_2*BDU45 RS}", "name": "2 BDU-45", "weight": 649.38}
_2_BDU_45___ = {"clsid": "{BRU42_2*BDU45 LS}", "name": "2 BDU-45", "weight": 592}
_2_BDU_45____ = {"clsid": "{BRU3242_2*BDU45 LS}", "name": "2 BDU-45", "weight": 649.38}
_2_BDU_45_____ = {"clsid": "{PHXBRU3242_2*BDU45 LS}", "name": "2 BDU-45", "weight": 649.38}
_2_CBU_99 = {"clsid": "{BRU-70_2*CBU-99_LEFT}", "name": "2 CBU-99", "weight": 541}
_2_CBU_99_ = {"clsid": "{BRU-70_2*CBU-99_RIGHT}", "name": "2 CBU-99", "weight": 541}
_2_GBU_12 = {"clsid": "{BRU-42_2*GBU-12_LEFT}", "name": "2 GBU-12", "weight": 547}
_2_GBU_12_ = {"clsid": "{BRU-42_2*GBU-12_RIGHT}", "name": "2 GBU-12", "weight": 547}
_2_GBU_16 = {"clsid": "{BRU-42_2*GBU-16_LEFT}", "name": "2 GBU-16", "weight": 1005}
_2_GBU_16_ = {"clsid": "{BRU-42_2*GBU-16_RIGHT}", "name": "2 GBU-16", "weight": 1005}
_2_GBU_38 = {"clsid": "{BRU-42_2*GBU-38_LEFT}", "name": "2 GBU-38", "weight": 579}
_2_GBU_38_ = {"clsid": "{BRU-42_2*GBU-38_RIGHT}", "name": "2 GBU-38", "weight": 579}
_2_GBU_54_V_1_B = {"clsid": "{BRU-70A_2*GBU-54_LEFT}", "name": "2 GBU-54(V)1/B", "weight": 603}
_2_GBU_54_V_1_B_ = {"clsid": "{BRU-70A_2*GBU-54_RIGHT}", "name": "2 GBU-54(V)1/B", "weight": 603}
_2_LAU_10___4_ZUNI_MK_71 = {"clsid": "{BRU42_2*LAU10 L}", "name": "2 LAU-10 - 4 ZUNI MK 71", "weight": 1008}
_2_LAU_10___4_ZUNI_MK_71_ = {"clsid": "{BRU3242_2*LAU10 L}", "name": "2 LAU-10 - 4 ZUNI MK 71", "weight": 1065.38}
_2_LAU_10___4_ZUNI_MK_71__ = {"clsid": "{BRU42_2*LAU10 R}", "name": "2 LAU-10 - 4 ZUNI MK 71", "weight": 1008}
_2_LAU_10___4_ZUNI_MK_71___ = {"clsid": "{BRU3242_2*LAU10 R}", "name": "2 LAU-10 - 4 ZUNI MK 71", "weight": 1065.38}
_2_LAU_10___4_ZUNI_MK_71____ = {"clsid": "{BRU42_2*LAU10 RS}", "name": "2 LAU-10 - 4 ZUNI MK 71", "weight": 1008}
_2_LAU_10___4_ZUNI_MK_71_____ = {"clsid": "{BRU3242_2*LAU10 RS}", "name": "2 LAU-10 - 4 ZUNI MK 71", "weight": 1065.38}
_2_LAU_10___4_ZUNI_MK_71______ = {"clsid": "{PHXBRU3242_2*LAU10 RS}", "name": "2 LAU-10 - 4 ZUNI MK 71", "weight": 1065.38}
_2_LAU_10___4_ZUNI_MK_71_______ = {"clsid": "{BRU42_2*LAU10 LS}", "name": "2 LAU-10 - 4 ZUNI MK 71", "weight": 1008}
_2_LAU_10___4_ZUNI_MK_71________ = {"clsid": "{BRU3242_2*LAU10 LS}", "name": "2 LAU-10 - 4 ZUNI MK 71", "weight": 1065.38}
_2_LAU_10___4_ZUNI_MK_71_________ = {"clsid": "{PHXBRU3242_2*LAU10 LS}", "name": "2 LAU-10 - 4 ZUNI MK 71", "weight": 1065.38}
_2_LUU_2 = {"clsid": "{BRU42_2*LUU2 R}", "name": "2 LUU-2", "weight": 155.2}
_2_LUU_2_ = {"clsid": "{BRU3242_2*LUU2 R}", "name": "2 LUU-2", "weight": 212.58}
_2_LUU_2__ = {"clsid": "{BRU42_2*LUU2 L}", "name": "2 LUU-2", "weight": 155.2}
_2_LUU_2___ = {"clsid": "{BRU3242_2*LUU2 L}", "name": "2 LUU-2", "weight": 212.58}
_2_MK_20 = {"clsid": "{BRU42_2*MK20 RS}", "name": "2 MK-20", "weight": 572}
_2_MK_20_ = {"clsid": "{BRU3242_2*MK20 RS}", "name": "2 MK-20", "weight": 629.38}
_2_MK_20__ = {"clsid": "{PHXBRU3242_2*MK20 RS}", "name": "2 MK-20", "weight": 629.38}
_2_MK_20___ = {"clsid": "{BRU42_2*MK20 LS}", "name": "2 MK-20", "weight": 572}
_2_MK_20____ = {"clsid": "{BRU3242_2*MK20 LS}", "name": "2 MK-20", "weight": 629.38}
_2_MK_20_____ = {"clsid": "{PHXBRU3242_2*MK20 LS}", "name": "2 MK-20", "weight": 629.38}
_2_Mk_20_Rockeye = {"clsid": "{BRU-42_2*MK-20_LEFT}", "name": "2 Mk-20 Rockeye", "weight": 541}
_2_Mk_20_Rockeye_ = {"clsid": "{BRU-42_2*MK-20_RIGHT}", "name": "2 Mk-20 Rockeye", "weight": 541}
_2_Mk_81 = {"clsid": "{BRU42_2*MK81 RS}", "name": "2 Mk-81", "weight": 364}
_2_Mk_81_ = {"clsid": "{BRU3242_2*MK81 RS}", "name": "2 Mk-81", "weight": 421.38}
_2_Mk_81__ = {"clsid": "{PHXBRU3242_2*MK81 RS}", "name": "2 Mk-81", "weight": 421.38}
_2_Mk_81___ = {"clsid": "{BRU42_2*MK81 LS}", "name": "2 Mk-81", "weight": 364}
_2_Mk_81____ = {"clsid": "{BRU3242_2*MK81 LS}", "name": "2 Mk-81", "weight": 421.38}
_2_Mk_81_____ = {"clsid": "{PHXBRU3242_2*MK81 LS}", "name": "2 Mk-81", "weight": 421.38}
_2_Mk_82 = {"clsid": "{BRU-42_2*Mk-82_LEFT}", "name": "2 Mk-82", "weight": 579}
_2_Mk_82AIR = {"clsid": "{BRU42_2*MK82AIR RS}", "name": "2 Mk-82AIR", "weight": 610}
_2_Mk_82AIR_ = {"clsid": "{BRU3242_2*MK82AIR RS}", "name": "2 Mk-82AIR", "weight": 667.38}
_2_Mk_82AIR__ = {"clsid": "{PHXBRU3242_2*MK82AIR RS}", "name": "2 Mk-82AIR", "weight": 667.38}
_2_Mk_82AIR___ = {"clsid": "{BRU42_2*MK82AIR LS}", "name": "2 Mk-82AIR", "weight": 610}
_2_Mk_82AIR____ = {"clsid": "{BRU3242_2*MK82AIR LS}", "name": "2 Mk-82AIR", "weight": 667.38}
_2_Mk_82AIR_____ = {"clsid": "{PHXBRU3242_2*MK82AIR LS}", "name": "2 Mk-82AIR", "weight": 667.38}
_2_Mk_82_ = {"clsid": "{BRU-42_2*Mk-82_RIGHT}", "name": "2 Mk-82", "weight": 579}
_2_Mk_82_AIR = {"clsid": "{BRU-42_2*Mk-82AIR_LEFT}", "name": "2 Mk-82 AIR", "weight": 579}
_2_Mk_82_AIR_ = {"clsid": "{BRU-42_2*Mk-82AIR_RIGHT}", "name": "2 Mk-82 AIR", "weight": 579}
_2_Mk_82_SnakeEye = {"clsid": "{BRU42_2*MK82SE RS}", "name": "2 Mk-82 SnakeEye", "weight": 610}
_2_Mk_82_SnakeEye_ = {"clsid": "{BRU3242_2*MK82SE RS}", "name": "2 Mk-82 SnakeEye", "weight": 667.38}
_2_Mk_82_SnakeEye__ = {"clsid": "{PHXBRU3242_2*MK82SE RS}", "name": "2 Mk-82 SnakeEye", "weight": 667.38}
_2_Mk_82_SnakeEye___ = {"clsid": "{BRU42_2*MK82SE LS}", "name": "2 Mk-82 SnakeEye", "weight": 610}
_2_Mk_82_SnakeEye____ = {"clsid": "{BRU3242_2*MK82SE LS}", "name": "2 Mk-82 SnakeEye", "weight": 667.38}
_2_Mk_82_SnakeEye_____ = {"clsid": "{PHXBRU3242_2*MK82SE LS}", "name": "2 Mk-82 SnakeEye", "weight": 667.38}
_2_Mk_82_Snakeye = {"clsid": "{BRU-42_2*Mk-82SNAKEYE_LEFT}", "name": "2 Mk-82 Snakeye", "weight": 579}
_2_Mk_82_Snakeye_ = {"clsid": "{BRU-42_2*Mk-82SNAKEYE_RIGHT}", "name": "2 Mk-82 Snakeye", "weight": 579}
_2_Mk_82__ = {"clsid": "{BRU42_2*MK82 RS}", "name": "2 Mk-82", "weight": 610}
_2_Mk_82___ = {"clsid": "{BRU3242_2*MK82 RS}", "name": "2 Mk-82", "weight": 667.38}
_2_Mk_82____ = {"clsid": "{PHXBRU3242_2*MK82 RS}", "name": "2 Mk-82", "weight": 667.38}
_2_Mk_82_____ = {"clsid": "{BRU42_2*MK82 LS}", "name": "2 Mk-82", "weight": 610}
_2_Mk_82______ = {"clsid": "{BRU3242_2*MK82 LS}", "name": "2 Mk-82", "weight": 667.38}
_2_Mk_82_______ = {"clsid": "{PHXBRU3242_2*MK82 LS}", "name": "2 Mk-82", "weight": 667.38}
_2_Mk_83 = {"clsid": "{BRU-42_2*Mk-83_LEFT}", "name": "2 Mk-83", "weight": 991}
_2_Mk_83_ = {"clsid": "{BRU-42_2*Mk-83_RIGHT}", "name": "2 Mk-83", "weight": 991}
_2_SUU_25___8_LUU_2 = {"clsid": "{BRU42_2*SUU25 L}", "name": "2 SUU-25 * 8 LUU-2", "weight": 388}
_2_SUU_25___8_LUU_2_ = {"clsid": "{BRU3242_2*SUU25 L}", "name": "2 SUU-25 * 8 LUU-2", "weight": 445.38}
_2_SUU_25___8_LUU_2__ = {"clsid": "{BRU42_2*SUU25 R}", "name": "2 SUU-25 * 8 LUU-2", "weight": 388}
_2_SUU_25___8_LUU_2___ = {"clsid": "{BRU3242_2*SUU25 R}", "name": "2 SUU-25 * 8 LUU-2", "weight": 445.38}
_2_x_9M120F_Ataka__AT_9_Spiral_2____AGM__SACLOS__HE = {"clsid": "{2x9M120F_Ataka_V}", "name": "2 x 9M120F Ataka (AT-9 Spiral-2) - AGM, SACLOS, HE", "weight": 112}
_2_x_9M120_Ataka__AT_9_Spiral_2____ATGM__SACLOS__Tandem_HEAT = {"clsid": "{2x9M120_Ataka_V}", "name": "2 x 9M120 Ataka (AT-9 Spiral-2) - ATGM, SACLOS, Tandem HEAT", "weight": 112}
_2_x_9M220O_Ataka__AT_9_Spiral_2____AAM__SACLOS__Frag = {"clsid": "{2x9M220_Ataka_V}", "name": "2 x 9M220O Ataka (AT-9 Spiral-2) - AAM, SACLOS, Frag", "weight": 112}
_2_x_ALARM = {"clsid": "{07BE2D19-0E48-4B0B-91DA-5F6C8F9E3C75}", "name": "2 x ALARM", "weight": 530}
_2_x_BL_755_CBUs___450kg__147_Frag_Pen_bomblets = {"clsid": "{C535596E-F7D2-4301-8BB4-B1658BB87ED7}", "name": "2 x BL-755 CBUs - 450kg, 147 Frag/Pen bomblets", "weight": 554}
_2_x_B_13L_pods___10_x_S_13_OF__122mm_UnGd_Rkts__Blast_Frag = {"clsid": "{TWIN_B13L_5OF}", "name": "2 x B-13L pods - 10 x S-13-OF, 122mm UnGd Rkts, Blast/Frag", "weight": 1042}
_2_x_B_13L___5_S_13_OF = {"clsid": "{B13_5_S13OF_DUAL_L}", "name": "2 x B-13L - 5 S-13 OF", "weight": 1042}
_2_x_B_13L___5_S_13_OF_ = {"clsid": "{B13_5_S13OF_DUAL_R}", "name": "2 x B-13L - 5 S-13 OF", "weight": 1042}
_2_x_B_8M1_pods___40_x_S_8KOM__80mm_UnGd_Rkts__HEAT_AP = {"clsid": "{TWIN_B_8M1_S_8KOM}", "name": "2 x B-8M1 pods - 40 x S-8KOM, 80mm UnGd Rkts, HEAT/AP", "weight": 759}
_2_x_B_8M1___20_S_8KOM = {"clsid": "{B8M1_20_S8KOM_DUAL_L}", "name": "2 x B-8M1 - 20 S-8KOM", "weight": 975}
_2_x_B_8M1___20_S_8KOM_ = {"clsid": "{B8M1_20_S8KOM_DUAL_R}", "name": "2 x B-8M1 - 20 S-8KOM", "weight": 975}
_2_x_B_8M1___20_S_8OFP2 = {"clsid": "{B8M1_20_S8OFP2_DUAL_L}", "name": "2 x B-8M1 - 20 S-8OFP2", "weight": 975}
_2_x_B_8M1___20_S_8OFP2_ = {"clsid": "{B8M1_20_S8OFP2_DUAL_R}", "name": "2 x B-8M1 - 20 S-8OFP2", "weight": 975}
_2_x_B_8M1___20_S_8TsM = {"clsid": "{B8M1_20_S8TsM_DUAL_L}", "name": "2 x B-8M1 - 20 S-8TsM", "weight": 751}
_2_x_B_8M1___20_S_8TsM_ = {"clsid": "{B8M1_20_S8TsM_DUAL_R}", "name": "2 x B-8M1 - 20 S-8TsM", "weight": 751}
_2_x_B_8V20A_pods___40_x_S_8OFP2__80mm_UnGd_Rkts__HE_Frag_AP = {"clsid": "{TWIN_B_8M1_S_8_OFP2}", "name": "2 x B-8V20A pods - 40 x S-8OFP2, 80mm UnGd Rkts, HE/Frag/AP", "weight": 975}
_2_x_B_8V20A_pods___40_x_S_8TsM__80mm_UnGd_Rkts__Smk = {"clsid": "{TWIN_B_8M1_S_8TsM}", "name": "2 x B-8V20A pods - 40 x S-8TsM, 80mm UnGd Rkts, Smk", "weight": 751}
_2_x_FAB_250 = {"clsid": "{FAB_250_DUAL_L}", "name": "2 x FAB-250", "weight": 532}
_2_x_FAB_250_ = {"clsid": "{FAB_250_DUAL_R}", "name": "2 x FAB-250", "weight": 532}
_2_x_FAB_500 = {"clsid": "{FAB_500_DUAL_L}", "name": "2 x FAB-500", "weight": 1044}
_2_x_FAB_500_ = {"clsid": "{FAB_500_DUAL_R}", "name": "2 x FAB-500", "weight": 1044}
_2_x_HVAR__UnGd_Rkts = {"clsid": "{HVARx2}", "name": "2 x HVAR, UnGd Rkts", "weight": 128}
_2_x_OFAB_100_Jupiter___100kg_GP_Bombs_LD = {"clsid": "{FAB-100x2}", "name": "2 x OFAB-100 Jupiter - 100kg GP Bombs LD", "weight": 342}
_2_x_RBK_250_PTAB_2_5M = {"clsid": "{RBK_250_PTAB25M_DUAL_L}", "name": "2 x RBK-250 PTAB-2.5M", "weight": 578}
_2_x_RBK_250_PTAB_2_5M_ = {"clsid": "{RBK_250_PTAB25M_DUAL_R}", "name": "2 x RBK-250 PTAB-2.5M", "weight": 578}
_2_x_RBK_500_255_PTAB_10_5 = {"clsid": "{RBK_500_PTAB105_DUAL_L}", "name": "2 x RBK-500-255 PTAB-10-5", "weight": 538}
_2_x_RBK_500_255_PTAB_10_5_ = {"clsid": "{RBK_500_PTAB105_DUAL_R}", "name": "2 x RBK-500-255 PTAB-10-5", "weight": 538}
_2_x_RP_3_25lb_AP_Mk_I = {"clsid": "{MOSSIE_2_British_AP_25LBNo1_3INCHNo1_ON_LEFT_WING_RAILS}", "name": "2 x RP-3 25lb AP Mk.I", "weight": 174}
_2_x_RP_3_25lb_AP_Mk_I_ = {"clsid": "{MOSSIE_2_British_AP_25LBNo1_3INCHNo1_ON_RIGHT_WING_RAILS}", "name": "2 x RP-3 25lb AP Mk.I", "weight": 174}
_2_x_RP_3_60lb_F_No1_Mk_I = {"clsid": "{MOSSIE_2_British_HE_60LBFNo1_3INCHNo1_ON_LEFT_WING_RAILS}", "name": "2 x RP-3 60lb F No1 Mk.I", "weight": 193.2}
_2_x_RP_3_60lb_F_No1_Mk_I_ = {"clsid": "{MOSSIE_2_British_HE_60LBFNo1_3INCHNo1_ON_RIGHT_WING_RAILS}", "name": "2 x RP-3 60lb F No1 Mk.I", "weight": 193.2}
_2_x_RP_3_60lb_SAP_No2_Mk_I = {"clsid": "{MOSSIE_2_British_HE_60LBSAPNo2_3INCHNo1_ON_LEFT_WING_RAILS}", "name": "2 x RP-3 60lb SAP No2 Mk.I", "weight": 206.2}
_2_x_RP_3_60lb_SAP_No2_Mk_I_ = {"clsid": "{MOSSIE_2_British_HE_60LBSAPNo2_3INCHNo1_ON_RIGHT_WING_RAILS}", "name": "2 x RP-3 60lb SAP No2 Mk.I", "weight": 206.2}
_2_x_S_25 = {"clsid": "{S25_DUAL_L}", "name": "2 x S-25", "weight": 902}
_2_x_S_25_ = {"clsid": "{S25_DUAL_R}", "name": "2 x S-25", "weight": 902}
_2_x_S_25_OFM___340mm_UnGdrocket__480kg_Penetrator = {"clsid": "{TWIN_S25}", "name": "2 x S-25-OFM - 340mm UnGdrocket, 480kg Penetrator", "weight": 902}
_2_x_S_25_O___420mm_UnGd_Rkt__380kg_Frag = {"clsid": "{TWIN_S25_O}", "name": "2 x S-25-O - 420mm UnGd Rkt, 380kg Frag", "weight": 922}
_33_x_FAB_250___250kg_GP_Bombs_LD = {"clsid": "{BDAD04AA-4D4A-4E51-B958-180A89F963CF}", "name": "33 x FAB-250 - 250kg GP Bombs LD", "weight": 8250}
_33_x_FAB_500_M_62___500kg_GP_Bombs_LD = {"clsid": "{AD5E5863-08FC-4283-B92C-162E2B2BD3FF}", "name": "33 x FAB-500 M-62 - 500kg GP Bombs LD", "weight": 16500}
_3M45 = {"clsid": "3M45", "name": "SS-N-19 SHIPWRECK", "weight": None}
_3_BDU_33 = {"clsid": "{BRU42_3*BDU33}", "name": "3 BDU-33", "weight": 161}
_3_BDU_33_ = {"clsid": "{BRU3242_3*BDU33}", "name": "3 BDU-33", "weight": 218.38}
_3_BDU_33__ = {"clsid": "{BRU42_3*BDU33_N}", "name": "3 BDU-33", "weight": 161}
_3_BDU_33___ = {"clsid": "{BRU3242_3*BDU33_N}", "name": "3 BDU-33", "weight": 218.38}
_3_BDU_33____ = {"clsid": "{PHXBRU3242_BDU33}", "name": "3 BDU-33", "weight": 218.38}
_3_GBU_12 = {"clsid": "{BRU-42A_3*GBU-12}", "name": "3 GBU-12", "weight": 772}
_3_GBU_16 = {"clsid": "{BRU-42A_3*GBU-16}", "name": "3 GBU-16", "weight": 1459}
_3_GBU_38 = {"clsid": "{BRU-42_3*GBU-38}", "name": "3 GBU-38", "weight": 820}
_3_GBU_54_V_1_B = {"clsid": "{BRU-70A_3*GBU-54}", "name": "3 GBU-54(V)1/B", "weight": 856}
_3_Mk_81 = {"clsid": "{BRU-42_3*Mk-81LD}", "name": "3 Mk-81", "weight": 451}
_3_Mk_82 = {"clsid": "{BRU-42_3*Mk-82LD}", "name": "3 Mk-82", "weight": 820}
_3_Mk_82_AIR = {"clsid": "{BRU-42_3_MK82AIR}", "name": "3 Mk-82 AIR", "weight": 820}
_3_Mk_82_Snakeye = {"clsid": "{BRU-42_3*Mk-82SNAKEYE}", "name": "3 Mk-82 Snakeye", "weight": 820}
_3_Mk_83 = {"clsid": "{BRU-42_3*Mk-83}", "name": "3 Mk-83", "weight": 1438}
_3_x_4_5_inch_M8_UnGd_Rocket = {"clsid": "{3xM8_ROCKETS_IN_TUBES}", "name": "3 x 4.5 inch M8 UnGd Rocket", "weight": 71.72}
_3_x_FAB_1500_M_54___1500kg_GP_Bombs_LD = {"clsid": "{639DB5DD-CB7E-4E42-AC75-2112BC397B97}", "name": "3 x FAB-1500 M-54 - 1500kg GP Bombs LD", "weight": 4500}
_3_x_LAU_61_pods___57_x_2_75_Hydra__UnGd_Rkts_M151__HE = {"clsid": "{A76344EB-32D2-4532-8FA2-0C1BDC00747E}", "name": "3 x LAU-61 pods - 57 x 2.75\" Hydra, UnGd Rkts M151, HE", "weight": 876.45}
_48N6E2 = {"clsid": "48N6E2", "name": "48N6E2 S-300F (SA-N-6 Grumble)", "weight": None}
_4M80 = {"clsid": "_4M80", "name": "SS-N-12 SANDBOX", "weight": None}
_4x_SB_M_71_120kg_GP_Bomb_High_drag = {"clsid": "{M71BOMBD}", "name": "4x SB M/71 120kg GP Bomb High-drag", "weight": 609}
_4x_SB_M_71_120kg_GP_Bomb_Low_drag = {"clsid": "{M71BOMB}", "name": "4x SB M/71 120kg GP Bomb Low-drag", "weight": 609}
_4_x_AGM_154C___JSOW_Unitary_BROACH = {"clsid": "{AABA1A14-78A1-4E85-94DD-463CF75BD9E4}", "name": "4 x AGM-154C - JSOW Unitary BROACH", "weight": 2560}
_4_x_AN_M64___500lb_GP_Bomb_LD = {"clsid": "{4xAN-M64_on_InvCountedAttachmentPoints}", "name": "4 x AN-M64 - 500lb GP Bomb LD", "weight": 908}
_4_x_BGM_71D_TOW_ATGM = {"clsid": "{3EA17AB0-A805-4D9E-8732-4CE00CB00F17}", "name": "4 x BGM-71D TOW ATGM", "weight": 250}
_4_x_GBU_27___2000lb_Laser_Guided_Penetrator_Bombs = {"clsid": "{B8C99F40-E486-4040-B547-6639172A5D57}", "name": "4 x GBU-27 - 2000lb Laser Guided Penetrator Bombs", "weight": 3936}
_4_x_RP_3_25lb_AP_Mk_I = {"clsid": "{MOSSIE_4_British_AP_25LBNo1_3INCHNo1_ON_LEFT_WING_RAILS}", "name": "4 x RP-3 25lb AP Mk.I", "weight": 218}
_4_x_RP_3_25lb_AP_Mk_I_ = {"clsid": "{MOSSIE_4_British_AP_25LBNo1_3INCHNo1_ON_RIGHT_WING_RAILS}", "name": "4 x RP-3 25lb AP Mk.I", "weight": 218}
_4_x_RP_3_60lb_F_No1_Mk_I = {"clsid": "{MOSSIE_4_British_HE_60LBFNo1_3INCHNo1_ON_LEFT_WING_RAILS}", "name": "4 x RP-3 60lb F No1 Mk.I", "weight": 256.4}
_4_x_RP_3_60lb_F_No1_Mk_I_ = {"clsid": "{MOSSIE_4_British_HE_60LBFNo1_3INCHNo1_ON_RIGHT_WING_RAILS}", "name": "4 x RP-3 60lb F No1 Mk.I", "weight": 256.4}
_4_x_RP_3_60lb_SAP_No2_Mk_I = {"clsid": "{MOSSIE_4_British_HE_60LBSAPNo2_3INCHNo1_ON_LEFT_WING_RAILS}", "name": "4 x RP-3 60lb SAP No2 Mk.I", "weight": 282.4}
_4_x_RP_3_60lb_SAP_No2_Mk_I_ = {"clsid": "{MOSSIE_4_British_HE_60LBSAPNo2_3INCHNo1_ON_RIGHT_WING_RAILS}", "name": "4 x RP-3 60lb SAP No2 Mk.I", "weight": 282.4}
_500_lb_GP_Mk_I = {"clsid": "{British_GP_500LB_Bomb_Mk1}", "name": "500 lb GP Mk.I", "weight": 213.188}
_500_lb_GP_Mk_IV = {"clsid": "{British_GP_500LB_Bomb_Mk4}", "name": "500 lb GP Mk.IV", "weight": 213.188}
_500_lb_GP_Mk_V = {"clsid": "{British_GP_500LB_Bomb_Mk5}", "name": "500 lb GP Mk.V", "weight": 213.188}
_500_lb_GP_Short_tail = {"clsid": "{British_GP_500LB_Bomb_Mk4_Short}", "name": "500 lb GP Short tail", "weight": 207.7}
_500_lb_GP_Short_tail_ = {"clsid": "{British_GP_500LB_Bomb_Mk4_Short_on_Handley_Page_Type_B_Cut_Bar}", "name": "500 lb GP Short tail", "weight": 213}
_500_lb_MC_Mk_II = {"clsid": "{British_MC_500LB_Bomb_Mk2}", "name": "500 lb MC Mk.II", "weight": 231.8}
_500_lb_MC_Short_tail = {"clsid": "{British_MC_500LB_Bomb_Mk1_Short}", "name": "500 lb MC Short tail", "weight": 226.3}
_500_lb_MC_Short_tail_ = {"clsid": "{British_MC_500LB_Bomb_Mk1_Short_on_Handley_Page_Type_B_Cut_Bar}", "name": "500 lb MC Short tail", "weight": 231.6}
_500_lb_S_A_P_ = {"clsid": "{British_SAP_500LB_Bomb_Mk5}", "name": "500 lb S.A.P.", "weight": 222.26}
_50_gal__Drop_Tank = {"clsid": "{MOSQUITO_50GAL_SLIPPER_TANK}", "name": "50 gal. Drop Tank", "weight": 187.7}
_51_x_M117___750lb_GP_Bombs_LD = {"clsid": "{72CAC282-AE18-490B-BD4D-35E7EE969E73}", "name": "51 x M117 - 750lb GP Bombs LD", "weight": 17340}
_51_x_Mk_82___500lb_GP_Bombs_LD = {"clsid": "{B84DFE16-6AC7-4854-8F6D-34137892E166}", "name": "51 x Mk-82 - 500lb GP Bombs LD", "weight": 12291}
_5V55 = {"clsid": "5V55", "name": "5V55 S-300PS (SA-10B Grumble)", "weight": None}
_5_x_HVAR__UnGd_Rkt = {"clsid": "{P47_5_HVARS_ON_LEFT_WING_RAILS}", "name": "5 x HVAR, UnGd Rkt", "weight": 330}
_5_x_HVAR__UnGd_Rkt_ = {"clsid": "{P47_5_HVARS_ON_RIGHT_WING_RAILS}", "name": "5 x HVAR, UnGd Rkt", "weight": 330}
_5_x_Mk_82_Snakeye___500lb_GP_Bomb_HD = {"clsid": "{MER-5E_Mk82SNAKEYEx5}", "name": "5 x Mk-82 Snakeye - 500lb GP Bomb HD", "weight": 1250.7}
_5_x_Mk_82___500lb_GP_Bombs_LD = {"clsid": "{MER-5E_MK82x5}", "name": "5 x Mk-82 - 500lb GP Bombs LD", "weight": 1295.7}
_6_x_AGM_86C_ALCM_on_MER = {"clsid": "{45447F82-01B5-4029-A572-9AAD28AF0275}", "name": "6 x AGM-86C ALCM on MER", "weight": 11760}
_6_x_BetAB_500___500kg_Concrete_Piercing_Bombs_LD = {"clsid": "{2B7BDB38-4F45-43F9-BE02-E7B3141F3D24}", "name": "6 x BetAB-500 - 500kg Concrete Piercing Bombs LD", "weight": 2868}
_6_x_FAB_1500_M_54___1500kg_GP_Bombs_LD = {"clsid": "{D9179118-E42F-47DE-A483-A6C2EA7B4F38}", "name": "6 x FAB-1500 M-54 - 1500kg GP Bombs LD", "weight": 9000}
_6_x_FAB_500_M_62___500kg_GP_Bombs_LD = {"clsid": "{26D2AF37-B0DF-4AB6-9D61-A150FF58A37B}", "name": "6 x FAB-500 M-62 - 500kg GP Bombs LD", "weight": 3000}
_6_x_Kh_35__AS_20_Kayak____520kg__AShM__IN__Act_Rdr = {"clsid": "{C42EE4C3-355C-4B83-8B22-B39430B8F4AE}", "name": "6 x Kh-35 (AS-20 Kayak) - 520kg, AShM, IN & Act Rdr", "weight": 2880}
_6_x_Kh_65__AS_15B_Kent____1250kg__ASM__IN__MCC = {"clsid": "{0290F5DE-014A-4BB1-9843-D717749B1DED}", "name": "6 x Kh-65 (AS-15B Kent) - 1250kg, ASM, IN & MCC", "weight": 7500}
_6_x_Mk_20_Rockeye___490lbs_CBUs__247_x_HEAT_Bomblets = {"clsid": "{E79759F7-C622-4AA4-B1EF-37639A34D924}", "name": "6 x Mk-20 Rockeye - 490lbs CBUs, 247 x HEAT Bomblets", "weight": 1332}
_6_x_Mk_82___500lb_GP_Bombs_LD = {"clsid": "{027563C9-D87E-4A85-B317-597B510E3F03}", "name": "6 x Mk-82 - 500lb GP Bombs LD", "weight": 1446}
_75_US_gal__Fuel_Tank = {"clsid": "{DT75GAL}", "name": "75 US gal. Fuel Tank", "weight": 227.048087675}
_8_x_AGM_84A_Harpoon_ASM = {"clsid": "{46ACDCF8-5451-4E26-BDDB-E78D5830E93C}", "name": "8 x AGM-84A Harpoon ASM", "weight": 5292}
_8_x_AGM_86C_ALCM = {"clsid": "{8DCAF3A3-7FCF-41B8-BB88-58DEDA878EDE}", "name": "8 x AGM-86C ALCM", "weight": 15600}
_8_x_Kh_65__AS_15B_Kent____1250kg__ASM__IN__MCC = {"clsid": "{CD9417DF-455F-4176-A5A2-8C58D61AA00B}", "name": "8 x Kh-65 (AS-15B Kent) - 1250kg, ASM, IN & MCC", "weight": 10000}
_9M111 = {"clsid": "_9M111", "name": "AT-4 SPIGOT", "weight": None}
_9M114_Shturm_V_2_Rack = {"clsid": "{9M114 Shturm-V-2 Rack}", "name": "9M114 Shturm-V-2 Rack", "weight": 13}
_9M114_Shturm_V_2__AT_6_Spiral____ATGM__SACLOS = {"clsid": "{B919B0F4-7C25-455E-9A02-CEA51DB895E3}", "name": "9M114 Shturm-V-2 (AT-6 Spiral) - ATGM, SACLOS", "weight": 105}
_9M114_Shturm_V_8__AT_6_Spiral____ATGM__SACLOS = {"clsid": "{57232979-8B0F-4db7-8D9A-55197E06B0F5}", "name": "9M114 Shturm-V-8 (AT-6 Spiral) - ATGM, SACLOS", "weight": 422}
_9M117 = {"clsid": "_9M117", "name": "AT-10 SABBER", "weight": None}
_9M133 = {"clsid": "9M133", "name": "AT-14 KORNET", "weight": None}
_9M14 = {"clsid": "9M14", "name": "AT-3 SAGGER", "weight": None}
_9M31 = {"clsid": "9M31", "name": "SA-9 GASKIN", "weight": None}
_9M311 = {"clsid": "9M311", "name": "SA-19 GRISON", "weight": None}
_9M33 = {"clsid": "9M33", "name": "SA-8 GECKO", "weight": None}
_9M331 = {"clsid": "_9M331", "name": "SA-15 GAUNTLET", "weight": None}
_9M37 = {"clsid": "_9M37", "name": "SA-13 GOPHER", "weight": None}
_9M38 = {"clsid": "_9M38", "name": "SA-11 GADFLY", "weight": None}
_9M39 = {"clsid": "_9M39", "name": "SA-18 GROUSE", "weight": None}
_9S846_Strelets___2_x_Igla = {"clsid": "{9S846_2xIGLA}", "name": "9S846 Strelets - 2 x Igla", "weight": 71}
_NiteHawk_FLIR = {"clsid": "_NiteHawk_FLIR", "name": "AN/AAS-38 \"Nite hawk\" FLIR, Laser designator & Laser spot tracker pod", "weight": 200}
weapon_ids = {
"{AB_250_2_SD_2}": Weapons.AB_250_2___144_x_SD_2__250kg_CBU_with_HE_submunitions,
"{AB_250_2_SD_10A}": Weapons.AB_250_2___17_x_SD_10A__250kg_CBU_with_10kg_Frag_HE_submunitions,
"{AB_500_1_SD_10A}": Weapons.AB_500_1___34_x_SD_10A__500kg_CBU_with_10kg_Frag_HE_submunitions,
"{ADEN_GUNPOD}": Weapons.ADEN_GUNPOD,
"{BRU42_ADM141}": Weapons.ADM_141A,
"{BRU3242_ADM141}": Weapons.ADM_141A_,
"{ADM_141A}": Weapons.ADM_141A_TALD,
"{ADM_141B}": Weapons.ADM_141B_TALD,
"{AV8BNA_AERO1D}": Weapons.AERO_1D_300_Gallons_Fuel_Tank_,
"{AV8BNA_AERO1D_EMPTY}": Weapons.AERO_1D_300_Gallons_Fuel_Tank__Empty_,
"AGM114x2_OH_58": Weapons.AGM114x2_OH_58,
"{ee368869-c35a-486a-afe7-284beb7c5d52}": Weapons.AGM_114K,
"{88D18A5E-99C8-4B04-B40B-1C02F2018B6E}": Weapons.AGM_114K___4,
"{7B8DCEB4-820B-4015-9B48-1028A4195692}": Weapons.AGM_119B_Penguin_ASM,
"{AGM_122_SIDEARM}": Weapons.AGM_122_Sidearm,
"{LAU_7_AGM_122_SIDEARM}": Weapons.AGM_122_Sidearm_,
"{AGM_122}": Weapons.AGM_122_Sidearm___light_ARM,
"{AGM-154A}": Weapons.AGM_154A___JSOW_CEB__CBU_type_,
"{AGM-154B}": Weapons.AGM_154B___JSOW_Anti_Armour,
"{9BCC2A2B-5708-4860-B1F1-053A18442067}": Weapons.AGM_154C___JSOW_Unitary_BROACH,
"{AGM_45A}": Weapons.AGM_45A_Shrike_ARM,
"{3E6B632D-65EB-44D2-9501-1C2D04515404}": Weapons.AGM_45B_Shrike_ARM__Imp_,
"{C40A1E3A-DD05-40D9-85A4-217729E37FAE}": Weapons.AGM_62_Walleye_II___Guided_Weapon_Mk_5__TV_Guided_,
"{444BA8AE-82A7-4345-842E-76154EFCCA47}": Weapons.AGM_65D___Maverick_D__IIR_ASM_,
"{F16A4DE0-116C-4A71-97F0-2CF85B0313EF}": Weapons.AGM_65E___Maverick_E__Laser_ASM___Lg_Whd_,
"{69DC8AE7-8F77-427B-B8AA-B19D3F478B65}": Weapons.AGM_65K___Maverick_K__CCD_Imp_ASM_,
"AGM_84": Weapons.AGM_84,
"{8B7CADF9-4954-46B3-8CFB-93F2F5B90B03}": Weapons.AGM_84A_Harpoon_ASM,
"{AGM_84D}": Weapons.AGM_84D_Harpoon_AShM,
"{AF42E6DF-9A60-46D8-A9A0-1708B241AADB}": Weapons.AGM_84E_Harpoon_SLAM__Stand_Off_Land_Attack_Missile_,
"{AGM_84E}": Weapons.AGM_84E_Harpoon_SLAM__Stand_Off_Land_Attack_Missile__,
"{AGM_84H}": Weapons.AGM_84H_SLAM_ER__Expanded_Response_,
"{769A15DF-6AFB-439F-9B24-5B7A45C59D16}": Weapons.AGM_86C_ALCM,
"{B06DD79A-F21E-4EB9-BD9D-AB3844618C9C}": Weapons.AGM_88C_HARM___High_Speed_Anti_Radiation_Missile,
"{B06DD79A-F21E-4EB9-BD9D-AB3844618C93}": Weapons.AGM_88C_HARM___High_Speed_Anti_Radiation_Missile_,
"{C8E06185-7CD6-4C90-959F-044679E90751}": Weapons.AIM_120B_AMRAAM___Active_Rdr_AAM,
"{40EF17B7-F508-45de-8566-6FFECC0C1AB8}": Weapons.AIM_120C_5_AMRAAM___Active_Rdr_AAM,
"{AIM_54A_Mk47}": Weapons.AIM_54A_Mk47,
"{SHOULDER AIM_54A_Mk47 L}": Weapons.AIM_54A_Mk47_,
"{SHOULDER AIM_54A_Mk47 R}": Weapons.AIM_54A_Mk47__,
"{AIM_54A_Mk60}": Weapons.AIM_54A_Mk60,
"{SHOULDER AIM_54A_Mk60 L}": Weapons.AIM_54A_Mk60_,
"{SHOULDER AIM_54A_Mk60 R}": Weapons.AIM_54A_Mk60__,
"{AIM_54C_Mk47}": Weapons.AIM_54C_Mk47,
"{SHOULDER AIM_54C_Mk47 L}": Weapons.AIM_54C_Mk47_,
"{7575BA0B-7294-4844-857B-031A144B2595}": Weapons.AIM_54C_Mk47_Phoenix_IN__Semi_Active_Radar,
"{SHOULDER AIM_54C_Mk47 R}": Weapons.AIM_54C_Mk47__,
"{AIM-7E}": Weapons.AIM_7E_Sparrow_Semi_Active_Radar,
"{SHOULDER AIM-7F}": Weapons.AIM_7F,
"{BELLY AIM-7F}": Weapons.AIM_7F_,
"{AIM-7F}": Weapons.AIM_7F_Sparrow_Semi_Active_Radar,
"{SHOULDER AIM-7M}": Weapons.AIM_7M,
"{SHOULDER AIM-7MH}": Weapons.AIM_7MH,
"{BELLY AIM-7MH}": Weapons.AIM_7MH_,
"{AIM-7H}": Weapons.AIM_7MH_Sparrow_Semi_Active_Radar,
"{BELLY AIM-7M}": Weapons.AIM_7M_,
"{8D399DDA-FF81-4F14-904D-099B34FE7918}": Weapons.AIM_7M_Sparrow_Semi_Active_Radar,
"{AIM-9B}": Weapons.AIM_9B_Sidewinder_IR_AAM,
"{AIM-9L}": Weapons.AIM_9L_Sidewinder_IR_AAM,
"{6CEB49FC-DED8-4DED-B053-E1F033FF72D3}": Weapons.AIM_9M_Sidewinder_IR_AAM,
"{AIM-9P5}": Weapons.AIM_9P5_Sidewinder_IR_AAM,
"{9BFD8C90-F7AE-4e90-833B-BFD0CED0E536}": Weapons.AIM_9P_Sidewinder_IR_AAM,
"{5CE2FF2A-645A-4197-B48D-8720AC69394F}": Weapons.AIM_9X_Sidewinder_IR_AAM,
"{VIGGEN_X-TANK}": Weapons.AJS_External_tank_1013kg_fuel,
"{AKAN}": Weapons.AKAN_M_55_Gunpod__150_rnds_MINGR55_HE,
"{E6747967-B1F0-4C77-977B-AB2E6EB0C102}": Weapons.ALARM,
"{6D21ECEA-F85B-4E8D-9D51-31DC9B8AA4EF}": Weapons.ALQ_131___ECM_Pod,
"ALQ_184": Weapons.ALQ_184,
"ALQ_184_Long": Weapons.ALQ_184_Long,
"{A111396E-D3E8-4b9c-8AC9-2432489304D5}": Weapons.AN_AAQ_28_LITENING___Targeting_Pod,
"{AAQ-28_LEFT}": Weapons.AN_AAQ_28_LITENING___Targeting_Pod_,
"{ALQ_164_RF_Jammer}": Weapons.AN_ALQ_164_DECM_Pod,
"{1C2B16EB-8EB0-43de-8788-8EBB2D70B8BC}": Weapons.AN_ASQ_173_Laser_Spot_Tracker_Strike_CAMera__LST_SCAM_,
"{AN_ASQ_213}": Weapons.AN_ASQ_213_HTS___HARM_Targeting_System,
"{AN_ASQ_228}": Weapons.AN_ASQ_228_ATFLIR___Targeting_Pod,
"{AIS_ASQ_T50}": Weapons.AN_ASQ_T50_TCTS_Pod___ACMI_Pod,
"{AN_M30A1}": Weapons.AN_M30A1___100lb_GP_Bomb_LD,
"{AN-M3}": Weapons.AN_M3___2_Browning_Machine_Guns_12_7mm,
"{AN_M57}": Weapons.AN_M57___250lb_GP_Bomb_LD,
"{AN-M64}": Weapons.AN_M64___500lb_GP_Bomb_LD,
"{F86ANM64}": Weapons.AN_M64___500lb_GP_Bomb_LD_,
"{AN_M65}": Weapons.AN_M65___1000lb_GP_Bomb_LD,
"{AN_M66}": Weapons.AN_M66___2000lb_GP_Bomb_LD,
"{APU-60-1_R_60M}": Weapons.APU_60_1M_with_R_60M__AA_8_Aphid____Infra_Red,
"{B0DBC591-0F52-4F7D-AD7B-51E67725FB81}": Weapons.APU_60_2M_with_2_x_R_60M__AA_8_Aphid____Infra_Red,
"{275A2855-4A79-4B2D-B082-91EA2ADF4691}": Weapons.APU_60_2M_with_2_x_R_60M__AA_8_Aphid____Infra_Red_,
"{APU_68_S-24}": Weapons.APU_68___S_24B,
"{A6FD14D3-6D30-4C85-88A7-8D17BEE120E2}": Weapons.APU_6___6_9A4172_Vikhr,
"{F789E86A-EE2E-4E6B-B81E-D5E5F903B6ED}": Weapons.APU_8___8_9A4172_Vikhr,
"{ARAKM70BAP}": Weapons.ARAK_M_70B_AP_6x_135mm_UnGd_Rkts__Pshu70_HEAT,
"{ARAKM70BHE}": Weapons.ARAK_M_70B_HE_6x_135mm_UnGd_Rkts__Shu70_HE_FRAG,
"{ASO-2}": Weapons.ASO_2___countermeasures_pod,
"{M2KC_RAFAUT_BLG66}": Weapons.AUF2_BLG_66_AC_x_2,
"{M2KC_RAFAUT_GBU12}": Weapons.AUF2_GBU_12_x_2,
"{M2KC_RAFAUT_MK82A}": Weapons.AUF2_MK_82_Air_x_2,
"{M2KC_RAFAUT_MK82S}": Weapons.AUF2_MK_82_Snakeyes_x_2,
"{M2KC_RAFAUT_MK82}": Weapons.AUF2_MK_82_x_2,
"{M2KC_RAFAUT_ROCKEYE}": Weapons.AUF2_ROCKEYE_x_2,
"{AWW-13}": Weapons.AWW_13_DATALINK_POD,
"{M2KC_AAF}": Weapons.A_A_Training,
"{M2KC_AGF}": Weapons.A_G_Training,
"{BAP_100}": Weapons.BAP_100_Anti_Runway,
"{M2KC_BAP100_12_RACK}": Weapons.BAP_100_x_12,
"{M2KC_BAP100_18_RACK}": Weapons.BAP_100_x_18,
"{M2KC_BAP100_6_RACK}": Weapons.BAP_100_x_6,
"{BDU-33}": Weapons.BDU_33___25lb_Practice_Bomb_LD,
"{BDU_45}": Weapons.BDU_45,
"{BDU_45B}": Weapons.BDU_45B,
"{BRU-32 BDU-45B}": Weapons.BDU_45B_,
"{BRU-32 BDU-45}": Weapons.BDU_45_,
"{BDU_45LG}": Weapons.BDU_45_LG,
"{BDU-50HD}": Weapons.BDU_50HD___500lb_Inert_Practice_Bomb_HD,
"{BDU-50LD}": Weapons.BDU_50LD___500lb_Inert_Practice_Bomb_LD,
"{BDU-50LGB}": Weapons.BDU_50LGB___500lb_Laser_Guided_Inert_Practice_Bomb_LD,
"{BETAB-500M}": Weapons.BETAB_500M___479_kg__bomb__penetrating,
"{BETAB-500S}": Weapons.BETAB_500S___425_kg__bomb__penetrating,
"{BEER_BOMB}": Weapons.Beer_Bomb,
"Beer_Bomb_(D)_on_LH_Spitfire_Wing_Carrier": Weapons.Beer_Bomb__D__on_LH_Spitfire_Wing_Carrier,
"Beer_Bomb_(D)_on_RH_Spitfire_Wing_Carrier": Weapons.Beer_Bomb__D__on_RH_Spitfire_Wing_Carrier,
"Beer_Bomb_(L)_on_LH_Spitfire_Wing_Carrier": Weapons.Beer_Bomb__L__on_LH_Spitfire_Wing_Carrier,
"Beer_Bomb_(L)_on_RH_Spitfire_Wing_Carrier": Weapons.Beer_Bomb__L__on_RH_Spitfire_Wing_Carrier,
"{BLG66_BELOUGA}": Weapons.Belouga,
"{BD289E34-DF84-4C5E-9220-4B14C346E79D}": Weapons.BetAB_500ShP___500kg_Concrete_Piercing_HD_w_booster_Bomb,
"{35B698AC-9FEF-4EC4-AD29-484A0085F62B}": Weapons.BetAB_500___500kg_Concrete_Piercing_Bomb_LD,
"BF109K_4_FUEL_TANK": Weapons.BF109K_4_FUEL_TANK,
"BGM_109": Weapons.BGM_109,
"BGM-109B": Weapons.BGM_109B,
"BIN_200": Weapons.BIN_200,
"{BKF_AO2_5RT}": Weapons.BKF___12_x_AO_2_5RT,
"{BKF_PTAB2_5KO}": Weapons.BKF___12_x_PTAB_2_5KO,
"{BK90}": Weapons.BK_90_MJ12__12x_MJ2_HEAT___36x_MJ1_HE_FRAG_Bomblets_,
"{BK90MJ1}": Weapons.BK_90_MJ1__72_x_MJ1_HE_FRAG_Bomblets_,
"{BK90MJ2}": Weapons.BK_90_MJ2__24_x_MJ2_HEAT_Bomblets_,
"{BLG66_BELOUGA_AC}": Weapons.BLG_66_AC_Belouga,
"{BLG66_AC}": Weapons.BLG_66_Belouga___290kg_CBU__151_Frag_Pen_bomblets,
"{752B9781-F962-11d5-9190-00A0249B6F00}": Weapons.BLU_107___440lb_Anti_Runway_Penetrator_Bomb,
"{08164777-5E9C-4B08-B48E-5AA7AFB246E2}": Weapons.BL_755_CBU___450kg__147_Frag_Pen_bomblets,
"{8C3F26A1-FA0F-11d5-9190-00A0249B6F00}": Weapons.BOZ_107___Countermeasure_Dispenser,
"{BRU33_LAU10}": Weapons.BRU_33_with_1_x_LAU_10_pod___4_x_127mm_ZUNI__UnGd_Rkts_Mk71__HE_FRAG,
"{BRU33_LAU61}": Weapons.BRU_33_with_1_x_LAU_61_pod___19_x_2_75_Hydra__UnGd_Rkts_M151__HE,
"{BRU33_LAU61_M282}": Weapons.BRU_33_with_1_x_LAU_61_pod___19_x_2_75_Hydra__UnGd_Rkts_M282__HEDP,
"{BRU33_LAU68}": Weapons.BRU_33_with_1_x_LAU_68_pod___7_x_2_75_Hydra__UnGd_Rkts_M151__HE,
"{BRU33_LAU68_M282}": Weapons.BRU_33_with_1_x_LAU_68_pod___7_x_2_75_Hydra__UnGd_Rkts_M282__HEDP,
"{BRU33_LAU68_MK5}": Weapons.BRU_33_with_1_x_LAU_68_pod___7_x_2_75_Hydra__UnGd_Rkts_Mk5__HEAT,
"{BRU33_2X_BDU-45B}": Weapons.BRU_33_with_2_x_BDU_45B___500lb_Practice_Bomb,
"{BRU33_2X_BDU_45LG}": Weapons.BRU_33_with_2_x_BDU_45_LG_500lb_Practice_Laser_Guided_Bomb,
"{BRU33_2X_BDU-45}": Weapons.BRU_33_with_2_x_BDU_45___500lb_Practice_Bomb,
"{BRU33_2X_CBU-99}": Weapons.BRU_33_with_2_x_CBU_99___490lbs__247_x_HEAT_Bomblets,
"{BRU33_2X_GBU-12}": Weapons.BRU_33_with_2_x_GBU_12___500lb_Laser_Guided_Bomb,
"{BRU33_2X_GBU-16}": Weapons.BRU_33_with_2_x_GBU_16___1000lb_Laser_Guided_Bomb,
"{BRU33_2*LAU10}": Weapons.BRU_33_with_2_x_LAU_10_pod___4_x_127mm_ZUNI__UnGd_Rkts_Mk71__HE_FRAG,
"{BRU33_2*LAU61}": Weapons.BRU_33_with_2_x_LAU_61_pod___19_x_2_75_Hydra__UnGd_Rkts_M151__HE,
"{BRU33_2*LAU61_M282}": Weapons.BRU_33_with_2_x_LAU_61_pod___19_x_2_75_Hydra__UnGd_Rkts_M282__HEDP,
"{BRU33_2*LAU68}": Weapons.BRU_33_with_2_x_LAU_68_pod___7_x_2_75_Hydra__UnGd_Rkts_M151__HE,
"{BRU33_2*LAU68_M282}": Weapons.BRU_33_with_2_x_LAU_68_pod___7_x_2_75_Hydra__UnGd_Rkts_M282__HEDP,
"{BRU33_2*LAU68_MK5}": Weapons.BRU_33_with_2_x_LAU_68_pod___7_x_2_75_Hydra__UnGd_Rkts_Mk5__HEAT,
"{BRU33_2X_ROCKEYE}": Weapons.BRU_33_with_2_x_Mk_20_Rockeye___490lbs_CBU__247_x_HEAT_Bomblets,
"{BRU33_2X_MK-82Y}": Weapons.BRU_33_with_2_x_Mk_82Y___500lb_GP_Chute_Retarded_HD,
"{BRU33_2X_MK-82_Snakeye}": Weapons.BRU_33_with_2_x_Mk_82_Snakeye___500lb_GP_Bomb_HD,
"{BRU33_2X_MK-82}": Weapons.BRU_33_with_2_x_Mk_82___500lb_GP_Bomb_LD,
"{BRU33_2X_MK-83}": Weapons.BRU_33_with_2_x_Mk_83___1000lb_GP_Bomb_LD,
"{BRU41_6X_BDU-33}": Weapons.BRU_41A_with_6_x_BDU_33___25lb_Practice_Bomb_LD,
"{BRU41_6X_MK-82}": Weapons.BRU_41A_with_6_x_Mk_82___500lb_GP_Bomb_LD,
"BRU-42_3*BDU-33": Weapons.BRU_42_3_BDU_33,
"BRU-42_3*GBU-12": Weapons.BRU_42_3_GBU_12,
"BRU-42_LS": Weapons.BRU_42_LS,
"{62BE78B1-9258-48AE-B882-279534C0D278}": Weapons.BRU_42_with_2_x_GBU_10___2000lb_Laser_Guided_Bombs,
"{EB969276-1922-4ED1-A5CB-18590F45D7FE}": Weapons.BRU_42_with_2_x_GBU_27___2000lb_Laser_Guided_Penetrator_Bombs,
"{88D49E04-78DF-4F08-B47E-B81247A9E3C5}": Weapons.BRU_42_with_3_x_GBU_16___1000lb_Laser_Guided_Bombs,
"{LAU-131x3 - 7 AGR-20A}": Weapons.BRU_42_with_3_x_LAU_131_pods___7_x_2_75_Hydra__Laser_Guided_Rkts_M151__HE_APKWS,
"{LAU-131x3 - 7 AGR-20 M282}": Weapons.BRU_42_with_3_x_LAU_131_pods___7_x_2_75_Hydra__Laser_Guided_Rkts_M282__MPP_APKWS,
"{64329ED9-B14C-4c0b-A923-A3C911DA1527}": Weapons.BRU_42_with_3_x_LAU_68_pods___21_x_2_75_Hydra__UnGd_Rkts_M151__HE,
"{C2593383-3CA8-4b18-B73D-0E750BCA1C85}": Weapons.BRU_42_with_3_x_LAU_68_pods___21_x_2_75_Hydra__UnGd_Rkts_M156__Wht_Phos,
"{E6966004-A525-4f47-AF94-BCFEDF8FDBDA}": Weapons.BRU_42_with_3_x_LAU_68_pods___21_x_2_75_Hydra__UnGd_Rkts_M257__Para_Illum,
"{4C044B08-886B-46c8-9B1F-AB05B3ED9C1D}": Weapons.BRU_42_with_3_x_LAU_68_pods___21_x_2_75_Hydra__UnGd_Rkts_M274__Practice_Smk,
"{443364AE-D557-488e-9499-45EDB3BA6730}": Weapons.BRU_42_with_3_x_LAU_68_pods___21_x_2_75_Hydra__UnGd_Rkts_Mk1__Practice,
"{9BC82B3D-FE70-4910-B2B7-3E54EFE73262}": Weapons.BRU_42_with_3_x_LAU_68_pods___21_x_2_75_Hydra__UnGd_Rkts_Mk5__HEAT,
"{C0FA251E-B645-4ce5-926B-F4BC20822F8B}": Weapons.BRU_42_with_3_x_LAU_68_pods___21_x_2_75_Hydra__UnGd_Rkts_Mk61__Practice,
"{A1853B38-2160-4ffe-B7E9-9BF81E6C3D77}": Weapons.BRU_42_with_3_x_LAU_68_pods___21_x_2_75_Hydra__UnGd_Rkts_WTU_1_B__Practice,
"{BRU_42_3xLAU68_M282}": Weapons.BRU_42_with_3_x_LAU_68_pod___7_x_2_75_Hydra__UnGd_Rkts_M282__HEDP,
"{B83CB620-5BBE-4BEA-910C-EB605A327EF9}": Weapons.BRU_42_with_3_x_Mk_20_Rockeye___490lbs_CBUs__247_x_HEAT_Bomblets,
"{7B34E0BB-E427-4C2A-A61A-8407CE18B54D}": Weapons.BRU_42_with_3_x_Mk_81___250lb_GP_Bombs_LD,
"{BRU-42_3*Mk-82AIR}": Weapons.BRU_42_with_3_x_Mk_82_AIR_Ballute___500lb_GP_Bombs_HD,
"{60CC734F-0AFA-4E2E-82B8-93B941AB11CF}": Weapons.BRU_42_with_3_x_Mk_82___500lb_GP_Bombs_LD,
"{BRU-42_LS_3*SUU-25_8*LUU-2}": Weapons.BRU_42_with_3_x_SUU_25_x_8_LUU_2___Target_Marker_Flares,
"{BRU55_2*AGM-154A}": Weapons.BRU_55_with_2_x_AGM_154A___JSOW_CEB__CBU_type_,
"{BRU55_2*AGM-154C}": Weapons.BRU_55_with_2_x_AGM_154C___JSOW_Unitary_BROACH,
"{BRU55_2*GBU-38}": Weapons.BRU_55_with_2_x_GBU_38___JDAM__500lb_GPS_Guided_Bomb,
"{BRU57_2*AGM-154A}": Weapons.BRU_57_with_2_x_AGM_154A___JSOW_CEB__CBU_type_,
"{BRU57_2*AGM-154B}": Weapons.BRU_57_with_2_x_AGM_154B___JSOW_Anti_Armour,
"{BRU57_2*CBU-103}": Weapons.BRU_57_with_2_x_CBU_103___202_x_CEM__CBU_with_WCMD,
"{BRU57_2*CBU-105}": Weapons.BRU_57_with_2_x_CBU_105___10_x_SFW__CBU_with_WCMD,
"{BRU57_2*GBU-38}": Weapons.BRU_57_with_2_x_GBU_38___JDAM__500lb_GPS_Guided_Bomb,
"BR_250": Weapons.BR_250,
"BR_500": Weapons.BR_500,
"British_GP_250LBS_Bomb_MK4_on_LH_Spitfire_Wing_Carrier": Weapons.British_GP_250LBS_Bomb_MK4_on_LH_Spitfire_Wing_Carrier,
"British_GP_250LBS_Bomb_MK4_on_RH_Spitfire_Wing_Carrier": Weapons.British_GP_250LBS_Bomb_MK4_on_RH_Spitfire_Wing_Carrier,
"British_GP_500LBS_Bomb_MK4_on_British_UniversalBC_MK3": Weapons.British_GP_500LBS_Bomb_MK4_on_British_UniversalBC_MK3,
"{FC56DF80-9B09-44C5-8976-DCFAFF219062}": Weapons.B_13L_pod___5_x_S_13_OF__122mm_UnGd_Rkts__Blast_Frag,
"B-1B_Mk-84*8": Weapons.B_1B_Mk_84_8,
"{F72F47E5-C83A-4B85-96ED-D3E46671EE9A}": Weapons.B_8M1_pod___20_x_S_8KOM__80mm_UnGd_Rkts__HEAT_AP,
"{3DFB7320-AB0E-11d7-9897-000476191836}": Weapons.B_8M1_pod___20_x_S_8TsM__80mm_UnGd_Rkts__Smk,
"B-8M1 - 20 S-8OFP2": Weapons.B_8M1___20_S_8OFP2,
"B_8V20A_CM": Weapons.B_8V20A_CM,
"B_8V20A_CM_BU": Weapons.B_8V20A_CM_BU,
"B_8V20A_CM_GN": Weapons.B_8V20A_CM_GN,
"B_8V20A_CM_RD": Weapons.B_8V20A_CM_RD,
"B_8V20A_CM_VT": Weapons.B_8V20A_CM_VT,
"B_8V20A_CM_WH": Weapons.B_8V20A_CM_WH,
"B_8V20A_CM_YE": Weapons.B_8V20A_CM_YE,
"B_8V20A_OFP2": Weapons.B_8V20A_OFP2,
"B_8V20A_OM": Weapons.B_8V20A_OM,
"{6A4B9E69-64FE-439a-9163-3A87FB6A4D81}": Weapons.B_8V20A_pod___20_x_S_8KOM__80mm_UnGd_Rkts__HEAT_AP,
"CATM-9M": Weapons.CATM_9M,
"CBLS-200": Weapons.CBLS_200,
"CBU87*10": Weapons.CBU87_10,
"CBU97*10": Weapons.CBU97_10,
"{CBU_103}": Weapons.CBU_103___202_x_CEM__CBU_with_WCMD,
"{CBU_105}": Weapons.CBU_105___10_x_SFW__CBU_with_WCMD,
"{CBU-52B}": Weapons.CBU_52B___220_x_HE_Frag_bomblets,
"{CBU-87}": Weapons.CBU_87___202_x_CEM_Cluster_Bomb,
"{5335D97A-35A5-4643-9D9B-026C75961E52}": Weapons.CBU_97___10_x_SFW_Cluster_Bomb,
"{CBU_99}": Weapons.CBU_99___490lbs__247_x_HEAT_Bomblets,
"{CM_802AKG}": Weapons.CM_802AKG,
"{C_802AK}": Weapons.C_802AK,
"{C-101-DEFA553}": Weapons.DEFA_553___30mm_Revolver_Cannon,
"DIS_AKD-10": Weapons.DIS_AKD_10,
"DIS_AKG_DLPOD": Weapons.DIS_AKG_DLPOD,
"DIS_BOMB_250_2": Weapons.DIS_BOMB_250_2,
"DIS_BOMB_250_3": Weapons.DIS_BOMB_250_3,
"DIS_BRM1_90": Weapons.DIS_BRM1_90,
"DIS_CM-802AKG": Weapons.DIS_CM_802AKG,
"DIS_C-701IR": Weapons.DIS_C_701IR,
"DIS_C-701T": Weapons.DIS_C_701T,
"DIS_C-802AK": Weapons.DIS_C_802AK,
"DIS_DF4A_KD20": Weapons.DIS_DF4A_KD20,
"DIS_DF4B_YJ12": Weapons.DIS_DF4B_YJ12,
"DIS_GB6": Weapons.DIS_GB6,
"DIS_GB6_HE": Weapons.DIS_GB6_HE,
"DIS_GB6_TSP": Weapons.DIS_GB6_TSP,
"DIS_GBU_10": Weapons.DIS_GBU_10,
"DIS_GBU_12": Weapons.DIS_GBU_12,
"DIS_GBU_12_DUAL_GDJ_II19_L": Weapons.DIS_GBU_12_DUAL_GDJ_II19_L,
"DIS_GBU_12_DUAL_GDJ_II19_R": Weapons.DIS_GBU_12_DUAL_GDJ_II19_R,
"DIS_GBU_16": Weapons.DIS_GBU_16,
"DIS_GDJ_KD63": Weapons.DIS_GDJ_KD63,
"DIS_GDJ_KD63B": Weapons.DIS_GDJ_KD63B,
"DIS_GDJ_YJ83K": Weapons.DIS_GDJ_YJ83K,
"DIS_H6_250_2_N12": Weapons.DIS_H6_250_2_N12,
"DIS_H6_250_2_N24": Weapons.DIS_H6_250_2_N24,
"DIS_KD20": Weapons.DIS_KD20,
"DIS_KD63": Weapons.DIS_KD63,
"DIS_KD63B": Weapons.DIS_KD63B,
"DIS_LAU68_MK5_DUAL_GDJ_II19_L": Weapons.DIS_LAU68_MK5_DUAL_GDJ_II19_L,
"DIS_LAU68_MK5_DUAL_GDJ_II19_R": Weapons.DIS_LAU68_MK5_DUAL_GDJ_II19_R,
"DIS_LD-10": Weapons.DIS_LD_10,
"DIS_LD-10_DUAL_L": Weapons.DIS_LD_10_DUAL_L,
"DIS_LD-10_DUAL_R": Weapons.DIS_LD_10_DUAL_R,
"DIS_LS_6_500": Weapons.DIS_LS_6_500,
"DIS_MER6_250_3_N6": Weapons.DIS_MER6_250_3_N6,
"DIS_MK_20": Weapons.DIS_MK_20,
"DIS_MK_20_DUAL_GDJ_II19_L": Weapons.DIS_MK_20_DUAL_GDJ_II19_L,
"DIS_MK_20_DUAL_GDJ_II19_R": Weapons.DIS_MK_20_DUAL_GDJ_II19_R,
"DIS_MK_82S_DUAL_GDJ_II19_L": Weapons.DIS_MK_82S_DUAL_GDJ_II19_L,
"DIS_MK_82S_DUAL_GDJ_II19_R": Weapons.DIS_MK_82S_DUAL_GDJ_II19_R,
"DIS_MK_82_DUAL_GDJ_II19_L": Weapons.DIS_MK_82_DUAL_GDJ_II19_L,
"DIS_MK_82_DUAL_GDJ_II19_R": Weapons.DIS_MK_82_DUAL_GDJ_II19_R,
"DIS_PL-12": Weapons.DIS_PL_12,
"DIS_PL-5EII": Weapons.DIS_PL_5EII,
"DIS_PL-8A": Weapons.DIS_PL_8A,
"DIS_PL-8B": Weapons.DIS_PL_8B,
"DIS_RKT_90_UG": Weapons.DIS_RKT_90_UG,
"DIS_SD-10": Weapons.DIS_SD_10,
"DIS_SD-10_DUAL_L": Weapons.DIS_SD_10_DUAL_L,
"DIS_SD-10_DUAL_R": Weapons.DIS_SD_10_DUAL_R,
"DIS_SMOKE_GENERATOR_B": Weapons.DIS_SMOKE_GENERATOR_B,
"DIS_SMOKE_GENERATOR_G": Weapons.DIS_SMOKE_GENERATOR_G,
"DIS_SMOKE_GENERATOR_O": Weapons.DIS_SMOKE_GENERATOR_O,
"DIS_SMOKE_GENERATOR_R": Weapons.DIS_SMOKE_GENERATOR_R,
"DIS_SMOKE_GENERATOR_W": Weapons.DIS_SMOKE_GENERATOR_W,
"DIS_SMOKE_GENERATOR_Y": Weapons.DIS_SMOKE_GENERATOR_Y,
"DIS_SPJ_POD": Weapons.DIS_SPJ_POD,
"DIS_TANK1100": Weapons.DIS_TANK1100,
"DIS_TANK1100_EMPTY": Weapons.DIS_TANK1100_EMPTY,
"DIS_TANK800": Weapons.DIS_TANK800,
"DIS_TANK800_EMPTY": Weapons.DIS_TANK800_EMPTY,
"DIS_TYPE200": Weapons.DIS_TYPE200,
"DIS_TYPE200_DUAL_L": Weapons.DIS_TYPE200_DUAL_L,
"DIS_TYPE200_DUAL_R": Weapons.DIS_TYPE200_DUAL_R,
"DIS_WMD7": Weapons.DIS_WMD7,
"DIS_YJ12": Weapons.DIS_YJ12,
"DIS_YJ83K": Weapons.DIS_YJ83K,
"{DWS39_MJ1}": Weapons.DWS39_MJ1,
"{DWS39_MJ1_MJ2}": Weapons.DWS39_MJ1_MJ2,
"{DWS39_MJ2}": Weapons.DWS39_MJ2,
"{Eclair}": Weapons.Eclair,
"ER_4_SC50": Weapons.ER_4_SC50,
"{0519A261-0AB6-11d6-9193-00A0249B6F00}": Weapons.ETHER,
"FAB_100M": Weapons.FAB_100M,
"FAB_100M": Weapons.FAB_100M_,
"{FAB-100-4}": Weapons.FAB_100_x_4,
"{FB3CE165-BF07-4979-887C-92B87F13276B}": Weapons.FAB_100___100kg_GP_Bomb_LD,
"{40AA4ABE-D6EB-4CD6-AEFE-A1A0477B24AB}": Weapons.FAB_1500_M_54___1500kg_GP_Bomb_LD,
"{FAB-250-M54-TU}": Weapons.FAB_250_M54_TU___235_kg__bomb__parachute,
"{FAB-250-M54}": Weapons.FAB_250_M54___235_kg__bomb__parachute,
"{FAB_250_M62}": Weapons.FAB_250_M62___250kg_GP_Bomb_LD,
"{3C612111-C7AD-476E-8A8E-2485812F4E5C}": Weapons.FAB_250___250kg_GP_Bomb_LD,
"FAB_50": Weapons.FAB_50,
"{FAB-500-M54-TU}": Weapons.FAB_500_M54_TU___480_kg__bomb__parachute,
"{FAB-500-M54}": Weapons.FAB_500_M54___474_kg__bomb__free_fall,
"{37DCC01E-9E02-432F-B61D-10C166CA2798}": Weapons.FAB_500_M_62___500kg_GP_Bomb_LD,
"{FAB-500-SL}": Weapons.FAB_500_SL___515_kg__bomb__parachute,
"{FAB-500-TA}": Weapons.FAB_500_TA___477_kg__bomb__free_fall,
"FAB_50": Weapons.FAB_50_,
"FIM_92": Weapons.FIM_92,
"{FPU_8A_FUEL_TANK}": Weapons.FPU_8A_Fuel_Tank_330_gallons,
"{PTB_120_F86F35}": Weapons.Fuel_Tank_120_gallons,
"{PTB_150L_L39}": Weapons.Fuel_Tank_150_liters,
"{PTB_200_F86F35}": Weapons.Fuel_Tank_200_gallons,
"{PTB_350L_L39}": Weapons.Fuel_Tank_350_liters,
"{PTB_490C_MIG21}": Weapons.Fuel_Tank_490_L_Central__21_,
"{PTB_490_MIG21}": Weapons.Fuel_Tank_490_L__21_,
"{PTB_800_MIG21}": Weapons.Fuel_Tank_800_L__21_,
"Fuel_Tank_FT600": Weapons.Fuel_Tank_FT600,
"{414E383A-59EB-41BC-8566-2B5E0788ED1F}": Weapons.Fuel_tank_1150L,
"{C0FF4842-FBAC-11d5-9190-00A0249B6F00}": Weapons.Fuel_tank_1150L_MiG_29,
"{2BEC576B-CDF5-4B7F-961F-B0FA4312B841}": Weapons.Fuel_tank_1400L,
"{16602053-4A12-40A2-B214-AB60D481B20E}": Weapons.Fuel_tank_2000L,
"{7D7EC917-05F6-49D4-8045-61FC587DD019}": Weapons.Fuel_tank_3000L,
"{8A0BE8AE-58D4-4572-9263-3144C0D06364}": Weapons.Fuel_tank_300_gal,
"{F14-300gal}": Weapons.Fuel_tank_300_gal_,
"{F14-300gal-empty}": Weapons.Fuel_tank_300_gal__empty_,
"{EFEC8200-B922-11d7-9897-000476191836}": Weapons.Fuel_tank_330_gal,
"{EFEC8201-B922-11d7-9897-000476191836}": Weapons.Fuel_tank_330_gal_,
"{82364E69-5564-4043-A866-E13032926C3E}": Weapons.Fuel_tank_367_gal,
"{F376DBEE-4CAE-41BA-ADD9-B2910AC95DEC}": Weapons.Fuel_tank_370_gal,
"{0855A3A1-FA50-4C89-BDBB-5D5360ABA071}": Weapons.Fuel_tank_5000L,
"{E1F29B21-F291-4589-9FD8-3272EEC69506}": Weapons.Fuel_tank_610_gal,
"{A5BAEAB7-6FAF-4236-AF72-0FD900F493F9}": Weapons.Fuel_tank_800L,
"{E8D4652F-FD48-45B7-BA5B-2AE05BB5A9CF}": Weapons.Fuel_tank_800L_Wing,
"{B99EE8A8-99BC-4a8d-89AC-A26831920DCE}": Weapons.Fuel_tank_PTB_450,
"{PTB_450}": Weapons.Fuel_tank_PTB_450_,
"{A504D93B-4E80-4B4F-A533-0D9B65F2C55F}": Weapons.Fuel_tank_S_3,
"FW109_FUEL_TANK": Weapons.FW109_FUEL_TANK,
"{8B9E3FD0-F034-4A07-B6CE-C269884CC71B}": Weapons.F_4_Fuel_tank_C,
"{7B4B122D-C12C-4DB4-834E-4D8BB4D863A8}": Weapons.F_4_Fuel_tank_W,
"{PTB-150GAL}": Weapons.F_5_150Gal_Fuel_tank,
"{0395076D-2F77-4420-9D33-087A4398130B}": Weapons.F_5_275Gal_Fuel_tank,
"{GAU_12_Equalizer_AP}": Weapons.GAU_12_Gunpod_w_AP_M79,
"{GAU_12_Equalizer_HE}": Weapons.GAU_12_Gunpod_w_HE_M792,
"{GAU_12_Equalizer}": Weapons.GAU_12_Gunpod_w_SAPHEI_T,
"{BRU-32 GBU-10}": Weapons.GBU_10,
"{51F9AAE5-964F-4D21-83FB-502E3BFE5F8A}": Weapons.GBU_10___2000lb_Laser_Guided_Bomb,
"{BRU-32 GBU-12}": Weapons.GBU_12,
"{DB769D48-67D7-42ED-A2BE-108D566C8B1E}": Weapons.GBU_12___500lb_Laser_Guided_Bomb,
"{BRU-32 GBU-16}": Weapons.GBU_16,
"{0D33DDAE-524F-4A4E-B5B8-621754FE3ADE}": Weapons.GBU_16___1000lb_Laser_Guided_Bomb,
"{BRU-32 GBU-24}": Weapons.GBU_24,
"{34759BBC-AF1E-4AEE-A581-498FF7A6EBCE}": Weapons.GBU_24_Paveway_III___2000lb_Laser_Guided_Bomb,
"{GBU-24}": Weapons.GBU_24_Paveway_III___2000lb_Laser_Guided_Bomb_,
"{EF0A9419-01D6-473B-99A3-BEBDB923B14D}": Weapons.GBU_27___2000lb_Laser_Guided_Penetrator_Bomb,
"{F06B775B-FC70-44B5-8A9F-5B5E2EB839C7}": Weapons.GBU_28___5000lb_Laser_Guided_Penetrator_Bomb,
"GBU-31V3B*8": Weapons.GBU_31V3B_8,
"GBU-31*8": Weapons.GBU_31_8,
"{GBU-31}": Weapons.GBU_31_V_1_B___JDAM__2000lb_GPS_Guided_Bomb,
"{GBU_31_V_2B}": Weapons.GBU_31_V_2_B___JDAM__2000lb_GPS_Guided_Bomb,
"{GBU-31V3B}": Weapons.GBU_31_V_3_B___JDAM__2000lb_GPS_Guided_Penetrator_Bomb,
"{GBU_31_V_4B}": Weapons.GBU_31_V_4_B___JDAM__2000lb_GPS_Guided_Penetrator_Bomb,
"{GBU_32_V_2B}": Weapons.GBU_32_V_2_B___JDAM__1000lb_GPS_Guided_Bomb,
"GBU-38*16": Weapons.GBU_38_16,
"{GBU-38}": Weapons.GBU_38___JDAM__500lb_GPS_Guided_Bomb,
"{GBU_54_V_1B}": Weapons.GBU_54B___LJDAM__500lb_Laser__GPS_Guided_Bomb_LD,
"GUV_VOG": Weapons.GUV_VOG,
"GUV_YakB_GSHP": Weapons.GUV_YakB_GSHP,
"{HOT3G}": Weapons.HOT3,
"{HOT3D}": Weapons.HOT3_,
"{4CD2BB0F-5493-44EF-A927-9760350F7BA1}": Weapons.HSAB_with_9_x_Mk_20_Rockeye___490lbs_CBUs__247_x_HEAT_Bomblets,
"{696CFFC4-0BDE-42A8-BE4B-0BE3D9DD723C}": Weapons.HSAB_with_9_x_Mk_83___1000lb_GP_Bombs_LD,
"{HVAR_SMOKE_2}": Weapons.HVAR_SMOKE__UnGd_Rkt,
"{HVAR_SMOKE_GENERATOR}": Weapons.HVAR_Smoke_Generator,
"{HVAR}": Weapons.HVAR__UnGd_Rkt,
"I16_DROP_FUEL_TANK": Weapons.I16_DROP_FUEL_TANK,
"I16_FAB_100SV": Weapons.I16_FAB_100SV,
"I16_RS_82": Weapons.I16_RS_82,
"{IAB-500}": Weapons.IAB_500___470_kg__bomb__free_fall,
"{IR_Deflector}": Weapons.IR_Deflector,
"{KAB_1500Kr_LOADOUT}": Weapons.KAB_1500Kr___1500kg_TV_Guided_Bomb,
"{KAB_1500LG_LOADOUT}": Weapons.KAB_1500LG_Pr___1500kg_Laser_Guided_Penetrator_Bomb,
"{39821727-F6E2-45B3-B1F0-490CC8921D1E}": Weapons.KAB_1500L___1500kg_Laser_Guided_Bomb,
"{E2C426E3-8B10-4E09-B733-9CDC26520F48}": Weapons.KAB_500Kr___500kg_TV_Guided_Bomb,
"{BA565F89-2373-4A84-9502-A0E017D3A44A}": Weapons.KAB_500LG___500kg_Laser_Guided_Bomb,
"{KAB_500S_LOADOUT}": Weapons.KAB_500S___500kg_GPS_Guided_Bomb,
"{KB}": Weapons.KB_Flare_Chaff_dispenser_pod,
"{12429ECF-03F0-4DF6-BCBD-5D38B6343DE1}": Weapons.Kh_22__AS_4_Kitchen____1000kg__AShM__IN__Act_Pas_Rdr,
"{9F390892-E6F9-42C9-B84E-1136A881DCB2}": Weapons.Kh_23L_Grom__AS_7_Kerry____286kg__ASM__Laser_Guided,
"{6DADF342-D4BA-4D8A-B081-BA928C4AF86D}": Weapons.Kh_25ML__AS_10_Karen____300kg__ASM__Semi_Act_Laser,
"{79D73885-0801-45a9-917F-C90FE1CE3DFC}": Weapons.Kh_25ML__AS_10_Karen____300kg__ASM__Semi_Act_Laser_,
"{X-25ML}": Weapons.Kh_25ML__AS_10_Karen____300kg__ASM__Semi_Act_Laser__,
"{E86C5AA5-6D49-4F00-AD2E-79A62D6DDE26}": Weapons.Kh_25MPU__Updated_AS_12_Kegler____320kg__ARM__IN__Pas_Rdr,
"{752AF1D2-EBCC-4bd7-A1E7-2357F5601C70}": Weapons.Kh_25MPU__Updated_AS_12_Kegler____320kg__ARM__IN__Pas_Rdr_,
"{X-25MPU}": Weapons.Kh_25MPU__Updated_AS_12_Kegler____320kg__ARM__IN__Pas_Rdr__,
"{Kh-25MP}": Weapons.Kh_25MP__AS_12_Kegler____320kg__ARM__Pas_Rdr,
"{292960BB-6518-41AC-BADA-210D65D5073C}": Weapons.Kh_25MR__AS_10_Karen____300kg__ASM__10km__RC_Guided,
"{X-25MR}": Weapons.Kh_25MR__AS_10_Karen____300kg__ASM__RC_Guided,
"{Kh-28}": Weapons.Kh_28__AS_9_Kyle____720kg__ARM__Pas_Rdr,
"{3468C652-E830-4E73-AFA9-B5F260AB7C3D}": Weapons.Kh_29L__AS_14_Kedge____657kg__ASM__Semi_Act_Laser,
"{D4A8D9B9-5C45-42e7-BBD2-0E54F8308432}": Weapons.Kh_29L__AS_14_Kedge____657kg__ASM__Semi_Act_Laser_,
"{X-29L}": Weapons.Kh_29L__AS_14_Kedge____657kg__ASM__Semi_Act_Laser__,
"{B4FC81C9-B861-4E87-BBDC-A1158E648EBF}": Weapons.Kh_29T__AS_14_Kedge____670kg__ASM__TV_Guided,
"{601C99F7-9AF3-4ed7-A565-F8B8EC0D7AAC}": Weapons.Kh_29T__AS_14_Kedge____670kg__ASM__TV_Guided_,
"{X-29T}": Weapons.Kh_29T__AS_14_Kedge____670kg__ASM__TV_Guided__,
"{4D13E282-DF46-4B23-864A-A9423DFDE504}": Weapons.Kh_31A__AS_17_Krypton____610kg__AShM__IN__Act_Rdr,
"{4D13E282-DF46-4B23-864A-A9423DFDE50A}": Weapons.Kh_31A__AS_17_Krypton____610kg__AShM__IN__Act_Rdr_,
"{X-31A}": Weapons.Kh_31A__AS_17_Krypton____610kg__AShM__IN__Act_Rdr__,
"{D8F2C90B-887B-4B9E-9FE2-996BC9E9AF03}": Weapons.Kh_31P__AS_17_Krypton____600kg__ARM__IN__Pas_Rdr,
"{D8F2C90B-887B-4B9E-9FE2-996BC9E9AF0A}": Weapons.Kh_31P__AS_17_Krypton____600kg__ARM__IN__Pas_Rdr_,
"{X-31P}": Weapons.Kh_31P__AS_17_Krypton____600kg__ARM__IN__Pas_Rdr__,
"{2234F529-1D57-4496-8BB0-0150F9BDBBD2}": Weapons.Kh_35__AS_20_Kayak____520kg__AShM__IN__Act_Rdr,
"{2234F529-1D57-4496-8BB0-0150F9BDBBD3}": Weapons.Kh_35__AS_20_Kayak____520kg__AShM__IN__Act_Rdr_,
"{3F26D9C5-5CC3-4E42-BC79-82FAA54E9F26}": Weapons.Kh_41__SS_N_22_Sunburn____4500kg__AShM__IN__Act_Rdr,
"{FE382A68-8620-4AC0-BDF5-709BFE3977D7}": Weapons.Kh_58U__AS_11_Kilter____640kg__ARM__IN__Pas_Rdr,
"{B5CA9846-776E-4230-B4FD-8BCC9BFB1676}": Weapons.Kh_58U__AS_11_Kilter____640kg__ARM__IN__Pas_Rdr_,
"{40AB87E8-BEFB-4D85-90D9-B2753ACF9514}": Weapons.Kh_59M__AS_18_Kazoo____930kg__ASM__IN,
"{BADAF2DE-68B5-472A-8AAC-35BAEFF6B4A1}": Weapons.Kh_65__AS_15B_Kent____1250kg__ASM__IN__MCC,
"{Kh-66_Grom}": Weapons.Kh_66_Grom__21____AGM__radar_guided_APU_68,
"{96A7F676-F956-404A-AD04-F33FB2C74884}": Weapons.KMGU_2___96_x_AO_2_5RT_Dispenser__CBU__HE_Frag,
"{96A7F676-F956-404A-AD04-F33FB2C74881}": Weapons.KMGU_2___96_x_PTAB_2_5KO_Dispenser__CBU__HEAT_AP,
"KORD_12_7": Weapons.KORD_12_7,
"{F4920E62-A99A-11d8-9897-000476191836}": Weapons.Kopyo_radar_pod,
"{7210496B-7B81-4B52-80D6-8529ECF847CD}": Weapons.Kormoran___ASM,
"{K-13A}": Weapons.K_13A,
"{44EE8698-89F9-48EE-AF36-5FD31896A82F}": Weapons.L005_Sorbtsiya_ECM_pod__left_,
"{44EE8698-89F9-48EE-AF36-5FD31896A82A}": Weapons.L005_Sorbtsiya_ECM_pod__right_,
"{ECM_POD_L_175V}": Weapons.L175V_Khibiny_ECM_pod,
"{F14-LANTIRN-TP}": Weapons.LANTIRN_Targeting_Pod,
"LAU3_HE151": Weapons.LAU3_HE151,
"LAU3_HE5": Weapons.LAU3_HE5,
"LAU3_WP156": Weapons.LAU3_WP156,
"LAU3_WP1B": Weapons.LAU3_WP1B,
"LAU3_WP61": Weapons.LAU3_WP61,
"LAU-105": Weapons.LAU_105,
"LAU-105_1*AIM-9L_L": Weapons.LAU_105_1_AIM_9L_L,
"LAU-105_1*AIM-9L_R": Weapons.LAU_105_1_AIM_9L_R,
"LAU-105_1*AIM-9M_L": Weapons.LAU_105_1_AIM_9M_L,
"LAU-105_1*AIM-9M_R": Weapons.LAU_105_1_AIM_9M_R,
"LAU-105_1*CATM-9M_L": Weapons.LAU_105_1_CATM_9M_L,
"LAU-105_1*CATM-9M_R": Weapons.LAU_105_1_CATM_9M_R,
"LAU-105_2*AIM-9L": Weapons.LAU_105_2_AIM_9L,
"LAU-105_2*AIM-9P5": Weapons.LAU_105_2_AIM_9P5,
"LAU-105_2*CATM-9M": Weapons.LAU_105_2_CATM_9M,
"LAU-105_AIS_ASQ_T50_L": Weapons.LAU_105_AIS_ASQ_T50_L,
"LAU-105_AIS_ASQ_T50_R": Weapons.LAU_105_AIS_ASQ_T50_R,
"{DB434044-F5D0-4F1F-9BA9-B73027E18DD3}": Weapons.LAU_105_with_2_x_AIM_9M_Sidewinder_IR_AAM,
"{3C0745ED-8B0B-42eb-B907-5BD5C1717447}": Weapons.LAU_105_with_2_x_AIM_9P_Sidewinder_IR_AAM,
"{LAU_10R}": Weapons.LAU_10R_pod___4_x_127mm_ZUNI__UnGd_Rkts_Mk71__HE_FRAG,
"{F3EFE0AB-E91A-42D8-9CA2-B63C91ED570A}": Weapons.LAU_10_pod___4_x_127mm_ZUNI__UnGd_Rkts_Mk71__HE_FRAG,
"{BRU42_LAU10}": Weapons.LAU_10___4_ZUNI_MK_71,
"{BRU3242_LAU10}": Weapons.LAU_10___4_ZUNI_MK_71_,
"{LAU-115 - AIM-7E}": Weapons.LAU_115C_with_AIM_7E_Sparrow_Semi_Active_Radar,
"{LAU-115 - AIM-7F}": Weapons.LAU_115C_with_AIM_7F_Sparrow_Semi_Active_Radar,
"{LAU-115 - AIM-7H}": Weapons.LAU_115C_with_AIM_7MH_Sparrow_Semi_Active_Radar,
"LAU-115_2*LAU-127_AIM-120B": Weapons.LAU_115_2_LAU_127_AIM_120B,
"LAU-115_2*LAU-127_AIM-120C": Weapons.LAU_115_2_LAU_127_AIM_120C,
"LAU-115_2*LAU-127_AIM-9L": Weapons.LAU_115_2_LAU_127_AIM_9L,
"LAU-115_2*LAU-127_AIM-9M": Weapons.LAU_115_2_LAU_127_AIM_9M,
"LAU-115_2*LAU-127_AIM-9X": Weapons.LAU_115_2_LAU_127_AIM_9X,
"LAU-115_2*LAU-127_CATM-9M": Weapons.LAU_115_2_LAU_127_CATM_9M,
"LAU-115_LAU-127_AIM-9L": Weapons.LAU_115_LAU_127_AIM_9L,
"LAU-115_LAU-127_AIM-9L_R": Weapons.LAU_115_LAU_127_AIM_9L_R,
"LAU-115_LAU-127_AIM-9M": Weapons.LAU_115_LAU_127_AIM_9M,
"LAU-115_LAU-127_AIM-9M_R": Weapons.LAU_115_LAU_127_AIM_9M_R,
"LAU-115_LAU-127_AIM-9X": Weapons.LAU_115_LAU_127_AIM_9X,
"LAU-115_LAU-127_AIM-9X_R": Weapons.LAU_115_LAU_127_AIM_9X_R,
"LAU-115_LAU-127_CATM-9M": Weapons.LAU_115_LAU_127_CATM_9M,
"LAU-115_LAU-127_CATM-9M_R": Weapons.LAU_115_LAU_127_CATM_9M_R,
"{LAU-115 - AIM-120B}": Weapons.LAU_115_with_1_x_LAU_127_AIM_120B_AMRAAM___Active_Rdr_AAM,
"{LAU-115 - AIM-120B_R}": Weapons.LAU_115_with_1_x_LAU_127_AIM_120B_AMRAAM___Active_Rdr_AAM_,
"{LAU-115 - AIM-120C}": Weapons.LAU_115_with_1_x_LAU_127_AIM_120C_5_AMRAAM___Active_Rdr_AAM,
"{LAU-115 - AIM-120C_R}": Weapons.LAU_115_with_1_x_LAU_127_AIM_120C_5_AMRAAM___Active_Rdr_AAM_,
"{LAU-115 - AIM-7M}": Weapons.LAU_115_with_AIM_7M_Sparrow_Semi_Active_Radar,
"LAU_117_AGM_65A": Weapons.LAU_117_AGM_65A,
"LAU_117_AGM_65B": Weapons.LAU_117_AGM_65B,
"LAU_117_AGM_65F": Weapons.LAU_117_AGM_65F,
"LAU_117_AGM_65G": Weapons.LAU_117_AGM_65G,
"LAU_117_AGM_65H": Weapons.LAU_117_AGM_65H,
"LAU_117_AGM_65L": Weapons.LAU_117_AGM_65L,
"LAU_117_CATM_65K": Weapons.LAU_117_CATM_65K,
"LAU_117_TGM_65D": Weapons.LAU_117_TGM_65D,
"LAU_117_TGM_65G": Weapons.LAU_117_TGM_65G,
"LAU_117_TGM_65H": Weapons.LAU_117_TGM_65H,
"{444BA8AE-82A7-4345-842E-76154EFCCA46}": Weapons.LAU_117_with_AGM_65D___Maverick_D__IIR_ASM_,
"{F16A4DE0-116C-4A71-97F0-2CF85B0313EC}": Weapons.LAU_117_with_AGM_65E___Maverick_E__Laser_ASM___Lg_Whd_,
"{69DC8AE7-8F77-427B-B8AA-B19D3F478B66}": Weapons.LAU_117_with_AGM_65K___Maverick_K__CCD_Imp_ASM_,
"{3E6B632D-65EB-44D2-9501-1C2D04515405}": Weapons.LAU_118a_with_AGM_45B_Shrike_ARM__Imp_,
"LAU-127_AIM-9L": Weapons.LAU_127_AIM_9L,
"LAU-127_AIM-9M": Weapons.LAU_127_AIM_9M,
"LAU-127_AIM-9X": Weapons.LAU_127_AIM_9X,
"LAU-127_CATM-9M": Weapons.LAU_127_CATM_9M,
"LAU_131x3_HYDRA_70_M151": Weapons.LAU_131x3_HYDRA_70_M151,
"LAU_131x3_HYDRA_70_M156": Weapons.LAU_131x3_HYDRA_70_M156,
"LAU_131x3_HYDRA_70_M257": Weapons.LAU_131x3_HYDRA_70_M257,
"LAU_131x3_HYDRA_70_M274": Weapons.LAU_131x3_HYDRA_70_M274,
"LAU_131x3_HYDRA_70_MK1": Weapons.LAU_131x3_HYDRA_70_MK1,
"LAU_131x3_HYDRA_70_MK5": Weapons.LAU_131x3_HYDRA_70_MK5,
"LAU_131x3_HYDRA_70_MK61": Weapons.LAU_131x3_HYDRA_70_MK61,
"LAU_131x3_HYDRA_70_WTU1B": Weapons.LAU_131x3_HYDRA_70_WTU1B,
"{LAU-131 - 7 AGR-20A}": Weapons.LAU_131_pod___7_x_2_75_Hydra__Laser_Guided_Rkts_M151__HE_APKWS,
"{LAU-131 - 7 AGR-20 M282}": Weapons.LAU_131_pod___7_x_2_75_Hydra__Laser_Guided_Rkts_M282__MPP_APKWS,
"{69926055-0DA8-4530-9F2F-C86B157EA9F6}": Weapons.LAU_131_pod___7_x_2_75_Hydra__UnGd_Rkts_M151__HE,
"{2AF2EC3F-9065-4de5-93E1-1739C9A71EF7}": Weapons.LAU_131_pod___7_x_2_75_Hydra__UnGd_Rkts_M156__Wht_Phos,
"{DAD45FE5-CFF0-4a2b-99D4-5D044D3BC22F}": Weapons.LAU_131_pod___7_x_2_75_Hydra__UnGd_Rkts_M257__Para_Illum,
"{6D6D5C07-2A90-4a68-9A74-C5D0CFFB05D9}": Weapons.LAU_131_pod___7_x_2_75_Hydra__UnGd_Rkts_M274__Practice_Smk,
"{D22C2D63-E5C9-4247-94FB-5E8F3DE22B71}": Weapons.LAU_131_pod___7_x_2_75_Hydra__UnGd_Rkts_Mk1__Practice,
"{319293F2-392C-4617-8315-7C88C22AF7C4}": Weapons.LAU_131_pod___7_x_2_75_Hydra__UnGd_Rkts_Mk5__HEAT,
"{1CA5E00B-D545-4ff9-9B53-5970E292F14D}": Weapons.LAU_131_pod___7_x_2_75_Hydra__UnGd_Rkts_Mk61__Practice,
"{DDCE7D70-5313-4181-8977-F11018681662}": Weapons.LAU_131_pod___7_x_2_75_Hydra__UnGd_Rkts_WTU_1_B__Practice,
"{LAU-138 wtip - AIM-9L}": Weapons.LAU_138_AIM_9L,
"{LAU-138 wtip - AIM-9M}": Weapons.LAU_138_AIM_9M,
"{LAU3_FFAR_WP156}": Weapons.LAU_3_pod___19_x_2_75_FFAR__UnGd_Rkts_M156__Wht_Phos,
"{LAU3_FFAR_MK1HE}": Weapons.LAU_3_pod___19_x_2_75_FFAR__UnGd_Rkts_Mk1__HE,
"{LAU3_FFAR_MK5HEAT}": Weapons.LAU_3_pod___19_x_2_75_FFAR__UnGd_Rkts_Mk5__HEAT,
"{LAU_61R}": Weapons.LAU_61R_pod___19_x_2_75_Hydra__UnGd_Rkts_M151__HE,
"{FD90A1DC-9147-49FA-BF56-CB83EF0BD32B}": Weapons.LAU_61_pod___19_x_2_75_Hydra__UnGd_Rkts_M151__HE,
"{3DFB7321-AB0E-11d7-9897-000476191836}": Weapons.LAU_61_pod___19_x_2_75_Hydra__UnGd_Rkts_M156__Wht_Phos,
"{LAU_61_M282}": Weapons.LAU_61_pod___19_x_2_75_Hydra__UnGd_Rkts_M282__HEDP,
"{LAU68_FFAR_WP156}": Weapons.LAU_68_pod___7_x_2_75_FFAR__UnGd_Rkts_M156__Wht_Phos,
"{LAU68_FFAR_MK1HE}": Weapons.LAU_68_pod___7_x_2_75_FFAR__UnGd_Rkts_Mk1__HE,
"{LAU68_FFAR_MK5HEAT}": Weapons.LAU_68_pod___7_x_2_75_FFAR__UnGd_Rkts_Mk5__HEAT,
"{A021F29D-18AB-4d3e-985C-FC9C60E35E9E}": Weapons.LAU_68_pod___7_x_2_75_Hydra__UnGd_Rkts_M151__HE,
"{4F977A2A-CD25-44df-90EF-164BFA2AE72F}": Weapons.LAU_68_pod___7_x_2_75_Hydra__UnGd_Rkts_M156__Wht_Phos,
"{647C5F26-BDD1-41e6-A371-8DE1E4CC0E94}": Weapons.LAU_68_pod___7_x_2_75_Hydra__UnGd_Rkts_M257__Para_Illum,
"{0877B74B-5A00-4e61-BA8A-A56450BA9E27}": Weapons.LAU_68_pod___7_x_2_75_Hydra__UnGd_Rkts_M274__Practice_Smk,
"{LAU_68_M282}": Weapons.LAU_68_pod___7_x_2_75_Hydra__UnGd_Rkts_M282__HEDP,
"{FC85D2ED-501A-48ce-9863-49D468DDD5FC}": Weapons.LAU_68_pod___7_x_2_75_Hydra__UnGd_Rkts_Mk1__Practice,
"{174C6E6D-0C3D-42ff-BCB3-0853CB371F5C}": Weapons.LAU_68_pod___7_x_2_75_Hydra__UnGd_Rkts_Mk5__HEAT,
"{65396399-9F5C-4ec3-A7D2-5A8F4C1D90C4}": Weapons.LAU_68_pod___7_x_2_75_Hydra__UnGd_Rkts_Mk61__Practice,
"{1F7136CB-8120-4e77-B97B-945FF01FB67C}": Weapons.LAU_68_pod___7_x_2_75_Hydra__UnGd_Rkts_WTU_1_B__Practice,
"{LAU-7 - AIM-9L}": Weapons.LAU_7_AIM_9L,
"{LAU-7 - AIM-9M}": Weapons.LAU_7_AIM_9M,
"{F4-2-AIM9B}": Weapons.LAU_7_with_2_x_AIM_9B_Sidewinder_IR_AAM,
"{F4-2-AIM9L}": Weapons.LAU_7_with_2_x_AIM_9L_Sidewinder_IR_AAM,
"{9DDF5297-94B9-42FC-A45E-6E316121CD85}": Weapons.LAU_7_with_2_x_AIM_9M_Sidewinder_IR_AAM,
"{F4-2-AIM9P5}": Weapons.LAU_7_with_2_x_AIM_9P5_Sidewinder_IR_AAM,
"{773675AB-7C29-422f-AFD8-32844A7B7F17}": Weapons.LAU_7_with_2_x_AIM_9P_Sidewinder_IR_AAM,
"{GAR-8}": Weapons.LAU_7_with_AIM_9B_Sidewinder_IR_AAM,
"{AIM-9M-ON-ADAPTER}": Weapons.LAU_7_with_AIM_9M_Sidewinder_IR_AAM,
"{AIM-9P5-ON-ADAPTER}": Weapons.LAU_7_with_AIM_9P5_Sidewinder_IR_AAM,
"{AIM-9P-ON-ADAPTER}": Weapons.LAU_7_with_AIM_9P_Sidewinder_IR_AAM,
"{AIM-9X-ON-ADAPTER}": Weapons.LAU_7_with_AIM_9X_Sidewinder_IR_AAM,
"{LAU-7_AIS_ASQ_T50}": Weapons.LAU_7_with_AN_ASQ_T50_TCTS_Pod___ACMI_Pod,
"LAU_88_AGM_65D_ONE": Weapons.LAU_88_AGM_65D_ONE,
"LAU_88_AGM_65H": Weapons.LAU_88_AGM_65H,
"LAU_88_AGM_65H_2_L": Weapons.LAU_88_AGM_65H_2_L,
"LAU_88_AGM_65H_2_R": Weapons.LAU_88_AGM_65H_2_R,
"LAU_88_AGM_65H_3": Weapons.LAU_88_AGM_65H_3,
"{E6A6262A-CA08-4B3D-B030-E1A993B98452}": Weapons.LAU_88_with_2_x_AGM_65D___Maverick_D__IIR_ASM_,
"{E6A6262A-CA08-4B3D-B030-E1A993B98453}": Weapons.LAU_88_with_2_x_AGM_65D___Maverick_D__IIR_ASM__,
"{2CC29C7A-E863-411C-8A6E-BD6F0E730548}": Weapons.LAU_88_with_2_x_AGM_65E___Maverick_E__Laser_ASM___Lg_Whd_,
"{2CC29C7A-E863-411C-8A6E-BD6F0E730547}": Weapons.LAU_88_with_2_x_AGM_65E___Maverick_E__Laser_ASM___Lg_Whd__,
"{D7670BC7-881B-4094-906C-73879CF7EB28}": Weapons.LAU_88_with_2_x_AGM_65K___Maverick_K__CCD_Imp_ASM_,
"{D7670BC7-881B-4094-906C-73879CF7EB27}": Weapons.LAU_88_with_2_x_AGM_65K___Maverick_K__CCD_Imp_ASM__,
"{DAC53A2F-79CA-42FF-A77A-F5649B601308}": Weapons.LAU_88_with_3_x_AGM_65D___Maverick_D__IIR_ASM_,
"{71AAB9B8-81C1-4925-BE50-1EF8E9899271}": Weapons.LAU_88_with_3_x_AGM_65E___Maverick_E__Laser_ASM___Lg_Whd_,
"{907D835F-E650-4154-BAFD-C656882555C0}": Weapons.LAU_88_with_3_x_AGM_65K___Maverick_K__CCD_Imp_ASM_,
"{LAU_SNEB68G}": Weapons.LAU_SNEB68G___8xSNEB68_EAP,
"{LAU_SNEB68_WP}": Weapons.LAU_SNEB68G___8xSNEB68_WP,
"{CAAC1CFD-6745-416B-AFA4-CB57414856D0}": Weapons.Lantirn_F_16,
"{D1744B93-2A8A-4C4D-B004-7A09CD8C8F3F}": Weapons.Lantirn_Target_Pod,
"{LR25_ARF8M3_API}": Weapons.LR_25___25_x_ARF_8_M3_API,
"{LR25_ARF8M3_HEI}": Weapons.LR_25___25_x_ARF_8_M3_HEI,
"{LR25_ARF8M3_TPSM}": Weapons.LR_25___25_x_ARF_8_M3_TP_SM,
"{0519A264-0AB6-11d6-9193-00A0249B6F00}": Weapons.L_081_Fantasmagoria_ELINT_pod,
"{US_M10_SMOKE_TANK_BLUE}": Weapons.M10_Smoke_Tank___blue,
"{US_M10_SMOKE_TANK_GREEN}": Weapons.M10_Smoke_Tank___green,
"{US_M10_SMOKE_TANK_ORANGE}": Weapons.M10_Smoke_Tank___orange,
"{US_M10_SMOKE_TANK_RED}": Weapons.M10_Smoke_Tank___red,
"{US_M10_SMOKE_TANK_WHITE}": Weapons.M10_Smoke_Tank___white,
"{US_M10_SMOKE_TANK_YELLOW}": Weapons.M10_Smoke_Tank___yellow,
"{00F5DAC4-0466-4122-998F-B1A298E34113}": Weapons.M117___750lb_GP_Bomb_LD,
"M134_L": Weapons.M134_L,
"M134_R": Weapons.M134_R,
"M134_SIDE_L": Weapons.M134_SIDE_L,
"M134_SIDE_R": Weapons.M134_SIDE_R,
"{414DA830-B61A-4F9E-B71B-C2F6832E1D7A}": Weapons.M2000_Fuel_tank,
"M260_HYDRA": Weapons.M260_HYDRA,
"M260_HYDRA_WP": Weapons.M260_HYDRA_WP,
"M261_MK151": Weapons.M261_MK151,
"M261_MK156": Weapons.M261_MK156,
"M60_SIDE_L": Weapons.M60_SIDE_L,
"M60_SIDE_R": Weapons.M60_SIDE_R,
"{MAK79_MK20 2L}": Weapons.MAK79_2_MK_20,
"{MAK79_MK20 2R}": Weapons.MAK79_2_MK_20_,
"{MAK79_BDU33 3L}": Weapons.MAK79_3_BDU_33,
"{MAK79_BDU33 3R}": Weapons.MAK79_3_BDU_33_,
"{MAK79_BDU45 3L}": Weapons.MAK79_3_BDU_45,
"{MAK79_BDU45B 3L}": Weapons.MAK79_3_BDU_45B,
"{MAK79_BDU45B 3R}": Weapons.MAK79_3_BDU_45B_,
"{MAK79_BDU45 3R}": Weapons.MAK79_3_BDU_45_,
"{MAK79_MK81 3L}": Weapons.MAK79_3_Mk_81,
"{MAK79_MK81 3R}": Weapons.MAK79_3_Mk_81_,
"{MAK79_MK82 3L}": Weapons.MAK79_3_Mk_82,
"{MAK79_MK82AIR 3L}": Weapons.MAK79_3_Mk_82AIR,
"{MAK79_MK82AIR 3R}": Weapons.MAK79_3_Mk_82AIR_,
"{MAK79_MK82 3R}": Weapons.MAK79_3_Mk_82_,
"{MAK79_MK82SE 3L}": Weapons.MAK79_3_Mk_82_SnakeEye,
"{MAK79_MK82SE 3R}": Weapons.MAK79_3_Mk_82_SnakeEye_,
"{MAK79_MK83 3L}": Weapons.MAK79_3_Mk_83,
"{MAK79_MK83 3R}": Weapons.MAK79_3_Mk_83_,
"{MAK79_BDU33 4}": Weapons.MAK79_4_BDU_33,
"{MAK79_BDU45 4}": Weapons.MAK79_4_BDU_45,
"{MAK79_BDU45B 4}": Weapons.MAK79_4_BDU_45B,
"{MAK79_MK81 4}": Weapons.MAK79_4_Mk_81,
"{MAK79_MK82 4}": Weapons.MAK79_4_Mk_82,
"{MAK79_MK82AIR 4}": Weapons.MAK79_4_Mk_82AIR,
"{MAK79_MK82SE 4}": Weapons.MAK79_4_Mk_82_SnakeEye,
"{MAK79_MK20 1R}": Weapons.MAK79_MK_20,
"{MAK79_MK20 1L}": Weapons.MAK79_MK_20_,
"{MAK79_MK83 1R}": Weapons.MAK79_Mk_83,
"{MAK79_MK83 1L}": Weapons.MAK79_Mk_83_,
"{MMagicII}": Weapons.Matra_Magic_II,
"{Matra_S530D}": Weapons.Matra_Super_530D,
"{Matra155RocketPod}": Weapons.Matra_Type_155_Rocket_Pod,
"{5A1AC2B4-CA4B-4D09-A1AF-AC52FBC4B60B}": Weapons.MBD2_67U_with_4_x_FAB_100___100kg_GP_Bombs_LD,
"{29A828E2-C6BB-11d8-9897-000476191836}": Weapons.MBD2_67U_with_4_x_FAB_100___100kg_GP_Bombs_LD_,
"{7C5F0F5F-0A0B-46E8-937C-8922303E39A8}": Weapons.MBD3_U2T_with_2_x_FAB_1500_M_54___1500kg_GP_Bombs_LD,
"{6A367BB4-327F-4A04-8D9E-6D86BDC98E7E}": Weapons.MBD3_U4T_with_4_x_FAB_250___250kg_GP_Bombs_LD,
"{02B81892-7E24-4795-84F9-B8110C641AF0}": Weapons.MBD3_U4T_with_4_x_RBK_250___42_x_PTAB_2_5M__250kg_CBUs_Medium_HEAT_AP,
"{E659C4BE-2CD8-4472-8C08-3F28ACB61A8A}": Weapons.MBD3_U6_68_with_2_x_FAB_250___250kg_GP_Bombs_LD,
"{MBD3_U6_3*FAB-250_fwd}": Weapons.MBD3_U6_68_with_3_x_FAB_250___250kg_GP_Bombs_LD,
"{3E35F8C1-052D-11d6-9191-00A0249B6F00}": Weapons.MBD3_U6_68_with_4_x_FAB_250___250kg_GP_Bombs_LD,
"{MBD3_U6_4*FAB-250_fwd}": Weapons.MBD3_U6_68_with_4_x_FAB_250___250kg_GP_Bombs_LD_,
"{MBD3_U6_5*FAB-250}": Weapons.MBD3_U6_68_with_5_x_FAB_250___250kg_GP_Bombs_LD,
"{E96E1EDD-FF3F-47CF-A959-576C3B682955}": Weapons.MBD3_U6_68_with_6_x_BetAB_500ShP___500kg_Concrete_Piercing_HD_w_booster_Bombs,
"{436C6FB9-8BF2-46B6-9DC4-F55ABF3CD1EC}": Weapons.MBD3_U6_68_with_6_x_BetAB_500___500kg_Concrete_Piercing_Bombs_LD,
"{F99BEC1A-869D-4AC7-9730-FBA0E3B1F5FC}": Weapons.MBD3_U6_68_with_6_x_FAB_100___100kg_GP_Bombs_LD,
"{53BE25A4-C86C-4571-9BC0-47D668349595}": Weapons.MBD3_U6_68_with_6_x_FAB_250___250kg_GP_Bombs_LD,
"{FA673F4C-D9E4-4993-AA7A-019A92F3C005}": Weapons.MBD3_U6_68_with_6_x_FAB_500_M_62___500kg_GP_Bombs_LD,
"{0D945D78-542C-4E9B-9A17-9B5008CC8D39}": Weapons.MBD3_U6_68_with_6_x_FAB_500_M_62___500kg_GP_Bombs_LD_,
"{F503C276-FE15-4C54-B310-17B50B735A84}": Weapons.MBD3_U6_68_with_6_x_RBK_500_255___30_x_PTAB_10_5__500kg_CBUs_Heavy_HEAT_AP,
"{4D459A95-59C0-462F-8A57-34E80697F38B}": Weapons.MBD3_U6_68_with_6_x_RBK_500_255___30_x_PTAB_10_5__500kg_CBUs_Heavy_HEAT_AP_,
"{5F1C54C0-0ABD-4868-A883-B52FF9FCB422}": Weapons.MBD3_U9M_with_9_x_FAB_100___100kg_GP_Bombs_LD,
"{E1AAE713-5FC3-4CAA-9FF5-3FDCFB899E33}": Weapons.MBD3_U9M_with_9_x_FAB_250___250kg_GP_Bombs_LD,
"{BF83E8FD-E7A2-40D2-9608-42E13AFE2193}": Weapons.MBD3_U9M_with_9_x_RBK_250___42_x_PTAB_2_5M__250kg_CBUs_Medium_HEAT_AP,
"{005E70F5-C3EA-4E95-A148-C1044C42D845}": Weapons.MBD3_with_3_x_BetAB_500___500kg_Concrete_Piercing_Bombs_LD,
"{CEE04106-B9AA-46B4-9CD1-CD3FDCF0CE78}": Weapons.MBD3_with_3_x_FAB_100___100kg_GP_Bombs_LD,
"{D109EE9C-A1B7-4F1C-8D87-631C293A1D26}": Weapons.MBD3_with_3_x_FAB_250___250kg_GP_Bombs_LD,
"{A1E85991-B58E-4E92-AE91-DED6DC85B2E7}": Weapons.MBD3_with_3_x_FAB_500_M_62___500kg_GP_Bombs_LD,
"{EAD9B2C1-F3BA-4A7B-A2A5-84E2AF8A1975}": Weapons.MBD3_with_3_x_RBK_250___42_x_PTAB_2_5M__250kg_CBUs_Medium_HEAT_AP,
"{919CE839-9390-4629-BAF7-229DE19B8523}": Weapons.MBD3_with_3_x_RBK_500_255___30_x_PTAB_10_5__500kg_CBUs_Heavy_HEAT_AP,
"{574EDEDF-20DE-4942-B2A2-B2EDFD621562}": Weapons.MER12_with_12_x_M117___750lb_GP_Bombs_LD,
"{585D626E-7F42-4073-AB70-41E728C333E2}": Weapons.MER12_with_12_x_Mk_82___500lb_GP_Bombs_LD,
"{0B9ABA77-93B8-45FC-9C63-82AFB2CB50A4}": Weapons.MER2_with_2_x_Mk_20_Rockeye___490lbs_CBUs__247_x_HEAT_Bomblets,
"{D5D51E24-348C-4702-96AF-97A714E72697}": Weapons.MER2_with_2_x_Mk_82___500lb_GP_Bombs_LD,
"{18617C93-78E7-4359-A8CE-D754103EDF63}": Weapons.MER2_with_2_x_Mk_83___1000lb_GP_Bombs_LD,
"{82F90BEC-0E2E-4CE5-A66E-1E4ADA2B5D1E}": Weapons.MER3_with_3_x_M117___750lb_GP_Bombs_LD,
"{752B9782-F962-11d5-9190-00A0249B6F00}": Weapons.MER6_with_6_x_BLU_107___440lb_Anti_Runway_Penetrator_Bombs,
"{6CDB6B36-7165-47D0-889F-6625FB333561}": Weapons.MER6_with_6_x_M117___750lb_GP_Bombs_LD,
"{3C7CD675-7D39-41C5-8735-0F4F537818A8}": Weapons.MER6_with_6_x_Mk_20_Rockeye___490lbs_CBUs__247_x_HEAT_Bomblets,
"{1C97B4A0-AA3B-43A8-8EE7-D11071457185}": Weapons.MER6_with_6_x_Mk_82___500lb_GP_Bombs_LD,
"{B1EF6B0E-3D91-4047-A7A5-A99E7D8B4A8B}": Weapons.Mercury_LLTV_Pod,
"{0DA03783-61E4-40B2-8FAE-6AEE0A5C5AAE}": Weapons.MICA_IR,
"{6D778860-7BB8-4ACB-9E95-BA772C6BBC2C}": Weapons.MICA_RF,
"MIM_104": Weapons.MIM_104,
"MIM_72": Weapons.MIM_72,
"{MBDA_MistralG}": Weapons.Mistral,
"{MBDA_MistralD}": Weapons.Mistral_,
"MK_82*28": Weapons.MK_82_28,
"{BRU-32 MK-20}": Weapons.Mk_20,
"{ACADB374-6D6C-45A0-BA7C-B22B2E108AE4}": Weapons.Mk_20_18,
"{ADD3FAE1-EBF6-4EF9-8EFC-B36B5DDF1E6B}": Weapons.Mk_20_Rockeye___490lbs_CBU__247_x_HEAT_Bomblets,
"{90321C8E-7ED1-47D4-A160-E074D5ABD902}": Weapons.Mk_81___250lb_GP_Bomb_LD,
"{BRU-32 MK-82}": Weapons.Mk_82,
"{BRU-32 MK-82AIR}": Weapons.Mk_82AIR,
"{Mk_82Y}": Weapons.Mk_82Y___500lb_GP_Chute_Retarded_HD,
"{Mk82AIR}": Weapons.Mk_82_AIR_Ballute___500lb_GP_Bomb_HD,
"{BRU-32 MK-82SE}": Weapons.Mk_82_SnakeEye,
"{Mk82SNAKEYE}": Weapons.Mk_82_Snakeye___500lb_GP_Bomb_HD,
"{BCE4E030-38E9-423E-98ED-24BE3DA87C32}": Weapons.Mk_82___500lb_GP_Bomb_LD,
"{BRU-32 MK-83}": Weapons.Mk_83,
"{Mk_83CT}": Weapons.Mk_83CT,
"{BRU42_MK83 RS}": Weapons.Mk_83_,
"{BRU3242_MK83 RS}": Weapons.Mk_83__,
"{PHXBRU3242_MK83 RS}": Weapons.Mk_83___,
"{7A44FF09-527C-4B7E-B42B-3F111CFE50FB}": Weapons.Mk_83___1000lb_GP_Bomb_LD,
"{BRU42_MK83 LS}": Weapons.Mk_83____,
"{BRU3242_MK83 LS}": Weapons.Mk_83_____,
"{PHXBRU3242_MK83 LS}": Weapons.Mk_83______,
"{BRU-32 MK-84}": Weapons.Mk_84,
"{F092B80C-BB54-477E-9408-66DEEF740008}": Weapons.Mk_84_18,
"{D3ABF208-FA56-4D56-BB31-E0D931D57AE3}": Weapons.Mk_84_28,
"{AB8B8299-F1CC-4359-89B5-2172E0CF4A5A}": Weapons.Mk_84___2000lb_GP_Bomb_LD,
"{44EE8698-89F9-48EE-AF36-5FD31896A82D}": Weapons.MPS_410,
"{44EE8698-89F9-48EE-AF36-5FD31896A82C}": Weapons.MPS_410_,
"MXU-648-TP": Weapons.MXU_648_TP,
"{ODAB-500PM}": Weapons.ODAB_500PM___525_kg__bomb__parachute__simulated_aerosol,
"{OFAB-100-120-TU}": Weapons.OFAB_100_120_TU_x_4,
"{OFAB_100_Jupiter}": Weapons.OFAB_100_Jupiter___100kg_GP_Bomb_LD,
"{ORO57K_S5M1_HEFRAG}": Weapons.ORO_57K___S_5M1_HE_FRAG_FFAR_x_8,
"{ORO57K_S5MO_HEFRAG}": Weapons.ORO_57K___S_5MO_HE_FRAG_FFAR_x_8,
"{ORO57K_S5M_HEFRAG}": Weapons.ORO_57K___S_5M_x_8,
"oh-58-brauning": Weapons.oh_58_brauning,
"{199D6D51-1764-497E-9AE5-7D07C8D4D87E}": Weapons.Pavetack_F_111,
"PKT_7_62": Weapons.PKT_7_62,
"{PK-3}": Weapons.PK_3___7_62mm_GPMG,
"PTB300_MIG15": Weapons.PTB300_MIG15,
"PTB400_MIG15": Weapons.PTB400_MIG15,
"PTB400_MIG19": Weapons.PTB400_MIG19,
"PTB600_MIG15": Weapons.PTB600_MIG15,
"PTB760_MIG19": Weapons.PTB760_MIG19,
"{P-50T}": Weapons.P_50T___50kg_Practice_Bomb_LD,
"{RBK_250_275_AO_1SCH}": Weapons.RBK_250_275___150_x_AO_1SCh__250kg_CBU_HE_Frag,
"{4203753F-8198-4E85-9924-6F8FF679F9FF}": Weapons.RBK_250___42_x_PTAB_2_5M__250kg_CBU_Medium_HEAT_AP,
"{RBK_500U_OAB_2_5RT}": Weapons.RBK_500U___126_x_OAB_2_5RT__500kg_CBU_HE_Frag,
"{D5435F26-F120-4FA3-9867-34ACE562EF1B}": Weapons.RBK_500_255___30_x_PTAB_10_5__500kg_CBU_Heavy_HEAT_AP,
"{7AEC222D-C523-425e-B714-719C0D1EB14D}": Weapons.RBK_500___268_x_PTAB_1M__500kg_CBU_Light_HEAT_AP,
"{Rb04AI}": Weapons.RB_04E__for_A_I___with_launcher,
"{Rb15AI}": Weapons.RB_15F__for_A_I___with_launcher,
"{Rb04}": Weapons.Rb_04E_Anti_ship_Missile,
"{Robot05}": Weapons.Rb_05A_MCLOS_ASM_AShM_AAM,
"{Rb15}": Weapons.Rb_15F_Programmable_Anti_ship_Missile,
"{Robot24J}": Weapons.Rb_24J__AIM_9P__Sidewinder_IR_AAM,
"{Robot24}": Weapons.Rb_24__AIM_9B__Sidewinder_IR_AAM,
"{Robot74}": Weapons.Rb_74__AIM_9L__Sidewinder_IR_AAM,
"{RB75}": Weapons.Rb_75A__AGM_65A_Maverick___TV_ASM_,
"{RB75B}": Weapons.Rb_75B__AGM_65B_Maverick___TV_ASM_,
"{RB75T}": Weapons.Rb_75T__AGM_65A_Maverick___TV_ASM_Lg_HE_Whd_,
"REFLEX_9M119": Weapons.REFLEX_9M119,
"{RKL609_L}": Weapons.RKL609_ECM_Pod__Left_,
"{RKL609_R}": Weapons.RKL609_ECM_Pod__Right_,
"{RN-24}": Weapons.RN_24___470kg__nuclear_bomb__free_fall,
"{RN-28}": Weapons.RN_28___260_kg__nuclear_bomb__free_fall,
"ROLAND": Weapons.ROLAND,
"{M2KC_RPL_522}": Weapons.RPL_522_1300_liters_Fuel_Tank,
"{M2KC_RPL_522_EMPTY}": Weapons.RPL_522_1300_liters_Fuel_Tank__Empty_,
"{M2KC_02_RPL541}": Weapons.RPL_541_2000_liters_Fuel_Tank_,
"{M2KC_08_RPL541}": Weapons.RPL_541_2000_liters_Fuel_Tank__,
"{M2KC_02_RPL541_EMPTY}": Weapons.RPL_541_2000_liters_Fuel_Tank__Empty_,
"{M2KC_08_RPL541_EMPTY}": Weapons.RPL_541_2000_liters_Fuel_Tank__Empty__,
"{British_AP_25LBNo1_3INCHNo1}": Weapons.RP_3_25lb_AP_Mk_I,
"{British_HE_60LBFNo1_3INCHNo1}": Weapons.RP_3_60lb_F_No1_Mk_I,
"{British_HE_60LBSAPNo2_3INCHNo1}": Weapons.RP_3_60lb_SAP_No2_Mk_I,
"{RS-2US}": Weapons.RS2US___AAM__beam_rider,
"{R-13M1}": Weapons.R_13M1___AAM__IR_guided,
"{R-13M}": Weapons.R_13M___AAM__IR_guided,
"{CCF898C9-5BC7-49A4-9D1E-C3ED3D5166A1}": Weapons.R_24R__AA_7_Apex_SA____Semi_Act_Rdr,
"{6980735A-44CC-4BB9-A1B5-591532F1DC69}": Weapons.R_24T__AA_7_Apex_IR____Infra_Red,
"{E8069896-8435-4B90-95C0-01A03AE6E400}": Weapons.R_27ER__AA_10_Alamo_C____Semi_Act_Extended_Range,
"{B79C379A-9E87-4E50-A1EE-7F7E29C2E87A}": Weapons.R_27ET__AA_10_Alamo_D____IR_Extended_Range,
"{9B25D316-0434-4954-868F-D51DB1A38DF0}": Weapons.R_27R__AA_10_Alamo_A____Semi_Act_Rdr,
"{88DAC840-9F75-4531-8689-B46E64E42E53}": Weapons.R_27T__AA_10_Alamo_B____Infra_Red,
"{F1243568-8EF0-49D4-9CB5-4DA90D92BC1D}": Weapons.R_33__AA_9_Amos____Semi_Act_Rdr,
"{R-3R}": Weapons.R_3R___AAM__radar_guided,
"{R-3S}": Weapons.R_3S___AAM__IR_guided,
"{4EDBA993-2E34-444C-95FB-549300BF7CAF}": Weapons.R_40R__AA_6_Acrid____Semi_Act_Rdr,
"{5F26DBC2-FB43-4153-92DE-6BBCE26CB0FF}": Weapons.R_40T__AA_6_Acrid____Infra_Red,
"{FC23864E-3B80-48E3-9C03-4DA8B1D7497B}": Weapons.R_550_Magic_2,
"{R-55}": Weapons.R_55___AAM__IR_guided,
"{R-60}": Weapons.R_60,
"{R-60M}": Weapons.R_60M,
"{R-60M 2L}": Weapons.R_60M_x_2,
"{R-60M 2R}": Weapons.R_60M_x_2_,
"{682A481F-0CB5-4693-A382-D00DD4A156D7}": Weapons.R_60M__AA_8_Aphid____Infra_Red,
"{R-60 2L}": Weapons.R_60_x_2,
"{R-60 2R}": Weapons.R_60_x_2_,
"{FBC29BFE-3D24-4C64-B81D-941239D12249}": Weapons.R_73__AA_11_Archer____Infra_Red,
"{CBC29BFE-3D24-4C64-B81D-941239D12249}": Weapons.R_73__AA_11_Archer____Infra_Red_,
"{B4C01D60-A8A3-4237-BD72-CA7655BC0FE9}": Weapons.R_77__AA_12_Adder____Active_Rdr,
"{B4C01D60-A8A3-4237-BD72-CA7655BC0FEC}": Weapons.R_77__AA_12_Adder____Active_Rdr_,
"{0511E528-EA28-4caf-A212-00D1408DF10A}": Weapons.SAB_100___100kg_flare_illumination_Bomb,
"{FAS}": Weapons.Sand_Filter,
"{SC_250_T1_L2}": Weapons.SC_250_Type_1_L2___250kg_GP_Bomb_LD,
"{Schloss500XIIC1_SC_250_T3_J}": Weapons.SC_250_Type_3_J___250kg_GP_Bomb_LD,
"{SC_500_L2}": Weapons.SC_500_L2___500kg_GP_Bomb_LD,
"SC_501_SC250": Weapons.SC_501_SC250,
"SC_501_SC500": Weapons.SC_501_SC500,
"{SC_50}": Weapons.SC_50___50kg_GP_Bomb_LD,
"{SD_250_Stg}": Weapons.SD_250_Stg___250kg_GP_Bomb_LD,
"{SD_500_A}": Weapons.SD_500_A___500kg_GP_Bomb_LD,
"SEASPARROW": Weapons.SEASPARROW,
"{1461CD18-429A-42A9-A21F-4C621ECD4573}": Weapons.Sea_Eagle___ASM,
"{0519A263-0AB6-11d6-9193-00A0249B6F00}": Weapons.Shpil_2_Laser_Recon__Intel_Pod,
"{8C3F26A2-FA0F-11d5-9190-00A0249B6F00}": Weapons.Sky_Shadow_ECM_Pod,
"SM2": Weapons.SM2,
"{A4BCC903-06C8-47bb-9937-A30FEDB4E743}": Weapons.Smokewinder___blue,
"{A4BCC903-06C8-47bb-9937-A30FEDB4E742}": Weapons.Smokewinder___green,
"{A4BCC903-06C8-47bb-9937-A30FEDB4E746}": Weapons.Smokewinder___orange,
"{A4BCC903-06C8-47bb-9937-A30FEDB4E741}": Weapons.Smokewinder___red,
"{A4BCC903-06C8-47bb-9937-A30FEDB4E744}": Weapons.Smokewinder___white,
"{A4BCC903-06C8-47bb-9937-A30FEDB4E745}": Weapons.Smokewinder___yellow,
"{CE2_SMOKE_WHITE}": Weapons.Smoke_for_Christen_Eagle_II__white,
"{D3F65166-1AB8-490f-AF2F-2FB6E22568B3}": Weapons.Smoke_Generator___blue,
"{INV-SMOKE-BLUE}": Weapons.Smoke_Generator___blue_,
"{D3F65166-1AB8-490f-AF2F-2FB6E22568B2}": Weapons.Smoke_Generator___green,
"{INV-SMOKE-GREEN}": Weapons.Smoke_Generator___green_,
"{D3F65166-1AB8-490f-AF2F-2FB6E22568B6}": Weapons.Smoke_Generator___orange,
"{INV-SMOKE-ORANGE}": Weapons.Smoke_Generator___orange_,
"{D3F65166-1AB8-490f-AF2F-2FB6E22568B1}": Weapons.Smoke_Generator___red,
"{INV-SMOKE-RED}": Weapons.Smoke_Generator___red_,
"{D3F65166-1AB8-490f-AF2F-2FB6E22568B4}": Weapons.Smoke_Generator___white,
"{INV-SMOKE-WHITE}": Weapons.Smoke_Generator___white_,
"{D3F65166-1AB8-490f-AF2F-2FB6E22568B5}": Weapons.Smoke_Generator___yellow,
"{INV-SMOKE-YELLOW}": Weapons.Smoke_Generator___yellow_,
"{SMOKE-RED-AVIOJET}": Weapons.Smoke_System_red_colorant,
"{SMOKE-YELLOW-AVIOJET}": Weapons.Smoke_System_yellow_colorant,
"{SMOKE-SYSTEM-AVIOJET}": Weapons.Smoke_System__White_Smoke_,
"{MIG21_SMOKE_RED}": Weapons.Smoke___red___21__t,
"{SMOKE_WHITE}": Weapons.Smoke___white___21,
"{MIG21_SMOKE_WHITE}": Weapons.Smoke___white___21_,
"SPITFIRE_45GAL_SLIPPER_TANK": Weapons.SPITFIRE_45GAL_SLIPPER_TANK,
"SPITFIRE_45GAL_TORPEDO_TANK": Weapons.SPITFIRE_45GAL_TORPEDO_TANK,
"{E92CBFE5-C153-11d8-9897-000476191836}": Weapons.SPPU_22_1___2_x_23mm__GSh_23L_Autocannon_Pod,
"{SPRD}": Weapons.SPRD_99_takeoff_rocket,
"{SPS-141-100}": Weapons.SPS_141_100__21____jamming_and_countermeasures_pod,
"{F75187EF-1D9E-4DA9-84B4-1A1A14A3973A}": Weapons.SPS_141___ECM_Jamming_Pod,
"{CAE48299-A294-4bad-8EE6-89EFC5DCDF00}": Weapons.SUU_25_x_8_LUU_2___Target_Marker_Flares,
"{BRU42_SUU25}": Weapons.SUU_25___8_LUU_2,
"{BRU3242_SUU25}": Weapons.SUU_25___8_LUU_2_,
"{FD21B13E-57F3-4C2A-9F78-C522D0B5BCE1}": Weapons.Super_530D,
"SVIR_9M119": Weapons.SVIR_9M119,
"{S-24A}": Weapons.S_24A__21____180_kg__cumulative_unguided_rocket,
"{S-24B}": Weapons.S_24B__21____180_kg__fragmented_unguided_rocket,
"{1FA14DEA-8CDB-45AD-88A8-EC068DF1E65A}": Weapons.S_24B___240mm_UnGd_Rkt__235kg__HE_Frag___Low_Smk_,
"{3858707D-F5D5-4bbb-BDD8-ABB0530EBC7C}": Weapons.S_24B___240mm_UnGd_Rkt__235kg__HE_Frag___Low_Smk__,
"{0180F983-C14A-11d8-9897-000476191836}": Weapons.S_25L___320Kg__340mm_Laser_Guided_Rkt,
"{A0648264-4BC0-4EE8-A543-D119F6BA4257}": Weapons.S_25_OFM___340mm_UnGd_Rkt__480kg_Penetrator,
"{S_25_O}": Weapons.S_25_O___420mm_UnGd_Rkt__380kg_Frag,
"{0519A262-0AB6-11d6-9193-00A0249B6F00}": Weapons.Tangazh_ELINT_pod,
"{TER_9A_2L*CBU-87}": Weapons.TER_9A_with_2_x_CBU_87___202_x_CEM_Cluster_Bomb,
"{TER_9A_2R*CBU-87}": Weapons.TER_9A_with_2_x_CBU_87___202_x_CEM_Cluster_Bomb_,
"{TER_9A_2L*CBU-97}": Weapons.TER_9A_with_2_x_CBU_97___10_x_SFW_Cluster_Bomb,
"{TER_9A_2R*CBU-97}": Weapons.TER_9A_with_2_x_CBU_97___10_x_SFW_Cluster_Bomb_,
"{TER_9A_2L*GBU-12}": Weapons.TER_9A_with_2_x_GBU_12___500lb_Laser_Guided_Bomb,
"{TER_9A_2R*GBU-12}": Weapons.TER_9A_with_2_x_GBU_12___500lb_Laser_Guided_Bomb_,
"{TER_9A_2L*MK-82AIR}": Weapons.TER_9A_with_2_x_Mk_82_AIR_Ballute___500lb_GP_Bomb_HD,
"{TER_9A_2R*MK-82AIR}": Weapons.TER_9A_with_2_x_Mk_82_AIR_Ballute___500lb_GP_Bomb_HD_,
"{TER_9A_2L*MK-82_Snakeye}": Weapons.TER_9A_with_2_x_Mk_82_Snakeye___500lb_GP_Bomb_HD,
"{TER_9A_2R*MK-82_Snakeye}": Weapons.TER_9A_with_2_x_Mk_82_Snakeye___500lb_GP_Bomb_HD_,
"{TER_9A_2L*MK-82}": Weapons.TER_9A_with_2_x_Mk_82___500lb_GP_Bomb_LD,
"{TER_9A_2R*MK-82}": Weapons.TER_9A_with_2_x_Mk_82___500lb_GP_Bomb_LD_,
"{TER_9A_3*BDU-33}": Weapons.TER_9A_with_3_x_BDU_33___25lb_Practice_Bomb_LD,
"{TER_9A_3*CBU-87}": Weapons.TER_9A_with_3_x_CBU_87___202_x_CEM_Cluster_Bomb,
"{TER_9A_3*CBU-97}": Weapons.TER_9A_with_3_x_CBU_97___10_x_SFW_Cluster_Bomb,
"{TER_9A_3*MK-82AIR}": Weapons.TER_9A_with_3_x_Mk_82_AIR_Ballute___500lb_GP_Bomb_HD,
"{TER_9A_3*MK-82_Snakeye}": Weapons.TER_9A_with_3_x_Mk_82_Snakeye___500lb_GP_Bomb_HD,
"{TER_9A_3*MK-82}": Weapons.TER_9A_with_3_x_Mk_82___500lb_GP_Bomb_LD,
"TEST_ROTARY_LAUNCHER_MK82": Weapons.TEST_ROTARY_LAUNCHER_MK82,
"TGM_65H": Weapons.TGM_65H,
"{EF124821-F9BB-4314-A153-E0E2FE1162C4}": Weapons.TORNADO_Fuel_tank,
"TOW": Weapons.TOW,
"{U22A}": Weapons.U22_A_Jammer,
"{UB-16-57UMP}": Weapons.UB_16UM_pod___16_x_S_5KO__57mm_UnGd_Rkts__HEAT_Frag,
"{UB-16_S5M}": Weapons.UB_16UM___16_S_5M,
"{UB-32A-24}": Weapons.UB_32A_24_pod___32_x_S_5KO,
"{637334E4-AB5A-47C0-83A6-51B7F1DF3CD5}": Weapons.UB_32A_pod___32_x_S_5KO__57mm_UnGd_Rkts__HEAT_Frag,
"{UB-32_S5M}": Weapons.UB_32M___32_S_5M,
"{05544F1A-C39C-466b-BC37-5BD1D52E57BB}": Weapons.UPK_23_250___2_x_23mm__GSh_23L_Autocannon_Pod,
"{UPK-23-250 MiG-21}": Weapons.UPK_23_250___gun_pod,
"{U22}": Weapons.U_22_Jammer_pod,
"{WGr21}": Weapons.Werfer_Granate_21___21_cm_UnGd_air_to_air_rocket,
"XM158_M151": Weapons.XM158_M151,
"XM158_M156": Weapons.XM158_M156,
"XM158_M257": Weapons.XM158_M257,
"XM158_M274": Weapons.XM158_M274,
"XM158_MK1": Weapons.XM158_MK1,
"XM158_MK5": Weapons.XM158_MK5,
"{MOSQUITO_100GAL_SLIPPER_TANK}": Weapons._100_gal__Drop_Tank,
"{US_108GAL_PAPER_FUEL_TANK}": Weapons._108_US_gal__Paper_Fuel_Tank,
"{US_110GAL_FUEL_TANK}": Weapons._110_US_gal__Fuel_Tank,
"{12xM64}": Weapons._12_AN_M64___500lb_GP_Bomb_LD,
"{D6A0441E-6794-4FEB-87F7-E68E2290DFAB}": Weapons._12_x_BetAB_500___500kg_Concrete_Piercing_Bombs_LD,
"{E70446B7-C7E6-4B95-B685-DEA10CAD1A0E}": Weapons._12_x_FAB_500_M_62___500kg_GP_Bombs_LD,
"{FW_190_R4M_LEFT_WING}": Weapons._13_R4M_3_2kg_UnGd_air_to_air_rocket,
"{FW_190_R4M_RGHT_WING}": Weapons._13_R4M_3_2kg_UnGd_air_to_air_rocket_,
"{US_150GAL_FUEL_TANK}": Weapons._150_US_gal__Fuel_Tank,
"{22906569-A97F-404B-BA4F-D96DBF94D05E}": Weapons._20_x_AGM_86C_ALCM,
"{B0241BD2-5628-47E0-954C-A8675B7E698E}": Weapons._24_x_FAB_250___250kg_GP_Bombs_LD,
"{British_GP_250LB_Bomb_Mk1}": Weapons._250_lb_GP_Mk_I,
"{British_GP_250LB_Bomb_Mk4}": Weapons._250_lb_GP_Mk_IV,
"{British_GP_250LB_Bomb_Mk4_on_Handley_Page_Type_B_Cut_Bar}": Weapons._250_lb_GP_Mk_IV_,
"{British_GP_250LB_Bomb_Mk5}": Weapons._250_lb_GP_Mk_V,
"{British_GP_250LB_Bomb_Mk5_on_Handley_Page_Type_B_Cut_Bar}": Weapons._250_lb_GP_Mk_V_,
"{British_MC_250LB_Bomb_Mk1}": Weapons._250_lb_MC_Mk_I,
"{British_MC_250LB_Bomb_Mk2}": Weapons._250_lb_MC_Mk_II,
"{British_MC_250LB_Bomb_Mk2_on_Handley_Page_Type_B_Cut_Bar}": Weapons._250_lb_MC_Mk_II_,
"{British_MC_250LB_Bomb_Mk1_on_Handley_Page_Type_B_Cut_Bar}": Weapons._250_lb_MC_Mk_I_,
"{British_SAP_250LB_Bomb_Mk5}": Weapons._250_lb_S_A_P_,
"{British_SAP_250LB_Bomb_Mk5_on_Handley_Page_Type_B_Cut_Bar}": Weapons._250_lb_S_A_P__,
"{B58F99BA-5480-4572-8602-28B0449F5260}": Weapons._27_x_M117___750lb_GP_Bombs_LD,
"{6C47D097-83FF-4FB2-9496-EAB36DDF0B05}": Weapons._27_x_Mk_82___500lb_GP_Bombs_LD,
"{89D000B0-0360-461A-AD83-FB727E2ABA98}": Weapons._2xGBU_12___500lb_Laser_Guided_Bomb,
"{BRU-42_2xGBU-12_right}": Weapons._2xGBU_12___500lb_Laser_Guided_Bomb_,
"{LYSBOMB}": Weapons._2x_80kg_LYSB_71_Illumination_Bomb,
"{BRU42_2*BDU45 RS}": Weapons._2_BDU_45,
"{BRU42_2*BDU45B RS}": Weapons._2_BDU_45B,
"{BRU3242_2*BDU45B RS}": Weapons._2_BDU_45B_,
"{PHXBRU3242_2*BDU45B RS}": Weapons._2_BDU_45B__,
"{BRU42_2*BDU45B LS}": Weapons._2_BDU_45B___,
"{BRU3242_2*BDU45B LS}": Weapons._2_BDU_45B____,
"{PHXBRU3242_2*BDU45B LS}": Weapons._2_BDU_45B_____,
"{BRU3242_2*BDU45 RS}": Weapons._2_BDU_45_,
"{PHXBRU3242_2*BDU45 RS}": Weapons._2_BDU_45__,
"{BRU42_2*BDU45 LS}": Weapons._2_BDU_45___,
"{BRU3242_2*BDU45 LS}": Weapons._2_BDU_45____,
"{PHXBRU3242_2*BDU45 LS}": Weapons._2_BDU_45_____,
"{BRU-70_2*CBU-99_LEFT}": Weapons._2_CBU_99,
"{BRU-70_2*CBU-99_RIGHT}": Weapons._2_CBU_99_,
"{BRU-42_2*GBU-12_LEFT}": Weapons._2_GBU_12,
"{BRU-42_2*GBU-12_RIGHT}": Weapons._2_GBU_12_,
"{BRU-42_2*GBU-16_LEFT}": Weapons._2_GBU_16,
"{BRU-42_2*GBU-16_RIGHT}": Weapons._2_GBU_16_,
"{BRU-42_2*GBU-38_LEFT}": Weapons._2_GBU_38,
"{BRU-42_2*GBU-38_RIGHT}": Weapons._2_GBU_38_,
"{BRU-70A_2*GBU-54_LEFT}": Weapons._2_GBU_54_V_1_B,
"{BRU-70A_2*GBU-54_RIGHT}": Weapons._2_GBU_54_V_1_B_,
"{BRU42_2*LAU10 L}": Weapons._2_LAU_10___4_ZUNI_MK_71,
"{BRU3242_2*LAU10 L}": Weapons._2_LAU_10___4_ZUNI_MK_71_,
"{BRU42_2*LAU10 R}": Weapons._2_LAU_10___4_ZUNI_MK_71__,
"{BRU3242_2*LAU10 R}": Weapons._2_LAU_10___4_ZUNI_MK_71___,
"{BRU42_2*LAU10 RS}": Weapons._2_LAU_10___4_ZUNI_MK_71____,
"{BRU3242_2*LAU10 RS}": Weapons._2_LAU_10___4_ZUNI_MK_71_____,
"{PHXBRU3242_2*LAU10 RS}": Weapons._2_LAU_10___4_ZUNI_MK_71______,
"{BRU42_2*LAU10 LS}": Weapons._2_LAU_10___4_ZUNI_MK_71_______,
"{BRU3242_2*LAU10 LS}": Weapons._2_LAU_10___4_ZUNI_MK_71________,
"{PHXBRU3242_2*LAU10 LS}": Weapons._2_LAU_10___4_ZUNI_MK_71_________,
"{BRU42_2*LUU2 R}": Weapons._2_LUU_2,
"{BRU3242_2*LUU2 R}": Weapons._2_LUU_2_,
"{BRU42_2*LUU2 L}": Weapons._2_LUU_2__,
"{BRU3242_2*LUU2 L}": Weapons._2_LUU_2___,
"{BRU42_2*MK20 RS}": Weapons._2_MK_20,
"{BRU3242_2*MK20 RS}": Weapons._2_MK_20_,
"{PHXBRU3242_2*MK20 RS}": Weapons._2_MK_20__,
"{BRU42_2*MK20 LS}": Weapons._2_MK_20___,
"{BRU3242_2*MK20 LS}": Weapons._2_MK_20____,
"{PHXBRU3242_2*MK20 LS}": Weapons._2_MK_20_____,
"{BRU-42_2*MK-20_LEFT}": Weapons._2_Mk_20_Rockeye,
"{BRU-42_2*MK-20_RIGHT}": Weapons._2_Mk_20_Rockeye_,
"{BRU42_2*MK81 RS}": Weapons._2_Mk_81,
"{BRU3242_2*MK81 RS}": Weapons._2_Mk_81_,
"{PHXBRU3242_2*MK81 RS}": Weapons._2_Mk_81__,
"{BRU42_2*MK81 LS}": Weapons._2_Mk_81___,
"{BRU3242_2*MK81 LS}": Weapons._2_Mk_81____,
"{PHXBRU3242_2*MK81 LS}": Weapons._2_Mk_81_____,
"{BRU-42_2*Mk-82_LEFT}": Weapons._2_Mk_82,
"{BRU42_2*MK82AIR RS}": Weapons._2_Mk_82AIR,
"{BRU3242_2*MK82AIR RS}": Weapons._2_Mk_82AIR_,
"{PHXBRU3242_2*MK82AIR RS}": Weapons._2_Mk_82AIR__,
"{BRU42_2*MK82AIR LS}": Weapons._2_Mk_82AIR___,
"{BRU3242_2*MK82AIR LS}": Weapons._2_Mk_82AIR____,
"{PHXBRU3242_2*MK82AIR LS}": Weapons._2_Mk_82AIR_____,
"{BRU-42_2*Mk-82_RIGHT}": Weapons._2_Mk_82_,
"{BRU-42_2*Mk-82AIR_LEFT}": Weapons._2_Mk_82_AIR,
"{BRU-42_2*Mk-82AIR_RIGHT}": Weapons._2_Mk_82_AIR_,
"{BRU42_2*MK82SE RS}": Weapons._2_Mk_82_SnakeEye,
"{BRU3242_2*MK82SE RS}": Weapons._2_Mk_82_SnakeEye_,
"{PHXBRU3242_2*MK82SE RS}": Weapons._2_Mk_82_SnakeEye__,
"{BRU42_2*MK82SE LS}": Weapons._2_Mk_82_SnakeEye___,
"{BRU3242_2*MK82SE LS}": Weapons._2_Mk_82_SnakeEye____,
"{PHXBRU3242_2*MK82SE LS}": Weapons._2_Mk_82_SnakeEye_____,
"{BRU-42_2*Mk-82SNAKEYE_LEFT}": Weapons._2_Mk_82_Snakeye,
"{BRU-42_2*Mk-82SNAKEYE_RIGHT}": Weapons._2_Mk_82_Snakeye_,
"{BRU42_2*MK82 RS}": Weapons._2_Mk_82__,
"{BRU3242_2*MK82 RS}": Weapons._2_Mk_82___,
"{PHXBRU3242_2*MK82 RS}": Weapons._2_Mk_82____,
"{BRU42_2*MK82 LS}": Weapons._2_Mk_82_____,
"{BRU3242_2*MK82 LS}": Weapons._2_Mk_82______,
"{PHXBRU3242_2*MK82 LS}": Weapons._2_Mk_82_______,
"{BRU-42_2*Mk-83_LEFT}": Weapons._2_Mk_83,
"{BRU-42_2*Mk-83_RIGHT}": Weapons._2_Mk_83_,
"{BRU42_2*SUU25 L}": Weapons._2_SUU_25___8_LUU_2,
"{BRU3242_2*SUU25 L}": Weapons._2_SUU_25___8_LUU_2_,
"{BRU42_2*SUU25 R}": Weapons._2_SUU_25___8_LUU_2__,
"{BRU3242_2*SUU25 R}": Weapons._2_SUU_25___8_LUU_2___,
"{2x9M120F_Ataka_V}": Weapons._2_x_9M120F_Ataka__AT_9_Spiral_2____AGM__SACLOS__HE,
"{2x9M120_Ataka_V}": Weapons._2_x_9M120_Ataka__AT_9_Spiral_2____ATGM__SACLOS__Tandem_HEAT,
"{2x9M220_Ataka_V}": Weapons._2_x_9M220O_Ataka__AT_9_Spiral_2____AAM__SACLOS__Frag,
"{07BE2D19-0E48-4B0B-91DA-5F6C8F9E3C75}": Weapons._2_x_ALARM,
"{C535596E-F7D2-4301-8BB4-B1658BB87ED7}": Weapons._2_x_BL_755_CBUs___450kg__147_Frag_Pen_bomblets,
"{TWIN_B13L_5OF}": Weapons._2_x_B_13L_pods___10_x_S_13_OF__122mm_UnGd_Rkts__Blast_Frag,
"{B13_5_S13OF_DUAL_L}": Weapons._2_x_B_13L___5_S_13_OF,
"{B13_5_S13OF_DUAL_R}": Weapons._2_x_B_13L___5_S_13_OF_,
"{TWIN_B_8M1_S_8KOM}": Weapons._2_x_B_8M1_pods___40_x_S_8KOM__80mm_UnGd_Rkts__HEAT_AP,
"{B8M1_20_S8KOM_DUAL_L}": Weapons._2_x_B_8M1___20_S_8KOM,
"{B8M1_20_S8KOM_DUAL_R}": Weapons._2_x_B_8M1___20_S_8KOM_,
"{B8M1_20_S8OFP2_DUAL_L}": Weapons._2_x_B_8M1___20_S_8OFP2,
"{B8M1_20_S8OFP2_DUAL_R}": Weapons._2_x_B_8M1___20_S_8OFP2_,
"{B8M1_20_S8TsM_DUAL_L}": Weapons._2_x_B_8M1___20_S_8TsM,
"{B8M1_20_S8TsM_DUAL_R}": Weapons._2_x_B_8M1___20_S_8TsM_,
"{TWIN_B_8M1_S_8_OFP2}": Weapons._2_x_B_8V20A_pods___40_x_S_8OFP2__80mm_UnGd_Rkts__HE_Frag_AP,
"{TWIN_B_8M1_S_8TsM}": Weapons._2_x_B_8V20A_pods___40_x_S_8TsM__80mm_UnGd_Rkts__Smk,
"{FAB_250_DUAL_L}": Weapons._2_x_FAB_250,
"{FAB_250_DUAL_R}": Weapons._2_x_FAB_250_,
"{FAB_500_DUAL_L}": Weapons._2_x_FAB_500,
"{FAB_500_DUAL_R}": Weapons._2_x_FAB_500_,
"{HVARx2}": Weapons._2_x_HVAR__UnGd_Rkts,
"{FAB-100x2}": Weapons._2_x_OFAB_100_Jupiter___100kg_GP_Bombs_LD,
"{RBK_250_PTAB25M_DUAL_L}": Weapons._2_x_RBK_250_PTAB_2_5M,
"{RBK_250_PTAB25M_DUAL_R}": Weapons._2_x_RBK_250_PTAB_2_5M_,
"{RBK_500_PTAB105_DUAL_L}": Weapons._2_x_RBK_500_255_PTAB_10_5,
"{RBK_500_PTAB105_DUAL_R}": Weapons._2_x_RBK_500_255_PTAB_10_5_,
"{MOSSIE_2_British_AP_25LBNo1_3INCHNo1_ON_LEFT_WING_RAILS}": Weapons._2_x_RP_3_25lb_AP_Mk_I,
"{MOSSIE_2_British_AP_25LBNo1_3INCHNo1_ON_RIGHT_WING_RAILS}": Weapons._2_x_RP_3_25lb_AP_Mk_I_,
"{MOSSIE_2_British_HE_60LBFNo1_3INCHNo1_ON_LEFT_WING_RAILS}": Weapons._2_x_RP_3_60lb_F_No1_Mk_I,
"{MOSSIE_2_British_HE_60LBFNo1_3INCHNo1_ON_RIGHT_WING_RAILS}": Weapons._2_x_RP_3_60lb_F_No1_Mk_I_,
"{MOSSIE_2_British_HE_60LBSAPNo2_3INCHNo1_ON_LEFT_WING_RAILS}": Weapons._2_x_RP_3_60lb_SAP_No2_Mk_I,
"{MOSSIE_2_British_HE_60LBSAPNo2_3INCHNo1_ON_RIGHT_WING_RAILS}": Weapons._2_x_RP_3_60lb_SAP_No2_Mk_I_,
"{S25_DUAL_L}": Weapons._2_x_S_25,
"{S25_DUAL_R}": Weapons._2_x_S_25_,
"{TWIN_S25}": Weapons._2_x_S_25_OFM___340mm_UnGdrocket__480kg_Penetrator,
"{TWIN_S25_O}": Weapons._2_x_S_25_O___420mm_UnGd_Rkt__380kg_Frag,
"{BDAD04AA-4D4A-4E51-B958-180A89F963CF}": Weapons._33_x_FAB_250___250kg_GP_Bombs_LD,
"{AD5E5863-08FC-4283-B92C-162E2B2BD3FF}": Weapons._33_x_FAB_500_M_62___500kg_GP_Bombs_LD,
"3M45": Weapons._3M45,
"{BRU42_3*BDU33}": Weapons._3_BDU_33,
"{BRU3242_3*BDU33}": Weapons._3_BDU_33_,
"{BRU42_3*BDU33_N}": Weapons._3_BDU_33__,
"{BRU3242_3*BDU33_N}": Weapons._3_BDU_33___,
"{PHXBRU3242_BDU33}": Weapons._3_BDU_33____,
"{BRU-42A_3*GBU-12}": Weapons._3_GBU_12,
"{BRU-42A_3*GBU-16}": Weapons._3_GBU_16,
"{BRU-42_3*GBU-38}": Weapons._3_GBU_38,
"{BRU-70A_3*GBU-54}": Weapons._3_GBU_54_V_1_B,
"{BRU-42_3*Mk-81LD}": Weapons._3_Mk_81,
"{BRU-42_3*Mk-82LD}": Weapons._3_Mk_82,
"{BRU-42_3_MK82AIR}": Weapons._3_Mk_82_AIR,
"{BRU-42_3*Mk-82SNAKEYE}": Weapons._3_Mk_82_Snakeye,
"{BRU-42_3*Mk-83}": Weapons._3_Mk_83,
"{3xM8_ROCKETS_IN_TUBES}": Weapons._3_x_4_5_inch_M8_UnGd_Rocket,
"{639DB5DD-CB7E-4E42-AC75-2112BC397B97}": Weapons._3_x_FAB_1500_M_54___1500kg_GP_Bombs_LD,
"{A76344EB-32D2-4532-8FA2-0C1BDC00747E}": Weapons._3_x_LAU_61_pods___57_x_2_75_Hydra__UnGd_Rkts_M151__HE,
"48N6E2": Weapons._48N6E2,
"_4M80": Weapons._4M80,
"{M71BOMBD}": Weapons._4x_SB_M_71_120kg_GP_Bomb_High_drag,
"{M71BOMB}": Weapons._4x_SB_M_71_120kg_GP_Bomb_Low_drag,
"{AABA1A14-78A1-4E85-94DD-463CF75BD9E4}": Weapons._4_x_AGM_154C___JSOW_Unitary_BROACH,
"{4xAN-M64_on_InvCountedAttachmentPoints}": Weapons._4_x_AN_M64___500lb_GP_Bomb_LD,
"{3EA17AB0-A805-4D9E-8732-4CE00CB00F17}": Weapons._4_x_BGM_71D_TOW_ATGM,
"{B8C99F40-E486-4040-B547-6639172A5D57}": Weapons._4_x_GBU_27___2000lb_Laser_Guided_Penetrator_Bombs,
"{MOSSIE_4_British_AP_25LBNo1_3INCHNo1_ON_LEFT_WING_RAILS}": Weapons._4_x_RP_3_25lb_AP_Mk_I,
"{MOSSIE_4_British_AP_25LBNo1_3INCHNo1_ON_RIGHT_WING_RAILS}": Weapons._4_x_RP_3_25lb_AP_Mk_I_,
"{MOSSIE_4_British_HE_60LBFNo1_3INCHNo1_ON_LEFT_WING_RAILS}": Weapons._4_x_RP_3_60lb_F_No1_Mk_I,
"{MOSSIE_4_British_HE_60LBFNo1_3INCHNo1_ON_RIGHT_WING_RAILS}": Weapons._4_x_RP_3_60lb_F_No1_Mk_I_,
"{MOSSIE_4_British_HE_60LBSAPNo2_3INCHNo1_ON_LEFT_WING_RAILS}": Weapons._4_x_RP_3_60lb_SAP_No2_Mk_I,
"{MOSSIE_4_British_HE_60LBSAPNo2_3INCHNo1_ON_RIGHT_WING_RAILS}": Weapons._4_x_RP_3_60lb_SAP_No2_Mk_I_,
"{British_GP_500LB_Bomb_Mk1}": Weapons._500_lb_GP_Mk_I,
"{British_GP_500LB_Bomb_Mk4}": Weapons._500_lb_GP_Mk_IV,
"{British_GP_500LB_Bomb_Mk5}": Weapons._500_lb_GP_Mk_V,
"{British_GP_500LB_Bomb_Mk4_Short}": Weapons._500_lb_GP_Short_tail,
"{British_GP_500LB_Bomb_Mk4_Short_on_Handley_Page_Type_B_Cut_Bar}": Weapons._500_lb_GP_Short_tail_,
"{British_MC_500LB_Bomb_Mk2}": Weapons._500_lb_MC_Mk_II,
"{British_MC_500LB_Bomb_Mk1_Short}": Weapons._500_lb_MC_Short_tail,
"{British_MC_500LB_Bomb_Mk1_Short_on_Handley_Page_Type_B_Cut_Bar}": Weapons._500_lb_MC_Short_tail_,
"{British_SAP_500LB_Bomb_Mk5}": Weapons._500_lb_S_A_P_,
"{MOSQUITO_50GAL_SLIPPER_TANK}": Weapons._50_gal__Drop_Tank,
"{72CAC282-AE18-490B-BD4D-35E7EE969E73}": Weapons._51_x_M117___750lb_GP_Bombs_LD,
"{B84DFE16-6AC7-4854-8F6D-34137892E166}": Weapons._51_x_Mk_82___500lb_GP_Bombs_LD,
"5V55": Weapons._5V55,
"{P47_5_HVARS_ON_LEFT_WING_RAILS}": Weapons._5_x_HVAR__UnGd_Rkt,
"{P47_5_HVARS_ON_RIGHT_WING_RAILS}": Weapons._5_x_HVAR__UnGd_Rkt_,
"{MER-5E_Mk82SNAKEYEx5}": Weapons._5_x_Mk_82_Snakeye___500lb_GP_Bomb_HD,
"{MER-5E_MK82x5}": Weapons._5_x_Mk_82___500lb_GP_Bombs_LD,
"{45447F82-01B5-4029-A572-9AAD28AF0275}": Weapons._6_x_AGM_86C_ALCM_on_MER,
"{2B7BDB38-4F45-43F9-BE02-E7B3141F3D24}": Weapons._6_x_BetAB_500___500kg_Concrete_Piercing_Bombs_LD,
"{D9179118-E42F-47DE-A483-A6C2EA7B4F38}": Weapons._6_x_FAB_1500_M_54___1500kg_GP_Bombs_LD,
"{26D2AF37-B0DF-4AB6-9D61-A150FF58A37B}": Weapons._6_x_FAB_500_M_62___500kg_GP_Bombs_LD,
"{C42EE4C3-355C-4B83-8B22-B39430B8F4AE}": Weapons._6_x_Kh_35__AS_20_Kayak____520kg__AShM__IN__Act_Rdr,
"{0290F5DE-014A-4BB1-9843-D717749B1DED}": Weapons._6_x_Kh_65__AS_15B_Kent____1250kg__ASM__IN__MCC,
"{E79759F7-C622-4AA4-B1EF-37639A34D924}": Weapons._6_x_Mk_20_Rockeye___490lbs_CBUs__247_x_HEAT_Bomblets,
"{027563C9-D87E-4A85-B317-597B510E3F03}": Weapons._6_x_Mk_82___500lb_GP_Bombs_LD,
"{DT75GAL}": Weapons._75_US_gal__Fuel_Tank,
"{46ACDCF8-5451-4E26-BDDB-E78D5830E93C}": Weapons._8_x_AGM_84A_Harpoon_ASM,
"{8DCAF3A3-7FCF-41B8-BB88-58DEDA878EDE}": Weapons._8_x_AGM_86C_ALCM,
"{CD9417DF-455F-4176-A5A2-8C58D61AA00B}": Weapons._8_x_Kh_65__AS_15B_Kent____1250kg__ASM__IN__MCC,
"_9M111": Weapons._9M111,
"{9M114 Shturm-V-2 Rack}": Weapons._9M114_Shturm_V_2_Rack,
"{B919B0F4-7C25-455E-9A02-CEA51DB895E3}": Weapons._9M114_Shturm_V_2__AT_6_Spiral____ATGM__SACLOS,
"{57232979-8B0F-4db7-8D9A-55197E06B0F5}": Weapons._9M114_Shturm_V_8__AT_6_Spiral____ATGM__SACLOS,
"_9M117": Weapons._9M117,
"9M133": Weapons._9M133,
"9M14": Weapons._9M14,
"9M31": Weapons._9M31,
"9M311": Weapons._9M311,
"9M33": Weapons._9M33,
"_9M331": Weapons._9M331,
"_9M37": Weapons._9M37,
"_9M38": Weapons._9M38,
"_9M39": Weapons._9M39,
"{9S846_2xIGLA}": Weapons._9S846_Strelets___2_x_Igla,
"_NiteHawk_FLIR": Weapons._NiteHawk_FLIR
}
|
pydcs/dcs
|
dcs/weapons_data.py
|
Python
|
lgpl-3.0
| 212,933
|
# -*- coding: utf-8 -*-
from django import forms
class StagiaireForms(forms.Form):
_choix = ( (1, 'Stagiaire'), (2, 'Classe'),)
choisir = forms.ChoiceField(choices =_choix, widget = forms.RadioSelect, required = True, initial = '1', label = '')
chercher = forms.CharField(max_length=100, required=False, label = '')
def __init__(self, *args, **kwargs):
super(StagiaireForms, self).__init__(*args, **kwargs)
self.fields['chercher'].widget.attrs.update({'autofocus': 'autofocus', 'placeholder': 'Nom, prénom ou classe'})
|
hetica/webeni
|
main/forms.py
|
Python
|
lgpl-3.0
| 531
|
# -*- coding: utf-8 -*-
#
# Django Diário documentation build configuration file, created by
# sphinx-quickstart v0.5.2.
#
# This file is execfile()d with the current directory set to its containing dir.
#
# The contents of this file are pickled, so don't put values in the namespace
# that aren't pickleable (module imports are okay, they're removed automatically).
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys, os
sys.path.append(os.path.abspath('../examples'))
from basic_blog import settings
from django.core.management import setup_environ
setup_environ(settings)
# General configuration
# ---------------------
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = ['sphinx.ext.autodoc', 'sphinx.ext.intersphinx']
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.txt'
# The encoding of source files.
source_encoding = 'utf-8'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'Django Diário'
copyright = u'2009, Guilherme Gondim and contributors'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '0.3'
# The full version, including alpha/beta/rc tags.
release = '0.3'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
language = 'en'
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of documents that shouldn't be included in the build.
#unused_docs = []
# List of directories, relative to source directory, that shouldn't be searched
# for source files.
exclude_trees = ['_build']
# The reST default role (used for this markup: `text`) to use for all documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# Options for HTML output
# -----------------------
# The style sheet to use for HTML and HTML Help pages. A file of that name
# must exist either in Sphinx' static/ path, or in one of the custom paths
# given in html_static_path.
html_style = 'default.css'
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_use_modindex = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, the reST sources are included in the HTML build as _sources/<name>.
#html_copy_source = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# If nonempty, this is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = ''
# Output file base name for HTML help builder.
htmlhelp_basename = 'DjangoDiariodoc'
# Options for LaTeX output
# ------------------------
# The paper size ('letter' or 'a4').
#latex_paper_size = 'letter'
# The font size ('10pt', '11pt' or '12pt').
#latex_font_size = '10pt'
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, document class [howto/manual]).
latex_documents = [
('index', 'DjangoDirio.tex', ur'Django Diário Documentation',
ur'Guilherme Gondim and contributors', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# Additional stuff for the LaTeX preamble.
#latex_preamble = ''
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_use_modindex = True
# Example configuration for intersphinx: refer to the Python standard library.
intersphinx_mapping = {'http://docs.python.org/dev': None}
|
marinho/django-diario
|
docs/conf.py
|
Python
|
lgpl-3.0
| 6,174
|
#::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
#
# This file is part of the E-Cell System
#
# Copyright (C) 1996-2016 Keio University
# Copyright (C) 2008-2016 RIKEN
# Copyright (C) 2005-2009 The Molecular Sciences Institute
#
#::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
#
#
# E-Cell System is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public
# License as published by the Free Software Foundation; either
# version 2 of the License, or (at your option) any later version.
#
# E-Cell System is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# See the GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public
# License along with E-Cell System -- see the file COPYING.
# If not, write to the Free Software Foundation, Inc.,
# 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
#
#END_HEADER
#
#'Design: Gabor Bereczki <gabor@e-cell.org>',
#'Design and application Framework: Koichi Takahashi <shafi@e-cell.org>',
#'Programming: Gabor Bereczki' at
# E-CELL Project, Lab. for Bioinformatics, Keio University.
#
import os
import os.path
import sys
import gtk
import gobject
from ecell.ui.model_editor.Utils import *
from ecell.ui.model_editor.Constants import *
from ecell.ui.model_editor.ModelEditor import *
from ecell.ui.model_editor.ViewComponent import *
class BadNestedList( Exception ):
def __init__( self, badString ):
self.args = "%s\n cannot be parsed as nestedlist!"%badString
class NestedListEditor(ViewComponent):
#######################
# GENERAL CASES #
#######################
def __init__( self, aParentWindow, pointOfAttach ):
self.theParentWindow = aParentWindow
# call superclass
ViewComponent.__init__( self, pointOfAttach, 'attachment_box' )
self.theNestedList = copyValue( self.theParentWindow.thePropertyValue )
self.theTextView = self['textview']
self.textBuffer = gtk.TextBuffer()
self.theTextView.set_buffer( self.textBuffer )
self.textBuffer.set_text( self.__nestedListToString( self.theNestedList,0 ) )
def getValue( self ):
aText = self.textBuffer.get_text( self.textBuffer.get_start_iter(), self.textBuffer.get_end_iter())
try:
aValue= self.__stringToNestedList( aText)
except BadNestedList:
self.theParentWindow.theModelEditor.printMessage( ''.join(sys.exc_value), ME_ERROR )
aValue = None
return aValue
def __nestedListToString( self, aNestedList, level = 1 ):
if type(aNestedList ) == type(''):
return aNestedList
stringList = []
for aSubList in aNestedList:
stringList.append( self.__nestedListToString( aSubList ) )
if level == 0:
separator = '\n,'
else:
separator = ', '
return '( ' + separator.join( stringList ) + ' ) '
def __stringToNestedList( self, aString ):
# should return a nestedlist if string format is OK
# should return None if string format is not OK, should display an error message in this case.
aString=aString.strip()
# decide whether list or string
if aString.__contains__(',') or aString.__contains__('(') or aString.__contains__(')'):
#must be list
if not (aString.startswith('(') and aString.endswith(')') ):
raise BadNestedList( aString )
stringList = self.__split(aString[1:len(aString)-1].strip())
parsedList = map( self.__stringToNestedList, stringList )
if len(parsedList) == 1 and type( parsedList[0]) != type(parsedList ):
return stringList[0]
return parsedList
else:
return aString
def __split( self, aString ):
openPara = 0
returnList = []
actualWord = ''
for aChar in aString:
if aChar == ',' and openPara == 0:
returnList.append( actualWord )
actualWord = ''
elif aChar == '(':
openPara +=1
actualWord += aChar
elif aChar == ')':
openPara -=1
actualWord += aChar
else:
actualWord += aChar
if openPara!=0:
raise BadNestedList( aString )
returnList.append( actualWord )
return returnList
|
ecell/ecell3
|
ecell/frontend/model-editor/ecell/ui/model_editor/NestedListEditor.py
|
Python
|
lgpl-3.0
| 4,695
|
/*Owner & Copyrights: Vance King Saxbe. A.*/from GoldSaxEngineChinaMarkets import goldsaxenginechinamarkets
goldsaxenginechinamarkets.start()
/*email to provide support at vancekingsaxbe@powerdominionenterprise.com, businessaffairs@powerdominionenterprise.com, For donations please write to fundraising@powerdominionenterprise.com*/
|
VanceKingSaxbeA/GoldSaxEngineChinaMarkets
|
goldsaxenginestart.py
|
Python
|
lgpl-3.0
| 334
|
# coding=utf-8
# Copyright © 2016 Computational Molecular Biology Group,
# Freie Universität Berlin (GER)
#
# This file is part of ReaDDy.
#
# ReaDDy is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as
# published by the Free Software Foundation, either version 3 of
# the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General
# Public License along with this program. If not, see
# <http://www.gnu.org/licenses/>.
"""
Created on 21.06.17
@author: clonker
"""
from __future__ import print_function
from contextlib import closing
import numpy as np
import readdy._internal.readdybinding.api.top as top
import readdy._internal.readdybinding.common.io as io
import readdy._internal.readdybinding.common as common
from readdy._internal.readdybinding.api import KernelProvider
from readdy._internal.readdybinding.api import ParticleTypeFlavor
from readdy._internal.readdybinding.api import Simulation
from readdy.util import platform_utils
class ChainDecay(object):
def __init__(self, kernel, time_step):
self.kernel_provider = KernelProvider.get()
self.kernel_provider.load_from_dir(platform_utils.get_readdy_plugin_dir())
self.kernel = kernel
self.time_step = time_step
def _get_split_reaction(self):
def reaction_function(topology):
recipe = top.Recipe(topology)
if topology.get_n_particles() > 1:
edge = np.random.randint(0, topology.get_n_particles() - 1)
recipe.remove_edge(edge, edge + 1)
return recipe
def rate_function(topology):
if topology.get_n_particles() > 1:
return float(topology.get_n_particles()) / 5.
else:
return .0
fun1 = top.ReactionFunction(reaction_function)
fun2 = top.RateFunction(rate_function)
reaction = top.TopologyReaction(fun1, fun2)
reaction.roll_back_if_invalid()
reaction.create_child_topologies_after_reaction()
return reaction
def _get_decay_reaction(self, typeidb):
def reaction_function(topology):
recipe = top.Recipe(topology)
if topology.get_n_particles() == 1:
recipe.change_particle_type(0, typeidb)
return recipe
def rate_function(topology):
return 1./self.time_step if topology.get_n_particles() == 1 else 0
fun1, fun2 = top.ReactionFunction(reaction_function), top.RateFunction(rate_function)
reaction = top.TopologyReaction(fun1, fun2)
reaction.raise_if_invalid()
reaction.create_child_topologies_after_reaction()
return reaction
def run(self, time_steps, out_file):
sim = Simulation()
sim.set_kernel(self.kernel)
sim.box_size = common.Vec(60, 20, 20)
sim.periodic_boundary = [True, True, True]
typeid_b = sim.register_particle_type("B", 1.0, 1.0, ParticleTypeFlavor.NORMAL)
sim.register_particle_type("Topology A", .5, .5, ParticleTypeFlavor.TOPOLOGY)
sim.register_potential_harmonic_repulsion("Topology A", "Topology A", 10)
sim.register_potential_harmonic_repulsion("Topology A", "B", 10)
sim.register_potential_harmonic_repulsion("B", "B", 10)
sim.configure_topology_bond_potential("Topology A", "Topology A", 10, 1.)
sim.configure_topology_angle_potential("Topology A", "Topology A", "Topology A", 10, np.pi)
# sim.configure_topology_dihedral_potential("Topology A", "Topology A", "Topology A", "Topology A", 1, 1, -np.pi)
n_elements = 50.
particles = [sim.create_topology_particle("Topology A", common.Vec(-25. + i, 0, 0))
for i in range(int(n_elements))]
topology = sim.add_topology(particles)
for i in range(int(n_elements - 1)):
topology.get_graph().add_edge(i, i + 1)
topology.add_reaction(self._get_decay_reaction(typeid_b))
topology.add_reaction(self._get_split_reaction())
traj_handle = sim.register_observable_flat_trajectory(1)
with closing(io.File(out_file, io.FileAction.CREATE, io.FileFlag.OVERWRITE)) as f:
traj_handle.enable_write_to_file(f, u"", 50)
sim.run_scheme_readdy(True)\
.evaluate_topology_reactions()\
.write_config_to_file(f)\
.configure_and_run(time_steps, self.time_step)
print("currently %s topologies" % len(sim.current_topologies()))
if __name__ == '__main__':
sim = ChainDecay("SingleCPU", .001)
sim.run(10000, "out.h5")
|
chrisfroe/readdy
|
wrappers/python/src/python/readdy/examples/chain_decay.py
|
Python
|
lgpl-3.0
| 4,956
|
# Copyright (C) 2015 by Clearcode <http://clearcode.cc>
# and associates (see AUTHORS).
# This file is part of matchbox.
# matchbox is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# matchbox is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
# You should have received a copy of the GNU Lesser General Public License
# along with matchbox. If not, see <http://www.gnu.org/licenses/>.
"""Main matchbox module."""
from matchbox.box import MatchBox
from matchbox.index import MatchIndex
__version__ = "1.1.1"
__all__ = ("MatchBox", "MatchIndex")
|
ClearcodeHQ/matchbox
|
src/matchbox/__init__.py
|
Python
|
lgpl-3.0
| 925
|
import pandas as pd
import os
# -*- coding: utf-8 -*-
from flutype.data_management.fill_master import Master
import numpy as np
def print_full(x):
pd.set_option('display.max_rows', len(x))
print(x)
pd.reset_option('display.max_rows')
def extract_peptide_batch(ma):
gal_lig_fix = ma.read_gal_ligand("170725_N13", index=False)
unique_peptides = gal_lig_fix[0].drop_duplicates(subset=["ID"])
unique_peptides = unique_peptides[unique_peptides.ID != "Empty"]
unique_peptides.ID = unique_peptides.ID.astype(int)
unique_peptides.sort_values(by = "ID", inplace=True)
unique_peptides.Name = unique_peptides.Name.str.replace('FAIL_', "")
unique_peptides['Concentration'] = unique_peptides.Name.str.rpartition('_')[0]
unique_peptides['Concentration'] = unique_peptides.Concentration.str.partition('_')[0]
peptide_batch = pd.DataFrame(unique_peptides[["Name","Concentration"]].values,columns=["sid","concentration"])
peptide_batch["labeling"] = ""
peptide_batch["buffer"] = ""
peptide_batch["ph"] = ""
peptide_batch["purity"] = ""
peptide_batch["produced_by"] = ""
peptide_batch["comment"] = ""
peptide_batch["ligand"] = ""
peptide_batch["ligand"] = unique_peptides.Name.str.partition('_')[2].values
return peptide_batch
def gal_reformat(ma):
gal_lig_fix = ma.read_gal_ligand("170725_N15", index= False)
gal_lig_fix_new = pd.DataFrame(gal_lig_fix[0][["Block","Row","Column","Name"]])
mapping = {"Empty":"NO",
"Panama":"Pan3",
"California":"Cal2",
"Aichi":"Ach1",
"1.0_Kloe_Amid":"KLOA025",
"0.5_Kloe_Amid":"KLOA050",
"0.25_Kloe_Amid":"KLOA025",
"1.0_pep_Nenad":"NEN100",
"0.5_pep_Nenad":"NEN050",
"0.25_pep_Nenad":"NEN025",
"1.0_Fetuin":"P012-1",
"0.5_Fetuin":"P012-05",
"0.25_Fetuin":"P012-025",
"1.0_Leuchtefix":"DYE100",
"0.5_Leuchtefix":"DYE050",
"0.25_Leuchtefix":"DYE025",
'FAIL_': ""
}
for key in mapping:
gal_lig_fix_new.Name = gal_lig_fix_new.Name.str.replace(key, mapping[key])
mapping = {"1.0_Kloe_S":"KLOS100",
"0.5_Kloe_S":"KLOS050",
"0.25_Kloe_S":"KLOS025"
}
for key in mapping:
gal_lig_fix_new.loc[gal_lig_fix_new["Name"].str.contains(key), "Name"] = mapping[key]
return gal_lig_fix_new
def peptide_batches_not_in_master(ma,gal_lig_fix):
s_gal = set(gal_lig_fix["Name"].values)
data_dic = ma.read_data_tables()
s_pb = set(data_dic["peptide_batch"]["sid"].values)
s_ab = set(data_dic["antibody_batch"]["sid"].values)
s_vb = set(data_dic["virus_batch"]["sid"].values)
s_b = s_pb
s_b.update(s_ab)
s_b.update(s_vb)
return(s_gal - s_b)
def reshape_gal_file(shape, gal_file):
a = []
b = []
for i in range(shape[1]):
for ii in range(shape[0]):
a.append(i )
b.append(ii )
gal_file["row_factor"] = 0
gal_file["column_factor"] = 0
print(a)
print(b)
for block_num,block_factor in enumerate(a):
gal_file.loc[gal_file["Block"] == block_num+1, "row_factor"] = block_factor
for block_num, block_factor in enumerate(b):
gal_file.loc[gal_file["Block"] == block_num+1, "column_factor"] = block_factor
gal_file["Row"]=gal_file["Row"]+(gal_file["Row"].max()*gal_file["row_factor"])
gal_file["Column"]=gal_file["Column"]+(gal_file["Column"].max()*gal_file["column_factor"])
return gal_file
def three_viruses_gal(gal_file):
virus_map = {}
for i in range(1,33):
if i <= 12:
virus_map[i] = "Ach1"
elif 12 < i <= 24:
virus_map[i] = "Cal2"
elif 24 < i:
virus_map[i] = "Pan3"
for key in virus_map.keys():
gal_file.loc[gal_file["Block"]== key , "Name"] =virus_map[key]
return gal_file
####################################################################
if __name__ == "__main__":
ma_path = "../master_uncomplete/"
ma = Master(ma_path)
#peptide_batch = extract_peptide_batch(ma)
# print_full(peptide_batch)
#fp = os.path.join(ma.collections_path,"170725_N13","peptides_batch.csv")
# peptide_batch.to_csv(fp)
ma_path_standard = "../master/"
ma_standard = Master(ma_path_standard)
gal_lig_fix = gal_reformat(ma)
#subset = peptide_batches_not_in_master(ma_standard,gal_lig_fix)
gal_lig_fix= reshape_gal_file((4,8), gal_lig_fix)
gal_lig_fix = gal_lig_fix.reset_index(drop=True)
fp = os.path.join(ma.collections_path,"170725_P7","lig_fix_012.txt")
gal_lig_fix.to_csv(fp, sep='\t',index=True , index_label="ID")
#gal_lig_fix = three_viruses_gal(gal_lig_fix)
gal_lig_fix["Name"] = "Ach1"
fp2 = os.path.join(ma.collections_path,"170725_P7","lig_mob_016.txt")
gal_lig_fix.to_csv(fp2, sep='\t', index=True,index_label="ID")
|
janekg89/flutype_webapp
|
flutype/gal-file.py
|
Python
|
lgpl-3.0
| 5,034
|
# -*- coding: utf-8 -*-
from __future__ import print_function, absolute_import, division
from builtins import str
import sys
import unittest
import re
import os
dirname = os.path.dirname(__file__)
sys.path.insert(0, os.path.join(dirname, '..', '..', '..'))
import uuid
from SpiffWorkflow.serializer.dict import DictionarySerializer
from .baseTest import SerializerTest
from SpiffWorkflow import Workflow
class DictionarySerializerTest(SerializerTest):
def setUp(self):
super(DictionarySerializerTest, self).setUp()
self.serializer = DictionarySerializer()
self.return_type = dict
def _compare_results(self, item1, item2,
exclude_dynamic=False,
exclude_items=None):
exclude_items = exclude_items if exclude_items is not None else []
if exclude_dynamic:
if 'last_state_change' not in exclude_items:
exclude_items.append('last_state_change')
if 'last_task' not in exclude_items:
exclude_items.append('last_task')
if uuid.UUID not in exclude_items:
exclude_items.append(uuid.UUID)
if type(item1) in exclude_items:
return
if isinstance(item1, dict):
self.assertIsInstance(item2, dict)
for key, value in list(item1.items()):
self.assertIn(key, item2)
if key in exclude_items:
continue
self._compare_results(value, item2[key],
exclude_dynamic=exclude_dynamic,
exclude_items=exclude_items)
for key in item2:
self.assertIn(key, item1)
elif isinstance(item1, list):
msg = "item is not a list (is a " + str(type(item2)) + ")"
self.assertIsInstance(item2, list, msg)
msg = "list lengths differ: {} vs {}".format(
len(item1), len(item2))
self.assertEqual(len(item1), len(item2), msg)
for i, listitem in enumerate(item1):
self._compare_results(listitem, item2[i],
exclude_dynamic=exclude_dynamic,
exclude_items=exclude_items)
elif isinstance(item1, Workflow):
raise Exception("Item is a Workflow")
else:
msg = "{}: types differ: {} vs {}".format(
str(item2), type(item1), type(item2))
self.assertEqual(type(item1), type(item2), msg)
self.assertEqual(item1, item2)
def suite():
return unittest.defaultTestLoader.loadTestsFromTestCase(DictionarySerializerTest)
if __name__ == '__main__':
unittest.TextTestRunner(verbosity=2).run(suite())
|
knipknap/SpiffWorkflow
|
tests/SpiffWorkflow/serializer/dictTest.py
|
Python
|
lgpl-3.0
| 2,803
|
# Copyright (c) 2012 CNRS
# Author: Florent Lamiraux
#
# This file is part of hpp-corbaserver.
# hpp-corbaserver is free software: you can redistribute it
# and/or modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation, either version
# 3 of the License, or (at your option) any later version.
#
# hpp-corbaserver is distributed in the hope that it will be
# useful, but WITHOUT ANY WARRANTY; without even the implied warranty
# of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Lesser Public License for more details. You should have
# received a copy of the GNU Lesser General Public License along with
# hpp-corbaserver. If not, see
# <http://www.gnu.org/licenses/>.
from .quaternion import Quaternion
from .transform import Transform
def retrieveRosResource(path):
import os
ros_package_paths = os.environ["ROS_PACKAGE_PATH"].split(':')
if path.startswith("package://"):
relpath = path[len("package://"):]
for dir in ros_package_paths:
abspath = os.path.join(dir,relpath)
if os.path.exists(abspath):
return abspath
return IOError ("Could not find resource " + path)
else:
return path
|
humanoid-path-planner/hpp-corbaserver
|
src/hpp/__init__.py
|
Python
|
lgpl-3.0
| 1,269
|
# -*- coding: utf-8 -*-
#
# This file is part of Glances.
#
# Copyright (C) 2016 Nicolargo <nicolas@nicolargo.com>
#
# Glances is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Glances is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""Manage the Glances ports list (Ports plugin)."""
from glances.compat import range
from glances.logger import logger
from glances.globals import BSD
# XXX *BSDs: Segmentation fault (core dumped)
# -- https://bitbucket.org/al45tair/netifaces/issues/15
# Also used in the glances_ip plugin
if not BSD:
try:
import netifaces
netifaces_tag = True
except ImportError:
netifaces_tag = False
else:
netifaces_tag = False
class GlancesPortsList(object):
"""Manage the ports list for the ports plugin."""
_section = "ports"
_default_refresh = 60
_default_timeout = 3
def __init__(self, config=None, args=None):
# ports_list is a list of dict (JSON compliant)
# [ {'host': 'www.google.fr', 'port': 443, 'refresh': 30, 'description': Internet, 'status': True} ... ]
# Load the configuration file
self._ports_list = self.load(config)
def load(self, config):
"""Load the ports list from the configuration file."""
ports_list = []
if config is None:
logger.debug("No configuration file available. Cannot load ports list.")
elif not config.has_section(self._section):
logger.debug("No [%s] section in the configuration file. Cannot load ports list." % self._section)
else:
logger.debug("Start reading the [%s] section in the configuration file" % self._section)
refresh = int(config.get_value(self._section, 'refresh', default=self._default_refresh))
timeout = int(config.get_value(self._section, 'timeout', default=self._default_timeout))
# Add default gateway on top of the ports_list lits
default_gateway = config.get_value(self._section, 'port_default_gateway', default='False')
if default_gateway.lower().startswith('true') and netifaces_tag:
new_port = {}
new_port['host'] = netifaces.gateways()['default'][netifaces.AF_INET][0]
# ICMP
new_port['port'] = 0
new_port['description'] = 'DefaultGateway'
new_port['refresh'] = refresh
new_port['timeout'] = timeout
new_port['status'] = None
new_port['rtt_warning'] = None
logger.debug("Add default gateway %s to the static list" % (new_port['host']))
ports_list.append(new_port)
# Read the scan list
for i in range(1, 256):
new_port = {}
postfix = 'port_%s_' % str(i)
# Read mandatories configuration key: host
new_port['host'] = config.get_value(self._section, '%s%s' % (postfix, 'host'))
if new_port['host'] is None:
continue
# Read optionals configuration keys
# Port is set to 0 by default. 0 mean ICMP check instead of TCP check
new_port['port'] = config.get_value(self._section,
'%s%s' % (postfix, 'port'),
0)
new_port['description'] = config.get_value(self._section,
'%sdescription' % postfix,
default="%s:%s" % (new_port['host'], new_port['port']))
# Default status
new_port['status'] = None
# Refresh rate in second
new_port['refresh'] = refresh
# Timeout in second
new_port['timeout'] = int(config.get_value(self._section,
'%stimeout' % postfix,
default=timeout))
# RTT warning
new_port['rtt_warning'] = config.get_value(self._section,
'%srtt_warning' % postfix,
default=None)
if new_port['rtt_warning'] is not None:
# Convert to second
new_port['rtt_warning'] = int(new_port['rtt_warning']) / 1000.0
# Add the server to the list
logger.debug("Add port %s:%s to the static list" % (new_port['host'], new_port['port']))
ports_list.append(new_port)
# Ports list loaded
logger.debug("Ports list loaded: %s" % ports_list)
return ports_list
def get_ports_list(self):
"""Return the current server list (dict of dict)."""
return self._ports_list
def set_server(self, pos, key, value):
"""Set the key to the value for the pos (position in the list)."""
self._ports_list[pos][key] = value
|
velocyno/glances
|
glances/ports_list.py
|
Python
|
lgpl-3.0
| 5,667
|
from __future__ import division
from __future__ import print_function
#!/usr/bin/python
# program to plot SEVIRI observations
# usage from command line
# $ python plot_msg.py
#
# pass arguments to overwrite time, rgb, area given in the script
# $ python plot_msg.py year month day hour min rgb area
# year month day hour min
# -> integers specifying the date of observation
# rgb -> string, e.g. RGBs='HRoverview' or
# string list, e.g. RGBs=['IR_108','IR_120-IR_108','HRoverview']
# for possible options have a look at __main__
# area -> string or string array, e.g. 'EuropeCanary' or 'ccs4' (default)
# for possible options have a look at the file area.def
# RSS -> logical (True or False) rapid scan service
# True -> 5min service for europe (default)
# False -> 15min service for whole disk
# verbose-> logical (True or False) activates verbose output
# True -> more messages for debugging (default)
# False -> quiet
#
# Author Ulrich Hamann
# History 2014-10-01 U. Hamann, first version
# 2014-10-28 U. Hamann, area can also be used as array
# 2014-02-10 U. Hamann, introduced input file
# 2015-02-25 U. Hamann, added the ability to plot
# NWC-SAF cloud mask and SPhR products
#
from mpop.satellites import GeostationaryFactory
from mpop.imageo.geo_image import GeoImage
#from mpop.imageo.palettes import cms_modified, convert_palette, convert_palette2colormap
from pycoast import ContourWriterAGG
from pydecorate import DecoratorAGG
from mpop.channel import Channel, GenericChannel
import aggdraw
from numpy import where, zeros
import numpy.ma as ma
from os.path import dirname, exists, join
from os import makedirs, chmod, stat
import subprocess
from mpop.projector import get_area_def
from copy import deepcopy
from PIL import Image
from trollimage.image import Image as trollimage
from PIL import ImageFont
from PIL import ImageDraw
from trollimage.colormap import rdbu, greys, rainbow, spectral
from my_composites import mask_clouddepth, get_image
from my_msg_module import get_last_SEVIRI_date, check_input, channel_str2ind
from my_msg_module import choose_msg, choose_area_loaded_msg, convert_NWCSAF_to_radiance_format, get_NWC_pge_name, format_name
from my_msg_module import check_loaded_channels
from postprocessing import postprocessing
import products
from datetime import datetime
from plot_msg import load_products, create_PIL_image, choose_map_resolution, save_reprojected_data, mask_data
from plot_msg import add_colorscale, add_title, indicate_mask, add_borders_and_rivers
from get_input_msg import parse_commandline_and_read_inputfile
import inspect
from mpop.utils import debug_on
debug_on()
try:
str
except NameError:
str = str
#----------------------------------------------------------------------------------------------------------------
#----------------------------------------------------------------------------------------------------------------
#----------------------------------------------------------------------------------------------------------------
def plot_msg_minus_cosmo(in_msg):
# do statistics for the last full hour (minutes=0, seconds=0)
in_msg.datetime = datetime(in_msg.datetime.year, in_msg.datetime.month, in_msg.datetime.day, in_msg.datetime.hour, 0, 0)
area_loaded = choose_area_loaded_msg(in_msg.sat, in_msg.sat_nr, in_msg.datetime)
# define contour write for coasts, borders, rivers
cw = ContourWriterAGG(in_msg.mapDir)
# check if input data is complete
if in_msg.verbose:
print("*** check input data for ", in_msg.sat_str())
RGBs = check_input(in_msg, in_msg.sat_str(layout="%(sat)s")+in_msg.sat_nr_str(), in_msg.datetime)
# in_msg.sat_nr might be changed to backup satellite
if in_msg.verbose:
print('*** Create plots for ')
print(' Satellite/Sensor: ' + in_msg.sat_str())
print(' Satellite number: ' + in_msg.sat_nr_str() +' // ' +str(in_msg.sat_nr))
print(' Satellite instrument: ' + in_msg.instrument)
print(' Date/Time: '+ str(in_msg.datetime))
print(' RGBs: ', in_msg.RGBs)
print(' Area: ', in_msg.areas)
print(' reader level: ', in_msg.reader_level)
# define satellite data object
#global_data = GeostationaryFactory.create_scene(in_msg.sat, in_msg.sat_nr_str(), "seviri", in_msg.datetime)
global_data = GeostationaryFactory.create_scene(in_msg.sat_str(), in_msg.sat_nr_str(), in_msg.instrument, in_msg.datetime)
# global_data = GeostationaryFactory.create_scene("msg-ot", "", "Overshooting_Tops", in_msg.datetime)
if len(RGBs) == 0 and len(in_msg.postprocessing_areas) == 0:
return RGBs
if in_msg.verbose:
print("*** load satellite channels for " + in_msg.sat_str()+in_msg.sat_nr_str()+" ", global_data.fullname)
# initialize processed RGBs
RGBs_done=[]
# -------------------------------------------------------------------
# load reflectivities, brightness temperatures, NWC-SAF products ...
# -------------------------------------------------------------------
area_loaded = load_products(global_data, RGBs, in_msg, area_loaded)
cosmo_input_file="input_cosmo_cronjob.py"
print("... read COSMO input file: ", cosmo_input_file)
in_cosmo = parse_commandline_and_read_inputfile(input_file=cosmo_input_file)
# add composite
in_msg.scpOutput = True
in_msg.resize_montage = 70
in_msg.postprocessing_montage = [["MSG_IR-108cpc","COSMO_SYNMSG-BT-CL-IR10.8","MSG_IR-108-COSMO-minus-MSGpc"]]
in_msg.scpProducts = [["MSG_IR-108cpc","COSMO_SYNMSG-BT-CL-IR10.8","MSG_IR-108-COSMO-minus-MSGpc"]]
#in_msg.scpProducts = ["all"]
# define satellite data object
cosmo_data = GeostationaryFactory.create_scene(in_cosmo.sat_str(), in_cosmo.sat_nr_str(), in_cosmo.instrument, in_cosmo.datetime)
area_loaded_cosmo = load_products(cosmo_data, ['SYNMSG_BT_CL_IR10.8'], in_cosmo, area_loaded)
# preprojecting the data to another area
# --------------------------------------
if len(RGBs) > 0:
for area in in_msg.areas:
print("")
obj_area = get_area_def(area)
if area != 'ccs4':
print("*** WARNING, diff MSG-COSMO only implemented for ccs4")
continue
# reproject data to new area
print(area_loaded)
if obj_area == area_loaded:
if in_msg.verbose:
print("*** Use data for the area loaded: ", area)
#obj_area = area_loaded
data = global_data
resolution='l'
else:
if in_msg.verbose:
print("*** Reproject data to area: ", area, "(org projection: ", area_loaded.name, ")")
obj_area = get_area_def(area)
# PROJECT data to new area
data = global_data.project(area, precompute=True)
resolution='i'
if in_msg.parallax_correction:
loaded_products = [chn.name for chn in data.loaded_channels()]
if 'CTH' not in loaded_products:
print("*** Error in plot_msg ("+inspect.getfile(inspect.currentframe())+")")
print(" Cloud Top Height is needed for parallax correction ")
print(" either load CTH or specify the estimation of the CTH in the input file (load 10.8 in this case)")
quit()
if in_msg.verbose:
print(" perform parallax correction for loaded channels: ", loaded_products)
data = data.parallax_corr(fill=in_msg.parallax_gapfilling, estimate_cth=in_msg.estimate_cth, replace=True)
# save reprojected data
if area in in_msg.save_reprojected_data:
save_reprojected_data(data, area, in_msg)
# apply a mask to the data (switched off at the moment)
if False:
mask_data(data, area)
# save average values
if in_msg.save_statistics:
mean_array = zeros(len(RGBs))
#statisticFile = '/data/COALITION2/database/meteosat/ccs4/'+yearS+'/'+monthS+'/'+dayS+'/MSG_'+area+'_'+yearS[2:]+monthS+dayS+'.txt'
statisticFile = './'+yearS+'-'+monthS+'-'+dayS+'/MSG_'+area+'_'+yearS[2:]+monthS+dayS+'.txt'
if in_msg.verbose:
print("*** write statistics (average values) to "+statisticFile)
f1 = open(statisticFile,'a') # mode append
i_rgb=0
for rgb in RGBs:
if rgb in products.MSG_color:
mean_array[i_rgb]=data[rgb.replace("c","")].data.mean()
i_rgb=i_rgb+1
# create string to write
str2write = dateS +' '+hourS+' : '+minS+' UTC '
for mm in mean_array:
str2write = str2write+' '+ "%7.2f" % mm
str2write = str2write+"\n"
f1.write(str2write)
f1.close()
# creating plots/images
if in_msg.make_plots:
# choose map resolution
in_msg.resolution = choose_map_resolution(area, in_msg.mapResolution)
# define area
proj4_string = obj_area.proj4_string
# e.g. proj4_string = '+proj=geos +lon_0=0.0 +a=6378169.00 +b=6356583.80 +h=35785831.0'
area_extent = obj_area.area_extent
# e.g. area_extent = (-5570248.4773392612, -5567248.074173444, 5567248.074173444, 5570248.4773392612)
area_tuple = (proj4_string, area_extent)
RGBs=['IR_108-COSMO-minus-MSG']
print(data['IR_108'].data.shape)
print(cosmo_data['SYNMSG_BT_CL_IR10.8'].data.shape)
diff_MSG_COSMO = cosmo_data['SYNMSG_BT_CL_IR10.8'].data - data['IR_108'].data
HRV_enhance_str=''
# add IR difference as "channel object" to satellite regional "data" object
data.channels.append(Channel(name=RGBs[0],
wavelength_range=[0.,0.,0.],
resolution=data['IR_108'].resolution,
data = diff_MSG_COSMO) )
for rgb in RGBs:
if not check_loaded_channels(rgb, data):
continue
PIL_image = create_PIL_image(rgb, data, in_msg, obj_area=obj_area)
# !!! in_msg.colorbar[rgb] is initialized inside (give attention to rgbs) !!!
add_borders_and_rivers(PIL_image, cw, area_tuple,
add_borders=in_msg.add_borders, border_color=in_msg.border_color,
add_rivers=in_msg.add_rivers, river_color=in_msg.river_color,
resolution=in_msg.resolution, verbose=in_msg.verbose)
# indicate mask
if in_msg.indicate_mask:
PIL_image = indicate_mask(rgb, PIL_image, data, in_msg.verbose)
#if area.find("EuropeCanary") != -1 or area.find("ccs4") != -1:
dc = DecoratorAGG(PIL_image)
# add title to image
if in_msg.add_title:
add_title(PIL_image, in_msg.title, HRV_enhance_str+rgb, in_msg.sat_str(), data.sat_nr(), in_msg.datetime, area, dc, in_msg.font_file, in_msg.verbose,
title_color=in_msg.title_color, title_y_line_nr=in_msg.title_y_line_nr ) # !!! needs change
# add MeteoSwiss and Pytroll logo
if in_msg.add_logos:
if in_msg.verbose:
print('... add logos')
dc.align_right()
if in_msg.add_colorscale:
dc.write_vertically()
if PIL_image.mode != 'L':
height = 60 # height=60.0 normal resolution
dc.add_logo(in_msg.logos_dir+"/pytroll3.jpg",height=height) # height=60.0
dc.add_logo(in_msg.logos_dir+"/meteoSwiss3.jpg",height=height)
dc.add_logo(in_msg.logos_dir+"/EUMETSAT_logo2_tiny_white_square.png",height=height) # height=60.0
# add colorscale
if in_msg.add_colorscale and in_msg.colormap[rgb] != None:
if rgb in products.MSG_color:
unit = data[rgb.replace("c","")].info['units']
#elif rgb in products.MSG or rgb in products.NWCSAF or rgb in products.HSAF:
# unit = data[rgb].info['units']
else:
unit = None
loaded_channels = [chn.name for chn in data.loaded_channels()]
if rgb in loaded_channels:
if hasattr(data[rgb], 'info'):
print(" hasattr(data[rgb], 'info')", list(data[rgb].info.keys()))
if 'units' in list(data[rgb].info.keys()):
print("'units' in data[rgb].info.keys()")
unit = data[rgb].info['units']
print("... units = ", unit)
add_colorscale(dc, rgb, in_msg, unit=unit)
if in_msg.parallax_correction:
parallax_correction_str='pc'
else:
parallax_correction_str=''
rgb+=parallax_correction_str
# create output filename
outputDir = format_name(in_msg.outputDir, data.time_slot, area=area, rgb=rgb, sat=data.satname, sat_nr=data.sat_nr()) # !!! needs change
outputFile = outputDir +"/"+ format_name(in_msg.outputFile, data.time_slot, area=area, rgb=rgb, sat=data.satname, sat_nr=data.sat_nr()) # !!! needs change
# check if output directory exists, if not create it
path= dirname(outputFile)
if not exists(path):
if in_msg.verbose:
print('... create output directory: ' + path)
makedirs(path)
# save file
if exists(outputFile) and not in_msg.overwrite:
if stat(outputFile).st_size > 0:
print('... outputFile '+outputFile+' already exists (keep old file)')
else:
print('*** Warning, outputFile'+outputFile+' already exists, but is empty (overwrite file)')
PIL_image.save(outputFile, optimize=True) # optimize -> minimize file size
chmod(outputFile, 0o777) ## FOR PYTHON3: 0o664 # give access read/write access to group members
else:
if in_msg.verbose:
print('... save final file: ' + outputFile)
PIL_image.save(outputFile, optimize=True) # optimize -> minimize file size
chmod(outputFile, 0o777) ## FOR PYTHON3: 0o664 # give access read/write access to group members
if in_msg.compress_to_8bit:
if in_msg.verbose:
print('... compress to 8 bit image: display '+outputFile.replace(".png","-fs8.png")+' &')
subprocess.call("/usr/bin/pngquant -force 256 "+outputFile+" 2>&1 &", shell=True) # 256 == "number of colors"
#if in_msg.verbose:
# print " add coastlines to "+outputFile
## alternative: reopen image and modify it (takes longer due to additional reading and saving)
#cw.add_rivers_to_file(img, area_tuple, level=5, outline='blue', width=0.5, outline_opacity=127)
#cw.add_coastlines_to_file(outputFile, obj_area, resolution=resolution, level=4)
#cw.add_borders_to_file(outputFile, obj_area, outline=outline, resolution=resolution)
# secure copy file to another place
if in_msg.scpOutput:
if (rgb in in_msg.scpProducts) or ('all' in [x.lower() for x in in_msg.scpProducts if type(x)==str]):
scpOutputDir = format_name (in_msg.scpOutputDir, data.time_slot, area=area, rgb=rgb, sat=data.satname, sat_nr=data.sat_nr() )
if in_msg.compress_to_8bit:
if in_msg.verbose:
print("... secure copy "+outputFile.replace(".png","-fs8.png")+ " to "+scpOutputDir)
subprocess.call("scp "+in_msg.scpID+" "+outputFile.replace(".png","-fs8.png")+" "+scpOutputDir+" 2>&1 &", shell=True)
else:
if in_msg.verbose:
print("... secure copy "+outputFile+ " to "+scpOutputDir)
subprocess.call("scp "+in_msg.scpID+" "+outputFile+" "+scpOutputDir+" 2>&1 &", shell=True)
if in_msg.scpOutput and in_msg.scpID2 != None and in_msg.scpOutputDir2 != None:
if (rgb in in_msg.scpProducts2) or ('all' in [x.lower() for x in in_msg.scpProducts2 if type(x)==str]):
scpOutputDir2 = format_name (in_msg.scpOutputDir2, data.time_slot, area=area, rgb=rgb, sat=data.satname, sat_nr=data.sat_nr() )
if in_msg.compress_to_8bit:
if in_msg.verbose:
print("... secure copy "+outputFile.replace(".png","-fs8.png")+ " to "+scpOutputDir2)
subprocess.call("scp "+in_msg.scpID2+" "+outputFile.replace(".png","-fs8.png")+" "+scpOutputDir2+" 2>&1 &", shell=True)
else:
if in_msg.verbose:
print("... secure copy "+outputFile+ " to "+scpOutputDir2)
subprocess.call("scp "+in_msg.scpID2+" "+outputFile+" "+scpOutputDir2+" 2>&1 &", shell=True)
if 'ninjotif' in in_msg.outputFormats:
ninjotif_file = format_name (outputDir+'/'+in_msg.ninjotifFilename, data.time_slot, sat_nr=data.sat_nr(), RSS=in_msg.RSS, area=area, rgb=rgb )
from plot_coalition2 import pilimage2geoimage
GEO_image = pilimage2geoimage(PIL_image, obj_area, data.time_slot)
GEO_image.save(ninjotif_file,
fformat='mpop.imageo.formats.ninjotiff',
ninjo_product_name=rgb, chan_id = products.ninjo_chan_id[rgb.replace("_","-")+"_"+area],
nbits=8)
chmod(ninjotif_file, 0o777)
print(("... save ninjotif image: display ", ninjotif_file, " &"))
if rgb not in RGBs_done:
RGBs_done.append(rgb)
## start postprocessing
for area in in_msg.postprocessing_areas:
postprocessing(in_msg, global_data.time_slot, int(data.sat_nr()), area)
if in_msg.verbose:
print(" ")
return RGBs_done
#----------------------------------------------------------------------------------------------------------------
#----------------------------------------------------------------------------------------------------------------
# the main function get the command line arguments and start the function plot_msg
#----------------------------------------------------------------------------------------------------------------
#----------------------------------------------------------------------------------------------------------------
def print_usage():
print("*** ")
print("*** Error, not enough command line arguments")
print("*** please specify at least an input file")
print("*** possible calls are:")
print("*** python "+inspect.getfile(inspect.currentframe())+" input_MSG ")
print("*** python "+inspect.getfile(inspect.currentframe())+" input_MSG 2014 07 23 16 10 ")
print(" date and time must be completely given")
print("*** python "+inspect.getfile(inspect.currentframe())+" input_MSG 2014 07 23 16 10 'IR_108'")
print("*** python "+inspect.getfile(inspect.currentframe())+" input_MSG 2014 07 23 16 10 'IR_108' 'ccs4'")
print("*** python "+inspect.getfile(inspect.currentframe())+" input_MSG 2014 07 23 16 10 ['HRoverview','fog'] ['ccs4','euro4']")
print("*** ")
quit() # quit at this point
#----------------------------------------------------------------------------------------------------------------
if __name__ == '__main__':
in_msg = parse_commandline_and_read_inputfile(input_file="input_msg_cosmo_cronjob.py")
RGBs_done = plot_msg_minus_cosmo(in_msg)
print("*** Satellite pictures produced for ", RGBs_done)
print(" ")
|
meteoswiss-mdr/monti-pytroll
|
scripts/plot_msg_minus_cosmo.py
|
Python
|
lgpl-3.0
| 20,832
|
# -*- coding: utf-8 -*-
# Copyright (c) 2010-2013 by Yaco Sistemas <goinnn@gmail.com>
# 2015 by Pablo Martín <goinnn@gmail.com>
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with this programe. If not, see <http://www.gnu.org/licenses/>.
from django.contrib import admin
from testing.unusual_fields.models import UnusualModel
class UnusualModelAdmin(admin.ModelAdmin):
pass
class ResourceAdmin(admin.ModelAdmin):
pass
admin.site.register(UnusualModel, UnusualModelAdmin)
|
django-inplaceedit/django-inplaceedit
|
testing/testing/unusual_fields/admin.py
|
Python
|
lgpl-3.0
| 1,048
|
"""
Common definitions/constants used throughout the manager.
"""
from typing import Final
from ai.backend.common.types import SlotName, SlotTypes
INTRINSIC_SLOTS: Final = {
SlotName('cpu'): SlotTypes('count'),
SlotName('mem'): SlotTypes('bytes'),
}
|
lablup/sorna-manager
|
src/ai/backend/manager/defs.py
|
Python
|
lgpl-3.0
| 262
|
#!/usr/env python
import os
import sys
import shutil
import tempfile
if os.name != 'nt':
print 'Windows only!'
sys.exit(1)
if not len(sys.argv) == 3:
print 'USAGE: %s PortablePythonDir output-dir' % sys.argv[0]
print ' Example: D:\yCanta>..\PortablePython\Python-Portable.exe windows-build.py d:\PortablePython d:\output'
sys.exit(1)
ppydir = sys.argv[1]
workdir = os.path.abspath(sys.argv[2])
requirements = os.path.abspath('requirements.txt')
if not os.path.exists(workdir):
os.mkdir(workdir)
exclude = [
'song.db',
'songs',
'songbooks',
'songbook_backup',
'webapp\\static\\songs',
'webapp\\static\\songbooks',
'.git*',
'.hg*']
print 'EXCLUDE:', exclude
print 'Copying to working dir:', workdir
shutil.copytree('.', os.path.join(workdir, 'yCanta'), ignore=shutil.ignore_patterns(*exclude))
shutil.copytree(ppydir, os.path.join(workdir, 'PortablePython'))
print 'Creating launcher script'
launcher = open(os.path.join(workdir, 'yCanta.bat'), 'w')
launcher.write(r'''cd yCanta
..\PortablePython\Python-Portable.exe start-webapp.py --start-browser
'''.rstrip())
launcher.close()
print 'Installing packages into portable python environment'
easy_install = os.path.join(workdir, 'PortablePython', 'App', 'Scripts', 'easy_install.exe')
print 'EASY_INSTALL:', easy_install
for line in open(requirements):
if '#' in line:
continue
os.system(easy_install + ' ' + line.strip())
os.system(easy_install + ' pip')
# run install via pip too cause of weird portable python bug ... if I do it both ways (easy_install and pip) it works, else it doesn't.
os.system(os.path.join(workdir, 'PortablePython', 'Python-Portable.exe') + ' -m pip install -r ' + requirements)
print 'Creating zip archive: yCanta.zip'
shutil.make_archive('yCanta', 'zip', workdir)
print 'DONE'
#print 'Cleaning up working dir ...'
#shutil.rmtree(workdir)
#exclude = [ os.path.abspath(line) for line in open('.gitignore') if '#' not in line ]
#print 'EXCLUDE:', exclude
#
#for root, dirs, files in os.walk('.'):
# for i in reversed(range(len(dirs))): # go through indexes backwords because we're doing deletions
# path = os.path.abspath(os.path.join(root, dirs[i]))
# if path in exclude:
# print 'EXCLUDE:', path
# del dirs[i]
# else:
# print 'INCLUDE:', path
# os.mkdir(os.path.join(workdir, root, dirs[i]))
#
# for i in reversed(range(len(files))): # go through indexes backwords because we're doing deletions
# path = os.path.abspath(os.path.join(root, files[i]))
# if path in exclude:
# print 'EXCLUDE:', path
# else:
# print 'INCLUDE:', path
# os.mkdir(os.path.join(workdir, root, files[i]))
|
yCanta/yCanta
|
windows-build.py
|
Python
|
unlicense
| 2,677
|
#!/usr/bin/env python
"""Ninja build configurator for foundation library"""
import sys
import os
import copy
sys.path.insert(0, os.path.join('build', 'ninja'))
import generator
generator = generator.Generator(project = 'rpmalloc', variables = {'bundleidentifier': 'com.rampantpixels.rpmalloc.$(binname)', 'nowarning': True})
target = generator.target
writer = generator.writer
toolchain = generator.toolchain
variables = {'defines': ['NDEBUG=1'], 'cflags': ['-fno-builtin-malloc']}
def merge_variables(a, b):
merged = copy.deepcopy(a)
for k, v in b.items():
if k in merged:
merged[k] = list(merged[k]) + list(v)
else:
merged[k] = v
return merged
includepaths = ['test', 'benchmark']
test_lib = generator.lib(module = 'test', sources = ['thread.c', 'timer.c'], includepaths = includepaths, variables = variables)
benchmark_lib = generator.lib(module = 'benchmark', sources = ['main.c'], includepaths = includepaths, variables = variables)
#Build one binary per benchmark
generator.bin(module = 'rpmalloc', sources = ['benchmark.c', 'rpmalloc.c'], binname = 'benchmark-rpmalloc', basepath = 'benchmark', implicit_deps = [benchmark_lib, test_lib], libs = ['benchmark', 'test'], includepaths = includepaths, variables = variables)
if target.is_android():
resources = [os.path.join('all', 'android', item) for item in [
'AndroidManifest.xml', os.path.join('layout', 'main.xml'), os.path.join('values', 'strings.xml'),
os.path.join('drawable-ldpi', 'icon.png'), os.path.join('drawable-mdpi', 'icon.png'), os.path.join('drawable-hdpi', 'icon.png'),
os.path.join('drawable-xhdpi', 'icon.png'), os.path.join('drawable-xxhdpi', 'icon.png'), os.path.join('drawable-xxxhdpi', 'icon.png')
]]
appsources = [os.path.join('test', 'all', 'android', 'java', 'com', 'rampantpixels', 'foundation', 'test', item) for item in [
'TestActivity.java'
]]
generator.app(module = '', sources = appsources, binname = 'benchmark-rpmalloc', basepath = '', implicit_deps = [benchmark_lib, test_lib], libs = ['benchmark', 'test'], resources = resources, includepaths = includepaths, variables = variables)
generator.bin(module = 'crt', sources = ['benchmark.c'], binname = 'benchmark-crt', basepath = 'benchmark', implicit_deps = [benchmark_lib, test_lib], libs = ['benchmark', 'test'], includepaths = includepaths, variables = {'defines': ['NDEBUG=1']})
if not target.is_android():
generator.bin(module = 'nedmalloc', sources = ['benchmark.c', 'nedmalloc.c'], binname = 'benchmark-nedmalloc', basepath = 'benchmark', implicit_deps = [benchmark_lib, test_lib], libs = ['benchmark', 'test'], includepaths = includepaths, variables = variables)
platform_includepaths = [os.path.join('benchmark', 'ptmalloc3')]
if target.is_windows():
platform_includepaths += [os.path.join('benchmark', 'ptmalloc3', 'sysdeps', 'windows')]
else:
platform_includepaths += [os.path.join('benchmark', 'ptmalloc3', 'sysdeps', 'pthread')]
if not target.is_android():
generator.bin(module = 'ptmalloc3', sources = ['benchmark.c', 'ptmalloc3.c', 'malloc.c'], binname = 'benchmark-ptmalloc3', basepath = 'benchmark', implicit_deps = [benchmark_lib, test_lib], libs = ['benchmark', 'test'], includepaths = includepaths + platform_includepaths, variables = variables)
hoardincludepaths = [
os.path.join('benchmark', 'hoard', 'include'),
os.path.join('benchmark', 'hoard', 'include', 'hoard'),
os.path.join('benchmark', 'hoard', 'include', 'util'),
os.path.join('benchmark', 'hoard', 'include', 'superblocks'),
os.path.join('benchmark', 'hoard'),
os.path.join('benchmark', 'hoard', 'Heap-Layers')
]
hoardsources = ['source/libhoard.cpp']
if target.is_macos() or target.is_ios():
hoardsources += ['Heap-Layers/wrappers/macwrapper.cpp']
elif target.is_windows():
hoardsources += ['Heap-Layers/wrappers/winwrapper.cpp']
else:
hoardsources += ['Heap-Layers/wrappers/gnuwrapper.cpp']
if target.is_macos() or target.is_ios():
hoardsources += ['source/mactls.cpp']
elif target.is_windows():
hoardsources += ['source/wintls.cpp']
else:
hoardsources += ['source/unixtls.cpp']
if not target.is_android():
hoard_variables = merge_variables({'runtime': 'c++'}, variables)
hoard_lib = generator.lib(module = 'hoard', sources = hoardsources, basepath = 'benchmark', includepaths = includepaths + hoardincludepaths, variables = hoard_variables)
hoard_depend_libs = ['hoard', 'benchmark', 'test']
generator.bin(module = 'hoard', sources = ['benchmark.c'], binname = 'benchmark-hoard', basepath = 'benchmark', implicit_deps = [hoard_lib, benchmark_lib, test_lib], libs = hoard_depend_libs, includepaths = includepaths, variables = hoard_variables)
gperftoolsincludepaths = [
os.path.join('benchmark', 'gperftools', 'src'),
os.path.join('benchmark', 'gperftools', 'src', 'base'),
os.path.join('benchmark', 'gperftools', 'src', target.get())
]
gperftoolsbasesources = [
'dynamic_annotations.c', 'linuxthreads.cc', 'logging.cc', 'low_level_alloc.cc', 'spinlock.cc',
'spinlock_internal.cc', 'sysinfo.cc'
]
if not target.is_windows():
gperftoolsbasesources += ['thread_lister.c']
gperftoolsbasesources = [os.path.join('src', 'base', path) for path in gperftoolsbasesources]
gperftoolssources = [
'central_freelist.cc', 'common.cc', 'internal_logging.cc',
'malloc_extension.cc', 'malloc_hook.cc', 'memfs_malloc.cc',
'page_heap.cc', 'sampler.cc', 'stack_trace_table.cc',
'static_vars.cc', 'span.cc', 'symbolize.cc', 'tcmalloc.cc', 'thread_cache.cc'
]
if not target.is_windows():
gperftoolssources += ['maybe_threads.cc', 'system-alloc.cc']
if target.is_windows():
gperftoolssources += [os.path.join('windows', 'port.cc'), os.path.join('windows', 'system-alloc.cc')]
gperftoolssources = [os.path.join('src', path) for path in gperftoolssources]
if not target.is_android():
gperf_variables = merge_variables({'runtime': 'c++', 'defines': ['NO_TCMALLOC_SAMPLES', 'NO_HEAP_CHECK'], 'nowarning': True}, variables)
gperftools_lib = generator.lib(module = 'gperftools', sources = gperftoolsbasesources + gperftoolssources, basepath = 'benchmark', includepaths = includepaths + gperftoolsincludepaths, variables = gperf_variables)
gperftools_depend_libs = ['gperftools', 'benchmark', 'test']
generator.bin(module = 'gperftools', sources = ['benchmark.c'], binname = 'benchmark-tcmalloc', basepath = 'benchmark', implicit_deps = [gperftools_lib, benchmark_lib, test_lib], libs = gperftools_depend_libs, includepaths = includepaths, variables = gperf_variables)
jemallocincludepaths = [
os.path.join('benchmark', 'jemalloc', 'include'),
os.path.join('benchmark', 'jemalloc', 'include', 'jemalloc'),
os.path.join('benchmark', 'jemalloc', 'include', 'jemalloc', 'internal')
]
jemallocsources = [
'arena.c', 'background_thread.c', 'base.c', 'bin.c', 'bitmap.c', 'ckh.c', 'ctl.c', 'div.c', 'extent.c',
'extent_dss.c', 'extent_mmap.c', 'hash.c', 'hook.c', 'jemalloc.c', 'large.c', 'log.c', 'malloc_io.c',
'mutex.c', 'mutex_pool.c', 'nstime.c', 'pages.c', 'prng.c', 'prof.c', 'rtree.c', 'safety_check.c',
'sc.c', 'stats.c', 'sz.c', 'tcache.c', 'test_hooks.c', 'ticker.c', 'tsd.c', 'witness.c'
]
jemallocsources = [os.path.join('src', path) for path in jemallocsources]
if not target.is_windows() and not target.is_android():
je_variables = merge_variables({'defines': ['JEMALLOC_NO_RENAME']}, variables)
jemalloc_lib = generator.lib(module = 'jemalloc', sources = jemallocsources, basepath = 'benchmark', includepaths = includepaths + jemallocincludepaths, variables = je_variables)
jemalloc_depend_libs = ['jemalloc', 'benchmark', 'test']
generator.bin(module = 'jemalloc', sources = ['benchmark.c'], binname = 'benchmark-jemalloc', basepath = 'benchmark', implicit_deps = [jemalloc_lib, benchmark_lib, test_lib], libs = jemalloc_depend_libs, includepaths = includepaths, variables = je_variables)
snmallocincludepaths = [
os.path.join('benchmark', 'snmalloc', 'src'),
]
snmallocsources = [os.path.join('src', 'override', 'malloc.cc')]
snvariables = merge_variables({'defines': ['SNMALLOC_STATIC_LIBRARY=1', 'SNMALLOC_STATIC_LIBRARY_PREFIX=sn_'], 'cflags': ['-mcx16'], 'runtime': 'c++'}, variables)
snmalloc_lib = generator.lib(module = 'snmalloc', sources = snmallocsources, basepath = 'benchmark', includepaths = includepaths + snmallocincludepaths, variables = snvariables)
snmalloc_depend_libs = ['snmalloc', 'benchmark', 'test', 'WindowsApp']
generator.bin(module = 'snmalloc', sources = ['benchmark.cc'], binname = 'benchmark-snmalloc', basepath = 'benchmark', implicit_deps = [snmalloc_lib, benchmark_lib, test_lib], libs = snmalloc_depend_libs, includepaths = includepaths + snmallocincludepaths, variables = snvariables)
scallocincludepaths = [
os.path.join('benchmark', 'scalloc', 'src'),
os.path.join('benchmark', 'scalloc', 'src', 'platform')
]
scallocsources = [
'glue.cc'
]
scallocsources = [os.path.join('src', path) for path in scallocsources]
if not target.is_windows() and not target.is_android():
scalloc_variables = merge_variables({'runtime': 'c++'}, variables)
scalloc_lib = generator.lib(module = 'scalloc', sources = scallocsources, basepath = 'benchmark', includepaths = includepaths + scallocincludepaths, variables = scalloc_variables)
scalloc_depend_libs = ['scalloc', 'benchmark', 'test']
generator.bin(module = 'scalloc', sources = ['benchmark.c'], binname = 'benchmark-scalloc', basepath = 'benchmark', implicit_deps = [scalloc_lib, benchmark_lib, test_lib], libs = scalloc_depend_libs, includepaths = includepaths, variables = scalloc_variables)
if not target.is_windows():
lockfree_malloc_depend_libs = ['benchmark', 'test']
if not target.is_android():
lockfree_variables = merge_variables({'runtime': 'c++'}, variables)
generator.bin(module = 'lockfree-malloc', sources = ['benchmark.c', 'lite-malloc.cpp'], binname = 'benchmark-lockfree-malloc', basepath = 'benchmark', implicit_deps = [benchmark_lib, test_lib], libs = lockfree_malloc_depend_libs, includepaths = includepaths, variables = lockfree_variables)
if not target.is_windows():
bmallocincludepaths = [
os.path.join('benchmark', 'bmalloc', 'bmalloc')
]
bmallocsources = [
'AllIsoHeaps.cpp', 'Allocator.cpp', 'AvailableMemory.cpp', 'bmalloc.cpp', 'Cache.cpp', 'CryptoRandom.cpp',
'Deallocator.cpp', 'DebugHeap.cpp', 'Environment.cpp', 'FreeList.cpp', 'Gigacage.cpp', 'Heap.cpp',
'HeapKind.cpp', 'IsoHeapImpl.cpp', 'IsoPage.cpp', 'IsoSharedHeap.cpp', 'IsoSharedPage.cpp', 'IsoTLS.cpp',
'IsoTLSEntry.cpp', 'IsoTLSLayout.cpp', 'LargeMap.cpp', 'Logging.cpp', 'mbmalloc.cpp', 'Mutex.cpp',
'ObjectType.cpp', 'PerProcess.cpp', 'PerThread.cpp', 'Scavenger.cpp', 'StaticMutex.cpp', 'VMHeap.cpp'
]
if target.is_macos() or target.is_ios():
bmallocsources += ['Zone.cpp']
bmallocsources = [os.path.join('bmalloc', path) for path in bmallocsources]
if not target.is_android():
bmalloc_variables = merge_variables({'runtime': 'c++'}, variables)
bmalloc_lib = generator.lib(module = 'bmalloc', sources = bmallocsources, basepath = 'benchmark', includepaths = includepaths + bmallocincludepaths, variables = bmalloc_variables)
bmalloc_depend_libs = ['bmalloc', 'benchmark', 'test']
generator.bin(module = 'bmalloc', sources = ['benchmark.cc'], binname = 'benchmark-bmalloc', basepath = 'benchmark', implicit_deps = [bmalloc_lib, benchmark_lib, test_lib], libs = bmalloc_depend_libs, includepaths = includepaths, variables = bmalloc_variables)
#Requires transactional memory for full performance?
if not target.is_windows():
supermallocincludepaths = [
os.path.join('benchmark', 'supermalloc', 'src')
]
supermallocsources = [
'bassert.cc', 'cache.cc', 'env.cc', 'footprint.cc', 'futex_mutex.cc', 'has_tsx.cc', 'huge_malloc.cc',
'large_malloc.cc', 'makechunk.cc', 'malloc.cc', 'rng.cc', 'small_malloc.cc', 'stats.cc',
'generated_constants.cc'
]
supermallocsources = [os.path.join('src', path) for path in supermallocsources]
if not target.is_android():
supermalloc_variables = {'cflags': ['-mrtm'], 'runtime': 'c++', 'defines': ['NDEBUG=1']}
supermalloc_lib = generator.lib(module = 'supermalloc', sources = supermallocsources, basepath = 'benchmark', includepaths = includepaths + supermallocincludepaths, variables = supermalloc_variables)
supermalloc_depend_libs = ['supermalloc', 'benchmark', 'test']
generator.bin(module = 'supermalloc', sources = ['benchmark.c'], binname = 'benchmark-supermalloc', basepath = 'benchmark', implicit_deps = [supermalloc_lib, benchmark_lib, test_lib], libs = supermalloc_depend_libs, includepaths = includepaths, variables = supermalloc_variables)
#Lockless only seems to build with gcc
if toolchain.name() == "gcc":
lockless_depend_libs = ['benchmark', 'test']
if target.is_linux():
lockless_variables = merge_variables({'defines': ['USE_PREFIX']}, variables)
generator.bin(module = 'lockless', sources = ['benchmark.c', 'll_alloc.c'], binname = 'benchmark-lockless', basepath = 'benchmark', implicit_deps = [benchmark_lib, test_lib], libs = lockless_depend_libs, includepaths = includepaths, variables = lockless_variables)
if not target.is_windows():
smmallocsources = [
'smmalloc.cpp', 'smmalloc_generic.cpp', 'smmalloc_tls.cpp'
]
smmalloc_variables = {'defines': ['_M_X64=1'], 'runtime': 'c++'}
smmalloc_depend_libs = ['benchmark', 'test']
generator.bin(module = 'smmalloc', sources = ['benchmark.cpp'] + smmallocsources, binname = 'benchmark-smmalloc', basepath = 'benchmark', implicit_deps = [benchmark_lib, test_lib], libs = smmalloc_depend_libs, includepaths = includepaths, variables = smmalloc_variables)
mimallocsources = [
'stats.c', 'os.c', 'segment.c', 'page.c', 'random.c', 'arena.c', 'bitmap.c', 'alloc.c', 'alloc-aligned.c',
'segment-cache.c', 'heap.c', 'options.c', 'init.c'
]
mimallocsources = [os.path.join('src', path) for path in mimallocsources]
mimalloc_variables = {'defines': ['MI_DEBUG=0']}
mimallocincludepaths = [
os.path.join('benchmark', 'mimalloc', 'include')
]
mimalloc_depend_libs = ['benchmark', 'test']
generator.bin(module = 'mimalloc', sources = ['benchmark.c'] + mimallocsources, binname = 'benchmark-mimalloc', basepath = 'benchmark', implicit_deps = [benchmark_lib, test_lib], libs = mimalloc_depend_libs, includepaths = includepaths + mimallocincludepaths, variables = mimalloc_variables)
|
rampantpixels/rpmalloc-benchmark
|
configure.py
|
Python
|
unlicense
| 14,222
|
import unittest
import json
from bitmovin import Bitmovin, Response, SFTPInput
from bitmovin.errors import BitmovinApiError
from tests.bitmovin import BitmovinTestCase
class SFTPInputTests(BitmovinTestCase):
@classmethod
def setUpClass(cls):
super().setUpClass()
@classmethod
def tearDownClass(cls):
super().tearDownClass()
def setUp(self):
super().setUp()
self.bitmovin = Bitmovin(self.api_key)
self.assertIsNotNone(self.bitmovin)
self.assertTrue(isinstance(self.bitmovin, Bitmovin))
def tearDown(self):
super().tearDown()
def test_create_sftp_input(self):
(sample_input, sample_files) = self._get_sample_sftp_input()
input_resource_response = self.bitmovin.inputs.SFTP.create(sample_input)
self.assertIsNotNone(input_resource_response)
self.assertIsNotNone(input_resource_response.resource)
self.assertIsNotNone(input_resource_response.resource.id)
self._compare_sftp_inputs(sample_input, input_resource_response.resource)
def test_create_sftp_input_without_name(self):
(sample_input, sample_files) = self._get_sample_sftp_input()
sample_input.name = None
input_resource_response = self.bitmovin.inputs.SFTP.create(sample_input)
self.assertIsNotNone(input_resource_response)
self.assertIsNotNone(input_resource_response.resource)
self.assertIsNotNone(input_resource_response.resource.id)
self._compare_sftp_inputs(sample_input, input_resource_response.resource)
def test_create_sftp_input_custom(self):
(sample_input, sample_files) = self._get_sample_sftp_input()
sample_input.port = 9921
input_resource_response = self.bitmovin.inputs.SFTP.create(sample_input)
self.assertIsNotNone(input_resource_response)
self.assertIsNotNone(input_resource_response.resource)
self.assertIsNotNone(input_resource_response.resource.id)
self._compare_sftp_inputs(sample_input, input_resource_response.resource)
self.assertEqual(sample_input.port, input_resource_response.resource.port)
def test_retrieve_sftp_input(self):
(sample_input, sample_files) = self._get_sample_sftp_input()
created_input_response = self.bitmovin.inputs.SFTP.create(sample_input)
self.assertIsNotNone(created_input_response)
self.assertIsNotNone(created_input_response.resource)
self.assertIsNotNone(created_input_response.resource.id)
self._compare_sftp_inputs(sample_input, created_input_response.resource)
retrieved_input_response = self.bitmovin.inputs.SFTP.retrieve(created_input_response.resource.id)
self.assertIsNotNone(retrieved_input_response)
self.assertIsNotNone(retrieved_input_response.resource)
self._compare_sftp_inputs(created_input_response.resource, retrieved_input_response.resource)
def test_delete_sftp_input(self):
(sample_input, sample_files) = self._get_sample_sftp_input()
created_input_response = self.bitmovin.inputs.SFTP.create(sample_input)
self.assertIsNotNone(created_input_response)
self.assertIsNotNone(created_input_response.resource)
self.assertIsNotNone(created_input_response.resource.id)
self._compare_sftp_inputs(sample_input, created_input_response.resource)
deleted_minimal_resource = self.bitmovin.inputs.SFTP.delete(created_input_response.resource.id)
self.assertIsNotNone(deleted_minimal_resource)
self.assertIsNotNone(deleted_minimal_resource.resource)
self.assertIsNotNone(deleted_minimal_resource.resource.id)
try:
self.bitmovin.inputs.SFTP.retrieve(created_input_response.resource.id)
self.fail(
'Previous statement should have thrown an exception. ' +
'Retrieving input after deleting it shouldn\'t be possible.'
)
except BitmovinApiError:
pass
def test_list_sftp_inputs(self):
(sample_input, sample_files) = self._get_sample_sftp_input()
created_input_response = self.bitmovin.inputs.SFTP.create(sample_input)
self.assertIsNotNone(created_input_response)
self.assertIsNotNone(created_input_response.resource)
self.assertIsNotNone(created_input_response.resource.id)
self._compare_sftp_inputs(sample_input, created_input_response.resource)
inputs = self.bitmovin.inputs.SFTP.list()
self.assertIsNotNone(inputs)
self.assertIsNotNone(inputs.resource)
self.assertIsNotNone(inputs.response)
self.assertIsInstance(inputs.resource, list)
self.assertIsInstance(inputs.response, Response)
self.assertGreater(inputs.resource.__sizeof__(), 1)
def test_retrieve_sftp_input_custom_data(self):
(sample_input, sample_files) = self._get_sample_sftp_input()
sample_input.customData = '<pre>my custom data</pre>'
created_input_response = self.bitmovin.inputs.SFTP.create(sample_input)
self.assertIsNotNone(created_input_response)
self.assertIsNotNone(created_input_response.resource)
self.assertIsNotNone(created_input_response.resource.id)
self._compare_sftp_inputs(sample_input, created_input_response.resource)
custom_data_response = self.bitmovin.inputs.SFTP.retrieve_custom_data(created_input_response.resource.id)
custom_data = custom_data_response.resource
self.assertEqual(sample_input.customData, json.loads(custom_data.customData))
def _compare_sftp_inputs(self, first: SFTPInput, second: SFTPInput):
"""
:param first: SFTPInput
:param second: SFTPInput
:return: bool
"""
self.assertEqual(first.host, second.host)
self.assertEqual(first.name, second.name)
self.assertEqual(first.description, second.description)
#self.assertEqual(first.username, second.username) # issue 574
def _get_sample_sftp_input(self):
sftp_input_settings = self.settings.get('sampleObjects').get('inputs').get('sftp')\
.get('3945fee9-5e0f-48ce-8f3d-d451c0bf1071')
files = sftp_input_settings.get('files')
sftp_input = SFTPInput(
host=sftp_input_settings.get('host'),
username=sftp_input_settings.get('username'),
password=sftp_input_settings.get('password'),
name='Sample SFTP input'
)
self.assertIsNotNone(sftp_input.host)
self.assertIsNotNone(sftp_input.username)
self.assertIsNotNone(sftp_input.password)
return sftp_input, files
if __name__ == '__main__':
unittest.main()
|
bitmovin/bitmovin-python
|
tests/bitmovin/services/inputs/sftp_input_service_tests.py
|
Python
|
unlicense
| 6,702
|
# uncompyle6 version 2.9.10
# Python bytecode 2.7 (62211)
# Decompiled from: Python 3.6.0b2 (default, Oct 11 2016, 05:27:10)
# [GCC 6.2.0 20161005]
# Embedded file name: errors.py
import mcl.status
ERR_SUCCESS = mcl.status.MCL_SUCCESS
ERR_INVALID_PARAM = mcl.status.framework.ERR_START
ERR_MARSHAL_FAILED = mcl.status.framework.ERR_START + 1
ERR_GET_FULL_PATH_FAILED = mcl.status.framework.ERR_START + 2
ERR_CALLBACK_FAILED = mcl.status.framework.ERR_START + 3
ERR_ENUM_FAILED = mcl.status.framework.ERR_START + 4
ERR_DONE_MAX_ENTRIES = mcl.status.framework.ERR_START + 5
ERR_NOT_IMPLEMENTED = mcl.status.framework.ERR_START + 6
errorStrings = {ERR_INVALID_PARAM: 'Invalid parameter(s)',
ERR_MARSHAL_FAILED: 'Marshaling data failed',
ERR_GET_FULL_PATH_FAILED: 'Unable to get full path',
ERR_CALLBACK_FAILED: 'Return of data failed',
ERR_ENUM_FAILED: 'Failed to enumerate given directory',
ERR_DONE_MAX_ENTRIES: 'Maximum entries exceeded',
ERR_NOT_IMPLEMENTED: 'Feature not implemented on this platform'
}
|
DarthMaulware/EquationGroupLeaks
|
Leak #5 - Lost In Translation/windows/Resources/Dsz/PyScripts/Lib/dsz/mca/file/cmd/grep/errors.py
|
Python
|
unlicense
| 1,026
|
from kivy.uix.stacklayout import StackLayout
from kivy.uix.behaviors import DragBehavior
from kivy.uix.modalview import ModalView
from kivy.core.window import Window
from kivy.uix.button import Button
from kivy.uix.label import Label
from kivy.lang import Builder
from kivy.clock import Clock
from kivy.uix.stencilview import StencilView
Builder.load_string('''
<FindBar>:
canvas.before:
Color:
rgba: col_dgrey
Rectangle:
size: self.size
pos: self.pos
TextInput:
size_hint: 1, 0.5
TextInput:
size_hint: 1, 0.5
''')
class FindBar(StackLayout, StencilView):
def __init__(self, **kwargs):
super(FindBar, self).__init__(**kwargs)
|
Bakterija/log_fruit
|
src/app_modules/widgets/find_bar.py
|
Python
|
unlicense
| 724
|
#!/usr/bin/env python
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2012 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
#
# Copyright 2012 Nebula, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import os
from distutils.core import setup
from distutils.command.install import INSTALL_SCHEMES
from horizon import version
from openstack_dashboard.openstack.common import setup as os_common_setup
requires = os_common_setup.parse_requirements()
depend_links = os_common_setup.parse_dependency_links()
tests_require = os_common_setup.parse_requirements(['tools/test-requires'])
ROOT = os.path.dirname(__file__)
target_dirs = ['horizon', 'openstack_dashboard', 'bin']
def read(fname):
return open(os.path.join(ROOT, fname)).read()
def split(path, result=None):
"""
Split a path into components in a platform-neutral way.
"""
if result is None:
result = []
head, tail = os.path.split(path)
if head == '':
return [tail] + result
if head == path:
return result
return split(head, [tail] + result)
# Tell distutils not to put the data_files in platform-specific installation
# locations. See here for an explanation:
# https://groups.google.com/forum/#!topic/comp.lang.python/Nex7L-026uw
for scheme in INSTALL_SCHEMES.values():
scheme['data'] = scheme['purelib']
# Compile the list of packages available, because distutils doesn't have
# an easy way to do this.
packages, data_files = [], []
root_dir = os.path.dirname(__file__)
if root_dir != '':
os.chdir(root_dir)
for target_dir in target_dirs:
for dirpath, dirnames, filenames in os.walk(target_dir):
# Ignore dirnames that start with '.'
for i, dirname in enumerate(dirnames):
if dirname.startswith('.'):
del dirnames[i]
if '__init__.py' in filenames:
packages.append('.'.join(split(dirpath)))
elif filenames:
data_files.append([dirpath, [os.path.join(dirpath, f)
for f in filenames]])
setup(name="horizon",
version=version.canonical_version_string(),
url='https://github.com/openstack/horizon/',
license='Apache 2.0',
description="The OpenStack Dashboard.",
long_description=read('README.rst'),
author='OpenStack',
author_email='horizon@lists.launchpad.net',
packages=packages,
data_files=data_files,
cmdclass=os_common_setup.get_cmdclass(),
include_package_data=True,
install_requires=requires,
tests_require=tests_require,
dependency_links=depend_links,
zip_safe=False,
classifiers=['Development Status :: 5 - Production/Stable',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: Apache Software License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Internet :: WWW/HTTP',
'Environment :: OpenStack']
)
|
m3z/HT
|
setup.py
|
Python
|
apache-2.0
| 3,693
|
#!/usr/env python3
try:
import numpypy as np
except:
import numpy as np
try:
import cPickle as pickle
except ImportError:
import pickle
#import ipdb
## Tri Diagonal Matrix Algorithm(a.k.a Thomas algorithm) solver
def TDMAsolver(a, b, c, d):
'''
TDMA solver, a b c d can be NumPy array type or Python list type.
refer to http://en.wikipedia.org/wiki/Tridiagonal_matrix_algorithm
'''
nf = len(a) # number of equations
ac, bc, cc, dc = map(np.array, (a, b, c, d)) # copy the array
for it in xrange(1, nf):
mc = ac[it]/bc[it-1]
bc[it] = bc[it] - mc*cc[it-1]
dc[it] = dc[it] - mc*dc[it-1]
xc = ac
xc[-1] = dc[-1]/bc[-1]
for il in xrange(nf-2, -1, -1):
xc[il] = (dc[il]-cc[il]*xc[il+1])/bc[il]
del bc, cc, dc # delete variables from memory
return xc
class Vector(object):
def __init__(self, parent):
# save the pointer to the parent (dynamical)
self.p = parent
# initial G = 0, G[k,n]
self.G = np.zeros((self.p.Nz, self.p.NFourier), dtype="float64")
# access via G[k][n]
def step(self):
# save the old G
self.G_old = self.G.copy()
# compute the new one
self.compute_G()
# new += dt/2*(3G-G_old)
self.field[1:-1] = (self.field[1:-1]
+ self.p.dt/2*(3*self.G[1:-1] - self.G_old[1:-1])
)
# conditions at top and bottom : null
self.field[0 ,:] = 0
self.field[-1,:] = 0
def compute_G(self):
raise Exception("Vector class is a base class, not supposed to be "+
"used like that")
def initial(self, init_cond):
if init_cond == 'null':
self.field = np.zeros((self.p.Nz, self.p.NFourier))
elif init_cond == "T":
self.field = np.array([[T_0(n,k,self.p) for n in range(self.p.NFourier)]
for k in range(self.p.Nz)])
else:
raise Exception("init_cond must be either `null` or `T`")
class Temp(Vector):
name = "T"
def compute_G(self):
# compute G except for k = 0, Nz-1 and n = 0
for n in range(1, self.p.NFourier):
self.G[1:-1,n] = ((self.field[:-2,n]-2*self.field[1:-1,n]+self.field[2:,n])
* self.p.oodz2
- (n*self.p.pi/self.p.a)**2
* self.field[1:-1,n] )
class Vort(Vector):
name = "ω"
def __init__(self, parent):
super().__init__(parent)
self.compute_wk()
def compute_wk(self):
# init. the arrays:
self.wk1 = np.zeros((self.p.Nz, self.p.NFourier))
self.wk2 = np.zeros((self.p.Nz, self.p.NFourier))
self.sub = np.zeros((self.p.Nz, self.p.NFourier))
for n in range(1,self.p.NFourier):
# save some usefull functions
sub_f = lambda k : -self.p.oodz2 if k<self.p.Nz-1 else 1
dia = lambda k : (n*self.p.pi/self.p.a)**2 + 2*self.p.oodz2 if 0<k<self.p.Nz-1 else 1
sup = lambda k : -self.p.oodz2 if k>0 else 1
# tridiag. solver
self.wk1[0,n] = 1/dia(0)
self.wk2[0,n] = sup(0) * self.wk1[0,n]
for k in range(1, self.p.Nz-1):
self.wk1[k,n] = 1 /(dia(k)-sub_f(k)*self.wk2[k-1,n])
self.wk2[k,n] = sup(k)*self.wk1[k,n]
self.wk1[-1,n] = 1/(dia(self.p.Nz-1)-sub_f(self.p.Nz-1)*self.wk2[-2,n])
self.sub[:,n] = [sub_f(k) for k in range(self.p.Nz)]
def step(self):
rhs = self.p.psi.field.copy()
# boundary conditions k=0, Nz-1 : psi = 0
rhs[0, :] = 0
rhs[-1,:] = 0
for n in range(1,self.p.NFourier):
# tridiag. solver
self.field[0,n] = rhs[0,n]*self.wk1[0,n]
for k in range(1, self.p.Nz):
self.field[k,n] = (rhs[k,n] - self.sub[k,n]*self.field[k-1,n]*self.wk1[k,n])
for k in range(self.p.Nz-2, 0, -1):
self.field[k,n] = self.field[k,n]-self.wk2[k,n]*self.field[k+1,n]
class Stream(Vector):
name = "ψ"
def compute_G(self):
# compute G except for k=0, Nz-1 and n=0
for n in range(1, self.p.NFourier):
a = self.p.Ra*n*self.p.pi/self.p.a*self.p.T.field[1:-1,n]
b = (self.field[:-2,n] - 2*self.field[1:-1,n] + self.field[2:,n])*self.p.oodz2
c = (n*self.p.pi/self.p.a)**2*self.field[1:-1,n]
self.G[1:-1,n] = self.p.Pr*( a + b - c)
class Simulation(object):
param_list = {'Re': 1, 'Pr': 1, 'Ra': 1, 'a' : 1, 'Nz': 100,
'NFourier': 50, 'dt_security': 0.9,
'maxiter': 100, 'freq_output': 10,
'freq_critical_Ra':50, 'verbose': False}
def __init__(self, *args, **kargs):
# save the default parameters
for param, value in self.param_list.items():
setattr(self, param, value)
# override if necessary
for param, value in kargs.items():
if param not in self.param_list:
raise Exception("`%s' not recognized" % param)
else:
setattr(self, param, value)
# set the initial values
self.t = 0
self.niter = 0
self.dz = 1/(self.Nz-1)
# some usefull quantities
self.oodz2 = 1/self.dz**2
self.pi = np.pi
# create the inner fields
self.T = Temp(self)
self.omega = Vort(self)
self.psi = Stream(self)
# previous fields for critical Ra number
self.T_old = np.zeros((self.NFourier,))
self.omega_old = np.zeros((self.NFourier,))
self.psi_old = np.zeros((self.NFourier,))
def __del__(self):
pass
def growth(self):
''' Calculate the log-growth rate and return a string containing
all the growth rate'''
amp = lambda v: np.log(abs(v)) if v != 0 else 0
gr = lambda new,old,n: str(amp(new.field[self.Nz//3,n])
- amp(abs(old[n])))
out = "".join([ gr(self.T, self.T_old,n) + "\t" +
gr(self.omega, self.omega_old,n) + "\t" +
gr(self.psi, self.psi_old,n) + "\t"
for n in range(self.NFourier) ])
# save the arrays for next output
self.T_old = self.T.field[self.Nz//3,:].copy()
self.omega_old = self.omega.field[self.Nz//3,:].copy()
self.psi_old = self.psi.field[self.Nz//3,:].copy()
return out+"\n"
def step(self):
# eventually output
if self.verbose and self.niter % self.freq_output == 0:
self.dump()
# eventually calculate the d-ln term for the critical Ra
if self.verbose and self.niter % self.freq_critical_Ra == 0 :
output = "# growth : \t"
output+= "".join([
"{T.name}_{n}\t{w.name}_{n}\t{psi.name}_{n}\t".format(T=self.T,
w=self.omega,
psi=self.psi,
n=n)
for n in range(self.NFourier)])
output+= "\n"
output+= "# growth : \t"
output+= self.growth()
print(output)
# get the max timestep
self.CFL()
# increase the time, the iteration
self.t += self.dt
self.niter += 1
# check that the end is not reached
if self.niter > self.maxiter:
return False
else:
return True
def dump(self):
output = "#k\t"
for n in range(self.NFourier):
o = "{T}_{n}\t{w}_{n}\t{psi}_{n}\t".format(T=self.T.name,
w=self.omega.name,
psi=self.psi.name,
n=n)
output += o
output += "\n"
for k in range(self.Nz):
output += str(k) + "\t"
for n in range(self.NFourier):
l = "{T}\t{w}\t{psi}\t".format(T=self.T.field[k,n],
w=self.omega.field[k,n],
psi=self.psi.field[k,n])
output += l
output += "\n"
print(output)
def CFL(self):
# dt < (dz)^2/4 or (dz)^2/(4Pr) if Pr > 1
self.dt = self.dt_security * self.dz**2/(4*max(1,self.Pr))
def T_0 (n,k,s):
if n > 0:
return np.sin(s.pi*k*s.dz)
else:
return 1-k*s.dz
if __name__ == '__main__':
# create a new simulation
s = Simulation(Re=5)
# initial conditions psi(0) = 0, Omega(0) = 0
s.psi.initial("null")
s.omega.initial("null")
# T_n(t=0) = sin(pi*k*dz) & T_0(t=0) = 1-k*dz
s.T.initial(lambda n, k: T_0(n,k,s))
# main loop over time
while s.step():
s.T.step()
s.psi.step()
s.omega.step()
del s
|
cphyc/MHD_simulation
|
python/simul.py
|
Python
|
apache-2.0
| 9,256
|
# Copyright 2021 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import setuptools
requirements = [
"requests", # no lower bound
"packaging>=14.0, !=15.0, <22.0.0", # too complex for tool
"wheel<0.36.0",
"click==7.0.0",
]
setuptools.setup(
name="invalid-package",
version="0.0.1",
author="Example Author",
author_email="author@example.com",
description="A small example package",
long_description_content_type="text/markdown",
url="https://github.com/pypa/sampleproject",
classifiers=[
"Programming Language :: Python :: 3",
"Operating System :: OS Independent",
],
install_requires=requirements,
packages=setuptools.find_packages(),
python_requires=">=3.6",
)
|
googleapis/python-test-utils
|
tests/unit/resources/bad_package/setup.py
|
Python
|
apache-2.0
| 1,256
|
# Copyright 2016 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Controller module for attenuators.
Sample Config:
.. code-block:: python
"Attenuator": [
{
"address": "192.168.1.12",
"port": 23,
"model": "minicircuits",
"paths": ["AP1-2G", "AP1-5G", "AP2-2G", "AP2-5G"]
},
{
"address": "192.168.1.14",
"port": 23,
"model": "minicircuits",
"paths": ["AP-DUT"]
}
]
"""
import importlib
import logging
MOBLY_CONTROLLER_CONFIG_NAME = "Attenuator"
# Keys used inside a config dict for attenuator.
# Keys for making the connection to the attenuator device. Right now we only
# use telnet lib. This can be refactored when the need for a different
# communication protocol arises.
KEY_ADDRESS = "address"
KEY_PORT = "port"
# A string that is the model of the attenuator used. This is essentially the
# module name for the underlying driver for the attenuator hardware.
KEY_MODEL = "model"
# A list of strings, each describing what's the connected to this attenuation
# path
KEY_PATHS = "paths"
PACKAGE_PATH_TEMPLATE = "mobly.controllers.attenuator_lib.%s"
def create(configs):
objs = []
for config in configs:
_validate_config(config)
attenuator_model = config[KEY_MODEL]
# Import the correct driver module for the attenuator device
module_name = PACKAGE_PATH_TEMPLATE % attenuator_model
module = importlib.import_module(module_name)
# Create each
attenuation_device = module.AttenuatorDevice(
path_count=len(config[KEY_PATHS]))
attenuation_device.model = attenuator_model
instances = attenuation_device.open(config[KEY_ADDRESS], config[KEY_PORT])
for idx, path_name in enumerate(config[KEY_PATHS]):
path = AttenuatorPath(attenuation_device, idx=idx, name=path_name)
objs.append(path)
return objs
def destroy(objs):
for attenuation_path in objs:
attenuation_path.attenuation_device.close()
class Error(Exception):
"""This is the Exception class defined for all errors generated by
Attenuator-related modules.
"""
def _validate_config(config):
"""Verifies that a config dict for an attenuator device is valid.
Args:
config: A dict that is the configuration for an attenuator device.
Raises:
attenuator.Error: A config is not valid.
"""
required_keys = [KEY_ADDRESS, KEY_MODEL, KEY_PORT, KEY_PATHS]
for key in required_keys:
if key not in config:
raise Error("Required key %s missing from config %s", (key, config))
class AttenuatorPath:
"""A convenience class that allows users to control each attenuator path
separately as different objects, as opposed to passing in an index number
to the functions of an attenuator device object.
This decouples the test code from the actual attenuator device used in the
physical test bed.
For example, if a test needs to attenuate four signal paths, this allows the
test to do:
.. code-block:: python
self.attenuation_paths[0].set_atten(50)
self.attenuation_paths[1].set_atten(40)
instead of:
.. code-block:: python
self.attenuators[0].set_atten(0, 50)
self.attenuators[0].set_atten(1, 40)
The benefit the former is that the physical test bed can use either four
single-channel attenuators, or one four-channel attenuators. Whereas the
latter forces the test bed to use a four-channel attenuator.
"""
def __init__(self, attenuation_device, idx=0, name=None):
self.model = attenuation_device.model
self.attenuation_device = attenuation_device
self.idx = idx
if (self.idx >= attenuation_device.path_count):
raise IndexError("Attenuator index out of range!")
def set_atten(self, value):
"""This function sets the attenuation of Attenuator.
Args:
value: This is a floating point value for nominal attenuation to be
set. Unit is db.
"""
self.attenuation_device.set_atten(self.idx, value)
def get_atten(self):
"""Gets the current attenuation setting of Attenuator.
Returns:
A float that is the current attenuation value. Unit is db.
"""
return self.attenuation_device.get_atten(self.idx)
def get_max_atten(self):
"""Gets the max attenuation supported by the Attenuator.
Returns:
A float that is the max attenuation value.
"""
return self.attenuation_device.max_atten
|
google/mobly
|
mobly/controllers/attenuator.py
|
Python
|
apache-2.0
| 5,038
|
# Copyright 2008-2015 Nokia Solutions and Networks
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Various generic utility functions and classes.
Utilities are mainly for internal usage, but external libraries and tools
may find some of them useful. Utilities are generally stable, but absolute
backwards compatibility between major versions is not guaranteed.
All utilities are exposed via the :mod:`robot.utils` package, and should be
used either like::
from robot import utils
assert utils.Matcher('H?llo').match('Hillo')
or::
from robot.utils import Matcher
assert Matcher('H?llo').match('Hillo')
"""
from .argumentparser import ArgumentParser, cmdline2list
from .application import Application
from .compress import compress_text
from .connectioncache import ConnectionCache
from .dotdict import DotDict, OrderedDict
from .encoding import (decode_output, encode_output,
decode_from_system, encode_to_system)
from .error import (get_error_message, get_error_details, ErrorDetails)
from .escaping import escape, unescape, split_from_equals
from .etreewrapper import ET, ETSource
from .frange import frange
from .markuputils import html_format, html_escape, xml_escape, attribute_escape
from .markupwriters import HtmlWriter, XmlWriter, NullMarkupWriter
from .importer import Importer
from .match import eq, Matcher, MultiMatcher
from .misc import (getdoc, isatty, plural_or_not, printable_name, py2to3,
roundup, seq2str, seq2str2)
from .normalizing import lower, normalize, NormalizedDict
from .platform import (IRONPYTHON, JYTHON, PY2, PY3, PYTHON, UNIXY, WINDOWS,
RERAISED_EXCEPTIONS)
from .recommendations import RecommendationFinder
from .robotenv import get_env_var, set_env_var, del_env_var, get_env_vars
from .robotinspect import is_java_init, is_java_method
from .robotio import binary_file_writer, file_writer
from .robotpath import abspath, find_file, get_link_path, normpath
from .robottime import (elapsed_time_to_string, format_time, get_elapsed_time,
get_time, get_timestamp, secs_to_timestamp,
secs_to_timestr, timestamp_to_secs, timestr_to_secs,
parse_time)
from .robottypes import (is_bytes, is_dict_like, is_falsy, is_integer,
is_list_like, is_number, is_string, is_truthy,
is_unicode, long, type_name, unicode, StringIO)
from .setter import setter, SetterAwareType
from .sortable import Sortable
from .text import (cut_long_message, format_assign_message,
pad_console_length, get_console_length, split_tags_from_doc,
split_args_from_name_or_path)
from .unic import prepr, unic
from .utf8reader import Utf8Reader
# Used by the old SeleniumLibrary until version 2.9.2.
# https://github.com/robotframework/SeleniumLibrary/issues/261
# TODO: Remove in RF 3.0.
def html_attr_escape(attr):
"""Deprecated!! Use attribute_escape instead."""
return attribute_escape(attr)
|
moto-timo/robotframework
|
src/robot/utils/__init__.py
|
Python
|
apache-2.0
| 3,555
|
__author__ = 'himanshu'
# TrayIcon
class TrayIcon(Exception):
pass
|
chennan47/OSF-Offline
|
osfoffline/exceptions/tray_icon_exceptions.py
|
Python
|
apache-2.0
| 73
|
########
# Copyright (c) 2013 GigaSpaces Technologies Ltd. All rights reserved
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# * See the License for the specific language governing permissions and
# * limitations under the License.
__author__ = 'Ganesh'
from setuptools import setup
version = '0.3'
setup(
name='cloudify-aws',
version=version,
author='ran',
author_email='ganeshpandi.g@cloudenablers.com',
packages=['cloudify_aws'],
license='LICENSE',
description='the cloudify amazon provider',
package_data={'cloudify_aws': ['cloudify-config.yaml',
'cloudify-config.defaults.yaml']},
install_requires=[
"scp",
"fabric",
"jsonschema",
"IPy", 'boto'
]
)
|
CloudifySource/cloudify-aws
|
setup.py
|
Python
|
apache-2.0
| 1,176
|
from __future__ import absolute_import
import os
import socket
import random
import string
import time
import uuid
import pytest
from . import unittest
from kafka import SimpleClient, create_message
from kafka.client_async import KafkaClient
from kafka.errors import (
LeaderNotAvailableError, KafkaTimeoutError, InvalidTopicError,
NotLeaderForPartitionError, UnknownTopicOrPartitionError,
FailedPayloadsError
)
from kafka.structs import OffsetRequestPayload, ProduceRequestPayload
#from test.fixtures import random_string, version_str_to_list, version as kafka_version #pylint: disable=wrong-import-order
def random_string(length):
return "".join(random.choice(string.ascii_letters) for i in range(length))
def env_kafka_version():
"""Return the Kafka version set in the OS environment as a tuple.
Example: '0.8.1.1' --> (0, 8, 1, 1)
"""
if 'KAFKA_VERSION' not in os.environ:
return ()
return tuple(map(int, os.environ['KAFKA_VERSION'].split('.')))
def get_open_port():
sock = socket.socket()
sock.bind(("", 0))
port = sock.getsockname()[1]
sock.close()
return port
_MESSAGES = {}
def msg(message):
"""Format, encode and deduplicate a message
"""
global _MESSAGES #pylint: disable=global-statement
if message not in _MESSAGES:
_MESSAGES[message] = '%s-%s' % (message, str(uuid.uuid4()))
return _MESSAGES[message].encode('utf-8')
def send_messages(client, topic, partition, messages):
"""Send messages to a topic's partition
"""
messages = [create_message(msg(str(m))) for m in messages]
produce = ProduceRequestPayload(topic, partition, messages=messages)
resp, = client.send_produce_request([produce])
assert resp.error == 0
return [x.value for x in messages]
def current_offset(client, topic, partition, kafka_broker=None):
"""Get the current offset of a topic's partition
"""
try:
offsets, = client.send_offset_request([OffsetRequestPayload(topic,
partition, -1, 1)])
except Exception:
# XXX: We've seen some UnknownErrors here and can't debug w/o server logs
if kafka_broker:
kafka_broker.dump_logs()
raise
else:
return offsets.offsets[0]
def assert_message_count(messages, num_messages):
"""Check that we received the expected number of messages with no duplicates."""
# Make sure we got them all
assert len(messages) == num_messages
# Make sure there are no duplicates
# Note: Currently duplicates are identified only using key/value. Other attributes like topic, partition, headers,
# timestamp, etc are ignored... this could be changed if necessary, but will be more tolerant of dupes.
unique_messages = {(m.key, m.value) for m in messages}
assert len(unique_messages) == num_messages
class KafkaIntegrationTestCase(unittest.TestCase):
create_client = True
topic = None
zk = None
server = None
def setUp(self):
super(KafkaIntegrationTestCase, self).setUp()
if not os.environ.get('KAFKA_VERSION'):
self.skipTest('Integration test requires KAFKA_VERSION')
if not self.topic:
topic = "%s-%s" % (self.id()[self.id().rindex(".") + 1:], random_string(10))
self.topic = topic
if self.create_client:
self.client = SimpleClient('%s:%d' % (self.server.host, self.server.port))
self.client_async = KafkaClient(bootstrap_servers='%s:%d' % (self.server.host, self.server.port))
timeout = time.time() + 30
while time.time() < timeout:
try:
self.client.load_metadata_for_topics(self.topic, ignore_leadernotavailable=False)
if self.client.has_metadata_for_topic(topic):
break
except (LeaderNotAvailableError, InvalidTopicError):
time.sleep(1)
else:
raise KafkaTimeoutError('Timeout loading topic metadata!')
# Ensure topic partitions have been created on all brokers to avoid UnknownPartitionErrors
# TODO: It might be a good idea to move this to self.client.ensure_topic_exists
for partition in self.client.get_partition_ids_for_topic(self.topic):
while True:
try:
req = OffsetRequestPayload(self.topic, partition, -1, 100)
self.client.send_offset_request([req])
break
except (NotLeaderForPartitionError, UnknownTopicOrPartitionError, FailedPayloadsError) as e:
if time.time() > timeout:
raise KafkaTimeoutError('Timeout loading topic metadata!')
time.sleep(.1)
self._messages = {}
def tearDown(self):
super(KafkaIntegrationTestCase, self).tearDown()
if not os.environ.get('KAFKA_VERSION'):
return
if self.create_client:
self.client.close()
def current_offset(self, topic, partition):
try:
offsets, = self.client.send_offset_request([OffsetRequestPayload(topic,
partition, -1, 1)])
except Exception:
# XXX: We've seen some UnknownErrors here and can't debug w/o server logs
self.zk.child.dump_logs()
self.server.child.dump_logs()
raise
else:
return offsets.offsets[0]
def msgs(self, iterable):
return [self.msg(x) for x in iterable]
def msg(self, s):
if s not in self._messages:
self._messages[s] = '%s-%s-%s' % (s, self.id(), str(uuid.uuid4()))
return self._messages[s].encode('utf-8')
def key(self, k):
return k.encode('utf-8')
class Timer(object):
def __enter__(self):
self.start = time.time()
return self
def __exit__(self, *args):
self.end = time.time()
self.interval = self.end - self.start
|
Yelp/kafka-python
|
test/testutil.py
|
Python
|
apache-2.0
| 6,084
|
# Copyright 2014-2015 University of Chicago
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Repository Management
"""
import datetime
import fnmatch
import hashlib
import os
import os.path
import re
default_root = "/mcs/globus.org/ftppub/gt6"
default_api_root = "/mcs/globus.org/api"
default_releases = ["unstable", "testing", "stable"]
public_key = """-----BEGIN PGP PUBLIC KEY BLOCK-----
Version: GnuPG v1.4.5 (GNU/Linux)
mQGiBE0PXQkRBAC12PfwFzMyTKAvCp3AEbzdwwDyEaBHYmd1+Dv+q5c48fEZQrzA
PuZ75BnG8BRIo3ZSYJll9Xf5v8A0M6F35msBBdjUpI+PHZvSQ+yru6U3w9XCsmO9
jSGWM1XAw/hcDWOsETOsjJ56AqIKndOXtG2jeOMFD0MwJus9paDcv5pPkwCgk3Fk
I+GdLaZf0O6vGUtq2Fo2EgkD/14AQ4SyUufwztQeLwlYXyihdUoIVBl4wm4fndJb
TuzTlp3V/oabM8t+V92ftbqlAesFb1FdFQ9NeUEY0VIODR2OTsmEfLUSMK/kRfXM
4FatObXpEp58EydZb3oz/vwASEk1Nno5OW2noGZL3sCk+3j65MstI2q4kMvNSvl+
JEjUBACgNv/mDrn0UjWBuzxOuZrh1r2rBdsjIHx31o/vBF5YLfQhErZQTm6cfpRK
W32Nm18btrqgxxHFAMb4wxnVxAxdM3zLSAaiqvi33z2wHReh5TfaVKpJBj7LpMSI
hwu50iovsBjE7HiusJQBWBtk8Bqp4g9ic2sPV0caEMCUXU5R9bQjR2xvYnVzIFRv
b2xraXQgPHN1cHBvcnRAZ2xvYnVzLm9yZz6IYAQTEQIAIAUCTQ9dCQIbAwYLCQgH
AwIEFQIIAwQWAgMBAh4BAheAAAoJEESufsL68kNlb6IAoIemS8dr65xCkA4GQzgJ
ngXwZgtvAKCOKs5Ork6HiNKIrWRGMLvA7iktBbkCDQRND10SEAgA37cRQGj/QNcc
OjyBrL6e2wPT7UtpXBEHzfjhtmT8+VC+PSbKRxVfawLBtrfzSAAwsmye3c+XK/VB
Pa06vSSmezeyNau+XtEVLwrwQwO/kM6wgNtb7zYyI67Y6XEPP+ZlqpZ0W14cTZBD
3SXWuu6zqjdtUnJCg/j/j0zH5TZa40aCfisERxNCQeoePk2gmMTJDJF0ASM3Nhys
QIP9qpCA+eOJnKmMeEgDCW9j2mYO4tp9lCSbi15HAb41HKN6xypNWk+EHKyu9n50
88UocRHXLZFujzNTGIokWAcoC0D3qpVQehtAVgt1VPrE6MxFPek8ZN4Ho++92KB7
F6E0OsfF6wADBggAnNPguzYAIztF/EzZANUU/7Eon9zJaD4Lf/mnhB3bMuGvenY0
7HSBAXbUxVXs7uX3S6u9PZ9dytl2Fqh8w47TNcC0ACKLRnhxTJ92LLakzAGVGtNz
2W9l+YJaZ6qIQR9FmYpCyIWp6Vm47yOARThrMtnwUhb53g5ZfxgzpHNUDN/7utTy
3sUaMRiijecmSVhDFbrz7ryY2Btlcr7ZrBo0ODHohDkZVn2UrzE6qg9g5np03zYe
5OUM5Lt5GYZJSKZO81aJ5+9DlkiAev3BFEeCsSOwjrqLZpsr0olbIfeHCi8pvjOJ
SCfx4Qs/hI34ykaUn3AgbgxqT0mSKfMasg2bIIhJBBgRAgAJBQJND10SAhsMAAoJ
EESufsL68kNlBuAAnRRI5jFAvyjtQaoQpVqSL4/O45D7AJ9WrW/vxTzN0OyZyUU6
8T0dJyXArA==
=r6rU
-----END PGP PUBLIC KEY BLOCK-----
"""
uid = os.getuid()
gid = None
def _digest_file(filename, force=False):
"""
Compute the md5, sha1, sha512 hashes of a file and write them to disk.
Parameters
----------
*filename*::
Name of the file to compute the hash of (str)
*force*::
Overwrite existing hash file (bool [False])
"""
if fnmatch.fnmatch(filename, "*.md5") or \
fnmatch.fnmatch(filename, "*.sha1") or \
fnmatch.fnmatch(filename, "*.sha512"):
return
for h in ['md5', 'sha1', 'sha512']:
hashname = filename + "." + h
if (force
or not os.path.exists(hashname)
or os.path.getmtime(filename) > os.path.getmtime(hashname)):
digester = hashlib.new(h)
f = file(filename, "r")
digester.update(f.read())
f.close()
f = file(hashname, "w")
f.write(
"%s %s\n" %
(digester.hexdigest(), filename.split(os.sep)[-1]))
f.close()
class Repository(object):
"""
Repository class
===================
This class contains the generic package management features for the various
metadata types associated with different repository systems. It contains
algorithms for matching packages and selecting ones to copy into another
repository based on version matches. This is subclassed to implement the
actual metdata parsing for various metadata formats.
"""
def __init__(self):
self.packages = {}
def get_packages(
self, name=None, arch=None, version=None, source=None,
newest_only=False):
"""
Construct a list of packages that match the optional parameters. If
source is an Metadata object, match packages that have that package
as the source package. Otherwise, filter the package list based on
the name if not None, further filtering on version and arch if they
are not None. If newest_only is True, only return the highest versions
of the packages which match
"""
package_candidates = []
if source is not None:
return [
(package)
for package_list in self.packages
for package in self.packages[package_list]
if package.source_name == source.source_name
and package.version == source.version
]
elif name is not None:
if version is not None:
package_candidates = [
(package)
for package_list in self.packages
for package in self.packages[package_list]
if name == package.name
and package.version == version
]
else:
package_candidates = [
(package)
for package_list in self.packages
for package in self.packages[package_list]
if name == package.name
]
if arch is not None:
package_candidates = [
(p)
for p in package_candidates if p.arch == arch
]
if newest_only and len(package_candidates) > 0:
newv = package_candidates[-1].version
return [p for p in package_candidates if p.version == newv]
elif newest_only:
return []
else:
return package_candidates
else:
package_candidates = []
for n in self.packages:
package_candidates.extend(
self.get_packages(
name=n, arch=arch, newest_only=newest_only))
return package_candidates
def is_newer(self, pkg):
"""
Check to see if *pkg* is newer than any versions of the same package
name within this repository. Returns 'True' if it is, 'False'
otherwise.
Parameters
----------
*self*:
This Repository object
*pkg*:
Package metadata to compare against the versions in *self*.
Returns
-------
Boolean
"""
matches = self.get_packages(pkg.name, arch=pkg.arch, newest_only=True)
return matches == [] or pkg > matches[-1]
def __contains__(self, pkg):
"""
Check to see if pkg is included in this Repository
"""
return len(self.get_packages(
name=pkg.name, arch=pkg.arch,
version=pkg.version, newest_only=True)) > 0
def __iter__(self):
"""
Iterate through the packages in this repository
"""
return self.packages.keys()
@staticmethod
def create_index(path, recursive=False):
for root, dirs, filenames in os.walk(path, topdown=not recursive):
if not recursive:
del dirs[0:]
indexfile = os.path.join(root, "index.html")
index_mtime = 0
regenerate_index = False
if os.path.exists(indexfile):
index_mtime = os.stat(indexfile).st_mtime
else:
regenerate_index = True
if not regenerate_index:
for dir in dirs:
fulldir = os.path.join(root, dir)
if os.stat(fulldir).st_mtime >= index_mtime:
regenerate_index = True
break
if not regenerate_index:
for filename in filenames:
fullfilename = os.path.join(root, filename)
if os.stat(fullfilename).st_mtime >= index_mtime:
regenerate_index = True
break
if regenerate_index:
try:
f = open(indexfile, "w")
f.write(
"<html><head><title>{0}</title></head>\n"
"<body>"
"<table>\n"
"<tr>"
"<td><a href='../index.html'>Parent Directory</a></td>"
"<td>{1}</td></tr>\n"
.format(
os.path.basename(root),
datetime.datetime.fromtimestamp(
os.stat(
os.path.join(
root, "..")).st_mtime).isoformat()))
dirs.sort()
for dir in dirs:
f.write(
"<tr>"
"<td><a href='{0}/index.html'>{0}/</a></td>"
"<td>{1}/</td></tr>\n"
.format(
dir,
datetime.datetime.fromtimestamp(
os.stat(
os.path.join(root, dir)).st_mtime
).isoformat()))
filenames.sort()
for pkg in filenames:
pkg_filename = os.path.join(root, pkg)
if (os.path.isfile(pkg_filename)
and not pkg_filename.endswith(".html")):
f.write(
"<tr>"
"<td><a href='{0}'>{0}</a></td>"
"<td>{1}</td></tr>\n"
.format(
pkg,
datetime.datetime.fromtimestamp(
os.stat(
pkg_filename).st_mtime
).isoformat()))
f.write("</table></body></html>\n")
finally:
f.close()
os.utime(root, None)
class Release(object):
"""
A Release is a top-level collection of +repo.Repository+ objects for
particular package stability ('unstable', 'testing', 'stable')
for each operating system.
"""
def __init__(self, name, repositories):
self.name = name
self.repositories = repositories
def get_packages(
self, name=None, os=None, version=None, arch=None,
source=None, newest_only=False):
return [p
for repository in self.repositories_for_os_arch(os, arch)
for p in repository.get_packages(
name=name, arch=arch, version=version, source=source,
newest_only=newest_only)]
def is_newer(self, package):
for repository in self.repositories_for_package(package):
if repository.is_newer(package):
return True
return False
def add_package(self, package, update_metadata=False):
return [
repository.add_package(package, update_metadata)
for repository in self.repositories_for_package(package)]
def remove_package(self, package, update_metadata=False):
return [
repository.remove_package(package, update_metadata)
for repository in self.repositories_for_package(package)]
def update_metadata(self, osname=None, arch=None, force=False):
for repository in self.repositories_for_os_arch(osname, arch):
repository.update_metadata(force)
def repositories_for_os_arch(self, osname, arch):
if osname is not None:
if arch is not None:
return [self.repositories[osname][arch]]
else:
return [
self.repositories[osname][ar]
for ar in self.repositories[osname]
]
else:
return [
self.repositories[osn][ar]
for osn in self.repositories
for ar in self.repositories[osn]
]
def repositories_for_package(self, package):
"""
Returns a list of repositories where the given package would belong.
By default, its a list containing the repository that matches the
package's os and arch, but subclasses can override this
"""
if package.os in self.repositories:
return [self.repositories[package.os][package.arch]]
else:
return []
def get_operating_systems(self):
return self.repositories.keys()
def get_architectures(self, osname):
return self.repositories[osname].keys()
def __contains__(self, package):
return len(self.get_packages(
name=package.name, os=package.os, version=package.version,
arch=package.arch)) > 0
class Manager(object):
def __init__(self, releases):
self.releases = releases
def get_release(self, releasename):
return self.releases[releasename]
def package_name(self, name):
return name.replace("_", "-") if name is not None else None
def promote_packages(
self, from_release=None,
to_release="unstable", os=None, name=None, version=None,
dryrun=False, exclude_package_names=None):
"""
Find new packages in the *from_release*, that are not in *to_release*
and copy them there and update the distro metadata. The packages to
promote
can be limited by specifying the package *name*, *version*, and
particular *os* to update.
Parameters
----------
*from_release*::
The name of a release in this Manager object to copy new packages
from.
*to_release*::
The name of a release in this Manager object
to copy new packages into.
*os*::
Optional operating system indicator (either version or codename)
to restrict the package promotion to.
*name*::
Optional name of the packages to copy. If this is not present, all
packages that have a newer source version in *from_release* than
*to_release* are copied.
*version*::
Optional version of the packages to copy. This is only used if the
*name* option is used to additionally limit the packages to copy.
*dryrun*::
(Optional) Boolean whether to prepare to promote the packages or
just compute which packages are eligible for promotion.
*exclude_package_names*::
(Optional) List of regular expressions matching packages to
exclude from the promotion list.
Returns
-------
This function returns a list of packages that were promoted
(or would have been if dryrun=False)
"""
from_release = self.get_release(from_release)
# Find source packages in the from_release that are newer versions than
# those in the to_release
src_candidates = [src_info for src_info in from_release.get_packages(
name=self.package_name(name), os=os, version=version,
newest_only=(version is None))]
src_candidates_by_os = {}
for src in src_candidates:
source_and_os = "{0}:{1}".format(src.source_name, src.os)
if (source_and_os not in src_candidates_by_os
or src_candidates_by_os[source_and_os].version
< src.version):
src_candidates_by_os[source_and_os] = src
src_candidates = [
src_candidates_by_os[x] for x in src_candidates_by_os
]
result = []
seen = {}
to_release_object = self.get_release(to_release)
# For each package found above, find source and binaries in
# from_release and copy them over if they are not in to_release
for src in src_candidates:
source_and_os = "{0}:{1}".format(src.source_name, src.os)
if source_and_os not in seen:
seen[source_and_os] = True
for package in from_release.get_packages(source=src):
skip = False
if exclude_package_names is not None:
for exclude in exclude_package_names:
if re.match(exclude, package.name) is not None:
skip = True
break
if (not skip) and to_release_object.is_newer(package):
if not dryrun:
to_release_object.add_package(
package, update_metadata=False)
result.append(package)
if not dryrun:
to_release_object.update_metadata()
return result
# vim: filetype=python:
|
globus/globus-release-tools
|
share/python/repo/__init__.py
|
Python
|
apache-2.0
| 17,563
|
# Copyright 2021 The FedLearner Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the 'License');
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an 'AS IS' BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# coding: utf-8
import time
import unittest
from http import HTTPStatus
from testing.common import BaseTestCase, TestAppProcess
from fedlearner_webconsole.proto import workflow_definition_pb2
from fedlearner_webconsole.db import db
from fedlearner_webconsole.workflow.models import Workflow
from fedlearner_webconsole.job.models import Job, JobType
from fedlearner_webconsole.job.metrics import JobMetricsBuilder
class JobMetricsBuilderTest(BaseTestCase):
class Config(BaseTestCase.Config):
ES_HOST = ''
ES_PORT = 80
class FollowerConfig(Config):
GRPC_LISTEN_PORT = 4990
def test_data_join_metrics(self):
job = Job(
name='multi-indices-test27',
job_type=JobType.DATA_JOIN)
import json
print(json.dumps(JobMetricsBuilder(job).plot_metrics()))
def test_nn_metrics(self):
job = Job(
name='automl-2782410011',
job_type=JobType.NN_MODEL_TRANINING)
print(JobMetricsBuilder(job).plot_metrics())
def test_peer_metrics(self):
proc = TestAppProcess(
JobMetricsBuilderTest,
'follower_test_peer_metrics',
JobMetricsBuilderTest.FollowerConfig)
proc.start()
self.leader_test_peer_metrics()
proc.terminate()
def leader_test_peer_metrics(self):
self.setup_project(
'leader',
JobMetricsBuilderTest.FollowerConfig.GRPC_LISTEN_PORT)
workflow = Workflow(
name='test-workflow',
project_id=1)
db.session.add(workflow)
db.session.commit()
while True:
resp = self.get_helper(
'/api/v2/workflows/1/peer_workflows'
'/0/jobs/test-job/metrics')
if resp.status_code == HTTPStatus.OK:
break
time.sleep(1)
def follower_test_peer_metrics(self):
self.setup_project(
'follower',
JobMetricsBuilderTest.Config.GRPC_LISTEN_PORT)
workflow = Workflow(
name='test-workflow',
project_id=1,
metric_is_public=True)
workflow.set_job_ids([1])
db.session.add(workflow)
job = Job(
name='automl-2782410011',
job_type=JobType.NN_MODEL_TRANINING,
workflow_id=1,
project_id=1,
config=workflow_definition_pb2.JobDefinition(
name='test-job'
).SerializeToString())
db.session.add(job)
db.session.commit()
while True:
time.sleep(1)
if __name__ == '__main__':
# no es in test env skip this test
# unittest.main()
pass
|
bytedance/fedlearner
|
web_console_v2/api/test/fedlearner_webconsole/job/metrics_test.py
|
Python
|
apache-2.0
| 3,305
|
# -*- coding: utf-8 -*-
from __future__ import division, print_function
import finufft
from finufft import interface
import numpy as np
import pytest
__all__ = [
"test_nufft1d1", "test_nufft1d2", "test_nufft1d3",
]
def test_nufft1d1(seed=42, iflag=1):
np.random.seed(seed)
ms = int(1e3)
n = int(2e3)
tol = 1.0e-9
x = np.random.uniform(-np.pi, np.pi, n)
c = np.random.uniform(-1.0, 1.0, n) + 1.0j*np.random.uniform(-1.0, 1.0, n)
f = finufft.nufft1d1(x, c, ms, eps=tol, iflag=iflag)
# Make sure that this also works with other values of 'fftw'
f = finufft.nufft1d1(x, c, ms, eps=tol, iflag=iflag,
fftw=interface.FFTWOptions.measure)
with pytest.raises(TypeError):
f = finufft.nufft1d1(x, c, ms, eps=tol, iflag=iflag, fftw=100)
f0 = interface.dirft1d1(x, c, ms, iflag=iflag)
assert np.all(np.abs((f - f0) / f0) < 1e-6)
def test_nufft1d2(seed=42, iflag=1):
np.random.seed(seed)
ms = int(1e3)
n = int(2e3)
tol = 1.0e-9
x = np.random.uniform(-np.pi, np.pi, n)
c = np.random.uniform(-1.0, 1.0, n) + 1.0j*np.random.uniform(-1.0, 1.0, n)
f = finufft.nufft1d1(x, c, ms, eps=tol, iflag=iflag)
c = finufft.nufft1d2(x, f, eps=tol, iflag=iflag)
c0 = interface.dirft1d2(x, f, iflag=iflag)
assert np.all(np.abs((c - c0) / c0) < 1e-6)
def test_nufft1d3(seed=42, iflag=1):
np.random.seed(seed)
ms = int(1e3)
n = int(2e3)
tol = 1.0e-9
x = np.random.uniform(-np.pi, np.pi, n)
c = np.random.uniform(-1.0, 1.0, n) + 1.0j*np.random.uniform(-1.0, 1.0, n)
s = 0.5 * n * (1.7 + np.random.uniform(-1.0, 1.0, ms))
f = finufft.nufft1d3(x, c, s, eps=tol, iflag=iflag)
f0 = interface.dirft1d3(x, c, s, iflag=iflag)
assert np.all(np.abs((f - f0) / f0) < 1e-6)
|
dfm/python-finufft
|
tests/test_1d.py
|
Python
|
apache-2.0
| 1,814
|
"""
This module handles parsing the AWS Billing Reports (stored on S3 in .zip
or just plain .csv format) and creating metrics to be sent to the WF proxy.
"""
import ConfigParser
import datetime
import io
import os
import sys
import time
import traceback
import zipfile
import logging.config
import dateutil
from wavefront.aws_common import AwsBaseMetricsCommand, AwsBaseMetricsConfiguration
from wavefront import utils
#pylint: disable=too-few-public-methods
#pylint: disable=too-many-instance-attributes
class AwsBillingConfiguration(AwsBaseMetricsConfiguration):
"""
Configuration for billing
"""
def __init__(self, config_file_path):
super(AwsBillingConfiguration, self).__init__(
config_file_path=config_file_path)
self.enabled = self.getboolean('aws_billing', 'enabled', False)
self.role_arn = self.get('aws_billing', 'role_arn', None)
self.role_external_id = self.get(
'aws_billing', 'external_id', None)
self.billing_thread_names = self.getlist(
'aws_billing', 'billing_threads', [])
self.ec2_tag_keys = self.getlist('aws_billing', 'ec2_tag_keys', [])
self.billing_threads = []
for name in self.billing_thread_names:
section = 'billing-' + name
self.billing_threads.append(
AwsBillingDetailThreadConfiguration(self, section))
def validate(self):
"""
Validation of configuration
"""
pass
def get_region_config(self, _):
"""
Gets the configuration for cloudwatch for the given region
Arguments:
region - the name of the region
"""
return self
#pylint: disable=too-few-public-methods
#pylint: disable=too-many-instance-attributes
class AwsBillingDetailThreadConfiguration(object):
"""
Configuration for a billing detail section in the configuration file
"""
def __init__(self, config, section_name):
super(AwsBillingDetailThreadConfiguration, self).__init__()
self.config = config
self.section_name = section_name
self.last_run_time_section = section_name
self.tmp_dir = self.config.get(section_name, 'tmp_dir', '/tmp/')
self.namespace = self.config.get(section_name, 'namespace', None)
self.enabled = self.config.getboolean(section_name, 'enabled', False)
self.region = self.config.get(section_name, 's3_region', None)
self.bucket = self.config.get(section_name, 's3_bucket', None)
self.prefix = self.config.get(section_name, 's3_prefix', None)
self.header_row_index = int(
self.config.get(section_name, 'header_row_index', 1))
self.dimensions = self._build_table(
self.config.getlist(section_name, 'dimension_column_names', []))
self.metrics = self._build_table(
self.config.getlist(section_name, 'metric_column_names', []))
self.source_names = self.config.getlist(section_name, 'source_names', [])
self.dates = self._build_table(
self.config.getlist(section_name, 'date_column_names', []), '|')
self.duration = self.config.getlist(section_name, 'duration_column_names', [])
self.instance_id_columns = self.config.getlist(
section_name, 'instance_id_column_names', [])
self.delay = int(self.config.get(section_name, 'delay', 3600))
self.record_id_column = self.config.get(
section_name, 'record_id_column_name', None)
self.maximum_number_of_rows = int(self.config.get(
section_name, 'maximum_number_of_rows', 0))
self.sleep_after_rows = int(self.config.get(
section_name, 'sleep_after_rows', 0))
self.sleep_ms = float(self.config.get(
section_name, 'sleep_ms', 0.0)) / 1000
@staticmethod
def _build_table(lst, delimiter=':'):
"""
Build a dictionary from a list of delimiter-separated key-value pairs
Arguments:
lst - list of strings
delimiter - delimiter between components of each string in the lst
Returns:
dictionary with the key being the string on the left side of
the delimiter and the value of the dictionary key being the string
on the right side
"""
rtn = {}
if lst:
for item in lst:
parts = item.split(delimiter)
if len(parts) == 1:
rtn[parts[0]] = parts[0]
elif len(parts) == 2:
rtn[parts[0]] = parts[1]
return rtn
def get_last_record_id(self, curr_month):
"""
Gets the last record id for the given month
"""
return self.config.output.get(
self.section_name, 'last_record_id_' + curr_month, None)
def set_last_record_id(self, curr_month, record_id):
"""
Sets the last record id read
Arguments:
record_id - last record id
"""
if not record_id:
return
self.config.output.set(
self.section_name, 'last_record_id_' + curr_month, record_id)
self.config.output.save()
class AwsBillingMetricsCommand(AwsBaseMetricsCommand):
"""
Billing metrics command object. Grabs metrics from billing CSV files.
"""
def __init__(self, **kwargs):
super(AwsBillingMetricsCommand, self).__init__(**kwargs)
def _initialize(self, args):
"""
Initialize this command
Arguments:
arg - the argparse parser object returned from argparser
"""
self.config = AwsBillingConfiguration(args.config_file_path)
self.config.validate()
try:
logging.config.fileConfig(args.config_file_path)
except ConfigParser.NoSectionError:
pass
self.logger = logging.getLogger()
def _process(self):
"""
Processes the latest billing details CSV file. A few helpful sites:
http://www.dowdandassociates.com/products/cloud-billing/documentation/1.0/schema/
http://docs.aws.amazon.com/awsaccountbilling/latest/aboutv2/detailed-billing-reports.html#reportstagsresources
"""
utcnow = (datetime.datetime.utcnow()
.replace(microsecond=0, tzinfo=dateutil.tz.tzutc()))
if utils.CANCEL_WORKERS_EVENT.is_set():
return
if not self.config.enabled:
self.logger.info('Billing is disabled')
return
for config in self.config.billing_threads:
if utils.CANCEL_WORKERS_EVENT.is_set():
break
try:
if config.enabled:
last_run_time = config.config.get_last_run_time()
if last_run_time:
diff = utcnow - last_run_time
if diff.total_seconds() <= config.delay:
self.logger.info('Not ready to run %s (last run at '
'%s; expected delay interval is %ds)',
config.section_name,
str(last_run_time),
config.delay)
continue
if config.bucket == 'local':
self.logger.info('Running in local mode ...')
self._get_csv_from_local(config)
else:
self._get_csv_from_s3(config)
config.config.set_last_run_time(utcnow, None, True)
else:
self.logger.info('Billing thread %s is disabled',
config.section_name)
#pylint: disable=bare-except
except:
self.logger.error('%s failed: %s', config.section_name,
sys.exc_info()[1])
traceback.print_exc()
def _get_csv_from_local(self, config):
"""
Opens a CSV file on the local machine
Arguments:
config - the AwsBillingDetailThreadConfiguration object
"""
self.logger.info('Getting AWS billing details from local file system %s',
config.section_name)
with open(config.prefix, 'r') as csvfd:
csv_file = utils.CsvFile(csvfd, config.header_row_index)
self.parse_csv(config, csv_file, 'local')
#pylint: disable=too-many-locals
#pylint: disable=too-many-branches
#pylint: disable=too-many-statements
def _get_csv_from_s3(self, config):
"""
Opens a CSV file that matches the prefix in the S3 bucket.
Arguments:
config - the AwsBillingDetailThreadConfiguration object
"""
self.logger.info('Getting AWS billing details from S3 for %s',
config.section_name)
utcnow = (datetime.datetime.utcnow()
.replace(microsecond=0, tzinfo=dateutil.tz.tzutc()))
s3cli = self.account.get_session(
config.region, self.config.role_arn,
self.config.role_external_id).client('s3')
acct_id = self.account.get_account_id(self.config.role_arn)
curr_month = utcnow.strftime('%Y-%m')
prefix = (config.prefix
.replace('${account_id}', acct_id)
.replace('${date}', curr_month))
# find the item in the s3 bucket
response = s3cli.list_objects(Bucket=config.bucket, Prefix=prefix)
if (not response or 'Contents' not in response or
not response['Contents']):
self.logger.warning('Billing details file [%s] not found in %s\n%s',
prefix, config.bucket, str(response))
return
# open the item in S3
key = None
zipped = False
for s3file in response['Contents']:
if s3file['Key'][-8:] == '.csv.zip':
key = s3file['Key']
zipped = True
break
if s3file['Key'][-4:] == '.csv':
key = s3file['Key']
zipped = False
if not key:
self.logger.warning('Unable to find billing file [%s] in %s',
prefix, config.bucket)
return
try:
response = s3cli.get_object(Bucket=config.bucket, Key=key)
csv_contents = io.BytesIO(response['Body'].read())
filename = None
if zipped:
self.logger.info('Unzipping %s ...', key)
with zipfile.ZipFile(csv_contents, 'r') as zipref:
key = key[0:-4] # remove .zip
filename = config.tmp_dir + key
zipref.extractall(config.tmp_dir)
if utils.CANCEL_WORKERS_EVENT.is_set():
return
with open(filename, 'r') as csvfd:
csv_file = utils.CsvFile(csvfd, config.header_row_index)
self.parse_csv(config, csv_file, curr_month)
else:
csv_file = utils.CsvFile(csv_contents, config.header_row_index)
self.parse_csv(config, csv_file, curr_month)
finally:
if filename and os.path.exists(filename):
self.logger.info('Removing %s ...', filename)
os.remove(filename)
def parse_csv(self, config, csvreader, curr_month):
"""
Parse the CSV contents and generate metrics.
Arguments:
config - the AwsBillingDetailThreadConfiguration object
csvreader - utils.CsvFile object
curr_month - Y-M
"""
rows = 0
record_id = None
current_record_id = None
if config.record_id_column:
record_id = config.get_last_record_id(curr_month)
self.logger.info('Skipping records until after record ID %s', record_id)
# loop over all lines in the csv file after the header and
# transmit the cost metric for each one
#pylint: disable=bare-except
for row in csvreader:
if utils.CANCEL_WORKERS_EVENT.is_set():
break
try:
if config.record_id_column and row[config.record_id_column]:
current_record_id = row[config.record_id_column]
if record_id and current_record_id != record_id:
continue
elif record_id and current_record_id == record_id:
record_id = None
continue
else:
record_id = None
self._process_csv_row(row, config)
except:
self.logger.warning('Unable to process record (%s):\n\t%s',
sys.exc_info()[1], str(row))
traceback.print_exc()
rows = rows + 1
if config.maximum_number_of_rows:
if rows >= config.maximum_number_of_rows:
self.logger.debug('Stopping after %d rows', rows)
break
if config.sleep_after_rows and rows % config.sleep_after_rows == 0:
self.logger.debug('Sleeping %0.2f', config.sleep_ms)
time.sleep(config.sleep_ms)
if current_record_id:
self.logger.info('Recording last record id of %s for %s',
current_record_id, curr_month)
config.set_last_record_id(curr_month, current_record_id)
def _process_csv_row(self, row, config):
# point tags
point_tags = {}
for header, point_tag_key in config.dimensions.iteritems():
if row[header]:
point_tags[point_tag_key] = row[header]
# point tags from ec2 instance
#pylint: disable=too-many-nested-blocks
if config.instance_id_columns:
found_instance = False
for header in config.instance_id_columns:
instance_id = row[header]
# arn:aws:ec2:us-east-1:011750033084:instance/i-33ac36e5"
if instance_id and instance_id[0:12] == 'arn:aws:ec2:':
parts = instance_id.split(':')
instance_id = parts[5].split('/')[1]
point_tags['region'] = parts[3]
if not instance_id or instance_id[0:2] != 'i-':
continue
for region in self.account.regions:
for sub_account in self.account.get_sub_accounts():
instances = sub_account.get_instances(region)
if instance_id in instances:
instance_tags = instances[instance_id]
for key, value in instance_tags.iteritems():
point_tags[key] = value
found_instance = True
break
if found_instance:
break
if found_instance:
break
# source names
source, source_name = AwsBaseMetricsCommand.get_source(
config.source_names, point_tags)
if source_name in point_tags:
del point_tags[source_name]
# timestamp
tstamp = None
tstamp_col_values = []
for header, date_fmt in config.dates.iteritems():
if row[header]:
tstamp_col_values.append(row[header])
tstamp = utils.unix_time_seconds(
datetime.datetime.strptime(row[header], date_fmt))
if not tstamp:
self.logger.warning('Unable to find valid date in columns (%s) '
'|%s|. Record is:\n\t%s',
', '.join(config.dates.keys()),
', '.join(tstamp_col_values),
str(row))
return
# calculate duration
if config.duration and len(config.duration) == 2:
start = config.duration[0].split('|')
start_dt = datetime.datetime.strptime(row[start[0]],
start[1])
start_tstamp = utils.unix_time_seconds(start_dt)
end = config.duration[1].split('|')
end_dt = datetime.datetime.strptime(row[end[0]], end[1])
end_tstamp = utils.unix_time_seconds(end_dt)
duration = end_tstamp - start_tstamp
else:
duration = 0
# metric and value
for header, metric_name in config.metrics.iteritems():
if config.namespace:
metric = config.namespace + '.' + metric_name
else:
metric = metric_name
value = row[header]
if not value:
value = 0.0
# send the metric to the proxy
self.proxy.transmit_metric(metric, value, long(tstamp),
source, point_tags)
if duration:
self.proxy.transmit_metric(metric + '.duration',
duration, long(tstamp),
source, point_tags)
|
wavefrontHQ/wavefront-collector
|
wavefront/awsbilling.py
|
Python
|
apache-2.0
| 17,443
|
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from ducktape.mark import parametrize
from ducktape.utils.util import wait_until
from kafkatest.services.zookeeper import ZookeeperService
from kafkatest.services.kafka import KafkaService
from kafkatest.services.verifiable_producer import VerifiableProducer
from kafkatest.services.console_consumer import ConsoleConsumer
from kafkatest.tests.produce_consume_validate import ProduceConsumeValidateTest
from kafkatest.utils import is_int_with_prefix
from kafkatest.version import DEV_BRANCH, LATEST_0_10_0, LATEST_0_10_1, LATEST_0_10_2, LATEST_0_11_0, LATEST_1_0, LATEST_1_1, LATEST_2_0, LATEST_2_1, LATEST_2_2, LATEST_2_3, LATEST_2_4, KafkaVersion
class ClientCompatibilityProduceConsumeTest(ProduceConsumeValidateTest):
"""
These tests validate that we can use a new client to produce and consume from older brokers.
"""
def __init__(self, test_context):
""":type test_context: ducktape.tests.test.TestContext"""
super(ClientCompatibilityProduceConsumeTest, self).__init__(test_context=test_context)
self.topic = "test_topic"
self.zk = ZookeeperService(test_context, num_nodes=3)
self.kafka = KafkaService(test_context, num_nodes=3, zk=self.zk, topics={self.topic:{
"partitions": 10,
"replication-factor": 2}})
self.num_partitions = 10
self.timeout_sec = 60
self.producer_throughput = 1000
self.num_producers = 2
self.messages_per_producer = 1000
self.num_consumers = 1
def setUp(self):
self.zk.start()
def min_cluster_size(self):
# Override this since we're adding services outside of the constructor
return super(ClientCompatibilityProduceConsumeTest, self).min_cluster_size() + self.num_producers + self.num_consumers
@parametrize(broker_version=str(DEV_BRANCH))
@parametrize(broker_version=str(LATEST_0_10_0))
@parametrize(broker_version=str(LATEST_0_10_1))
@parametrize(broker_version=str(LATEST_0_10_2))
@parametrize(broker_version=str(LATEST_0_11_0))
@parametrize(broker_version=str(LATEST_1_0))
@parametrize(broker_version=str(LATEST_1_1))
@parametrize(broker_version=str(LATEST_2_0))
@parametrize(broker_version=str(LATEST_2_1))
@parametrize(broker_version=str(LATEST_2_2))
@parametrize(broker_version=str(LATEST_2_3))
@parametrize(broker_version=str(LATEST_2_4))
def test_produce_consume(self, broker_version):
print("running producer_consumer_compat with broker_version = %s" % broker_version)
self.kafka.set_version(KafkaVersion(broker_version))
self.kafka.security_protocol = "PLAINTEXT"
self.kafka.interbroker_security_protocol = self.kafka.security_protocol
self.producer = VerifiableProducer(self.test_context, self.num_producers, self.kafka,
self.topic, throughput=self.producer_throughput,
message_validator=is_int_with_prefix)
self.consumer = ConsoleConsumer(self.test_context, self.num_consumers, self.kafka, self.topic,
consumer_timeout_ms=60000,
message_validator=is_int_with_prefix)
self.kafka.start()
self.run_produce_consume_validate(lambda: wait_until(
lambda: self.producer.each_produced_at_least(self.messages_per_producer) == True,
timeout_sec=120, backoff_sec=1,
err_msg="Producer did not produce all messages in reasonable amount of time"))
|
sslavic/kafka
|
tests/kafkatest/tests/client/client_compatibility_produce_consume_test.py
|
Python
|
apache-2.0
| 4,454
|
# Copyright 2014-2015 Isotoma Limited
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import uuid
from touchdown.core.resource import Resource
from touchdown.core.plan import Plan, Present
from touchdown.core import argument, serializers
from ..account import BaseAccount
from ..common import SimpleDescribe, SimpleApply, SimpleDestroy, RefreshMetadata
from ..s3 import Bucket
from .. import route53
from .common import CloudFrontList
class StreamingLoggingConfig(Resource):
resource_name = "streaming_logging_config"
dot_ignore = True
enabled = argument.Boolean(field="Enabled", default=False)
bucket = argument.Resource(Bucket, field="Bucket", serializer=serializers.Default(default=None), default="")
prefix = argument.String(field="Prefix", default="")
class StreamingDistribution(Resource):
resource_name = "streaming_distribution"
extra_serializers = {
"CallerReference": serializers.Expression(
lambda runner, object: runner.get_plan(object).object.get('StreamingDistributionConfig', {}).get('CallerReference', str(uuid.uuid4()))
),
"Aliases": CloudFrontList(serializers.Chain(
serializers.Context(serializers.Argument("cname"), serializers.ListOfOne(maybe_empty=True)),
serializers.Context(serializers.Argument("aliases"), serializers.List()),
)),
"TrustedSigners": serializers.Const({
"Enabled": False,
"Quantity": 0,
}),
"S3Origin": serializers.Resource(group="s3origin"),
}
name = argument.String()
cname = argument.String(default=lambda instance: instance.name)
comment = argument.String(field='Comment', default=lambda instance: instance.name)
aliases = argument.List()
enabled = argument.Boolean(default=True, field="Enabled")
bucket = argument.Resource(
Bucket,
field="DomainName",
serializer=serializers.Format("{0}.s3.amazonaws.com", serializers.Identifier()),
group="s3origin"
)
origin_access_identity = argument.String(default='', field="OriginAccessIdentity", group="s3origin")
logging = argument.Resource(
StreamingLoggingConfig,
default=lambda instance: dict(enabled=False),
field="Logging",
serializer=serializers.Resource(),
)
price_class = argument.String(
default="PriceClass_100",
choices=['PriceClass_100', 'PriceClass_200', 'PriceClass_All'],
field="PriceClass",
)
account = argument.Resource(BaseAccount)
class Describe(SimpleDescribe, Plan):
resource = StreamingDistribution
service_name = 'cloudfront'
describe_filters = {}
describe_action = "list_streaming_distributions"
describe_envelope = 'StreamingDistributionList.Items'
key = 'Id'
def get_describe_filters(self):
return {"Id": self.object['Id']}
def describe_object_matches(self, d):
return self.resource.name == d['Comment'] or self.resource.name in d['Aliases'].get('Items', [])
def describe_object(self):
distribution = super(Describe, self).describe_object()
if distribution:
result = self.client.get_streaming_distribution(Id=distribution['Id'])
distribution = {"ETag": result["ETag"], "Id": distribution["Id"]}
distribution.update(result['StreamingDistribution'])
return distribution
class Apply(SimpleApply, Describe):
create_action = "create_streaming_distribution"
create_response = "not-that-useful"
waiter = "streaming_distribution_deployed"
signature = (
Present("name"),
Present("bucket"),
)
def get_create_serializer(self):
return serializers.Dict(
StreamingDistributionConfig=serializers.Resource(),
)
class Destroy(SimpleDestroy, Describe):
destroy_action = "delete_streaming_distribution"
def get_destroy_serializer(self):
return serializers.Dict(
Id=self.resource_id,
IfMatch=serializers.Property('ETag'),
)
def destroy_object(self):
if not self.object:
return
if self.object['StreamingDistributionConfig'].get('Enabled', False):
yield self.generic_action(
"Disable streaming distribution",
self.client.update_streaming_distribution,
Id=self.object['Id'],
IfMatch=self.object['ETag'],
StreamingDistributionConfig=serializers.Resource(
Enabled=False,
),
)
yield self.get_waiter(
["Waiting for streaming distribution to enter disabled state"],
"streaming_distribution_deployed",
)
yield RefreshMetadata(self)
for change in super(Destroy, self).destroy_object():
yield change
class AliasTarget(route53.AliasTarget):
""" Adapts a StreamingDistribution into a AliasTarget """
input = StreamingDistribution
def get_serializer(self, runner, **kwargs):
return serializers.Context(
serializers.Const(self.adapts),
serializers.Dict(
DNSName=serializers.Context(
serializers.Property("DomainName"),
serializers.Expression(lambda r, o: route53._normalize(o)),
),
HostedZoneId="Z2FDTNDATAQYW2",
EvaluateTargetHealth=False,
)
)
|
mitchellrj/touchdown
|
touchdown/aws/cloudfront/streaming_distribution.py
|
Python
|
apache-2.0
| 6,002
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import urllib
import urllib2
import re
import json
class Spider:
def __init__(self):
self.url = 'http://brand.efu.com.cn/'
self.user_agent = 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/59.0.3071.115 Safari/537.36'
self.headers = { 'User-Agent' : self.user_agent }
def getBrandCategory(self):
f = open('brand1.csv','a')
f.write('品牌,目标消费群体,分类\n')
f.close()
content = self.getPageContext(self.url)
items = self.resolveIndexContent(content)
for line in items:
context = [line[0]]
# 循环遍历每一个分类下的页码
url = line[1]
for num in range(1,1000):
nexturl = self.url+url[:-6]+str(num)+".html" # 拼接每一页的url
pageContent = self.getPageContext(nexturl) # 爬取分页的内容
# 判断此页是否有内容
if pageContent.find('<div class="lstPhotob">') == -1:
break
# 处理分页页面内容
pageItems = self.resolvePageContent(pageContent,context[0])
if len(pageItems) == 0:
break
for pageLine in pageItems:
# print pageLine[0]
# print pageLine[1]
brandContent = self.getPageContext(pageLine[0])
brandItems = self.resolveBrandContext(brandContent)
if len(brandItems) == 0:
break
f = open('brand1.csv','a')
for brandLine in brandItems:
if brandLine[0] == '目标消费群体':
output = str(pageLine[1])+","+str(brandLine[1])+","+str(line[0])
print output
f.write(output)
f.write("\n")
break
f.close()
def resolveBrandContext(self,content):
# [\s\S]+?
try:
pattern = re.compile('.*?<span class="sp-a">(.*?)</span>.*?<span class="sp-b">(.*?)</span>.*?')
return re.findall(pattern,content)
except:
# print '忽略解析品牌页面出错问题'
return []
def resolveIndexContent(self,content):
try:
pattern = re.compile('.*?<li><a title="(.*?)" href="(.*?)">.*?</a></li>.*?')
return re.findall(pattern,content)
except:
# print '忽略解析首页出错问题'
return []
def resolvePageContent(self,content,category):
# pattern = re.compile('.*?<div class="lstPhotob"><div class="lstPa"><div class="lstPa-a"><a href="(.*?)" target="_blank" title="(.*?)>.*?')
try:
pattern = re.compile('.*?<a href="(.*?)" target="_blank" title="(.*?)'+category+'品牌">.*?')
return re.findall(pattern,content)
except:
# print '忽略解析分页页面出错问题'
return []
def getPageContext(self,url):
# print '爬取页面',url
try:
request = urllib2.Request(url,headers = self.headers)
response = urllib2.urlopen(request)
return response.read()
except:
1
# print '忽略url发送出错问题'
def run(self):
self.getBrandCategory()
spider = Spider()
spider.run()
|
xinghalo/DMInAction
|
src/spider/BrandSpider/brand1.py
|
Python
|
apache-2.0
| 2,854
|
# coding=utf-8
from typing import List
import networkx as nx
import pyisemail
from fuzzywuzzy import fuzz
from recordclass import recordclass
import pandas as pd
import saapy.util as su
from .lexeme import cleanup_proper_name
def connect_actors(actor_frame, connectivity_sets, connectivity_column):
"""
:param actor_frame:
:param connectivity_sets:
:param connectivity_column:
:return:
Examples:
same_actors = {
'ccason': [3, 14, 15], 'clipka': [4, 5, 13],
'wfpokorny': [11, 17], 'anshuarya': [0],
'bentsm': [1], 'cbarton': [2], 'dbodor': [6],
'jlecher': [7], 'jgrimbert': [8], 'nalvarez': [9],
'selvik': [10], 'wverhelst': [12], 'gryken': [16],
'github': [18]}
actor_frame = connect_actors(actor_frame, same_actors, 'actor_id')
"""
connectivity = {}
for actor_id, connectivity_set in connectivity_sets.items():
for actor in connectivity_set:
connectivity[actor] = actor_id
actor_frame[connectivity_column] = su.categorize(pd.Series(connectivity))
return actor_frame
def combine_actors(actor_frame, connectivity_column):
"""
:param actor_frame:
:param connectivity_column:
:return:
Examples:
combine_actors(actor_frame, 'actor_id')
"""
aggregator = {'name': 'first', 'email': 'first',
'author_commits': 'sum',
'committer_commits': 'sum'}
return actor_frame.groupby(connectivity_column).agg(
aggregator).reset_index()
def insert_actor_ids(commit_frame, actor_frame, drop_name_email=True):
actor_columns = ['author_name', 'author_email',
'committer_name', 'committer_email']
cf = commit_frame[actor_columns]
af = actor_frame[['name', 'email', 'actor_id']]
author = pd.merge(
cf, af, left_on=actor_columns[:2],
right_on=('name', 'email'),
how='left')['actor_id']
committer = pd.merge(
cf, af, left_on=actor_columns[2:],
right_on=('name', 'email'),
how='left')['actor_id']
commit_frame.insert(3, 'author', author)
commit_frame.insert(4, 'committer', committer)
if drop_name_email:
commit_frame.drop(actor_columns, axis=1, inplace=True)
return commit_frame
PARSED_EMAIL_FIELDS = ['email', 'valid', 'name', 'domain', 'parsed_name']
ParsedEmail = recordclass('ParsedEmail', PARSED_EMAIL_FIELDS)
PARSED_NAME_FIELDS = ['name', 'name_type']
ParsedName = recordclass('ParsedName', PARSED_NAME_FIELDS)
def proper(name: ParsedName):
return name.name_type == 'proper' or name.name_type == 'personal'
class Actor:
name: str
email: str
actor_id: str
parsed_email: ParsedEmail
parsed_name: ParsedName
def __init__(self, name: str, email: str):
self.name = name
self.email = email
self.actor_id = '{} <{}>'.format(name, email).lower()
self.parsed_email = None
self.parsed_name = None
def __repr__(self):
return "Actor('{}')".format(self.actor_id)
class ActorParser:
role_names = None
def __init__(self):
self.role_names = dict()
def add_role_names(self, name_roles):
for name, role in name_roles:
self.role_names[name] = role
def parse_name(self, name: str) -> List[str]:
"""
splits a name into parts separated by ., _, camel casing and
similar
:param name: potentially human name
:return: list of name parts
"""
parsed_name = ParsedName(**su.empty_dict(PARSED_NAME_FIELDS))
lower_name = name.lower()
if lower_name in self.role_names:
parsed_name.name_type = self.role_names[lower_name]
parsed_name.name = lower_name
else:
parsed_name.name_type = 'proper'
parsed_name.name = cleanup_proper_name(name)
return parsed_name
def parse_email(self, email: str) -> ParsedEmail:
lower_email = email.lower()
parsed_email = ParsedEmail(**su.empty_dict(PARSED_EMAIL_FIELDS))
parsed_email.email = lower_email
parsed_email.valid = pyisemail.is_email(lower_email)
email_parts = lower_email.split('@')
parsed_email.name = email_parts[0]
if len(email_parts) == 2:
parsed_email.domain = email_parts[1]
else:
parsed_email.domain = ''
parsed_email.parsed_name = self.parse_name(parsed_email.name)
return parsed_email
def parse_actor(self, name: str, email: str, name_from_email=True) -> Actor:
parsed_email = self.parse_email(email)
if not name and name_from_email:
name = parsed_email.parsed_name.name
actor = Actor(name, email)
actor.parsed_name = self.parse_name(name)
actor.parsed_email = parsed_email
return actor
ACTOR_SIMILARITY_FIELDS = ['possible',
'identical',
'same_name',
'same_email',
'same_email_name',
'name_ratio',
'email_name_ratio',
'email_domain_ratio',
'name1_email_ratio',
'name2_email_ratio',
'proper_name1',
'proper_name2',
'proper_email_name1',
'proper_email_name2',
'explicit']
ActorSimilarity = recordclass('ActorSimilarity', ACTOR_SIMILARITY_FIELDS)
ACTOR_SIMILARITY_SETTINGS_FIELDS = ['min_name_ratio',
'min_email_domain_ratio',
'min_email_name_ratio',
'min_name_email_ratio']
ActorSimilaritySettings = recordclass('ActorSimilaritySettings',
ACTOR_SIMILARITY_SETTINGS_FIELDS)
class ActorSimilarityGraph:
actor_graph: nx.Graph
settings: ActorSimilaritySettings
def __init__(self, settings=None):
self.actor_graph = nx.Graph()
self.similarity_checks = [self.identical_actors,
self.similar_emails,
self.similar_proper_names]
if settings is None:
settings = ActorSimilaritySettings(min_name_ratio=55,
min_email_domain_ratio=55,
min_email_name_ratio=55,
min_name_email_ratio=55)
self.settings = settings
def add_actor(self, actor: Actor, link_similar=True):
if self.actor_graph.has_node(actor.actor_id):
return
self.actor_graph.add_node(actor.actor_id, actor=actor)
for actor_id, actor_attrs in self.actor_graph.nodes_iter(data=True):
if actor.actor_id == actor_id:
continue
other_actor = actor_attrs['actor']
if link_similar:
similarity = self.evaluate_similarity(actor, other_actor)
if similarity.possible:
self.actor_graph.add_edge(actor.actor_id,
other_actor.actor_id,
similarity=similarity,
confidence=None)
def link_actors(self, actor1_id: str, actor2_id: str,
confidence: float = 1):
self.actor_graph.add_edge(actor1_id, actor2_id, confidence=confidence)
if 'similarity' not in self.actor_graph[actor1_id][actor2_id]:
self.actor_graph[actor1_id][actor2_id]['similarity'] = None
def unlink_actors(self, actor1_id: str, actor2_id: str):
self.actor_graph.remove_edge(actor1_id, actor2_id)
def evaluate_similarity(self, actor: Actor,
other_actor: Actor) -> ActorSimilarity:
similarity = self.build_similarity(actor, other_actor)
checks = list(self.similarity_checks)
while not similarity.possible and len(checks):
check = checks.pop()
similarity.possible = check(similarity)
return similarity
def build_similarity(self, actor, other_actor):
similarity = ActorSimilarity(**su.empty_dict(ACTOR_SIMILARITY_FIELDS))
# run comparisons for similarity
similarity.identical = (actor.actor_id == other_actor.actor_id)
similarity.proper_name1 = proper(actor.parsed_name)
similarity.proper_name2 = proper(other_actor.parsed_name)
similarity.proper_email_name1 = proper(actor.parsed_email.parsed_name)
similarity.proper_email_name2 = proper(
other_actor.parsed_email.parsed_name)
similarity.same_name = (actor.parsed_name.name ==
other_actor.parsed_name.name)
similarity.name_ratio = self.compare_names(actor.parsed_name,
other_actor.parsed_name)
similarity.same_email = (actor.parsed_email.email ==
other_actor.parsed_email.email)
similarity.email_domain_ratio = fuzz.ratio(
actor.parsed_email.domain,
other_actor.parsed_email.domain)
similarity.same_email_name = (actor.parsed_email.parsed_name.name ==
other_actor.parsed_email.parsed_name.name)
similarity.email_name_ratio = self.compare_names(
actor.parsed_email.parsed_name,
other_actor.parsed_email.parsed_name)
similarity.name1_email_ratio = self.compare_names(
actor.parsed_name,
other_actor.parsed_email.parsed_name)
similarity.name2_email_ratio = self.compare_names(
actor.parsed_email.parsed_name,
other_actor.parsed_name)
return similarity
@staticmethod
def compare_names(name1: ParsedName, name2: ParsedName):
if proper(name1) and proper(name2):
compare = fuzz.token_set_ratio
else:
compare = fuzz.ratio
return compare(name1.name, name2.name)
def similar_emails(self, s: ActorSimilarity):
return (s.same_email or
(s.email_domain_ratio >= self.settings.min_email_domain_ratio
and
s.email_name_ratio >= self.settings.min_email_name_ratio))
def similar_proper_names(self, s: ActorSimilarity):
return (s.proper_name1 and s.proper_name2 and
(s.same_name or s.name_ratio >= self.settings.min_name_ratio))
def similar_name_to_email(self, s: ActorSimilarity):
return (s.name1_email_ratio >= self.settings.min_name_email_ratio or
s.name2_email_ratio >= self.settings.min_name_email_ratio)
@staticmethod
def identical_actors(s: ActorSimilarity):
return s.identical
def group_similar_actors(self):
similar_actor_groups = [list(g) for g in
nx.connected_components(self.actor_graph)]
return similar_actor_groups
def print_similarity_groups(self):
similar_groups = self.group_similar_actors()
for i, group in enumerate(similar_groups):
if len(group) < 2:
continue
print('=== group', i, '===')
for actor1_id, actor2_id, data in self.actor_graph.edges_iter(
nbunch=group, data=True):
print(actor1_id, '->', actor2_id, data)
|
ashapochka/saapy
|
saapy/analysis/actor.py
|
Python
|
apache-2.0
| 11,608
|
# Copyright 2017 Mycroft AI Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import re
import json
import inflection
from os.path import exists, isfile
from requests import RequestException
from mycroft.util.json_helper import load_commented_json, merge_dict
from mycroft.util.log import LOG
from .locations import (DEFAULT_CONFIG, SYSTEM_CONFIG, USER_CONFIG,
WEB_CONFIG_CACHE)
def is_remote_list(values):
''' check if this list corresponds to a backend formatted collection of
dictionaries '''
for v in values:
if not isinstance(v, dict):
return False
if "@type" not in v.keys():
return False
return True
def translate_remote(config, setting):
"""
Translate config names from server to equivalents usable
in mycroft-core.
Args:
config: base config to populate
settings: remote settings to be translated
"""
IGNORED_SETTINGS = ["uuid", "@type", "active", "user", "device"]
for k, v in setting.items():
if k not in IGNORED_SETTINGS:
# Translate the CamelCase values stored remotely into the
# Python-style names used within mycroft-core.
key = inflection.underscore(re.sub(r"Setting(s)?", "", k))
if isinstance(v, dict):
config[key] = config.get(key, {})
translate_remote(config[key], v)
elif isinstance(v, list):
if is_remote_list(v):
if key not in config:
config[key] = {}
translate_list(config[key], v)
else:
config[key] = v
else:
config[key] = v
def translate_list(config, values):
"""
Translate list formated by mycroft server.
Args:
config (dict): target config
values (list): list from mycroft server config
"""
for v in values:
module = v["@type"]
if v.get("active"):
config["module"] = module
config[module] = config.get(module, {})
translate_remote(config[module], v)
class LocalConf(dict):
"""
Config dict from file.
"""
def __init__(self, path):
super(LocalConf, self).__init__()
if path:
self.path = path
self.load_local(path)
def load_local(self, path):
"""
Load local json file into self.
Args:
path (str): file to load
"""
if exists(path) and isfile(path):
try:
config = load_commented_json(path)
for key in config:
self.__setitem__(key, config[key])
LOG.debug("Configuration {} loaded".format(path))
except Exception as e:
LOG.error("Error loading configuration '{}'".format(path))
LOG.error(repr(e))
else:
LOG.debug("Configuration '{}' not defined, skipping".format(path))
def store(self, path=None):
"""
Cache the received settings locally. The cache will be used if
the remote is unreachable to load settings that are as close
to the user's as possible
"""
path = path or self.path
with open(path, 'w') as f:
json.dump(self, f, indent=2)
def merge(self, conf):
merge_dict(self, conf)
class RemoteConf(LocalConf):
"""
Config dict fetched from mycroft.ai
"""
def __init__(self, cache=None):
super(RemoteConf, self).__init__(None)
cache = cache or WEB_CONFIG_CACHE
from mycroft.api import is_paired
if not is_paired():
self.load_local(cache)
return
try:
# Here to avoid cyclic import
from mycroft.api import DeviceApi
api = DeviceApi()
setting = api.get_settings()
try:
location = api.get_location()
except RequestException as e:
LOG.error("RequestException fetching remote location: {}"
.format(str(e)))
if exists(cache) and isfile(cache):
location = load_commented_json(cache).get('location')
if location:
setting["location"] = location
# Remove server specific entries
config = {}
translate_remote(config, setting)
for key in config:
self.__setitem__(key, config[key])
self.store(cache)
except RequestException as e:
LOG.error("RequestException fetching remote configuration: {}"
.format(str(e)))
self.load_local(cache)
except Exception as e:
LOG.error("Failed to fetch remote configuration: %s" % repr(e),
exc_info=True)
self.load_local(cache)
class Configuration:
__config = {} # Cached config
__patch = {} # Patch config that skills can update to override config
@staticmethod
def get(configs=None, cache=True):
"""
Get configuration, returns cached instance if available otherwise
builds a new configuration dict.
Args:
configs (list): List of configuration dicts
cache (boolean): True if the result should be cached
"""
if Configuration.__config:
return Configuration.__config
else:
return Configuration.load_config_stack(configs, cache)
@staticmethod
def load_config_stack(configs=None, cache=False):
"""
load a stack of config dicts into a single dict
Args:
configs (list): list of dicts to load
cache (boolean): True if result should be cached
Returns: merged dict of all configuration files
"""
if not configs:
configs = [LocalConf(DEFAULT_CONFIG), RemoteConf(),
LocalConf(SYSTEM_CONFIG), LocalConf(USER_CONFIG),
Configuration.__patch]
else:
# Handle strings in stack
for index, item in enumerate(configs):
if isinstance(item, str):
configs[index] = LocalConf(item)
# Merge all configs into one
base = {}
for c in configs:
merge_dict(base, c)
# copy into cache
if cache:
Configuration.__config.clear()
for key in base:
Configuration.__config[key] = base[key]
return Configuration.__config
else:
return base
@staticmethod
def set_config_update_handlers(bus):
"""Setup websocket handlers to update config.
Args:
bus: Message bus client instance
"""
bus.on("configuration.updated", Configuration.updated)
bus.on("configuration.patch", Configuration.patch)
@staticmethod
def updated(message):
"""
handler for configuration.updated, triggers an update
of cached config.
"""
Configuration.load_config_stack(cache=True)
@staticmethod
def patch(message):
"""
patch the volatile dict usable by skills
Args:
message: Messagebus message should contain a config
in the data payload.
"""
config = message.data.get("config", {})
merge_dict(Configuration.__patch, config)
Configuration.load_config_stack(cache=True)
|
Dark5ide/mycroft-core
|
mycroft/configuration/config.py
|
Python
|
apache-2.0
| 8,223
|
import json
import datetime
from django.core.files.base import ContentFile
from django.core.exceptions import ValidationError
from django.utils.timezone import utc
from ..models import AgentProfile
from ..exceptions import IDNotFoundError, ParamError
from ..utils import etag
class AgentProfileManager():
def __init__(self, agent):
self.Agent = agent
def save_non_json_profile(self, p, profile, request_dict):
p.content_type = request_dict['headers']['CONTENT_TYPE']
p.etag = etag.create_tag(profile.read())
if 'updated' in request_dict['headers'] and request_dict['headers']['updated']:
p.updated = request_dict['headers']['updated']
else:
p.updated = datetime.datetime.utcnow().replace(tzinfo=utc)
# Go to beginning of file
profile.seek(0)
fn = "%s_%s" % (p.agent_id, request_dict.get('filename', p.id))
p.profile.save(fn, profile)
p.save()
def post_profile(self, request_dict):
# get/create profile
p, created = AgentProfile.objects.get_or_create(
profile_id=request_dict['params']['profileId'], agent=self.Agent)
post_profile = request_dict['profile']
# If incoming profile is application/json and if a profile didn't
# already exist with the same agent and profileId
if created:
p.json_profile = post_profile
p.content_type = "application/json"
p.etag = etag.create_tag(post_profile)
# If incoming profile is application/json and if a profile already
# existed with the same agent and profileId
else:
orig_prof = json.loads(p.json_profile)
post_profile = json.loads(post_profile)
merged = json.dumps(
dict(list(orig_prof.items()) + list(post_profile.items())))
p.json_profile = merged
p.etag = etag.create_tag(merged)
# Set updated
if 'updated' in request_dict['headers'] and request_dict['headers']['updated']:
p.updated = request_dict['headers']['updated']
else:
p.updated = datetime.datetime.utcnow().replace(tzinfo=utc)
p.save()
def put_profile(self, request_dict):
# get/create profile
p, created = AgentProfile.objects.get_or_create(
profile_id=request_dict['params']['profileId'], agent=self.Agent)
# Profile being PUT is not json
if "application/json" not in request_dict['headers']['CONTENT_TYPE']:
try:
profile = ContentFile(request_dict['profile'].read())
except:
try:
profile = ContentFile(request_dict['profile'])
except:
profile = ContentFile(str(request_dict['profile']))
etag.check_preconditions(request_dict, p, created)
# If it already exists delete it
if p.profile:
try:
p.profile.delete()
except OSError:
# probably was json before
p.json_profile = {}
self.save_non_json_profile(p, profile, request_dict)
# Profile being PUT is json
else:
# (overwrite existing profile data)
etag.check_preconditions(request_dict, p, created)
the_profile = request_dict['profile']
p.json_profile = the_profile
p.content_type = request_dict['headers']['CONTENT_TYPE']
p.etag = etag.create_tag(the_profile)
# Set updated
if 'updated' in request_dict['headers'] and request_dict['headers']['updated']:
p.updated = request_dict['headers']['updated']
else:
p.updated = datetime.datetime.utcnow().replace(tzinfo=utc)
p.save()
def get_profile(self, profile_id):
try:
return self.Agent.agentprofile_set.get(profile_id=profile_id)
except:
err_msg = 'There is no agent profile associated with the id: %s' % profile_id
raise IDNotFoundError(err_msg)
def get_profile_ids(self, since=None):
ids = []
if since:
try:
# this expects iso6801 date/time format
# "2013-02-15T12:00:00+00:00"
profs = self.Agent.agentprofile_set.filter(updated__gt=since)
except ValidationError:
err_msg = 'Since field is not in correct format for retrieval of agent profiles'
raise ParamError(err_msg)
ids = [p.profile_id for p in profs]
else:
ids = self.Agent.agentprofile_set.values_list(
'profile_id', flat=True)
return ids
def delete_profile(self, profile_id):
try:
self.get_profile(profile_id).delete()
# we don't want it anyway
except AgentProfile.DoesNotExist:
pass
except IDNotFoundError:
pass
|
adlnet/ADL_LRS
|
lrs/managers/AgentProfileManager.py
|
Python
|
apache-2.0
| 5,034
|
# Licensed under the Apache License: http://www.apache.org/licenses/LICENSE-2.0
# For details: https://github.com/nedbat/coveragepy/blob/master/NOTICE.txt
"""Monkey-patching to add multiprocessing support for coverage.py"""
import multiprocessing
import multiprocessing.process
import os
from coverage import env
from coverage.misc import contract
# An attribute that will be set on the module to indicate that it has been
# monkey-patched.
PATCHED_MARKER = "_coverage$patched"
if env.PYVERSION >= (3, 4):
OriginalProcess = multiprocessing.process.BaseProcess
else:
OriginalProcess = multiprocessing.Process
original_bootstrap = OriginalProcess._bootstrap
class ProcessWithCoverage(OriginalProcess):
"""A replacement for multiprocess.Process that starts coverage."""
def _bootstrap(self):
"""Wrapper around _bootstrap to start coverage."""
from coverage import Coverage # avoid circular import
cov = Coverage(data_suffix=True)
cov._warn_preimported_source = False
cov.start()
debug = cov._debug
try:
if debug.should("multiproc"):
debug.write("Calling multiprocessing bootstrap")
return original_bootstrap(self)
finally:
if debug.should("multiproc"):
debug.write("Finished multiprocessing bootstrap")
cov.stop()
cov.save()
if debug.should("multiproc"):
debug.write("Saved multiprocessing data")
class Stowaway(object):
"""An object to pickle, so when it is unpickled, it can apply the monkey-patch."""
def __init__(self, rcfile):
self.rcfile = rcfile
def __getstate__(self):
return {'rcfile': self.rcfile}
def __setstate__(self, state):
patch_multiprocessing(state['rcfile'])
@contract(rcfile=str)
def patch_multiprocessing(rcfile):
"""Monkey-patch the multiprocessing module.
This enables coverage measurement of processes started by multiprocessing.
This involves aggressive monkey-patching.
`rcfile` is the path to the rcfile being used.
"""
if hasattr(multiprocessing, PATCHED_MARKER):
return
if env.PYVERSION >= (3, 4):
OriginalProcess._bootstrap = ProcessWithCoverage._bootstrap
else:
multiprocessing.Process = ProcessWithCoverage
# Set the value in ProcessWithCoverage that will be pickled into the child
# process.
os.environ["COVERAGE_RCFILE"] = rcfile
# When spawning processes rather than forking them, we have no state in the
# new process. We sneak in there with a Stowaway: we stuff one of our own
# objects into the data that gets pickled and sent to the sub-process. When
# the Stowaway is unpickled, it's __setstate__ method is called, which
# re-applies the monkey-patch.
# Windows only spawns, so this is needed to keep Windows working.
try:
from multiprocessing import spawn
original_get_preparation_data = spawn.get_preparation_data
except (ImportError, AttributeError):
pass
else:
def get_preparation_data_with_stowaway(name):
"""Get the original preparation data, and also insert our stowaway."""
d = original_get_preparation_data(name)
d['stowaway'] = Stowaway(rcfile)
return d
spawn.get_preparation_data = get_preparation_data_with_stowaway
setattr(multiprocessing, PATCHED_MARKER, True)
|
blueyed/coveragepy
|
coverage/multiproc.py
|
Python
|
apache-2.0
| 3,478
|
# -*- encoding: utf-8 -*-
from inspect import isgenerator
from .testcase import DatatableViewTestCase
from .test_app import models
from ..exceptions import ColumnError
from ..datatables import Datatable, ValuesDatatable
from ..views import DatatableJSONResponseMixin, DatatableView
from .. import columns
class DatatableTests(DatatableViewTestCase):
def test_normalize_config(self):
dt = Datatable([], '/')
dt.configure()
self.assertEqual(dt.config['hidden_columns'], [])
self.assertEqual(dt.config['search_fields'], [])
self.assertEqual(dt.config['unsortable_columns'], [])
self.assertEqual(dt.config['search'], set())
self.assertEqual(dt.config['start_offset'], 0)
self.assertEqual(dt.config['page_length'], 25)
self.assertEqual(dt.config['ordering'], None)
def test_column_names_list_raises_unknown_columns(self):
class DT(Datatable):
class Meta:
model = models.ExampleModel
columns = ['fake']
dt = DT([], '/')
with self.assertRaises(ColumnError) as cm:
dt.configure()
self.assertEqual(str(cm.exception), "Unknown column name(s): ('fake',)")
def test_column_names_list_finds_local_fields(self):
class DT(Datatable):
class Meta:
model = models.ExampleModel
columns = ['name']
class NoError(BaseException):
pass
with self.assertRaises(NoError):
dt = DT([], '/')
raise NoError()
def test_column_names_list_raises_related_columns(self):
# This was the old way of including related data, but this is no longer supported
class DT(Datatable):
class Meta:
model = models.ExampleModel
columns = ['related__name']
dt = DT([], '/')
with self.assertRaises(ColumnError) as cm:
dt.configure()
self.assertEqual(str(cm.exception), "Unknown column name(s): ('related__name',)")
def test_column_names_list_finds_related_fields(self):
class DT(Datatable):
related = columns.TextColumn("Related", ['related__name'])
class Meta:
model = models.ExampleModel
columns = ['name', 'related']
class NoError(BaseException):
pass
with self.assertRaises(NoError):
dt = DT([], '/')
raise NoError()
def test_get_ordering_splits(self):
# Verify empty has blank db-backed list and virtual list
dt = Datatable([], '/')
dt.configure()
self.assertEqual(dt.get_ordering_splits(), ([], []))
class DT(Datatable):
fake = columns.TextColumn("Fake", sources=['get_absolute_url'])
class Meta:
model = models.ExampleModel
columns = ['name', 'fake']
# Verify a fake field name ends up separated from the db-backed field
dt = DT([], '/', query_config={'order[0][column]': '0', 'order[0][dir]': 'asc'}) # iSortingCols': '1',
dt.configure()
self.assertEqual(dt.get_ordering_splits(), (['name'], []))
# Verify ['name', 'fake'] ordering sends 'name' to db sort list, but keeps 'fake' in manual
# sort list.
dt = DT([], '/', query_config={'order[0][column]': '0', 'order[0][dir]': 'asc', 'order[1][column]': '1', 'order[1][dir]': 'asc'}) # 'iSortingCols': '2',
dt.configure()
self.assertEqual(dt.get_ordering_splits(), (['name'], ['fake']))
# Verify a fake field name as the sort column correctly finds no db sort fields
dt = DT([], '/', query_config={'order[0][column]': '1', 'order[0][dir]': 'asc'}) # 'iSortingCols': '1',
dt.configure()
self.assertEqual(dt.get_ordering_splits(), ([], ['fake']))
# Verify ['fake', 'name'] ordering sends both fields to manual sort list
dt = DT([], '/', query_config={'order[0][column]': '1', 'order[0][dir]': 'asc', 'order[1][column]': '0', 'order[1][dir]': 'asc'}) # 'iSortingCols': '2',
dt.configure()
self.assertEqual(dt.get_ordering_splits(), ([], ['fake', 'name']))
def test_get_records_populates_cache(self):
models.ExampleModel.objects.create(name="test name")
queryset = models.ExampleModel.objects.all()
dt = Datatable(queryset, '/')
dt.get_records()
self.assertIsNotNone(dt._records)
records = dt._records
# _records doesn't change when run again
dt.get_records()
self.assertEqual(dt._records, records)
def test_populate_records_searches(self):
obj1 = models.ExampleModel.objects.create(name="test name 1", value=False)
obj2 = models.ExampleModel.objects.create(name="test name 2", value=True)
queryset = models.ExampleModel.objects.all()
class DT(Datatable):
class Meta:
model = models.ExampleModel
columns = ['name', 'value']
dt = DT(queryset, '/')
# Sanity check for correct initial queryset
dt.populate_records()
self.assertIsNotNone(dt._records)
self.assertEqual(list(dt._records), list(queryset))
# Verify a search eliminates items from _records
dt = DT(queryset, '/', query_config={'search[value]': 'test name 1'})
dt.populate_records()
self.assertIsNotNone(dt._records)
self.assertEqual(list(dt._records), [obj1])
def test_populate_records_sorts(self):
obj1 = models.ExampleModel.objects.create(name="test name 1")
obj2 = models.ExampleModel.objects.create(name="test name 2")
queryset = models.ExampleModel.objects.all()
class DT(Datatable):
class Meta:
model = models.ExampleModel
columns = ['name']
dt = DT(queryset, '/')
# Sanity check for correct initial queryset
dt.populate_records()
self.assertIsNotNone(dt._records)
self.assertEqual(list(dt._records), list(queryset))
# Verify a sort changes the ordering of the records list
dt = DT(queryset, '/', query_config={'order[0][column]': '0', 'order[0][dir]': 'desc'}) # # 'iSortingCols': '1',
dt.populate_records()
self.assertIsNotNone(dt._records)
self.assertEqual(list(dt._records), [obj2, obj1])
def test_populate_records_avoids_column_callbacks(self):
obj1 = models.ExampleModel.objects.create(name="test name 1")
queryset = models.ExampleModel.objects.all()
class DT(Datatable):
def preload_record_data(self, obj):
raise Exception("Don't run this")
dt = DT(queryset, '/')
try:
dt.populate_records()
except Exception as e:
if str(e) == "Don't run this":
raise AssertionError("Per-row callbacks being executed!")
raise
def test_preload_record_data_calls_view(self):
obj1 = models.ExampleModel.objects.create(name="test name 1")
queryset = models.ExampleModel.objects.all()
class Dummy(object):
def preload_record_data(self, obj):
raise Exception("We did it")
dt = Datatable(queryset, '/', callback_target=Dummy())
with self.assertRaises(Exception) as cm:
dt.get_records()
self.assertEqual(str(cm.exception), "We did it")
def test_sort_defaults_to_meta_ordering(self):
# Defined so that 'pk' order != 'name' order
obj1 = models.ExampleModel.objects.create(name="b")
obj2 = models.ExampleModel.objects.create(name="a")
queryset = models.ExampleModel.objects.all()
class DT(Datatable):
name = columns.TextColumn("Name", sources=['name'])
class Meta:
model = models.ExampleModel
columns = ['name']
ordering = ['name']
dt = DT(queryset, '/')
dt.populate_records()
self.assertEqual(dt.get_ordering_splits(), (['name'], []))
self.assertEqual(list(dt._records), [obj2, obj1])
# this is to keep DatatableView class from overriding the Meta ordering in Datatable
class DTV(DatatableView):
datatable_class = DT
model = models.ExampleModel
dtv = DTV().get_datatable(url='/')
self.assertIn('<th data-name="name" data-config-sortable="true" data-config-sorting="0,0,asc" data-config-visible="true">Name</th>', dtv.__str__())
class DT(Datatable):
name = columns.TextColumn("Name", sources=['name'])
class Meta:
model = models.ExampleModel
columns = ['name']
ordering = ['-name']
dt = DT(queryset, '/')
dt.populate_records()
self.assertEqual(dt.get_ordering_splits(), (['-name'], []))
self.assertEqual(list(dt._records), [obj1, obj2])
def test_sort_prioritizes_db_source(self):
# Defined so that 'pk' order != 'name' order
obj1 = models.ExampleModel.objects.create(name="test name 2")
obj2 = models.ExampleModel.objects.create(name="test name 1")
queryset = models.ExampleModel.objects.all()
class DT(Datatable):
name = columns.TextColumn("Name", sources=['name'])
class Meta:
model = models.ExampleModel
columns = ['name']
ordering = ['pk']
dt = DT(queryset, '/', query_config={'order[0][column]': '0', 'order[0][dir]': 'asc'}) # 'iSortingCols': '1',
dt.populate_records()
self.assertEqual(dt.get_ordering_splits(), (['name'], []))
self.assertEqual(list(dt._records), [obj2, obj1])
dt = DT(queryset, '/', query_config={'order[0][column]': '0', 'order[0][dir]': 'desc'}) # 'iSortingCols': '1',
dt.populate_records()
self.assertEqual(dt.get_ordering_splits(), (['-name'], []))
self.assertEqual(list(dt._records), [obj1, obj2])
def test_sort_uses_all_sources(self):
from datetime import timedelta
obj1 = models.ExampleModel.objects.create(name="a")
obj2 = models.ExampleModel.objects.create(name="a")
obj3 = models.ExampleModel.objects.create(name="b")
obj1.date_created = obj1.date_created + timedelta(days=3)
obj2.date_created = obj2.date_created + timedelta(days=1)
obj3.date_created = obj3.date_created + timedelta(days=2)
obj1.save()
obj2.save()
obj3.save()
queryset = models.ExampleModel.objects.all()
class DT(Datatable):
my_column = columns.TextColumn("Data", sources=['name', 'date_created', 'pk'])
class Meta:
model = models.ExampleModel
columns = ['my_column']
dt = DT(queryset, '/', query_config={'order[0][column]': '0', 'order[0][dir]': 'asc'}) # 'iSortingCols': '1',
dt.populate_records()
self.assertEqual(dt.get_ordering_splits(), (['my_column'], []))
self.assertEqual(list(dt._records), [obj2, obj1, obj3])
dt = DT(queryset, '/', query_config={'order[0][column]': '0', 'order[0][dir]': 'desc'}) # 'iSortingCols': '1',
dt.populate_records()
self.assertEqual(dt.get_ordering_splits(), (['-my_column'], []))
self.assertEqual(list(dt._records), [obj3, obj1, obj2])
# Swap the order of 'date_created' and 'name' fields in the sources, which will alter the
# sort results.
class DT(Datatable):
my_column = columns.TextColumn("Data", sources=['date_created', 'name', 'pk'])
class Meta:
model = models.ExampleModel
columns = ['my_column']
dt = DT(queryset, '/', query_config={'order[0][column]': '0', 'order[0][dir]': 'asc'}) # 'iSortingCols': '1',
dt.populate_records()
self.assertEqual(dt.get_ordering_splits(), (['my_column'], []))
self.assertEqual(list(dt._records), [obj2, obj3, obj1])
dt = DT(queryset, '/', query_config={'order[0][column]': '0', 'order[0][dir]': 'desc'}) # 'iSortingCols': '1',
dt.populate_records()
self.assertEqual(dt.get_ordering_splits(), (['-my_column'], []))
self.assertEqual(list(dt._records), [obj1, obj3, obj2])
def test_sort_ignores_virtual_sources_when_mixed(self):
from datetime import timedelta
obj1 = models.ExampleModel.objects.create(name="a")
obj2 = models.ExampleModel.objects.create(name="b")
obj3 = models.ExampleModel.objects.create(name="a")
queryset = models.ExampleModel.objects.all()
class DT(Datatable):
my_column = columns.TextColumn("Data", sources=['name', 'get_absolute_url'])
class Meta:
model = models.ExampleModel
columns = ['my_column']
dt = DT(queryset, '/', query_config={'order[0][column]': '0', 'order[0][dir]': 'asc'}) # 'iSortingCols': '1',
dt.populate_records()
self.assertEqual(dt.get_ordering_splits(), (['my_column'], []))
self.assertEqual(list(dt._records), [obj1, obj3, obj2])
dt = DT(queryset, '/', query_config={'order[0][column]': '0', 'order[0][dir]': 'desc'}) # 'iSortingCols': '1',
dt.populate_records()
self.assertEqual(dt.get_ordering_splits(), (['-my_column'], []))
self.assertEqual(list(dt._records), [obj2, obj1, obj3]) # pk is natural ordering 1,3 here
# Swap the sources order, but we expect the same result
class DT(Datatable):
my_column = columns.TextColumn("Data", sources=['get_absolute_url', 'name'], processor='get_data')
class Meta:
model = models.ExampleModel
columns = ['my_column']
def get_data(self, obj, **kwargs):
# Return data that would make the sort order wrong if it were consulted for sorting
return obj.pk # tracks with get_absolute_url
dt = DT(queryset, '/', query_config={'order[0][column]': '0', 'order[0][dir]': 'asc'}) # 'iSortingCols': '1',
dt.populate_records()
self.assertEqual(list(dt._records), [obj1, obj3, obj2])
dt = DT(queryset, '/', query_config={'order[0][column]': '0', 'order[0][dir]': 'desc'}) # 'iSortingCols': '1',
dt.populate_records()
self.assertEqual(list(dt._records), [obj2, obj1, obj3]) # pk is natural ordering 1,3 here
def test_sort_uses_virtual_sources_when_no_db_sources_available(self):
from datetime import timedelta
obj1 = models.ExampleModel.objects.create(name="a")
obj2 = models.ExampleModel.objects.create(name="b")
obj3 = models.ExampleModel.objects.create(name="c")
queryset = models.ExampleModel.objects.all()
class DT(Datatable):
pk = columns.TextColumn("Data", sources=['get_negative_pk'])
class Meta:
model = models.ExampleModel
columns = ['pk']
dt = DT(queryset, '/', query_config={'order[0][column]': '0', 'order[0][dir]': 'asc'}) # 'iSortingCols': '1',
dt.populate_records()
self.assertEqual(dt.get_ordering_splits(), ([], ['pk']))
self.assertEqual(list(dt._records), [obj3, obj2, obj1])
dt = DT(queryset, '/', query_config={'order[0][column]': '0', 'order[0][dir]': 'desc'}) # 'iSortingCols': '1',
dt.populate_records()
self.assertEqual(dt.get_ordering_splits(), ([], ['-pk']))
self.assertEqual(list(dt._records), [obj1, obj2, obj3])
def test_get_object_pk(self):
obj1 = models.ExampleModel.objects.create(name="test name 1")
queryset = models.ExampleModel.objects.all()
dt = Datatable(queryset, '/')
self.assertEqual(dt.get_object_pk(obj1), obj1.pk)
def test_get_extra_record_data_passes_through_to_object_serialization(self):
obj1 = models.ExampleModel.objects.create(name="test name 1")
queryset = models.ExampleModel.objects.all()
class DT(Datatable):
def get_extra_record_data(self, obj):
return {'custom': 'data'}
dt = DT([], '/')
data = dt.get_record_data(obj1)
self.assertIn('_extra_data', data)
self.assertIn('custom', data['_extra_data'])
self.assertEqual(data['_extra_data']['custom'], 'data')
def test_get_extra_record_data_passes_through_to_json_response(self):
obj1 = models.ExampleModel.objects.create(name="test name 1")
queryset = models.ExampleModel.objects.all()
class DT(Datatable):
def get_extra_record_data(self, obj):
return {'custom': 'data'}
class FakeRequest(object):
method = 'GET'
GET = {'sEcho': 0}
dt = DT(queryset, '/')
view = DatatableJSONResponseMixin()
view.request = FakeRequest()
data = view.get_json_response_object(dt)
self.assertIn('data', data)
self.assertIn('DT_RowData', data['data'][0])
self.assertEqual(data['data'][0]['DT_RowData'], {'custom': 'data'})
def test_get_column_value_forwards_to_column_class(self):
class CustomColumn1(columns.Column):
def value(self, obj, **kwargs):
return "first"
class CustomColumn2(columns.Column):
def value(self, obj, **kwargs):
return "second"
class DT(Datatable):
fake1 = CustomColumn1("Fake1", sources=['get_absolute_url'])
fake2 = CustomColumn2("Fake2", sources=['get_absolute_url'])
class Meta:
model = models.ExampleModel
columns = ['name', 'fake1', 'fake2']
obj1 = models.ExampleModel.objects.create(name="test name 1")
queryset = models.ExampleModel.objects.all()
dt = DT(queryset, '/')
data = dt.get_record_data(obj1)
self.assertIn('1', data)
self.assertIn(data['1'], 'first')
self.assertIn('2', data)
self.assertIn(data['2'], 'second')
def test_get_processor_method(self):
class Dummy(object):
def fake_callback(self):
pass
view = Dummy()
# Test no callback given
dt = Datatable([], '/')
f = dt.get_processor_method(columns.Column("Fake", sources=['fake']), i=0)
self.assertEqual(f, None)
class DT(Datatable):
def fake_callback(self):
pass
column = columns.Column("Fake", sources=['fake'], processor='fake_callback')
# Test callback found on self
dt = DT([], '/')
f = dt.get_processor_method(column, i=0)
self.assertEqual(f, dt.fake_callback)
# Test callback found on callback_target
dt = Datatable([], '/', callback_target=view)
f = dt.get_processor_method(column, i=0)
self.assertEqual(f, view.fake_callback)
def test_get_processor_method_returns_direct_callable(self):
def fake_callback():
pass
column = columns.Column("Fake", sources=[], processor=fake_callback)
# Test no callback given
dt = Datatable([], '/')
f = dt.get_processor_method(column, i=0)
self.assertEqual(f, fake_callback)
def test_get_processor_method_finds_implied_callback(self):
class DummyNamed(object):
def get_column_fake_data(self):
pass
class DummyIndexed(object):
def get_column_0_data(self):
pass
class DummyBoth(object):
def get_column_fake_data(self):
pass
def get_column_0_data(self):
pass
column = columns.Column("Fake", sources=[])
column.name = 'fake'
# Test implied named callback found first
view = DummyNamed()
dt = Datatable([], '/', callback_target=view)
f = dt.get_processor_method(column, i=0)
self.assertEqual(f, view.get_column_fake_data)
# Test implied named callback found first
view = DummyIndexed()
dt = Datatable([], '/', callback_target=view)
f = dt.get_processor_method(column, i=0)
self.assertEqual(f, view.get_column_0_data)
# Test implied named callback found first
view = DummyBoth()
dt = Datatable([], '/', callback_target=view)
f = dt.get_processor_method(column, i=0)
self.assertEqual(f, view.get_column_fake_data)
class DTNamed(Datatable):
def get_column_fake_data(self):
pass
class DTIndexed(Datatable):
def get_column_0_data(self):
pass
class DTBoth(Datatable):
def get_column_fake_data(self):
pass
def get_column_0_data(self):
pass
# Test implied named callback found first
dt = DTNamed([], '/')
f = dt.get_processor_method(column, i=0)
self.assertEqual(f, dt.get_column_fake_data)
# Test implied named callback found first
dt = DTIndexed([], '/')
f = dt.get_processor_method(column, i=0)
self.assertEqual(f, dt.get_column_0_data)
# Test implied named callback found first
dt = DTBoth([], '/')
f = dt.get_processor_method(column, i=0)
self.assertEqual(f, dt.get_column_fake_data)
def test_iter_datatable_yields_columns(self):
class CustomColumn1(columns.Column):
pass
class CustomColumn2(columns.Column):
pass
class DT(Datatable):
fake1 = CustomColumn1("Fake1", sources=['get_absolute_url'])
fake2 = CustomColumn2("Fake2", sources=['get_absolute_url'])
class Meta:
model = models.ExampleModel
columns = ['name', 'fake1', 'fake2']
dt = DT([], '/')
self.assertEqual(isgenerator(dt.__iter__()), True)
self.assertEqual(list(dt), [dt.columns['name'], dt.columns['fake1'], dt.columns['fake2']])
def test_search_term_basic(self):
obj1 = models.ExampleModel.objects.create(name="test name 1")
obj2 = models.ExampleModel.objects.create(name="test name 2")
obj3 = models.ExampleModel.objects.create(name="test name 12")
queryset = models.ExampleModel.objects.all()
class DT(Datatable):
class Meta:
model = models.ExampleModel
columns = ['name']
dt = DT(queryset, '/', query_config={'search[value]': 'test'})
dt.populate_records()
self.assertEquals(list(dt._records), [obj1, obj2, obj3])
dt = DT(queryset, '/', query_config={'search[value]': 'name'})
dt.populate_records()
self.assertEquals(list(dt._records), [obj1, obj2, obj3])
dt = DT(queryset, '/', query_config={'search[value]': '1'})
dt.populate_records()
self.assertEquals(list(dt._records), [obj1, obj3])
dt = DT(queryset, '/', query_config={'search[value]': '2'})
dt.populate_records()
self.assertEquals(list(dt._records), [obj2, obj3])
dt = DT(queryset, '/', query_config={'search[value]': '12'})
dt.populate_records()
self.assertEquals(list(dt._records), [obj3])
dt = DT(queryset, '/', query_config={'search[value]': '3'})
dt.populate_records()
self.assertEquals(list(dt._records), [])
def test_search_term_boolean(self):
obj1 = models.ExampleModel.objects.create(name="test name 1", value=True)
obj2 = models.ExampleModel.objects.create(name="test name 2", value=True)
obj3 = models.ExampleModel.objects.create(name="test name 12", value=False)
queryset = models.ExampleModel.objects.all()
class DT(Datatable):
senior = columns.BooleanColumn('Senior:', 'value')
class Meta:
model = models.ExampleModel
columns = ['name', 'senior']
dt = DT(queryset, '/', query_config={'search[value]': 'True'})
dt.populate_records()
self.assertEquals(len(list(dt._records)), 2)
dt = DT(queryset, '/', query_config={'search[value]': 'false'})
dt.populate_records()
self.assertEquals(len(list(dt._records)), 1)
dt = DT(queryset, '/', query_config={'search[value]': 'SENIOR'})
dt.populate_records()
self.assertEquals(len(list(dt._records)), 2)
dt = DT(queryset, '/', query_config={'search[value]': 'menior'})
dt.populate_records()
self.assertEquals(len(list(dt._records)), 0)
def test_search_multiple_terms_use_AND(self):
obj1 = models.ExampleModel.objects.create(name="test name 1")
obj2 = models.ExampleModel.objects.create(name="test name 2")
obj3 = models.ExampleModel.objects.create(name="test name 12")
queryset = models.ExampleModel.objects.all()
class DT(Datatable):
class Meta:
model = models.ExampleModel
columns = ['name']
dt = DT(queryset, '/', query_config={'search[value]': 'test name'})
dt.populate_records()
self.assertEquals(list(dt._records), [obj1, obj2, obj3])
dt = DT(queryset, '/', query_config={'search[value]': 'test 1'})
dt.populate_records()
self.assertEquals(list(dt._records), [obj1, obj3])
dt = DT(queryset, '/', query_config={'search[value]': 'test 2'})
dt.populate_records()
self.assertEquals(list(dt._records), [obj2, obj3])
dt = DT(queryset, '/', query_config={'search[value]': 'test 12'})
dt.populate_records()
self.assertEquals(list(dt._records), [obj3])
dt = DT(queryset, '/', query_config={'search[value]': 'test 3'})
dt.populate_records()
self.assertEquals(list(dt._records), [])
def test_search_term_queries_all_columns(self):
r1 = models.RelatedModel.objects.create(name="test related 1 one")
r2 = models.RelatedModel.objects.create(name="test related 2 two")
obj1 = models.ExampleModel.objects.create(name="test name 1", related=r1)
obj2 = models.ExampleModel.objects.create(name="test name 2", related=r2)
queryset = models.ExampleModel.objects.all()
class DT(Datatable):
related = columns.TextColumn("Related", ['related__name'])
class Meta:
model = models.ExampleModel
columns = ['name', 'related']
dt = DT(queryset, '/', query_config={'search[value]': 'test'})
dt.populate_records()
self.assertEquals(list(dt._records), [obj1, obj2])
dt = DT(queryset, '/', query_config={'search[value]': 'test name'})
dt.populate_records()
self.assertEquals(list(dt._records), [obj1, obj2])
dt = DT(queryset, '/', query_config={'search[value]': 'test 2'})
dt.populate_records()
self.assertEquals(list(dt._records), [obj2])
dt = DT(queryset, '/', query_config={'search[value]': 'related 2'})
dt.populate_records()
self.assertEquals(list(dt._records), [obj2])
dt = DT(queryset, '/', query_config={'search[value]': 'test one'})
dt.populate_records()
self.assertEquals(list(dt._records), [obj1])
dt = DT(queryset, '/', query_config={'search[value]': '2 two'})
dt.populate_records()
self.assertEquals(list(dt._records), [obj2])
dt = DT(queryset, '/', query_config={'search[value]': 'test three'})
dt.populate_records()
self.assertEquals(list(dt._records), [])
def test_search_term_queries_extra_fields(self):
r1 = models.RelatedModel.objects.create(name="test related 1 one")
r2 = models.RelatedModel.objects.create(name="test related 2 two")
obj1 = models.ExampleModel.objects.create(name="test name 1", related=r1)
obj2 = models.ExampleModel.objects.create(name="test name 2", related=r2)
queryset = models.ExampleModel.objects.all()
class DT(Datatable):
related = columns.TextColumn("Related", ['related__name'])
class Meta:
model = models.ExampleModel
columns = ['related']
search_fields = ['name']
dt = DT(queryset, '/', query_config={'search[value]': 'test'})
dt.populate_records()
self.assertEquals(list(dt._records), [obj1, obj2])
dt = DT(queryset, '/', query_config={'search[value]': 'test name 2'})
dt.populate_records()
self.assertEquals(list(dt._records), [obj2])
class ValuesDatatableTests(DatatableViewTestCase):
def test_get_object_pk(self):
obj1 = models.ExampleModel.objects.create(name="test name 1")
queryset = models.ExampleModel.objects.all()
dt = ValuesDatatable(queryset, '/')
obj_data = queryset.values('pk')[0]
self.assertEqual(dt.get_object_pk(obj_data), obj1.pk)
|
doganmeh/django-datatable-view
|
datatableview/tests/test_datatables.py
|
Python
|
apache-2.0
| 29,048
|
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
# This test checks if dynamic loading of library into MXNet is successful
import os
import platform
import mxnet as mx
import numpy as np
from mxnet import nd
from mxnet.gluon import nn
from mxnet.base import MXNetError
from mxnet.test_utils import download, is_cd_run, assert_almost_equal, default_context
import pytest
base_path = os.path.join(os.path.dirname(__file__), "../../..")
def check_platform():
return platform.machine() not in ['x86_64', 'AMD64']
@pytest.mark.skipif(check_platform(), reason="not all machine types supported")
@pytest.mark.skipif(is_cd_run(), reason="continuous delivery run - ignoring test")
def test_custom_op():
# possible places to find library file
if (os.name=='posix'):
lib = 'libcustomop_lib.so'
if os.path.exists(lib):
fname = lib
elif os.path.exists(os.path.join(base_path,'build/'+lib)):
fname = os.path.join(base_path,'build/'+lib)
else:
raise MXNetError("library %s not found " % lib)
elif (os.name=='nt'):
lib = 'libcustomop_lib.dll'
if os.path.exists('windows_package\\lib\\'+lib):
fname = 'windows_package\\lib\\'+lib
else:
raise MXNetError("library %s not found " % lib)
fname = os.path.abspath(fname)
# load the library containing gemm custom operators
mx.library.load(fname)
# test symbol 2D gemm custom operators
s = mx.sym.Variable('s')
t = mx.sym.Variable('t')
c = mx.sym.my_gemm(s,t)
d = mx.sym.state_gemm(s,t)
# baseline gemm from MXNet
base = mx.sym.linalg.gemm2(s,t)
# get some random input matrices
dim_n, dim_k, dim_m = tuple(np.random.randint(1, 5, size=3))
mat1 = mx.nd.random.uniform(-10, 10, shape=(dim_n, dim_k), ctx=mx.cpu())
mat2 = mx.nd.random.uniform(-10, 10, shape=(dim_k, dim_m), ctx=mx.cpu())
# intermediate ndarrays to be populated by gradient compute
in_grad1 = [mx.nd.empty((dim_n,dim_k),ctx=mx.cpu()),mx.nd.empty((dim_k,dim_m),ctx=mx.cpu())]
in_grad2 = [mx.nd.empty((dim_n,dim_k),ctx=mx.cpu()),mx.nd.empty((dim_k,dim_m),ctx=mx.cpu())]
in_grad_base = [mx.nd.empty((dim_n,dim_k),ctx=mx.cpu()),mx.nd.empty((dim_k,dim_m),ctx=mx.cpu())]
exe1 = c.bind(ctx=mx.cpu(),args={'s':mat1,'t':mat2},args_grad=in_grad1)
exe2 = d.bind(ctx=mx.cpu(),args={'s':mat1,'t':mat2},args_grad=in_grad2)
exe_base = base.bind(ctx=mx.cpu(),args={'s':mat1,'t':mat2},args_grad=in_grad_base)
out1 = exe1.forward()
out2 = exe2.forward()
# test stateful operator by calling it multiple times
out2 = exe2.forward()
out_base = exe_base.forward()
# check that forward compute matches one executed by MXNet
assert_almost_equal(out_base[0].asnumpy(), out1[0].asnumpy(), rtol=1e-3, atol=1e-3)
assert_almost_equal(out_base[0].asnumpy(), out2[0].asnumpy(), rtol=1e-3, atol=1e-3)
# random output grad ndarray for gradient update
out_grad = mx.nd.ones((dim_n, dim_m), ctx=mx.cpu())
exe1.backward([out_grad])
exe2.backward([out_grad])
exe_base.backward([out_grad])
# check that gradient compute matches one executed by MXNet
assert_almost_equal(in_grad_base[0].asnumpy(), in_grad1[0].asnumpy(), rtol=1e-3, atol=1e-3)
assert_almost_equal(in_grad_base[0].asnumpy(), in_grad2[0].asnumpy(), rtol=1e-3, atol=1e-3)
@pytest.mark.skipif(check_platform(), reason="not all machine types supported")
@pytest.mark.skipif(is_cd_run(), reason="continuous delivery run - ignoring test")
def test_subgraph():
# possible places to find library file
if (os.name=='posix'):
lib = 'libsubgraph_lib.so'
if os.path.exists(lib):
# plain make build, when run in the CI
fname = lib
elif os.path.exists(os.path.join(base_path, 'build/'+lib)):
# plain cmake build when run in the CI
fname = os.path.join(base_path, 'build/'+lib)
else:
raise MXNetError("library %s not found " % lib)
elif (os.name=='nt'):
lib = 'libsubgraph_lib.dll'
if os.path.exists('windows_package\\lib\\'+lib):
# plain make build, when run in the CI
fname = 'windows_package\\lib\\'+lib
else:
# plain cmake build when run in the CI
raise MXNetError("library %s not found " % lib)
fname = os.path.abspath(fname)
mx.library.load(fname)
# test simple graph with add, exp and log operators, library supports exp/log
a = mx.sym.var('a')
b = mx.sym.var('b')
c = a + b
d = mx.sym.exp(c)
sym = mx.sym.log(d)
args = {'a':mx.nd.ones((3,2),ctx=mx.cpu()), 'b':mx.nd.ones((3,2),ctx=mx.cpu())}
arg_array = [mx.nd.ones((3,2),dtype='float32',ctx=mx.cpu()),
mx.nd.ones((3,2),dtype='float32',ctx=mx.cpu())]
# baseline - regular execution in MXNet
exe = sym.bind(ctx=mx.cpu(), args=args)
out = exe.forward()
# without propogating shapes/types, passing a custom option to subgraph prop "myOpt"
# should not create subgraph since subgraph prop requires type info
mysym1 = sym.optimize_for("myProp", myOpt='yello')
exe1 = mysym1.bind(ctx=mx.cpu(), args=args)
out1 = exe1.forward()
# check that result matches one executed by MXNet
assert_almost_equal(out[0].asnumpy(), out1[0].asnumpy(), rtol=1e-3, atol=1e-3)
# with propogating shapes/types, rejecting subgraph
# this tests creating the subgraph and having the subgraph prop reject it
mysym2 = sym.optimize_for("myProp", arg_array, reject=True)
exe2 = mysym2.bind(ctx=mx.cpu(), args=args)
out2 = exe2.forward()
# check that result matches one executed by MXNet
assert_almost_equal(out[0].asnumpy(), out2[0].asnumpy(), rtol=1e-3, atol=1e-3)
# with propogating shapes/types
mysym3 = sym.optimize_for("myProp",arg_array)
exe3 = mysym3.bind(ctx=mx.cpu(), args=args)
out3 = exe3.forward()
# check that result matches one executed by MXNet
assert_almost_equal(out[0].asnumpy(), out3[0].asnumpy(), rtol=1e-3, atol=1e-3)
# Gluon Hybridize partitioning with shapes/types
sym_block = nn.SymbolBlock(sym, [a,b])
sym_block.initialize()
sym_block.hybridize(backend='myProp')
out4 = sym_block(mx.nd.ones((3,2)),mx.nd.ones((3,2)))
# check that result matches one executed by MXNet
assert_almost_equal(out[0].asnumpy(), out4[0].asnumpy(), rtol=1e-3, atol=1e-3)
# Gluon Hybridize partitioning with shapes/types
sym_block2 = nn.SymbolBlock(sym, [a,b])
sym_block2.initialize()
a_data = mx.nd.ones((3,2))
b_data = mx.nd.ones((3,2))
sym_block2.optimize_for(a_data, b_data, backend='myProp')
sym_block2.export('optimized')
sym_block3 = nn.SymbolBlock.imports('optimized-symbol.json',['a','b'],
'optimized-0000.params')
out5 = sym_block3(a_data, b_data)
# check that result matches one executed by MXNet
assert_almost_equal(out[0].asnumpy(), out5[0].asnumpy(), rtol=1e-3, atol=1e-3)
|
zhreshold/mxnet
|
tests/python/unittest/test_extensions.py
|
Python
|
apache-2.0
| 7,787
|
import sys
sys.path.insert(1, "../../")
import h2o, tests
def headers():
headers = h2o.import_file(h2o.locate("smalldata/airlines/allyears2k_headers_only.csv"))
headers_and = h2o.import_file(h2o.locate("smalldata/airlines/allyears2k.zip"))
headers_and.setNames(headers.names)
print headers.names
print headers_and.names
assert headers.names == headers_and.names, "Expected the same column names but got {0} and {1}". \
format(headers.names, headers_and.names)
if __name__ == "__main__":
tests.run_test(sys.argv, headers)
|
tarasane/h2o-3
|
h2o-py/tests/testdir_misc/pyunit_headers.py
|
Python
|
apache-2.0
| 570
|
# Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for struct2tensor.ops.struct2tensor_ops."""
import itertools
from absl.testing import absltest
from absl.testing import parameterized
import numpy as np
from struct2tensor.ops import struct2tensor_ops
from struct2tensor.test import test_extension_pb2
from struct2tensor.test import test_map_pb2
from struct2tensor.test import test_pb2
from struct2tensor.test import test_proto3_pb2
import tensorflow as tf
from tensorflow.python.framework import test_util # pylint: disable=g-direct-tensorflow-import
INDEX = "index"
VALUE = "value"
_EQUIJOIN_TEST_CASES = [{
"testcase_name": "simple",
"a": [0, 0, 1, 1, 2, 3, 4],
"b": [0, 0, 2, 2, 3],
"expected_index_a": [0, 0, 1, 1, 4, 4, 5],
"expected_index_b": [0, 1, 0, 1, 2, 3, 4]
}, {
"testcase_name": "simple_2",
"a": [0, 1, 1, 2],
"b": [0, 1, 2],
"expected_index_a": [0, 1, 2, 3],
"expected_index_b": [0, 1, 1, 2]
}, {
"testcase_name": "empty",
"a": [],
"b": [0, 1, 2],
"expected_index_a": [],
"expected_index_b": []
}, {
"testcase_name": "empty_2",
"a": [0, 1, 1, 2],
"b": [],
"expected_index_a": [],
"expected_index_b": []
}, {
"testcase_name": "both_empty",
"a": [],
"b": [],
"expected_index_a": [],
"expected_index_b": []
}, {
"testcase_name": "no_overlap",
"a": [0, 1, 1, 2],
"b": [3, 4, 5],
"expected_index_a": [],
"expected_index_b": []
}, {
"testcase_name": "broadcast",
"a": [0, 1, 1],
"b": [0, 1, 2],
"expected_index_a": [0, 1, 2],
"expected_index_b": [0, 1, 1]
}]
def _parse_full_message_level_as_dict(proto_list):
serialized = [proto.SerializeToString() for proto in proto_list]
parsed_field_list = struct2tensor_ops.parse_full_message_level(
tf.constant(serialized), proto_list[0].DESCRIPTOR)
parsed_field_dict = {}
for parsed_field in parsed_field_list:
parsed_field_dict[parsed_field.field_name] = parsed_field
return parsed_field_dict
def _make_dict_runnable(level_as_dict):
"""Prepares output of parse_full_message_level_as_dict for evaluate."""
result = {}
for key, value in level_as_dict.items():
local_dict = {}
local_dict[INDEX] = value.index
local_dict[VALUE] = value.value
result[key] = local_dict
return result
def _get_full_message_level_runnable(proto_list):
return _make_dict_runnable(_parse_full_message_level_as_dict(proto_list))
# TODO(martinz): test empty tensors for decode_proto_sparse more thoroughly.
@test_util.run_all_in_graph_and_eager_modes
class PrensorOpsTest(parameterized.TestCase, tf.test.TestCase):
def test_out_of_order_fields(self):
fragments = [
test_pb2.Event(query_token=["aaa"]).SerializeToString(),
test_pb2.Event(query_token=["bbb"]).SerializeToString(),
test_pb2.Event(event_id="abc").SerializeToString(),
test_pb2.Event(action_mask=[False, True]).SerializeToString(),
]
# Test against all 4! permutations of fragments, and for each permutation
# test parsing all possible combination of 4 fields.
for indices in itertools.permutations(range(len(fragments))):
proto = b"".join([fragments[i] for i in indices])
for i in indices:
if i == 0:
expected_query_tokens = [b"aaa", b"bbb"]
break
if i == 1:
expected_query_tokens = [b"bbb", b"aaa"]
break
# "query" is not on wire at all.
all_fields_to_parse = ["query_token", "event_id", "action_mask", "query"]
expected_field_value = {
"action_mask": [False, True],
"query_token": expected_query_tokens,
"event_id": [b"abc"],
"query": np.array([], dtype=np.object),
}
for num_fields_to_parse in range(len(all_fields_to_parse)):
for comb in itertools.combinations(
all_fields_to_parse, num_fields_to_parse):
parsed_fields = struct2tensor_ops.parse_message_level(
[proto], test_pb2.Event.DESCRIPTOR, comb)
self.assertLen(parsed_fields, len(comb))
for f in parsed_fields:
self.assertAllEqual(
expected_field_value[f.field_name], f.value,
"field: {}, permutation: {}, field_to_parse: {}".format(
f.field_name, indices, comb))
def test_out_of_order_repeated_fields_1(self):
# This is a 2-1-2 wire number pattern.
proto = (
test_pb2.Event(query_token=["aaa"]).SerializeToString() +
test_pb2.Event(event_id="abc").SerializeToString() +
test_pb2.Event(query_token=["bbb"]).SerializeToString())
expected_field_value = {
"query_token": [b"aaa", b"bbb"],
"event_id": [b"abc"]
}
for fields_to_parse in [["query_token"], ["event_id"],
["query_token", "event_id"]]:
parsed_fields = struct2tensor_ops.parse_message_level(
[proto], test_pb2.Event.DESCRIPTOR, fields_to_parse)
for f in parsed_fields:
self.assertAllEqual(expected_field_value[f.field_name], f.value)
def test_out_of_order_repeated_fields_2(self):
# This follows a 3-5-3 wire number pattern, where 3 and 4 parsed fields.
proto = (
test_pb2.Event(query_token=["aaa"]).SerializeToString() +
test_pb2.Event(action_mask=[True]).SerializeToString() +
test_pb2.Event(query_token=["bbb"]).SerializeToString())
expected_field_value = {
"query_token": [b"aaa", b"bbb"],
"action_mask": [True],
"action": []
}
for fields_to_parse in [["query_token"], ["action_mask"],
["query_token", "action_mask"],
["query_token", "action"],
["query_token", "action_mask", "action"]]:
parsed_fields = struct2tensor_ops.parse_message_level(
[proto], test_pb2.Event.DESCRIPTOR, fields_to_parse)
for f in parsed_fields:
expected_value = expected_field_value[f.field_name]
if expected_value:
self.assertAllEqual(expected_value, f.value)
def test_out_of_order_repeated_fields_3(self):
# This follows a 3-5-3 wire number pattern, where 3 and 4 parsed fields.
proto = (
test_pb2.AllSimple(repeated_string=["aaa"]).SerializeToString() +
test_pb2.AllSimple(repeated_int64=[12345]).SerializeToString() +
test_pb2.AllSimple(repeated_string=["bbb"]).SerializeToString())
expected_field_value = {
"repeated_string": [b"aaa", b"bbb"],
"repeated_int64": [12345],
"repeated_int32": [],
"repeated_uint32": []
}
for fields_to_parse in [["repeated_int64"], ["repeated_string"],
[
"repeated_string",
"repeated_uint32", "repeated_int32"
]]:
parsed_fields = struct2tensor_ops.parse_message_level(
[proto], test_pb2.AllSimple.DESCRIPTOR, fields_to_parse)
for f in parsed_fields:
self.assertAllEqual(expected_field_value[f.field_name], f.value)
def test_parse_full_message_level_for_event(self):
event = test_pb2.Event()
event.event_id = "foo"
event.query = "query"
event.query_token.append("a")
event.query_token.append("b")
action0 = event.action.add()
action0.doc_id = "abc"
action1 = event.action.add()
event.user_info.age_in_years = 38
event2 = test_pb2.Event()
action2 = event2.action.add()
action2.doc_id = "def"
parsed_field_dict = _parse_full_message_level_as_dict([event, event2])
doc_id = parsed_field_dict["action"]
serialized_actions = [
proto.SerializeToString() for proto in [action0, action1, action2]
]
self.assertAllEqual(doc_id.index, [0, 0, 1])
self.assertAllEqual(doc_id.value, serialized_actions)
def test_parse_full_message_level_for_simple_action_multiple(self):
"""Test multiple messages."""
as1 = test_pb2.AllSimple()
as1.optional_string = "a"
as1.repeated_string.append("b")
as1.repeated_string.append("c")
as2 = test_pb2.AllSimple()
as2.optional_string = "d"
as2.optional_int32 = 123
as3 = test_pb2.AllSimple()
as3.repeated_string.append("d")
as3.repeated_string.append("e")
as3.optional_int32 = 123
parsed_field_dict = _parse_full_message_level_as_dict([as1, as2, as3])
doc_id = parsed_field_dict["repeated_string"]
self.assertAllEqual(doc_id.index, [0, 0, 2, 2])
self.assertAllEqual(doc_id.value, [b"b", b"c", b"d", b"e"])
def test_parse_full_message_level_for_all_simple_repeated_repeated(self):
"""Test five messages with every possible repeated field repeated."""
all_simple = test_pb2.AllSimple()
all_simple.repeated_string.append("foo")
all_simple.repeated_string.append("foo2")
all_simple.repeated_int32.append(32)
all_simple.repeated_int32.append(322)
all_simple.repeated_uint32.append(123)
all_simple.repeated_uint32.append(1232)
all_simple.repeated_int64.append(123456)
all_simple.repeated_int64.append(1234562)
all_simple.repeated_uint64.append(123)
all_simple.repeated_uint64.append(1232)
all_simple.repeated_float.append(1.0)
all_simple.repeated_float.append(2.0)
all_simple.repeated_double.append(1.5)
all_simple.repeated_double.append(2.5)
result = _get_full_message_level_runnable([
all_simple,
test_pb2.AllSimple(),
test_pb2.AllSimple(), all_simple,
test_pb2.AllSimple(), all_simple,
test_pb2.AllSimple()
])
self.assertAllEqual(result["repeated_string"][INDEX], [0, 0, 3, 3, 5, 5])
self.assertAllEqual(result["repeated_string"][VALUE],
[b"foo", b"foo2", b"foo", b"foo2", b"foo", b"foo2"])
self.assertAllEqual(result["repeated_int32"][INDEX], [0, 0, 3, 3, 5, 5])
self.assertAllEqual(result["repeated_int32"][VALUE],
[32, 322, 32, 322, 32, 322])
self.assertAllEqual(result["repeated_uint32"][INDEX], [0, 0, 3, 3, 5, 5])
self.assertAllEqual(result["repeated_uint32"][VALUE],
[123, 1232, 123, 1232, 123, 1232])
self.assertAllEqual(result["repeated_int64"][INDEX], [0, 0, 3, 3, 5, 5])
self.assertAllEqual(result["repeated_int64"][VALUE],
[123456, 1234562, 123456, 1234562, 123456, 1234562])
self.assertAllEqual(result["repeated_uint64"][INDEX], [0, 0, 3, 3, 5, 5])
self.assertAllEqual(result["repeated_uint64"][VALUE],
[123, 1232, 123, 1232, 123, 1232])
self.assertAllEqual(result["repeated_float"][INDEX], [0, 0, 3, 3, 5, 5])
self.assertAllEqual(result["repeated_float"][VALUE],
[1.0, 2.0, 1.0, 2.0, 1.0, 2.0])
self.assertAllEqual(result["repeated_double"][INDEX], [0, 0, 3, 3, 5, 5])
self.assertAllEqual(result["repeated_double"][VALUE],
[1.5, 2.5, 1.5, 2.5, 1.5, 2.5])
def test_parse_full_message_level_for_all_simple_repeated(self):
"""Test a single message with every possible repeated field repeated."""
all_simple = test_pb2.AllSimple()
all_simple.repeated_string.append("foo")
all_simple.repeated_string.append("foo2")
all_simple.repeated_int32.append(32)
all_simple.repeated_int32.append(322)
all_simple.repeated_uint32.append(123)
all_simple.repeated_uint32.append(1232)
all_simple.repeated_int64.append(123456)
all_simple.repeated_int64.append(1234562)
all_simple.repeated_uint64.append(123)
all_simple.repeated_uint64.append(1232)
all_simple.repeated_float.append(1.0)
all_simple.repeated_float.append(2.0)
all_simple.repeated_double.append(1.5)
all_simple.repeated_double.append(2.5)
result = _get_full_message_level_runnable([all_simple])
self.assertAllEqual(result["repeated_string"][INDEX], [0, 0])
self.assertAllEqual(result["repeated_string"][VALUE], [b"foo", b"foo2"])
self.assertAllEqual(result["repeated_int32"][INDEX], [0, 0])
self.assertAllEqual(result["repeated_int32"][VALUE], [32, 322])
self.assertAllEqual(result["repeated_uint32"][INDEX], [0, 0])
self.assertAllEqual(result["repeated_uint32"][VALUE], [123, 1232])
self.assertAllEqual(result["repeated_int64"][INDEX], [0, 0])
self.assertAllEqual(result["repeated_int64"][VALUE], [123456, 1234562])
self.assertAllEqual(result["repeated_uint64"][INDEX], [0, 0])
self.assertAllEqual(result["repeated_uint64"][VALUE], [123, 1232])
self.assertAllEqual(result["repeated_float"][INDEX], [0, 0])
self.assertAllEqual(result["repeated_float"][VALUE], [1.0, 2.0])
self.assertAllEqual(result["repeated_double"][INDEX], [0, 0])
self.assertAllEqual(result["repeated_double"][VALUE], [1.5, 2.5])
def test_parse_full_message_level_for_all_simple(self):
"""Test a single message with every possible primitive field."""
all_simple = test_pb2.AllSimple()
all_simple.optional_string = "foo"
all_simple.optional_int32 = -5
all_simple.optional_uint32 = 2**31
all_simple.optional_int64 = 100123
all_simple.optional_uint64 = 2**63
all_simple.optional_float = 6.5
all_simple.optional_double = -7.0
all_simple.repeated_string.append("foo")
all_simple.repeated_int32.append(32)
all_simple.repeated_uint32.append(123)
all_simple.repeated_int64.append(123456)
all_simple.repeated_uint64.append(123)
all_simple.repeated_float.append(1.0)
all_simple.repeated_double.append(1.5)
runnable = _get_full_message_level_runnable([all_simple])
self.assertLen(runnable["optional_string"][INDEX].shape.dims, 1)
self.assertLen(runnable["optional_string"][VALUE].shape.dims, 1)
self.assertLen(runnable["repeated_string"][INDEX].shape.dims, 1)
self.assertLen(runnable["repeated_string"][VALUE].shape.dims, 1)
result = runnable
self.assertAllEqual(result["optional_string"][INDEX], [0])
self.assertAllEqual(result["optional_string"][VALUE], [b"foo"])
self.assertAllEqual(result["optional_int32"][INDEX], [0])
self.assertAllEqual(result["optional_int32"][VALUE], [-5])
self.assertAllEqual(result["optional_uint32"][INDEX], [0])
self.assertAllEqual(result["optional_uint32"][VALUE], [2**31])
self.assertAllEqual(result["optional_int64"][INDEX], [0])
self.assertAllEqual(result["optional_int64"][VALUE], [100123])
self.assertAllEqual(result["optional_uint64"][INDEX], [0])
self.assertAllEqual(result["optional_uint64"][VALUE], [2**63])
self.assertAllEqual(result["optional_float"][INDEX], [0])
self.assertAllEqual(result["optional_float"][VALUE], [6.5])
self.assertAllEqual(result["optional_double"][INDEX], [0])
self.assertAllEqual(result["optional_double"][VALUE], [-7.0])
# TODO(martinz): test the repeated fields too.
def test_parse_full_message_level_action(self):
action = test_pb2.Action()
action.doc_id = "3"
action.number_of_views = 3
result = _get_full_message_level_runnable([action])
self.assertAllEqual(result["doc_id"][INDEX], [0])
self.assertAllEqual(result["doc_id"][VALUE], [b"3"])
self.assertAllEqual(result["number_of_views"][INDEX], [0])
self.assertAllEqual(result["number_of_views"][VALUE], [3])
def test_parse_message_level(self):
action = test_pb2.Action()
action.doc_id = "3"
action.number_of_views = 3
tensor_of_protos = tf.constant([action.SerializeToString()])
[field_tuple
] = struct2tensor_ops.parse_message_level(tensor_of_protos,
test_pb2.Action().DESCRIPTOR,
["number_of_views"])
values = field_tuple.value
indices = field_tuple.index
self.assertAllEqual(indices, [0])
self.assertAllEqual(values, [3])
def test_parse_extension(self):
user_info = test_pb2.UserInfo()
user_info.Extensions[
test_pb2.LocationOfExtension.special_user_info].secret = "shhh"
expected_value = test_pb2.SpecialUserInfo()
expected_value.secret = "shhh"
tensor_of_protos = tf.constant([user_info.SerializeToString()])
[field_tuple] = struct2tensor_ops.parse_message_level(
tensor_of_protos,
test_pb2.UserInfo().DESCRIPTOR,
["(struct2tensor.test.LocationOfExtension.special_user_info)"])
self.assertAllEqual(field_tuple.index, [0])
self.assertAllEqual(field_tuple.value, [expected_value.SerializeToString()])
def test_parse_external_extension(self):
user_info = test_pb2.UserInfo()
user_info.Extensions[
test_extension_pb2.MyExternalExtension.ext].special = "shhh"
expected_value = test_extension_pb2.MyExternalExtension()
expected_value.special = "shhh"
tensor_of_protos = tf.constant([user_info.SerializeToString()])
[field_tuple] = struct2tensor_ops.parse_message_level(
tensor_of_protos,
test_pb2.UserInfo().DESCRIPTOR,
["(struct2tensor.test.MyExternalExtension.ext)"])
self.assertAllEqual(field_tuple.index, [0])
self.assertAllEqual(field_tuple.value, [expected_value.SerializeToString()])
def test_parse_packed_fields(self):
message_with_packed_fields = test_pb2.HasPackedFields(
packed_int32=[-1, -2, -3],
packed_uint32=[100000, 200000, 300000],
packed_int64=[-400000, -500000, -600000],
packed_uint64=[4, 5, 6],
packed_float=[7.0, 8.0, 9.0],
packed_double=[10.0, 11.0, 12.0],
)
tensor_of_protos = tf.constant(
[message_with_packed_fields.SerializeToString()] * 2)
parsed_tuples = struct2tensor_ops.parse_message_level(
tensor_of_protos, test_pb2.HasPackedFields.DESCRIPTOR, [
"packed_int32",
"packed_uint32",
"packed_int64",
"packed_uint64",
"packed_float",
"packed_double",
])
indices = {
parsed_tuple.field_name: parsed_tuple.index
for parsed_tuple in parsed_tuples
}
values = {
parsed_tuple.field_name: parsed_tuple.value
for parsed_tuple in parsed_tuples
}
for index in indices.values():
self.assertAllEqual(index, [0, 0, 0, 1, 1, 1])
for field_name, value in values.items():
self.assertAllEqual(
value,
list(getattr(message_with_packed_fields, field_name)) * 2)
def test_proto2_optional_field_with_honor_proto3_optional_semantic(self):
proto2_message1 = test_pb2.AllSimple()
proto2_message2 = test_pb2.AllSimple(optional_string="a")
tensor_of_protos = tf.constant([proto2_message1.SerializeToString(),
proto2_message2.SerializeToString(),
proto2_message1.SerializeToString()])
parsed_tuples = struct2tensor_ops.parse_message_level(
tensor_of_protos, test_pb2.AllSimple.DESCRIPTOR, [
"optional_string",
], honor_proto3_optional_semantics=True)
indices = {
parsed_tuple.field_name: parsed_tuple.index
for parsed_tuple in parsed_tuples
}
values = {
parsed_tuple.field_name: parsed_tuple.value
for parsed_tuple in parsed_tuples
}
# Only the second proto has value. No default value should be inserted.
for idx in indices.values():
self.assertAllEqual([1], idx)
for value in values.values():
self.assertAllEqual([b"a"], value)
def test_make_repeated_basic(self):
parent_index = tf.constant([0, 0, 4, 4, 4, 7, 8, 9], dtype=tf.int64)
values = tf.constant(["a", "b", "c", "d", "e", "f", "g", "h"])
sparse_tensor = struct2tensor_ops.create_sparse_tensor_for_repeated(
parent_index, values, tf.constant([10, 3], dtype=tf.int64))
self.assertAllEqual(
sparse_tensor.indices,
[[0, 0], [0, 1], [4, 0], [4, 1], [4, 2], [7, 0], [8, 0], [9, 0]])
def test_make_repeated_empty(self):
parent_index = tf.constant([], dtype=tf.int64)
values = tf.constant([], dtype=tf.int32)
sparse_tensor = struct2tensor_ops.create_sparse_tensor_for_repeated(
parent_index, values, tf.constant([0, 0], dtype=tf.int64))
self.assertAllEqual(sparse_tensor.indices.shape, [0, 2])
@parameterized.named_parameters(*_EQUIJOIN_TEST_CASES)
def test_equi_join_indices(self, a, b, expected_index_a, expected_index_b):
a = tf.constant(a, dtype=tf.int64)
b = tf.constant(b, dtype=tf.int64)
# Test equi_join_indices
[index_a, index_b] = struct2tensor_ops.equi_join_indices(a, b)
self.assertAllEqual(index_a, expected_index_a)
self.assertAllEqual(index_b, expected_index_b)
# Test equi_join_any_indices
[index_a, index_b] = struct2tensor_ops.equi_join_any_indices(a, b)
self.assertAllEqual(index_a, expected_index_a)
self.assertAllEqual(index_b, expected_index_b)
def test_equi_join_any_indices_non_monotonic(self):
a = tf.constant([0, 1, 2, 1, 2], dtype=tf.int64)
b = tf.constant([0, 1, 1, 2, 3], dtype=tf.int64)
[index_a, index_b] = struct2tensor_ops.equi_join_any_indices(a, b)
self.assertAllEqual(index_a, [0, 1, 1, 2, 3, 3, 4])
self.assertAllEqual(index_b, [0, 1, 2, 3, 1, 2, 3])
def test_run_length_before(self):
"""Breaking down the broadcast."""
a = tf.constant([0, 1, 1, 7, 8, 8, 9], dtype=tf.int64)
b = struct2tensor_ops.run_length_before(a)
self.assertAllEqual(b, [0, 0, 1, 0, 0, 1, 0])
def test_run_length_before_empty(self):
"""Breaking down the broadcast."""
a = tf.constant([], dtype=tf.int64)
b = struct2tensor_ops.run_length_before(a)
self.assertAllEqual(b, [])
_SIGNED_INTEGER_TYPES = [
"int32", "int64", "sfixed32", "sfixed64", "sint32", "sint64"
]
_UNSIGNED_INTEGER_TYPES = ["uint32", "uint64", "fixed32", "fixed64"]
@test_util.run_all_in_graph_and_eager_modes
class DecodeProtoMapOpTest(parameterized.TestCase, tf.test.TestCase):
def _parse_map_entry(self, messages_with_map, map_field_name, keys_needed):
parsed_map_submessage = struct2tensor_ops.parse_message_level(
tf.constant([m.SerializeToString() for m in messages_with_map]),
test_map_pb2.MessageWithMap.DESCRIPTOR, [map_field_name])[0]
return struct2tensor_ops.parse_proto_map(
parsed_map_submessage.value, parsed_map_submessage.index,
parsed_map_submessage.field_descriptor.message_type, keys_needed)
@parameterized.named_parameters(
[dict(testcase_name=t, key_type=t) for t in _SIGNED_INTEGER_TYPES])
def test_signed_integer_key_types(self, key_type):
field_name = "{}_string_map".format(key_type)
message_with_map = test_map_pb2.MessageWithMap()
map_entry = getattr(message_with_map, "{}_string_map".format(key_type))
map_entry[42] = "hello"
map_entry[-42] = "world"
[(values_42, indices_42), (values_n42, indices_n42),
(values_0, indices_0)] = self._parse_map_entry([message_with_map],
field_name,
["42", "-42", "0"])
self.assertAllEqual(values_42, [b"hello"])
self.assertAllEqual(values_n42, [b"world"])
self.assertAllEqual(values_0, [])
self.assertAllEqual(indices_42, [0])
self.assertAllEqual(indices_n42, [0])
self.assertAllEqual(indices_0, [])
@parameterized.named_parameters(
[dict(testcase_name=t, key_type=t) for t in _UNSIGNED_INTEGER_TYPES])
def test_unsigned_integer_key_types(self, key_type):
field_name = "{}_string_map".format(key_type)
message_with_map = test_map_pb2.MessageWithMap()
map_entry = getattr(message_with_map, "{}_string_map".format(key_type))
map_entry[42] = "hello"
[(values_42, indices_42),
(values_0, indices_0)] = self._parse_map_entry([message_with_map],
field_name, ["42", "0"])
self.assertAllEqual(values_42, [b"hello"])
self.assertAllEqual(values_0, [])
self.assertAllEqual(indices_42, [0])
self.assertAllEqual(indices_0, [])
def test_invalid_uint32_key(self):
with self.assertRaisesRegexp(tf.errors.InvalidArgumentError,
"Failed to parse .*string"):
self.evaluate(
self._parse_map_entry([test_map_pb2.MessageWithMap()],
"uint32_string_map", ["-42"]))
def test_invalid_int32_key(self):
with self.assertRaisesRegexp(tf.errors.InvalidArgumentError,
"Failed to parse .*string"):
self.evaluate(
self._parse_map_entry([test_map_pb2.MessageWithMap()],
"int32_string_map", ["foo"]))
def test_bool_key_type(self):
message_with_map = test_map_pb2.MessageWithMap()
message_with_map.bool_string_map[False] = "hello"
[(values_false, indices_false), (values_true, indices_true)
] = self._parse_map_entry([message_with_map], "bool_string_map", ["0", "1"])
self.assertAllEqual(values_true, [])
self.assertAllEqual(values_false, [b"hello"])
self.assertAllEqual(indices_true, [])
self.assertAllEqual(indices_false, [0])
def test_invalid_bool_key(self):
message_with_map = test_map_pb2.MessageWithMap()
with self.assertRaisesRegexp(tf.errors.InvalidArgumentError,
"Failed to parse .*string"):
self.evaluate(
self._parse_map_entry([message_with_map], "bool_string_map", ["2"]))
@parameterized.named_parameters(
[dict(testcase_name=t, value_type=t) for t in _SIGNED_INTEGER_TYPES])
def test_signed_integer_value_types(self, value_type):
field_name = "string_{}_map".format(value_type)
message_with_map = test_map_pb2.MessageWithMap()
map_entry = getattr(message_with_map, "string_{}_map".format(value_type))
map_entry["foo"] = 42
map_entry["bar"] = -42
[(values_foo, indices_foo), (values_bar, indices_bar),
(values_null, indices_null)] = self._parse_map_entry([message_with_map],
field_name,
["foo", "bar", ""])
self.assertAllEqual(values_foo, [42])
self.assertAllEqual(values_bar, [-42])
self.assertAllEqual(values_null, [])
self.assertAllEqual(indices_foo, [0])
self.assertAllEqual(indices_bar, [0])
self.assertAllEqual(indices_null, [])
@parameterized.named_parameters(
[dict(testcase_name=t, value_type=t) for t in _UNSIGNED_INTEGER_TYPES])
def test_unsigned_integer_value_types(self, value_type):
field_name = "string_{}_map".format(value_type)
message_with_map = test_map_pb2.MessageWithMap()
map_entry = getattr(message_with_map, "string_{}_map".format(value_type))
map_entry["foo"] = 42
[(values_foo, indices_foo), (values_null, indices_null)
] = self._parse_map_entry([message_with_map], field_name, ["foo", ""])
self.assertAllEqual(values_foo, [42])
self.assertAllEqual(values_null, [])
self.assertAllEqual(indices_foo, [0])
self.assertAllEqual(indices_null, [])
@parameterized.named_parameters(
[dict(testcase_name=t, value_type=t) for t in ["float", "double"]])
def test_fp_value_types(self, value_type):
field_name = "string_{}_map".format(value_type)
message_with_map = test_map_pb2.MessageWithMap()
map_entry = getattr(message_with_map, "string_{}_map".format(value_type))
map_entry["foo"] = 0.5
[(values_foo, indices_foo), (values_null, indices_null)
] = self._parse_map_entry([message_with_map], field_name, ["foo", ""])
self.assertAllEqual(values_foo, [0.5])
self.assertAllEqual(values_null, [])
self.assertAllEqual(indices_foo, [0])
self.assertAllEqual(indices_null, [])
def test_enum_value_type(self):
message_with_map = test_map_pb2.MessageWithMap()
message_with_map.string_enum_map["foo"] = test_map_pb2.BAZ
[(values_foo, indices_foo),
(values_null, indices_null)] = self._parse_map_entry([message_with_map],
"string_enum_map",
["foo", ""])
self.assertAllEqual(values_foo, [int(test_map_pb2.BAZ)])
self.assertAllEqual(values_null, [])
self.assertAllEqual(indices_foo, [0])
self.assertAllEqual(indices_null, [])
def test_message_value_type(self):
sub_message = test_map_pb2.SubMessage(repeated_int64=[1, 2, 3])
message_with_map = test_map_pb2.MessageWithMap()
message_with_map.string_message_map["foo"].MergeFrom(sub_message)
[(values_foo, indices_foo),
(values_null, indices_null)] = self._parse_map_entry([message_with_map],
"string_message_map",
["foo", ""])
self.assertAllEqual(values_foo, [sub_message.SerializeToString()])
self.assertAllEqual(values_null, [])
self.assertAllEqual(indices_foo, [0])
self.assertAllEqual(indices_null, [])
def test_multiple_messages(self):
message_with_map1 = test_map_pb2.MessageWithMap(string_string_map={
"key1": "foo",
"key3": "bar"
})
message_with_map2 = test_map_pb2.MessageWithMap()
message_with_map3 = test_map_pb2.MessageWithMap(string_string_map={
"key2": "baz",
"key1": "kaz"
})
[(values_key1, indices_key1), (values_key2, indices_key2),
(values_key3, indices_key3)] = self._parse_map_entry(
[message_with_map1, message_with_map2, message_with_map3],
"string_string_map", ["key1", "key2", "key3"])
self.assertAllEqual(values_key1, [b"foo", b"kaz"])
self.assertAllEqual(values_key2, [b"baz"])
self.assertAllEqual(values_key3, [b"bar"])
self.assertAllEqual(indices_key1, [0, 2])
self.assertAllEqual(indices_key2, [2])
self.assertAllEqual(indices_key3, [0])
def test_corrupted_message(self):
with self.assertRaises(tf.errors.DataLossError):
self.evaluate(
struct2tensor_ops.parse_proto_map(
tf.constant(["corrupted message"]),
tf.constant([0], dtype=tf.int64), test_map_pb2.MessageWithMap
.DESCRIPTOR.fields_by_name["int32_string_map"].message_type,
["0"]))
if __name__ == "__main__":
absltest.main()
|
google/struct2tensor
|
struct2tensor/ops/struct2tensor_ops_test.py
|
Python
|
apache-2.0
| 30,777
|
# -*- coding: utf-8 -*-
# Copyright 2022 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from collections import OrderedDict
import functools
import re
from typing import Dict, Optional, Sequence, Tuple, Type, Union
import pkg_resources
from google.api_core.client_options import ClientOptions
from google.api_core import exceptions as core_exceptions
from google.api_core import gapic_v1
from google.api_core import retry as retries
from google.auth import credentials as ga_credentials # type: ignore
from google.oauth2 import service_account # type: ignore
try:
OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault]
except AttributeError: # pragma: NO COVER
OptionalRetry = Union[retries.Retry, object] # type: ignore
from google.api_core import operation # type: ignore
from google.api_core import operation_async # type: ignore
from google.cloud.orchestration.airflow.service_v1beta1.services.environments import (
pagers,
)
from google.cloud.orchestration.airflow.service_v1beta1.types import environments
from google.cloud.orchestration.airflow.service_v1beta1.types import operations
from google.protobuf import empty_pb2 # type: ignore
from google.protobuf import field_mask_pb2 # type: ignore
from google.protobuf import timestamp_pb2 # type: ignore
from .transports.base import EnvironmentsTransport, DEFAULT_CLIENT_INFO
from .transports.grpc_asyncio import EnvironmentsGrpcAsyncIOTransport
from .client import EnvironmentsClient
class EnvironmentsAsyncClient:
"""Managed Apache Airflow Environments."""
_client: EnvironmentsClient
DEFAULT_ENDPOINT = EnvironmentsClient.DEFAULT_ENDPOINT
DEFAULT_MTLS_ENDPOINT = EnvironmentsClient.DEFAULT_MTLS_ENDPOINT
environment_path = staticmethod(EnvironmentsClient.environment_path)
parse_environment_path = staticmethod(EnvironmentsClient.parse_environment_path)
common_billing_account_path = staticmethod(
EnvironmentsClient.common_billing_account_path
)
parse_common_billing_account_path = staticmethod(
EnvironmentsClient.parse_common_billing_account_path
)
common_folder_path = staticmethod(EnvironmentsClient.common_folder_path)
parse_common_folder_path = staticmethod(EnvironmentsClient.parse_common_folder_path)
common_organization_path = staticmethod(EnvironmentsClient.common_organization_path)
parse_common_organization_path = staticmethod(
EnvironmentsClient.parse_common_organization_path
)
common_project_path = staticmethod(EnvironmentsClient.common_project_path)
parse_common_project_path = staticmethod(
EnvironmentsClient.parse_common_project_path
)
common_location_path = staticmethod(EnvironmentsClient.common_location_path)
parse_common_location_path = staticmethod(
EnvironmentsClient.parse_common_location_path
)
@classmethod
def from_service_account_info(cls, info: dict, *args, **kwargs):
"""Creates an instance of this client using the provided credentials
info.
Args:
info (dict): The service account private key info.
args: Additional arguments to pass to the constructor.
kwargs: Additional arguments to pass to the constructor.
Returns:
EnvironmentsAsyncClient: The constructed client.
"""
return EnvironmentsClient.from_service_account_info.__func__(EnvironmentsAsyncClient, info, *args, **kwargs) # type: ignore
@classmethod
def from_service_account_file(cls, filename: str, *args, **kwargs):
"""Creates an instance of this client using the provided credentials
file.
Args:
filename (str): The path to the service account private key json
file.
args: Additional arguments to pass to the constructor.
kwargs: Additional arguments to pass to the constructor.
Returns:
EnvironmentsAsyncClient: The constructed client.
"""
return EnvironmentsClient.from_service_account_file.__func__(EnvironmentsAsyncClient, filename, *args, **kwargs) # type: ignore
from_service_account_json = from_service_account_file
@classmethod
def get_mtls_endpoint_and_cert_source(
cls, client_options: Optional[ClientOptions] = None
):
"""Return the API endpoint and client cert source for mutual TLS.
The client cert source is determined in the following order:
(1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the
client cert source is None.
(2) if `client_options.client_cert_source` is provided, use the provided one; if the
default client cert source exists, use the default one; otherwise the client cert
source is None.
The API endpoint is determined in the following order:
(1) if `client_options.api_endpoint` if provided, use the provided one.
(2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the
default mTLS endpoint; if the environment variabel is "never", use the default API
endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise
use the default API endpoint.
More details can be found at https://google.aip.dev/auth/4114.
Args:
client_options (google.api_core.client_options.ClientOptions): Custom options for the
client. Only the `api_endpoint` and `client_cert_source` properties may be used
in this method.
Returns:
Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the
client cert source to use.
Raises:
google.auth.exceptions.MutualTLSChannelError: If any errors happen.
"""
return EnvironmentsClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore
@property
def transport(self) -> EnvironmentsTransport:
"""Returns the transport used by the client instance.
Returns:
EnvironmentsTransport: The transport used by the client instance.
"""
return self._client.transport
get_transport_class = functools.partial(
type(EnvironmentsClient).get_transport_class, type(EnvironmentsClient)
)
def __init__(
self,
*,
credentials: ga_credentials.Credentials = None,
transport: Union[str, EnvironmentsTransport] = "grpc_asyncio",
client_options: ClientOptions = None,
client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
) -> None:
"""Instantiates the environments client.
Args:
credentials (Optional[google.auth.credentials.Credentials]): The
authorization credentials to attach to requests. These
credentials identify the application to the service; if none
are specified, the client will attempt to ascertain the
credentials from the environment.
transport (Union[str, ~.EnvironmentsTransport]): The
transport to use. If set to None, a transport is chosen
automatically.
client_options (ClientOptions): Custom options for the client. It
won't take effect if a ``transport`` instance is provided.
(1) The ``api_endpoint`` property can be used to override the
default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT
environment variable can also be used to override the endpoint:
"always" (always use the default mTLS endpoint), "never" (always
use the default regular endpoint) and "auto" (auto switch to the
default mTLS endpoint if client certificate is present, this is
the default value). However, the ``api_endpoint`` property takes
precedence if provided.
(2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable
is "true", then the ``client_cert_source`` property can be used
to provide client certificate for mutual TLS transport. If
not provided, the default SSL client certificate will be used if
present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not
set, no client certificate will be used.
Raises:
google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport
creation failed for any reason.
"""
self._client = EnvironmentsClient(
credentials=credentials,
transport=transport,
client_options=client_options,
client_info=client_info,
)
async def create_environment(
self,
request: Union[environments.CreateEnvironmentRequest, dict] = None,
*,
parent: str = None,
environment: environments.Environment = None,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> operation_async.AsyncOperation:
r"""Create a new environment.
.. code-block:: python
from google.cloud.orchestration.airflow import service_v1beta1
def sample_create_environment():
# Create a client
client = service_v1beta1.EnvironmentsClient()
# Initialize request argument(s)
request = service_v1beta1.CreateEnvironmentRequest(
)
# Make the request
operation = client.create_environment(request=request)
print("Waiting for operation to complete...")
response = operation.result()
# Handle the response
print(response)
Args:
request (Union[google.cloud.orchestration.airflow.service_v1beta1.types.CreateEnvironmentRequest, dict]):
The request object. Create a new environment.
parent (:class:`str`):
The parent must be of the form
"projects/{projectId}/locations/{locationId}".
This corresponds to the ``parent`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
environment (:class:`google.cloud.orchestration.airflow.service_v1beta1.types.Environment`):
The environment to create.
This corresponds to the ``environment`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
google.api_core.operation_async.AsyncOperation:
An object representing a long-running operation.
The result type for the operation will be
:class:`google.cloud.orchestration.airflow.service_v1beta1.types.Environment`
An environment for running orchestration tasks.
"""
# Create or coerce a protobuf request object.
# Quick check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
has_flattened_params = any([parent, environment])
if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
)
request = environments.CreateEnvironmentRequest(request)
# If we have keyword arguments corresponding to fields on the
# request, apply these.
if parent is not None:
request.parent = parent
if environment is not None:
request.environment = environment
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = gapic_v1.method_async.wrap_method(
self._client._transport.create_environment,
default_timeout=None,
client_info=DEFAULT_CLIENT_INFO,
)
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)),
)
# Send the request.
response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
# Wrap the response in an operation future.
response = operation_async.from_gapic(
response,
self._client._transport.operations_client,
environments.Environment,
metadata_type=operations.OperationMetadata,
)
# Done; return the response.
return response
async def get_environment(
self,
request: Union[environments.GetEnvironmentRequest, dict] = None,
*,
name: str = None,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> environments.Environment:
r"""Get an existing environment.
.. code-block:: python
from google.cloud.orchestration.airflow import service_v1beta1
def sample_get_environment():
# Create a client
client = service_v1beta1.EnvironmentsClient()
# Initialize request argument(s)
request = service_v1beta1.GetEnvironmentRequest(
)
# Make the request
response = client.get_environment(request=request)
# Handle the response
print(response)
Args:
request (Union[google.cloud.orchestration.airflow.service_v1beta1.types.GetEnvironmentRequest, dict]):
The request object. Get an environment.
name (:class:`str`):
The resource name of the environment
to get, in the form:
"projects/{projectId}/locations/{locationId}/environments/{environmentId}"
This corresponds to the ``name`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
google.cloud.orchestration.airflow.service_v1beta1.types.Environment:
An environment for running
orchestration tasks.
"""
# Create or coerce a protobuf request object.
# Quick check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
has_flattened_params = any([name])
if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
)
request = environments.GetEnvironmentRequest(request)
# If we have keyword arguments corresponding to fields on the
# request, apply these.
if name is not None:
request.name = name
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = gapic_v1.method_async.wrap_method(
self._client._transport.get_environment,
default_timeout=None,
client_info=DEFAULT_CLIENT_INFO,
)
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),
)
# Send the request.
response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
# Done; return the response.
return response
async def list_environments(
self,
request: Union[environments.ListEnvironmentsRequest, dict] = None,
*,
parent: str = None,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> pagers.ListEnvironmentsAsyncPager:
r"""List environments.
.. code-block:: python
from google.cloud.orchestration.airflow import service_v1beta1
def sample_list_environments():
# Create a client
client = service_v1beta1.EnvironmentsClient()
# Initialize request argument(s)
request = service_v1beta1.ListEnvironmentsRequest(
)
# Make the request
page_result = client.list_environments(request=request)
# Handle the response
for response in page_result:
print(response)
Args:
request (Union[google.cloud.orchestration.airflow.service_v1beta1.types.ListEnvironmentsRequest, dict]):
The request object. List environments in a project and
location.
parent (:class:`str`):
List environments in the given
project and location, in the form:
"projects/{projectId}/locations/{locationId}"
This corresponds to the ``parent`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
google.cloud.orchestration.airflow.service_v1beta1.services.environments.pagers.ListEnvironmentsAsyncPager:
The environments in a project and
location.
Iterating over this object will yield
results and resolve additional pages
automatically.
"""
# Create or coerce a protobuf request object.
# Quick check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
has_flattened_params = any([parent])
if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
)
request = environments.ListEnvironmentsRequest(request)
# If we have keyword arguments corresponding to fields on the
# request, apply these.
if parent is not None:
request.parent = parent
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = gapic_v1.method_async.wrap_method(
self._client._transport.list_environments,
default_timeout=None,
client_info=DEFAULT_CLIENT_INFO,
)
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)),
)
# Send the request.
response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
# This method is paged; wrap the response in a pager, which provides
# an `__aiter__` convenience method.
response = pagers.ListEnvironmentsAsyncPager(
method=rpc, request=request, response=response, metadata=metadata,
)
# Done; return the response.
return response
async def update_environment(
self,
request: Union[environments.UpdateEnvironmentRequest, dict] = None,
*,
name: str = None,
environment: environments.Environment = None,
update_mask: field_mask_pb2.FieldMask = None,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> operation_async.AsyncOperation:
r"""Update an environment.
.. code-block:: python
from google.cloud.orchestration.airflow import service_v1beta1
def sample_update_environment():
# Create a client
client = service_v1beta1.EnvironmentsClient()
# Initialize request argument(s)
request = service_v1beta1.UpdateEnvironmentRequest(
)
# Make the request
operation = client.update_environment(request=request)
print("Waiting for operation to complete...")
response = operation.result()
# Handle the response
print(response)
Args:
request (Union[google.cloud.orchestration.airflow.service_v1beta1.types.UpdateEnvironmentRequest, dict]):
The request object. Update an environment.
name (:class:`str`):
The relative resource name of the
environment to update, in the form:
"projects/{projectId}/locations/{locationId}/environments/{environmentId}"
This corresponds to the ``name`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
environment (:class:`google.cloud.orchestration.airflow.service_v1beta1.types.Environment`):
A patch environment. Fields specified by the
``updateMask`` will be copied from the patch environment
into the environment under update.
This corresponds to the ``environment`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`):
Required. A comma-separated list of paths, relative to
``Environment``, of fields to update. For example, to
set the version of scikit-learn to install in the
environment to 0.19.0 and to remove an existing
installation of argparse, the ``updateMask`` parameter
would include the following two ``paths`` values:
"config.softwareConfig.pypiPackages.scikit-learn" and
"config.softwareConfig.pypiPackages.argparse". The
included patch environment would specify the
scikit-learn version as follows:
::
{
"config":{
"softwareConfig":{
"pypiPackages":{
"scikit-learn":"==0.19.0"
}
}
}
}
Note that in the above example, any existing PyPI
packages other than scikit-learn and argparse will be
unaffected.
Only one update type may be included in a single
request's ``updateMask``. For example, one cannot update
both the PyPI packages and labels in the same request.
However, it is possible to update multiple members of a
map field simultaneously in the same request. For
example, to set the labels "label1" and "label2" while
clearing "label3" (assuming it already exists), one can
provide the paths "labels.label1", "labels.label2", and
"labels.label3" and populate the patch environment as
follows:
::
{
"labels":{
"label1":"new-label1-value"
"label2":"new-label2-value"
}
}
Note that in the above example, any existing labels that
are not included in the ``updateMask`` will be
unaffected.
It is also possible to replace an entire map field by
providing the map field's path in the ``updateMask``.
The new value of the field will be that which is
provided in the patch environment. For example, to
delete all pre-existing user-specified PyPI packages and
install botocore at version 1.7.14, the ``updateMask``
would contain the path
"config.softwareConfig.pypiPackages", and the patch
environment would be the following:
::
{
"config":{
"softwareConfig":{
"pypiPackages":{
"botocore":"==1.7.14"
}
}
}
}
**Note:** Only the following fields can be updated:
- ``config.softwareConfig.pypiPackages``
- Replace all custom custom PyPI packages. If a
replacement package map is not included in
``environment``, all custom PyPI packages are
cleared. It is an error to provide both this mask
and a mask specifying an individual package.
- ``config.softwareConfig.pypiPackages.``\ packagename
- Update the custom PyPI package *packagename*,
preserving other packages. To delete the package,
include it in ``updateMask``, and omit the mapping
for it in
``environment.config.softwareConfig.pypiPackages``.
It is an error to provide both a mask of this form
and the ``config.softwareConfig.pypiPackages``
mask.
- ``labels``
- Replace all environment labels. If a replacement
labels map is not included in ``environment``, all
labels are cleared. It is an error to provide both
this mask and a mask specifying one or more
individual labels.
- ``labels.``\ labelName
- Set the label named *labelName*, while preserving
other labels. To delete the label, include it in
``updateMask`` and omit its mapping in
``environment.labels``. It is an error to provide
both a mask of this form and the ``labels`` mask.
- ``config.nodeCount``
- Horizontally scale the number of nodes in the
environment. An integer greater than or equal to 3
must be provided in the ``config.nodeCount``
field. \* ``config.webServerNetworkAccessControl``
- Replace the environment's current
WebServerNetworkAccessControl.
- ``config.softwareConfig.airflowConfigOverrides``
- Replace all Apache Airflow config overrides. If a
replacement config overrides map is not included
in ``environment``, all config overrides are
cleared. It is an error to provide both this mask
and a mask specifying one or more individual
config overrides.
- ``config.softwareConfig.airflowConfigOverrides.``\ section-name
- Override the Apache Airflow config property *name*
in the section named *section*, preserving other
properties. To delete the property override,
include it in ``updateMask`` and omit its mapping
in
``environment.config.softwareConfig.airflowConfigOverrides``.
It is an error to provide both a mask of this form
and the
``config.softwareConfig.airflowConfigOverrides``
mask.
- ``config.softwareConfig.envVariables``
- Replace all environment variables. If a
replacement environment variable map is not
included in ``environment``, all custom
environment variables are cleared. It is an error
to provide both this mask and a mask specifying
one or more individual environment variables.
- ``config.softwareConfig.imageVersion``
- Upgrade the version of the environment in-place.
Refer to ``SoftwareConfig.image_version`` for
information on how to format the new image
version. Additionally, the new image version
cannot effect a version downgrade and must match
the current image version's Composer major version
and Airflow major and minor versions. Consult the
`Cloud Composer Version
List <https://cloud.google.com/composer/docs/concepts/versioning/composer-versions>`__
for valid values.
- ``config.softwareConfig.schedulerCount``
- Horizontally scale the number of schedulers in
Airflow. A positive integer not greater than the
number of nodes must be provided in the
``config.softwareConfig.schedulerCount`` field. \*
``config.databaseConfig.machineType``
- Cloud SQL machine type used by Airflow database.
It has to be one of: db-n1-standard-2,
db-n1-standard-4, db-n1-standard-8 or
db-n1-standard-16. \*
``config.webServerConfig.machineType``
- Machine type on which Airflow web server is
running. It has to be one of:
composer-n1-webserver-2, composer-n1-webserver-4
or composer-n1-webserver-8. \*
``config.maintenanceWindow``
- Maintenance window during which Cloud Composer
components may be under maintenance.
This corresponds to the ``update_mask`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
google.api_core.operation_async.AsyncOperation:
An object representing a long-running operation.
The result type for the operation will be
:class:`google.cloud.orchestration.airflow.service_v1beta1.types.Environment`
An environment for running orchestration tasks.
"""
# Create or coerce a protobuf request object.
# Quick check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
has_flattened_params = any([name, environment, update_mask])
if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
)
request = environments.UpdateEnvironmentRequest(request)
# If we have keyword arguments corresponding to fields on the
# request, apply these.
if name is not None:
request.name = name
if environment is not None:
request.environment = environment
if update_mask is not None:
request.update_mask = update_mask
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = gapic_v1.method_async.wrap_method(
self._client._transport.update_environment,
default_timeout=None,
client_info=DEFAULT_CLIENT_INFO,
)
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),
)
# Send the request.
response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
# Wrap the response in an operation future.
response = operation_async.from_gapic(
response,
self._client._transport.operations_client,
environments.Environment,
metadata_type=operations.OperationMetadata,
)
# Done; return the response.
return response
async def delete_environment(
self,
request: Union[environments.DeleteEnvironmentRequest, dict] = None,
*,
name: str = None,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> operation_async.AsyncOperation:
r"""Delete an environment.
.. code-block:: python
from google.cloud.orchestration.airflow import service_v1beta1
def sample_delete_environment():
# Create a client
client = service_v1beta1.EnvironmentsClient()
# Initialize request argument(s)
request = service_v1beta1.DeleteEnvironmentRequest(
)
# Make the request
operation = client.delete_environment(request=request)
print("Waiting for operation to complete...")
response = operation.result()
# Handle the response
print(response)
Args:
request (Union[google.cloud.orchestration.airflow.service_v1beta1.types.DeleteEnvironmentRequest, dict]):
The request object. Delete an environment.
name (:class:`str`):
The environment to delete, in the
form:
"projects/{projectId}/locations/{locationId}/environments/{environmentId}"
This corresponds to the ``name`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
google.api_core.operation_async.AsyncOperation:
An object representing a long-running operation.
The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated
empty messages in your APIs. A typical example is to
use it as the request or the response type of an API
method. For instance:
service Foo {
rpc Bar(google.protobuf.Empty) returns
(google.protobuf.Empty);
}
The JSON representation for Empty is empty JSON
object {}.
"""
# Create or coerce a protobuf request object.
# Quick check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
has_flattened_params = any([name])
if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
)
request = environments.DeleteEnvironmentRequest(request)
# If we have keyword arguments corresponding to fields on the
# request, apply these.
if name is not None:
request.name = name
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = gapic_v1.method_async.wrap_method(
self._client._transport.delete_environment,
default_timeout=None,
client_info=DEFAULT_CLIENT_INFO,
)
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),
)
# Send the request.
response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
# Wrap the response in an operation future.
response = operation_async.from_gapic(
response,
self._client._transport.operations_client,
empty_pb2.Empty,
metadata_type=operations.OperationMetadata,
)
# Done; return the response.
return response
async def restart_web_server(
self,
request: Union[environments.RestartWebServerRequest, dict] = None,
*,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> operation_async.AsyncOperation:
r"""Restart Airflow web server.
.. code-block:: python
from google.cloud.orchestration.airflow import service_v1beta1
def sample_restart_web_server():
# Create a client
client = service_v1beta1.EnvironmentsClient()
# Initialize request argument(s)
request = service_v1beta1.RestartWebServerRequest(
)
# Make the request
operation = client.restart_web_server(request=request)
print("Waiting for operation to complete...")
response = operation.result()
# Handle the response
print(response)
Args:
request (Union[google.cloud.orchestration.airflow.service_v1beta1.types.RestartWebServerRequest, dict]):
The request object. Restart Airflow web server.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
google.api_core.operation_async.AsyncOperation:
An object representing a long-running operation.
The result type for the operation will be
:class:`google.cloud.orchestration.airflow.service_v1beta1.types.Environment`
An environment for running orchestration tasks.
"""
# Create or coerce a protobuf request object.
request = environments.RestartWebServerRequest(request)
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = gapic_v1.method_async.wrap_method(
self._client._transport.restart_web_server,
default_timeout=None,
client_info=DEFAULT_CLIENT_INFO,
)
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),
)
# Send the request.
response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
# Wrap the response in an operation future.
response = operation_async.from_gapic(
response,
self._client._transport.operations_client,
environments.Environment,
metadata_type=operations.OperationMetadata,
)
# Done; return the response.
return response
async def check_upgrade(
self,
request: Union[environments.CheckUpgradeRequest, dict] = None,
*,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> operation_async.AsyncOperation:
r"""Check if an upgrade operation on the environment will
succeed.
In case of problems detailed info can be found in the
returned Operation.
.. code-block:: python
from google.cloud.orchestration.airflow import service_v1beta1
def sample_check_upgrade():
# Create a client
client = service_v1beta1.EnvironmentsClient()
# Initialize request argument(s)
request = service_v1beta1.CheckUpgradeRequest(
)
# Make the request
operation = client.check_upgrade(request=request)
print("Waiting for operation to complete...")
response = operation.result()
# Handle the response
print(response)
Args:
request (Union[google.cloud.orchestration.airflow.service_v1beta1.types.CheckUpgradeRequest, dict]):
The request object. Request to check whether image
upgrade will succeed.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
google.api_core.operation_async.AsyncOperation:
An object representing a long-running operation.
The result type for the operation will be :class:`google.cloud.orchestration.airflow.service_v1beta1.types.CheckUpgradeResponse` Message containing information about the result of an upgrade check
operation.
"""
# Create or coerce a protobuf request object.
request = environments.CheckUpgradeRequest(request)
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = gapic_v1.method_async.wrap_method(
self._client._transport.check_upgrade,
default_timeout=None,
client_info=DEFAULT_CLIENT_INFO,
)
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata(
(("environment", request.environment),)
),
)
# Send the request.
response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
# Wrap the response in an operation future.
response = operation_async.from_gapic(
response,
self._client._transport.operations_client,
environments.CheckUpgradeResponse,
metadata_type=operations.OperationMetadata,
)
# Done; return the response.
return response
async def __aenter__(self):
return self
async def __aexit__(self, exc_type, exc, tb):
await self.transport.close()
try:
DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(
gapic_version=pkg_resources.get_distribution(
"google-cloud-orchestration-airflow-service",
).version,
)
except pkg_resources.DistributionNotFound:
DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo()
__all__ = ("EnvironmentsAsyncClient",)
|
googleapis/python-orchestration-airflow
|
google/cloud/orchestration/airflow/service_v1beta1/services/environments/async_client.py
|
Python
|
apache-2.0
| 45,966
|
#_*_coding: utf-8 _*_
#__author__ = 'Alihanniba'
import urllib.request
# from urllib.request import urlopen
import urllib.error
import re
import os
import taobaotool
import time
class Spider:
def __init__(self):
self.siteUrl = 'http://mm.taobao.com/json/request_top_list.htm'
self.tool = taobaotool.Tool()
def getPage(self, pageIndex):
url = self.siteUrl + '?page=' + str(pageIndex)
request = urllib.request.Request(url)
response = urllib.request.urlopen(request)
content = response.read().decode('gbk')
return content
def getContents(self, pageIndex):
page = self.getPage(pageIndex)
pattern = re.compile('<div class="list-item".*?pic-word.*?<a href="(.*?)".*?<img src="(.*?)".*?<a class="lady-name.*?>(.*?)</a>.*?<strong>(.*?)</strong>.*?<span>(.*?)</span>',re.S)
items = re.findall(pattern, str(page))
contents = []
for item in items:
contents.append([item[0], item[1], item[2], item[3], item[4]])
print(item[0], item[1], item[2], item[3], item[4])
return contents
def getDetailPage(self, infoURL):
response = urllib.request.urlopen(infoURL)
return response.read().decode('gbk')
def getBrief(self, page):
pattern = re.compile('<div class="mm-aixiu-content".*?>(.*?)<!--',re.S)
result = re.search(pattern, str(page))
return self.tool.replace(result.group(1))
def getAllImg(self, page):
pattern = re.compile('<div class="mm-aixiu-content".*?>(.*?)<!--',re.S)
content = re.search(pattern, str(page))
patternImg = re.compile('<img.*?src="(.*?)"',re.S)
images = re.findall(patternImg, str(content.group(1)))
return images
def saveImgs(self, images, name):
number = 1
print(u'发现', name, u'共有', len(images), u'张图片')
for imageURL in images:
splitPage = imageURL.split('.')
fTail = splitPage.pop()
if len(fTail) > 3:
fTail = 'jpg'
fileName = name + '/' + str(number) + '.' + fTail
self.saveImg(imageURL, fileName)
number += 1
def saveImg(self, imgUrl, fileName):
u = urllib.request.urlopen(imgUrl)
data = u.read()
f = open(fileName, 'wb')
f.write(data)
print('正在保存图片为', fileName)
f.close()
def saveIcon(self, iconURL, name):
splitPath = iconURL.split('.')
fTail = splitPath.pop()
fileName = name + '/icon.' + fTail
self.saveImg(iconURL, fileName)
def saveBrief(self, content, name):
fileName = name + '/' + name + '.txt'
f = open(fileName, 'w+')
print(u"正在保存信息为", fileName)
f.write(content.encode('UTF-8'))
def mkdir(self, path):
path = path.strip()
isExists = os.exists(path)
if not isExists:
os.makedirs(path)
return True
else:
return False
def savePageInfo(self, pageIndex):
contents = self.getContents(pageIndex)
for item in contents:
detailURL = item[0]
detailPage = self.getDetailPage(detailURL)
brief = self.getBrief(detailPage)
images = self.getAllImg(detailPage)
self.mkdir(item[2])
self.saveBrief(brief, item[2])
self.saveIcon(item[1], item[2])
self.saveImgs(images, item[2])
def savePagesInfo(self, start, end):
for i in range(start, end + 1):
self.savePageInfo(i)
spider = Spider()
spider.savePagesInfo(2, 10)
|
alihanniba/tornado-awesome
|
scrapy/taobaomm.py
|
Python
|
apache-2.0
| 3,677
|
__author__ = 'bharathramh'
EMAIL_VERIFICATION_EXPIRATION_DAYS = 1
FORGOT_PASSWORD_EXPIRATION_DAYS = 1
|
bharathramh92/easy-ecom
|
accounts/constants.py
|
Python
|
apache-2.0
| 102
|
# -*- coding: utf-8 -*-
"""
test.
"""
import torndb
import unittest
from ..views import case
class PrimesTestCase(unittest.TestCase):
"""Tests for `primes.py`."""
def test_is_five_prime(self):
""""""
self.assertTrue(case.is_prime(2), msg='断言出错')
class WidgetTestCase(unittest.TestCase):
def setUp(self):
self.widget = 'widget'
self.db = torndb.Connection()
def tearDown(self):
self.db.close()
if __name__ == '__main__':
unittest.main()
|
BetterTomorrowPy/Luxury
|
luxury/apis/tests/test_unittest.py
|
Python
|
apache-2.0
| 518
|
import argparse
import sys
import logging
import os
import csv
class ReadItem:
def __init__(self, sequence, totalCount):
self.Sequence = sequence
self.TotalCount = totalCount
self.SampleMap = {}
class AnnotationItem:
def __init__(self, sequence, totalCount, category, counts):
self.Sequence = sequence
self.TotalCount = totalCount
self.Categories = [category]
self.Counts = counts
def getValue(value):
return value.TotalCount
def getFilename(value):
return value[1]
def update(logger, args):
logger.info("Reading short reads:" + input + " ...")
shortReadMap = {}
shortReadFiles = []
shortFileList = []
with open(input, 'r') as sr:
for line in sr:
parts = line.rstrip().split('\t')
shortFileList.append(parts)
shortFileList = sorted(shortFileList, key=getFilename)
for parts in shortFileList:
sampleFile = parts[0]
sample = parts[1]
shortReadFiles.append(sample)
logger.info(" Reading " + sampleFile + " ...")
with open(sampleFile, 'r') as fin:
fin.readline()
for line in fin:
reads = line.rstrip().split('\t')
count = int(reads[1])
seq = reads[2].rstrip()
if not seq in shortReadMap:
ri = ReadItem(seq, count)
shortReadMap[seq] = ri
else:
ri = shortReadMap[seq]
ri.TotalCount += count
ri.SampleMap[sample] = count
if minSampleCount > 1 or minReadCount > 1:
shortReads = []
for read in shortReadMap.values():
validSampleCount = len([v for v in read.SampleMap.values() if v >= minReadCount])
if validSampleCount >= minSampleCount:
shortReads.append(read)
else:
shortReads = shortReadMap.values()
shortReads = sorted(shortReads, key=getValue, reverse=True)
if len(shortReads) > maxNumber:
shortReads = shortReads[0:maxNumber]
logger.info("Reading max mapped reads:" + maxMapped + " ...")
maxmappedReads = {}
with open(maxMapped, 'r') as sr:
for line in sr:
parts = line.split('\t')
logger.info(" Reading " + parts[0] + " ...")
with open(parts[0], 'r') as fin:
while True:
qname = fin.readline().rstrip()
if not qname:
break
seq = fin.readline()
fin.readline()
fin.readline()
if qname.endswith("_"):
maxmappedReads[seq.rstrip()] = 1
cnames = names.split(",")
logger.info("Reading annotated reads:" + annotated + " ...")
annotatedReadMap = {}
annotatedFiles = []
with open(annotated, 'r') as annolist:
iIndex = -1
for row in annolist:
parts = row.split('\t')
annofile = parts[0]
iIndex = iIndex + 1
category = cnames[iIndex]
logger.info(" Reading " + annofile + " ...")
with open(annofile, 'r') as sr:
annotatedFiles = sr.readline().rstrip().split('\t')[1:]
for line in sr:
parts = line.rstrip().split('\t')
seq = parts[0]
if seq not in annotatedReadMap:
totalCount = sum(int(p) for p in parts[1:])
annotatedReadMap[seq] = AnnotationItem(seq, totalCount, category, parts[1:])
else:
annotatedReadMap[seq].Categories.append(category)
annotatedReads = sorted(annotatedReadMap.values(), key=getValue, reverse=True)
output = outputPrefix + ".tsv"
logger.info("Writing explain result:" + output + " ...")
with open(output, "w") as sw:
sw.write("ShortRead\tShortReadCount\tShortReadLength\t" + "\t".join(["SRS_" + f for f in shortReadFiles]) + "\tIsMaxMapped\tParentRead\tParentReadCount\tParentReadCategory\t" + "\t".join(["PRS_" + f for f in annotatedFiles]) + "\n")
emptyAnnotation = "\t\t\t\t" + "\t".join(["" for af in annotatedFiles]) + "\n"
for shortRead in shortReads:
shortSeq = shortRead.Sequence
shortSeqCount = shortRead.TotalCount
seqMap = shortRead.SampleMap
sw.write("%s\t%s\t%d" % (shortSeq, shortSeqCount, len(shortSeq)))
for fname in shortReadFiles:
if fname in seqMap:
sw.write("\t%s" % seqMap[fname])
else:
sw.write("\t0")
sw.write("\t" + str(shortSeq in maxmappedReads))
bFound = False
for annotatedRead in annotatedReads:
annoSeq = annotatedRead.Sequence
if shortSeq in annoSeq:
bFound = True
sw.write("\t%s\t%s\t%s\t%s\n" % (annoSeq, annotatedRead.TotalCount, "/".join(annotatedRead.Categories[0]), "\t".join(annotatedRead.Counts)))
break
if not bFound:
sw.write(emptyAnnotation)
logger.info("Done.")
def main():
parser = argparse.ArgumentParser(description="Matching short reads with annotated reads.",
formatter_class=argparse.ArgumentDefaultsHelpFormatter)
DEBUG=False
NOT_DEBUG = not DEBUG
parser.add_argument('-i', '--input', action='store', nargs='?', help='Input short reads', required=NOT_DEBUG)
parser.add_argument('-m', '--maxMapped', action='store', nargs='?', help='Input reads exceed maximum mapping to genome', required=NOT_DEBUG)
parser.add_argument('-a', '--annotated', action='store', nargs='?', help='Input annotated reads', required=NOT_DEBUG)
parser.add_argument('-n', '--names', action='store', nargs='?', help='Input annotated reads categories, split by ''', required=NOT_DEBUG)
parser.add_argument('--maxNumber', action='store', default=100, nargs='?', help='Input number of top short reads for annotation')
parser.add_argument('--minReadCount', action='store', default=3, nargs='?', help='Input minimum copy of short reads in sample for annotation')
parser.add_argument('--minSampleCount', action='store', default=2, nargs='?', help='Input minimum number of sample with valid read count')
parser.add_argument('-o', '--output', action='store', nargs='?', default="-", help="Output prefix of matched reads file", required=NOT_DEBUG)
if NOT_DEBUG and len(sys.argv)==1:
parser.print_help()
sys.exit(1)
args = parser.parse_args()
if DEBUG:
args.input = "T:/Shared/Labs/Vickers Lab/Tiger/projects/20180809_smallRNA_269_933_2002_human/data_visualization/short_reads_source/result/match__fileList1.list"
args.maxMapped = "T:/Shared/Labs/Vickers Lab/Tiger/projects/20180809_smallRNA_269_933_2002_human/data_visualization/short_reads_source/result/match__fileList2.list"
args.annotated = "T:/Shared/Labs/Vickers Lab/Tiger/projects/20180809_smallRNA_269_933_2002_human/data_visualization/short_reads_source/result/match__fileList3.list"
args.names = "Host miRNA,Host tRNA,Host snRNA,Host snoRNA,Host rRNA,Host other small RNA,Host Genome,Microbiome Bacteria,Environment Bacteria,Fungus,Non host tRNA,Non host rRNA"
#args.names = "Host miRNA,Host tRNA"
args.output = "T:/Shared/Labs/Vickers Lab/Tiger/projects/20180809_smallRNA_269_933_2002_human/data_visualization/short_reads_source/result/match2"
logger = logging.getLogger('updateCount')
logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(name)s - %(levelname)-8s - %(message)s')
match(logger, args.input, args.names, args.annotated, args.maxMapped, args.maxNumber, args.minReadCount, args.minSampleCount, args.output)
if __name__ == "__main__":
main()
|
shengqh/ngsperl
|
lib/SmallRNA/updateShortReadParentCount.py
|
Python
|
apache-2.0
| 7,328
|
# Copyright (c) 2016 Mirantis, Inc.
# Copyright (c) 2016 AT&T Corp
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from unittest import mock
from oslo_messaging.rpc import client
from oslo_serialization import jsonutils
from webob import exc
from murano.api.v1 import static_actions
from murano.common import policy
import murano.tests.unit.api.base as tb
@mock.patch.object(policy, 'check')
class TestStaticActionsApi(tb.ControllerTest, tb.MuranoApiTestCase):
def setUp(self):
super(TestStaticActionsApi, self).setUp()
self.controller = static_actions.Controller()
def test_execute_static_action(self, mock_policy_check):
"""Test that action execution results in the correct rpc call."""
self._set_policy_rules(
{'execute_action': '@'}
)
action = {
'method': 'TestAction',
'args': {'name': 'John'},
'class_name': 'TestClass',
'pkg_name': 'TestPackage',
'class_version': '=0'
}
rpc_task = {
'action': action,
'token': None,
'project_id': 'test_tenant',
'user_id': 'test_user',
'id': mock.ANY
}
request_data = {
"className": 'TestClass',
"methodName": 'TestAction',
"packageName": 'TestPackage',
"classVersion": '=0',
"parameters": {'name': 'John'}
}
req = self._post('/actions', jsonutils.dump_as_bytes(request_data))
try:
self.controller.execute(req, request_data)
except TypeError:
pass
self.mock_engine_rpc.call_static_action.assert_called_once_with(
rpc_task)
def test_execute_static_action_handle_bad_data_exc(self, _):
request_data = {
"className": None,
"methodName": 'TestAction'
}
req = self._post('/actions', jsonutils.dump_as_bytes(request_data))
self.assertRaises(exc.HTTPBadRequest, self.controller.execute, req,
request_data)
request_data = {
"className": 'TestClass',
"methodName": None
}
req = self._post('/actions', jsonutils.dump_as_bytes(request_data))
self.assertRaises(exc.HTTPBadRequest, self.controller.execute, req,
request_data)
@mock.patch('murano.services.static_actions.StaticActionServices.execute')
def test_execute_static_action_handle_execute_excs(self, mock_execute, _):
"""Test whether execute handles all exceptions thrown correctly."""
request_data = {
"className": 'TestClass',
"methodName": 'TestAction',
"packageName": 'TestPackage',
"classVersion": '=0',
"parameters": {'name': 'John'}
}
exc_types = ['NoClassFound', 'NoMethodFound',
'NoPackageFound', 'NoPackageForClassFound',
'MethodNotExposed', 'NoMatchingMethodException']
for exc_type in exc_types:
mock_execute.side_effect = client.RemoteError(exc_type=exc_type)
req = self._post('/actions', jsonutils.dump_as_bytes(request_data))
self.assertRaises(exc.HTTPNotFound, self.controller.execute, req,
request_data)
self.assertEqual(mock_execute.call_count, len(exc_types))
exc_type = 'ContractViolationException'
mock_execute.side_effect = client.RemoteError(exc_type=exc_type)
req = self._post('/actions', jsonutils.dump_as_bytes(request_data))
self.assertRaises(exc.HTTPBadRequest, self.controller.execute, req,
request_data)
exc_types.append(exc_type)
self.assertEqual(mock_execute.call_count, len(exc_types))
exc_type = 'ThisIsARandomTestException'
mock_execute.side_effect = client.RemoteError(exc_type=exc_type)
req = self._post('/actions', jsonutils.dump_as_bytes(request_data))
self.assertRaises(exc.HTTPServiceUnavailable, self.controller.execute,
req, request_data)
exc_types.append(exc_type)
self.assertEqual(mock_execute.call_count, len(exc_types))
try:
int('this will throw a value error')
except ValueError as e:
setattr(e, 'message', None)
exc_type = e
mock_execute.side_effect = exc_type
req = self._post('/actions', jsonutils.dump_as_bytes(request_data))
self.assertRaises(exc.HTTPBadRequest, self.controller.execute,
req, request_data)
exc_types.append(exc_type)
self.assertEqual(mock_execute.call_count, len(exc_types))
|
openstack/murano
|
murano/tests/unit/api/v1/test_static_actions.py
|
Python
|
apache-2.0
| 5,261
|
import logging
import datetime
import mediacloud.api
import re
from server import mc
from server.auth import is_user_logged_in
from server.util.csv import SOURCE_LIST_CSV_METADATA_PROPS
logger = logging.getLogger(__name__)
TOPIC_MEDIA_INFO_PROPS = ['media_id', 'name', 'url']
TOPIC_MEDIA_PROPS = ['story_count', 'media_inlink_count', 'inlink_count', 'outlink_count',
'facebook_share_count', 'simple_tweet_count']
TOPIC_MEDIA_URL_SHARING_PROPS = ['sum_post_count', 'sum_channel_count', 'sum_author_count']
TOPIC_MEDIA_CSV_PROPS = TOPIC_MEDIA_INFO_PROPS + TOPIC_MEDIA_PROPS + TOPIC_MEDIA_URL_SHARING_PROPS + \
SOURCE_LIST_CSV_METADATA_PROPS
def _parse_media_ids(args):
media_ids = []
if 'sources[]' in args:
src = args['sources[]']
if isinstance(src, str):
media_ids = src.split(',')
media_ids = " ".join([str(m) for m in media_ids])
src = re.sub(r'\[*\]*', '', str(src))
if len(src) == 0:
media_ids = []
media_ids = src.split(',') if len(src) > 0 else []
else:
media_ids = src
return media_ids
def _parse_collection_ids(args):
collection_ids = []
if 'collections[]' in args:
coll = args['collections[]']
if isinstance(coll, str):
tags_ids = coll.split(',')
tags_ids = " ".join([str(m) for m in tags_ids])
coll = re.sub(r'\[*\]*', '', str(tags_ids))
if len(coll) == 0:
collection_ids = []
else:
collection_ids = coll.split(',') # make a list
else:
collection_ids = coll
return collection_ids
# TODO: Migrate eto use mediapicker.concate!
# helper for topic preview queries
def concatenate_query_for_solr(solr_seed_query=None, media_ids=None, tags_ids=None):
query = ''
if solr_seed_query not in [None,'']:
query = '({})'.format(solr_seed_query)
if len(media_ids) > 0 or len(tags_ids) > 0:
if solr_seed_query not in [None,'']:
query += " AND ("
else:
query += "(*) AND ("
# add in the media sources they specified
if len(media_ids) > 0:
media_ids = media_ids.split(',') if isinstance(media_ids, str) else media_ids
query_media_ids = " ".join(map(str, media_ids))
query_media_ids = re.sub(r'\[*\]*', '', str(query_media_ids))
query_media_ids = " media_id:({})".format(query_media_ids)
query += '(' + query_media_ids + ')'
if len(media_ids) > 0 and len(tags_ids) > 0:
query += " OR "
# add in the collections they specified
if len(tags_ids) > 0:
tags_ids = tags_ids.split(',') if isinstance(tags_ids, str) else tags_ids
query_tags_ids = " ".join(map(str, tags_ids))
query_tags_ids = re.sub(r'\[*\]*', '', str(query_tags_ids))
query_tags_ids = " tags_id_media:({})".format(query_tags_ids)
query += '(' + query_tags_ids + ')'
query += ')'
return query
def concatenate_solr_dates(start_date, end_date):
publish_date = mediacloud.api.MediaCloud.dates_as_query_clause(
datetime.datetime.strptime(start_date, '%Y-%m-%d').date(),
datetime.datetime.strptime(end_date, '%Y-%m-%d').date())
return publish_date
|
mitmedialab/MediaCloud-Web-Tools
|
server/views/topics/__init__.py
|
Python
|
apache-2.0
| 3,391
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
import sorl.thumbnail.fields
class Migration(migrations.Migration):
dependencies = [
]
operations = [
migrations.CreateModel(
name='WebpageSnapshot',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('url', models.URLField(db_index=True)),
('image', sorl.thumbnail.fields.ImageField(upload_to='thummer/snapshots', null=True, editable=False)),
('capture_width', models.IntegerField(default=1680, editable=False)),
('created_at', models.DateTimeField(auto_now_add=True)),
('captured_at', models.DateTimeField(null=True, editable=False)),
],
options={
'ordering': ['-captured_at'],
'get_latest_by': 'captured_at',
},
),
]
|
mattaustin/django-thummer
|
thummer/migrations/0001_initial.py
|
Python
|
apache-2.0
| 1,008
|
__author__ = "shikun"
import pickle
import os
from common import operateFile
from common.variable import Constants
def write_pickle(dict_data, path="data.pickle"):
read = read_pickle(path)
result = []
if len(read) > 0:
read.append(dict_data)
result = read
else:
result.append(dict_data)
with open(path, 'wb') as f:
pickle.dump(result, f, 0)
def read_pickle(path):
pickle_data = {}
if operateFile.OperateFile(path).check_file():
with open(path, 'rb') as f:
try:
pickle_data = pickle.load(f)
except EOFError:
pass
return pickle_data
if __name__ == "__main__":
data = {"log":"132"}
write_pickle(data, path=Constants.CRASH_LOG_PATH)
read_pickle(path=Constants.CRASH_LOG_PATH)
# operateFile.OperateFile(PATH("data.pickle")).remove_file()
|
pqpo/appiumn_auto_re-develope
|
common/basePickle.py
|
Python
|
apache-2.0
| 885
|
import itertools
import re
import mock # noqa
import pytest
from awx.main.models import (AdHocCommand, Credential, CredentialType, Job, JobTemplate,
Inventory, InventorySource, Project,
WorkflowJobNode)
from awx.main.utils import decrypt_field
from awx.api.versioning import reverse
EXAMPLE_PRIVATE_KEY = '-----BEGIN PRIVATE KEY-----\nxyz==\n-----END PRIVATE KEY-----'
EXAMPLE_ENCRYPTED_PRIVATE_KEY = '-----BEGIN PRIVATE KEY-----\nProc-Type: 4,ENCRYPTED\nxyz==\n-----END PRIVATE KEY-----'
@pytest.mark.django_db
def test_idempotent_credential_type_setup():
assert CredentialType.objects.count() == 0
CredentialType.setup_tower_managed_defaults()
total = CredentialType.objects.count()
assert total > 0
CredentialType.setup_tower_managed_defaults()
assert CredentialType.objects.count() == total
@pytest.mark.django_db
@pytest.mark.parametrize('kind, total', [
('ssh', 1), ('net', 0)
])
def test_filter_by_v1_kind(get, admin, organization, kind, total):
CredentialType.setup_tower_managed_defaults()
cred = Credential(
credential_type=CredentialType.from_v1_kind('ssh'),
name='Best credential ever',
organization=organization,
inputs={
'username': u'jim',
'password': u'secret'
}
)
cred.save()
response = get(
reverse('api:credential_list', kwargs={'version': 'v1'}),
admin,
QUERY_STRING='kind=%s' % kind
)
assert response.status_code == 200
assert response.data['count'] == total
@pytest.mark.django_db
def test_filter_by_v1_kind_with_vault(get, admin, organization):
CredentialType.setup_tower_managed_defaults()
cred = Credential(
credential_type=CredentialType.objects.get(kind='ssh'),
name='Best credential ever',
organization=organization,
inputs={
'username': u'jim',
'password': u'secret'
}
)
cred.save()
cred = Credential(
credential_type=CredentialType.objects.get(kind='vault'),
name='Best credential ever',
organization=organization,
inputs={
'vault_password': u'vault!'
}
)
cred.save()
response = get(
reverse('api:credential_list', kwargs={'version': 'v1'}),
admin,
QUERY_STRING='kind=ssh'
)
assert response.status_code == 200
assert response.data['count'] == 2
@pytest.mark.django_db
def test_insights_credentials_in_v1_api_list(get, admin, organization):
credential_type = CredentialType.defaults['insights']()
credential_type.save()
cred = Credential(
credential_type=credential_type,
name='Best credential ever',
organization=organization,
inputs={
'username': u'joe',
'password': u'secret'
}
)
cred.save()
response = get(
reverse('api:credential_list', kwargs={'version': 'v1'}),
admin
)
assert response.status_code == 200
assert response.data['count'] == 1
cred = response.data['results'][0]
assert cred['kind'] == 'insights'
assert cred['username'] == 'joe'
assert cred['password'] == '$encrypted$'
@pytest.mark.django_db
def test_create_insights_credentials_in_v1(get, post, admin, organization):
credential_type = CredentialType.defaults['insights']()
credential_type.save()
response = post(
reverse('api:credential_list', kwargs={'version': 'v1'}),
{
'name': 'Best Credential Ever',
'organization': organization.id,
'kind': 'insights',
'username': 'joe',
'password': 'secret'
},
admin
)
assert response.status_code == 201
cred = Credential.objects.get(pk=response.data['id'])
assert cred.username == 'joe'
assert decrypt_field(cred, 'password') == 'secret'
assert cred.credential_type == credential_type
@pytest.mark.django_db
def test_custom_credentials_not_in_v1_api_list(get, admin, organization):
"""
'Custom' credentials (those not managed by Tower) shouldn't be visible from
the V1 credentials API list
"""
credential_type = CredentialType(
kind='cloud',
name='MyCloud',
inputs = {
'fields': [{
'id': 'password',
'label': 'Password',
'type': 'string',
'secret': True
}]
}
)
credential_type.save()
cred = Credential(
credential_type=credential_type,
name='Best credential ever',
organization=organization,
inputs={
'password': u'secret'
}
)
cred.save()
response = get(
reverse('api:credential_list', kwargs={'version': 'v1'}),
admin
)
assert response.status_code == 200
assert response.data['count'] == 0
@pytest.mark.django_db
def test_custom_credentials_not_in_v1_api_detail(get, admin, organization):
"""
'Custom' credentials (those not managed by Tower) shouldn't be visible from
the V1 credentials API detail
"""
credential_type = CredentialType(
kind='cloud',
name='MyCloud',
inputs = {
'fields': [{
'id': 'password',
'label': 'Password',
'type': 'string',
'secret': True
}]
}
)
credential_type.save()
cred = Credential(
credential_type=credential_type,
name='Best credential ever',
organization=organization,
inputs={
'password': u'secret'
}
)
cred.save()
response = get(
reverse('api:credential_detail', kwargs={'version': 'v1', 'pk': cred.pk}),
admin
)
assert response.status_code == 404
@pytest.mark.django_db
def test_filter_by_v1_invalid_kind(get, admin, organization):
response = get(
reverse('api:credential_list', kwargs={'version': 'v1'}),
admin,
QUERY_STRING='kind=bad_kind'
)
assert response.status_code == 400
#
# user credential creation
#
@pytest.mark.django_db
@pytest.mark.parametrize('version, params', [
['v1', {'username': 'someusername'}],
['v2', {'credential_type': 1, 'inputs': {'username': 'someusername'}}]
])
def test_create_user_credential_via_credentials_list(post, get, alice, credentialtype_ssh, version, params):
params['user'] = alice.id
params['name'] = 'Some name'
response = post(
reverse('api:credential_list', kwargs={'version': version}),
params,
alice
)
assert response.status_code == 201
response = get(reverse('api:credential_list', kwargs={'version': version}), alice)
assert response.status_code == 200
assert response.data['count'] == 1
@pytest.mark.django_db
@pytest.mark.parametrize('version, params', [
['v1', {'username': 'someusername'}],
['v2', {'credential_type': 1, 'inputs': {'username': 'someusername'}}]
])
def test_credential_validation_error_with_bad_user(post, admin, version, credentialtype_ssh, params):
params['user'] = 'asdf'
params['name'] = 'Some name'
response = post(
reverse('api:credential_list', kwargs={'version': version}),
params,
admin
)
assert response.status_code == 400
assert response.data['user'][0] == 'Incorrect type. Expected pk value, received unicode.'
@pytest.mark.django_db
@pytest.mark.parametrize('version, params', [
['v1', {'username': 'someusername'}],
['v2', {'credential_type': 1, 'inputs': {'username': 'someusername'}}]
])
def test_create_user_credential_via_user_credentials_list(post, get, alice, credentialtype_ssh, version, params):
params['user'] = alice.id
params['name'] = 'Some name'
response = post(
reverse('api:user_credentials_list', kwargs={'version': version, 'pk': alice.pk}),
params,
alice
)
assert response.status_code == 201
response = get(reverse('api:user_credentials_list', kwargs={'version': version, 'pk': alice.pk}), alice)
assert response.status_code == 200
assert response.data['count'] == 1
@pytest.mark.django_db
@pytest.mark.parametrize('version, params', [
['v1', {'username': 'someusername'}],
['v2', {'credential_type': 1, 'inputs': {'username': 'someusername'}}]
])
def test_create_user_credential_via_credentials_list_xfail(post, alice, bob, version, params):
params['user'] = bob.id
params['name'] = 'Some name'
response = post(
reverse('api:credential_list', kwargs={'version': version}),
params,
alice
)
assert response.status_code == 403
@pytest.mark.django_db
@pytest.mark.parametrize('version, params', [
['v1', {'username': 'someusername'}],
['v2', {'credential_type': 1, 'inputs': {'username': 'someusername'}}]
])
def test_create_user_credential_via_user_credentials_list_xfail(post, alice, bob, version, params):
params['user'] = bob.id
params['name'] = 'Some name'
response = post(
reverse('api:user_credentials_list', kwargs={'version': version, 'pk': bob.pk}),
params,
alice
)
assert response.status_code == 403
#
# team credential creation
#
@pytest.mark.django_db
@pytest.mark.parametrize('version, params', [
['v1', {'username': 'someusername'}],
['v2', {'credential_type': 1, 'inputs': {'username': 'someusername'}}]
])
def test_create_team_credential(post, get, team, organization, org_admin, team_member, credentialtype_ssh, version, params):
params['team'] = team.id
params['name'] = 'Some name'
response = post(
reverse('api:credential_list', kwargs={'version': version}),
params,
org_admin
)
assert response.status_code == 201
response = get(
reverse('api:team_credentials_list', kwargs={'version': version, 'pk': team.pk}),
team_member
)
assert response.status_code == 200
assert response.data['count'] == 1
# Assure that credential's organization is implictly set to team's org
assert response.data['results'][0]['summary_fields']['organization']['id'] == team.organization.id
@pytest.mark.django_db
@pytest.mark.parametrize('version, params', [
['v1', {'username': 'someusername'}],
['v2', {'credential_type': 1, 'inputs': {'username': 'someusername'}}]
])
def test_create_team_credential_via_team_credentials_list(post, get, team, org_admin, team_member, credentialtype_ssh, version, params):
params['team'] = team.id
params['name'] = 'Some name'
response = post(
reverse('api:team_credentials_list', kwargs={'version': version, 'pk': team.pk}),
params,
org_admin
)
assert response.status_code == 201
response = get(
reverse('api:team_credentials_list', kwargs={'version': version, 'pk': team.pk}),
team_member
)
assert response.status_code == 200
assert response.data['count'] == 1
@pytest.mark.django_db
@pytest.mark.parametrize('version, params', [
['v1', {'username': 'someusername'}],
['v2', {'credential_type': 1, 'inputs': {'username': 'someusername'}}]
])
def test_create_team_credential_by_urelated_user_xfail(post, team, organization, alice, team_member, version, params):
params['team'] = team.id
params['organization'] = organization.id
params['name'] = 'Some name'
response = post(
reverse('api:credential_list', kwargs={'version': version}),
params,
alice
)
assert response.status_code == 403
@pytest.mark.django_db
@pytest.mark.parametrize('version, params', [
['v1', {'username': 'someusername'}],
['v2', {'credential_type': 1, 'inputs': {'username': 'someusername'}}]
])
def test_create_team_credential_by_team_member_xfail(post, team, organization, alice, team_member, version, params):
# Members can't add credentials, only org admins.. for now?
params['team'] = team.id
params['organization'] = organization.id
params['name'] = 'Some name'
response = post(
reverse('api:credential_list', kwargs={'version': version}),
params,
team_member
)
assert response.status_code == 403
#
# Permission granting
#
@pytest.mark.django_db
@pytest.mark.parametrize('version', ['v1', 'v2'])
def test_grant_org_credential_to_org_user_through_role_users(post, credential, organization, org_admin, org_member, version):
credential.organization = organization
credential.save()
response = post(reverse('api:role_users_list', kwargs={'version': version, 'pk': credential.use_role.id}), {
'id': org_member.id
}, org_admin)
assert response.status_code == 204
@pytest.mark.django_db
@pytest.mark.parametrize('version', ['v1', 'v2'])
def test_grant_org_credential_to_org_user_through_user_roles(post, credential, organization, org_admin, org_member, version):
credential.organization = organization
credential.save()
response = post(reverse('api:user_roles_list', kwargs={'version': version, 'pk': org_member.id}), {
'id': credential.use_role.id
}, org_admin)
assert response.status_code == 204
@pytest.mark.django_db
@pytest.mark.parametrize('version', ['v1', 'v2'])
def test_grant_org_credential_to_non_org_user_through_role_users(post, credential, organization, org_admin, alice, version):
credential.organization = organization
credential.save()
response = post(reverse('api:role_users_list', kwargs={'version': version, 'pk': credential.use_role.id}), {
'id': alice.id
}, org_admin)
assert response.status_code == 400
@pytest.mark.django_db
@pytest.mark.parametrize('version', ['v1', 'v2'])
def test_grant_org_credential_to_non_org_user_through_user_roles(post, credential, organization, org_admin, alice, version):
credential.organization = organization
credential.save()
response = post(reverse('api:user_roles_list', kwargs={'version': version, 'pk': alice.id}), {
'id': credential.use_role.id
}, org_admin)
assert response.status_code == 400
@pytest.mark.django_db
@pytest.mark.parametrize('version', ['v1', 'v2'])
def test_grant_private_credential_to_user_through_role_users(post, credential, alice, bob, version):
# normal users can't do this
credential.admin_role.members.add(alice)
response = post(reverse('api:role_users_list', kwargs={'version': version, 'pk': credential.use_role.id}), {
'id': bob.id
}, alice)
assert response.status_code == 400
@pytest.mark.django_db
@pytest.mark.parametrize('version', ['v1', 'v2'])
def test_grant_private_credential_to_org_user_through_role_users(post, credential, org_admin, org_member, version):
# org admins can't either
credential.admin_role.members.add(org_admin)
response = post(reverse('api:role_users_list', kwargs={'version': version, 'pk': credential.use_role.id}), {
'id': org_member.id
}, org_admin)
assert response.status_code == 400
@pytest.mark.django_db
@pytest.mark.parametrize('version', ['v1', 'v2'])
def test_sa_grant_private_credential_to_user_through_role_users(post, credential, admin, bob, version):
# but system admins can
response = post(reverse('api:role_users_list', kwargs={'version': version, 'pk': credential.use_role.id}), {
'id': bob.id
}, admin)
assert response.status_code == 204
@pytest.mark.django_db
@pytest.mark.parametrize('version', ['v1', 'v2'])
def test_grant_private_credential_to_user_through_user_roles(post, credential, alice, bob, version):
# normal users can't do this
credential.admin_role.members.add(alice)
response = post(reverse('api:user_roles_list', kwargs={'version': version, 'pk': bob.id}), {
'id': credential.use_role.id
}, alice)
assert response.status_code == 400
@pytest.mark.django_db
@pytest.mark.parametrize('version', ['v1', 'v2'])
def test_grant_private_credential_to_org_user_through_user_roles(post, credential, org_admin, org_member, version):
# org admins can't either
credential.admin_role.members.add(org_admin)
response = post(reverse('api:user_roles_list', kwargs={'version': version, 'pk': org_member.id}), {
'id': credential.use_role.id
}, org_admin)
assert response.status_code == 400
@pytest.mark.django_db
@pytest.mark.parametrize('version', ['v1', 'v2'])
def test_sa_grant_private_credential_to_user_through_user_roles(post, credential, admin, bob, version):
# but system admins can
response = post(reverse('api:user_roles_list', kwargs={'version': version, 'pk': bob.id}), {
'id': credential.use_role.id
}, admin)
assert response.status_code == 204
@pytest.mark.django_db
@pytest.mark.parametrize('version', ['v1', 'v2'])
def test_grant_org_credential_to_team_through_role_teams(post, credential, organization, org_admin, org_auditor, team, version):
assert org_auditor not in credential.read_role
credential.organization = organization
credential.save()
response = post(reverse('api:role_teams_list', kwargs={'version': version, 'pk': credential.use_role.id}), {
'id': team.id
}, org_admin)
assert response.status_code == 204
assert org_auditor in credential.read_role
@pytest.mark.django_db
@pytest.mark.parametrize('version', ['v1', 'v2'])
def test_grant_org_credential_to_team_through_team_roles(post, credential, organization, org_admin, org_auditor, team, version):
assert org_auditor not in credential.read_role
credential.organization = organization
credential.save()
response = post(reverse('api:team_roles_list', kwargs={'version': version, 'pk': team.id}), {
'id': credential.use_role.id
}, org_admin)
assert response.status_code == 204
assert org_auditor in credential.read_role
@pytest.mark.django_db
@pytest.mark.parametrize('version', ['v1', 'v2'])
def test_sa_grant_private_credential_to_team_through_role_teams(post, credential, admin, team, version):
# not even a system admin can grant a private cred to a team though
response = post(reverse('api:role_teams_list', kwargs={'version': version, 'pk': credential.use_role.id}), {
'id': team.id
}, admin)
assert response.status_code == 400
@pytest.mark.django_db
@pytest.mark.parametrize('version', ['v1', 'v2'])
def test_sa_grant_private_credential_to_team_through_team_roles(post, credential, admin, team, version):
# not even a system admin can grant a private cred to a team though
response = post(reverse('api:role_teams_list', kwargs={'version': version, 'pk': team.id}), {
'id': credential.use_role.id
}, admin)
assert response.status_code == 400
#
# organization credentials
#
@pytest.mark.django_db
@pytest.mark.parametrize('version, params', [
['v1', {'username': 'someusername'}],
['v2', {'credential_type': 1, 'inputs': {'username': 'someusername'}}]
])
def test_create_org_credential_as_not_admin(post, organization, org_member, credentialtype_ssh, version, params):
params['name'] = 'Some name'
params['organization'] = organization.id
response = post(
reverse('api:credential_list'),
params,
org_member
)
assert response.status_code == 403
@pytest.mark.django_db
@pytest.mark.parametrize('version, params', [
['v1', {'username': 'someusername'}],
['v2', {'credential_type': 1, 'inputs': {'username': 'someusername'}}]
])
def test_create_org_credential_as_admin(post, organization, org_admin, credentialtype_ssh, version, params):
params['name'] = 'Some name'
params['organization'] = organization.id
response = post(
reverse('api:credential_list', kwargs={'version': version}),
params,
org_admin
)
assert response.status_code == 201
@pytest.mark.django_db
@pytest.mark.parametrize('version, params', [
['v1', {'username': 'someusername'}],
['v2', {'credential_type': 1, 'inputs': {'username': 'someusername'}}]
])
def test_credential_detail(post, get, organization, org_admin, credentialtype_ssh, version, params):
params['name'] = 'Some name'
params['organization'] = organization.id
response = post(
reverse('api:credential_list', kwargs={'version': version}),
params,
org_admin
)
assert response.status_code == 201
response = get(
reverse('api:credential_detail', kwargs={'version': version, 'pk': response.data['id']}),
org_admin
)
assert response.status_code == 200
summary_fields = response.data['summary_fields']
assert 'organization' in summary_fields
related_fields = response.data['related']
assert 'organization' in related_fields
@pytest.mark.django_db
@pytest.mark.parametrize('version, params', [
['v1', {'username': 'someusername'}],
['v2', {'credential_type': 1, 'inputs': {'username': 'someusername'}}]
])
def test_list_created_org_credentials(post, get, organization, org_admin, org_member, credentialtype_ssh, version, params):
params['name'] = 'Some name'
params['organization'] = organization.id
response = post(
reverse('api:credential_list', kwargs={'version': version}),
params,
org_admin
)
assert response.status_code == 201
response = get(
reverse('api:credential_list', kwargs={'version': version}),
org_admin
)
assert response.status_code == 200
assert response.data['count'] == 1
response = get(
reverse('api:credential_list', kwargs={'version': version}),
org_member
)
assert response.status_code == 200
assert response.data['count'] == 0
response = get(
reverse('api:organization_credential_list', kwargs={'version': version, 'pk': organization.pk}),
org_admin
)
assert response.status_code == 200
assert response.data['count'] == 1
response = get(
reverse('api:organization_credential_list', kwargs={'version': version, 'pk': organization.pk}),
org_member
)
assert response.status_code == 200
assert response.data['count'] == 0
@pytest.mark.parametrize('order_by', ('password', '-password', 'password,pk', '-password,pk'))
@pytest.mark.parametrize('version', ('v1', 'v2'))
@pytest.mark.django_db
def test_list_cannot_order_by_encrypted_field(post, get, organization, org_admin, credentialtype_ssh, order_by, version):
for i, password in enumerate(('abc', 'def', 'xyz')):
response = post(
reverse('api:credential_list', kwargs={'version': version}),
{
'organization': organization.id,
'name': 'C%d' % i,
'password': password
},
org_admin
)
response = get(
reverse('api:credential_list', kwargs={'version': version}),
org_admin,
QUERY_STRING='order_by=%s' % order_by,
status=400
)
assert response.status_code == 400
@pytest.mark.django_db
def test_v1_credential_kind_validity(get, post, organization, admin, credentialtype_ssh):
params = {
'name': 'Best credential ever',
'organization': organization.id,
'kind': 'nonsense'
}
response = post(
reverse('api:credential_list', kwargs={'version': 'v1'}),
params,
admin
)
assert response.status_code == 400
assert response.data['kind'] == ['"nonsense" is not a valid choice']
@pytest.mark.django_db
def test_inputs_cannot_contain_extra_fields(get, post, organization, admin, credentialtype_ssh):
params = {
'name': 'Best credential ever',
'organization': organization.id,
'credential_type': credentialtype_ssh.pk,
'inputs': {
'invalid_field': 'foo'
},
}
response = post(
reverse('api:credential_list', kwargs={'version': 'v2'}),
params,
admin
)
assert response.status_code == 400
assert "'invalid_field' was unexpected" in response.data['inputs'][0]
@pytest.mark.django_db
@pytest.mark.parametrize('field_name, field_value', itertools.product(
['username', 'password', 'ssh_key_data', 'become_method', 'become_username', 'become_password'], # noqa
['', None]
))
def test_nullish_field_data(get, post, organization, admin, field_name, field_value):
ssh = CredentialType.defaults['ssh']()
ssh.save()
params = {
'name': 'Best credential ever',
'credential_type': ssh.pk,
'organization': organization.id,
'inputs': {
field_name: field_value
}
}
response = post(
reverse('api:credential_list', kwargs={'version': 'v2'}),
params,
admin
)
assert response.status_code == 201
assert Credential.objects.count() == 1
cred = Credential.objects.all()[:1].get()
assert getattr(cred, field_name) == ''
@pytest.mark.django_db
@pytest.mark.parametrize('field_value', ['', None, False])
def test_falsey_field_data(get, post, organization, admin, field_value):
net = CredentialType.defaults['net']()
net.save()
params = {
'name': 'Best credential ever',
'credential_type': net.pk,
'organization': organization.id,
'inputs': {
'username': 'joe-user', # username is required
'authorize': field_value
}
}
response = post(
reverse('api:credential_list', kwargs={'version': 'v2'}),
params,
admin
)
assert response.status_code == 201
assert Credential.objects.count() == 1
cred = Credential.objects.all()[:1].get()
assert cred.authorize is False
@pytest.mark.django_db
@pytest.mark.parametrize('kind, extraneous', [
['ssh', 'ssh_key_unlock'],
['scm', 'ssh_key_unlock'],
['net', 'ssh_key_unlock'],
['net', 'authorize_password'],
])
def test_field_dependencies(get, post, organization, admin, kind, extraneous):
_type = CredentialType.defaults[kind]()
_type.save()
params = {
'name': 'Best credential ever',
'credential_type': _type.pk,
'organization': organization.id,
'inputs': {extraneous: 'not needed'}
}
response = post(
reverse('api:credential_list', kwargs={'version': 'v2'}),
params,
admin
)
assert response.status_code == 400
assert re.search('cannot be set unless .+ is set.', response.content)
assert Credential.objects.count() == 0
#
# SCM Credentials
#
@pytest.mark.django_db
@pytest.mark.parametrize('version, params', [
['v1', {
'kind': 'scm',
'name': 'Best credential ever',
'username': 'some_username',
'password': 'some_password',
'ssh_key_data': EXAMPLE_ENCRYPTED_PRIVATE_KEY,
'ssh_key_unlock': 'some_key_unlock',
}],
['v2', {
'credential_type': 1,
'name': 'Best credential ever',
'inputs': {
'username': 'some_username',
'password': 'some_password',
'ssh_key_data': EXAMPLE_ENCRYPTED_PRIVATE_KEY,
'ssh_key_unlock': 'some_key_unlock',
}
}]
])
def test_scm_create_ok(post, organization, admin, version, params):
scm = CredentialType.defaults['scm']()
scm.save()
params['organization'] = organization.id
response = post(
reverse('api:credential_list', kwargs={'version': version}),
params,
admin
)
assert response.status_code == 201
assert Credential.objects.count() == 1
cred = Credential.objects.all()[:1].get()
assert cred.inputs['username'] == 'some_username'
assert decrypt_field(cred, 'password') == 'some_password'
assert decrypt_field(cred, 'ssh_key_data') == EXAMPLE_ENCRYPTED_PRIVATE_KEY
assert decrypt_field(cred, 'ssh_key_unlock') == 'some_key_unlock'
@pytest.mark.django_db
@pytest.mark.parametrize('version, params', [
['v1', {
'kind': 'ssh',
'name': 'Best credential ever',
'password': 'secret',
'vault_password': '',
}],
['v2', {
'credential_type': 1,
'name': 'Best credential ever',
'inputs': {
'password': 'secret',
}
}]
])
def test_ssh_create_ok(post, organization, admin, version, params):
ssh = CredentialType.defaults['ssh']()
ssh.save()
params['organization'] = organization.id
response = post(
reverse('api:credential_list', kwargs={'version': version}),
params,
admin
)
assert response.status_code == 201
assert Credential.objects.count() == 1
cred = Credential.objects.all()[:1].get()
assert cred.credential_type == ssh
assert decrypt_field(cred, 'password') == 'secret'
@pytest.mark.django_db
def test_v1_ssh_vault_ambiguity(post, organization, admin):
vault = CredentialType.defaults['vault']()
vault.save()
params = {
'organization': organization.id,
'kind': 'ssh',
'name': 'Best credential ever',
'username': 'joe',
'password': 'secret',
'ssh_key_data': 'some_key_data',
'ssh_key_unlock': 'some_key_unlock',
'vault_password': 'vault_password',
}
response = post(
reverse('api:credential_list', kwargs={'version': 'v1'}),
params,
admin
)
assert response.status_code == 400
#
# Vault Credentials
#
@pytest.mark.django_db
@pytest.mark.parametrize('version, params', [
['v1', {
'kind': 'ssh',
'name': 'Best credential ever',
'vault_password': 'some_password',
}],
['v2', {
'credential_type': 1,
'name': 'Best credential ever',
'inputs': {
'vault_password': 'some_password',
}
}]
])
def test_vault_create_ok(post, organization, admin, version, params):
vault = CredentialType.defaults['vault']()
vault.save()
params['organization'] = organization.id
response = post(
reverse('api:credential_list', kwargs={'version': version}),
params,
admin
)
assert response.status_code == 201
assert Credential.objects.count() == 1
cred = Credential.objects.all()[:1].get()
assert decrypt_field(cred, 'vault_password') == 'some_password'
@pytest.mark.django_db
def test_vault_password_required(post, organization, admin):
vault = CredentialType.defaults['vault']()
vault.save()
response = post(
reverse('api:credential_list', kwargs={'version': 'v2'}),
{
'credential_type': vault.pk,
'organization': organization.id,
'name': 'Best credential ever',
'inputs': {}
},
admin
)
assert response.status_code == 400
assert response.data['inputs'] == {'vault_password': ['required for Vault']}
assert Credential.objects.count() == 0
#
# Net Credentials
#
@pytest.mark.django_db
@pytest.mark.parametrize('version, params', [
['v1', {
'kind': 'net',
'name': 'Best credential ever',
'username': 'some_username',
'password': 'some_password',
'ssh_key_data': EXAMPLE_ENCRYPTED_PRIVATE_KEY,
'ssh_key_unlock': 'some_key_unlock',
'authorize': True,
'authorize_password': 'some_authorize_password',
}],
['v2', {
'credential_type': 1,
'name': 'Best credential ever',
'inputs': {
'username': 'some_username',
'password': 'some_password',
'ssh_key_data': EXAMPLE_ENCRYPTED_PRIVATE_KEY,
'ssh_key_unlock': 'some_key_unlock',
'authorize': True,
'authorize_password': 'some_authorize_password',
}
}]
])
def test_net_create_ok(post, organization, admin, version, params):
net = CredentialType.defaults['net']()
net.save()
params['organization'] = organization.id
response = post(
reverse('api:credential_list', kwargs={'version': version}),
params,
admin
)
assert response.status_code == 201
assert Credential.objects.count() == 1
cred = Credential.objects.all()[:1].get()
assert cred.inputs['username'] == 'some_username'
assert decrypt_field(cred, 'password') == 'some_password'
assert decrypt_field(cred, 'ssh_key_data') == EXAMPLE_ENCRYPTED_PRIVATE_KEY
assert decrypt_field(cred, 'ssh_key_unlock') == 'some_key_unlock'
assert decrypt_field(cred, 'authorize_password') == 'some_authorize_password'
assert cred.inputs['authorize'] is True
#
# Cloudforms Credentials
#
@pytest.mark.django_db
@pytest.mark.parametrize('version, params', [
['v1', {
'kind': 'cloudforms',
'name': 'Best credential ever',
'host': 'some_host',
'username': 'some_username',
'password': 'some_password',
}],
['v2', {
'credential_type': 1,
'name': 'Best credential ever',
'inputs': {
'host': 'some_host',
'username': 'some_username',
'password': 'some_password',
}
}]
])
def test_cloudforms_create_ok(post, organization, admin, version, params):
cloudforms = CredentialType.defaults['cloudforms']()
cloudforms.save()
params['organization'] = organization.id
response = post(
reverse('api:credential_list', kwargs={'version': version}),
params,
admin
)
assert response.status_code == 201
assert Credential.objects.count() == 1
cred = Credential.objects.all()[:1].get()
assert cred.inputs['host'] == 'some_host'
assert cred.inputs['username'] == 'some_username'
assert decrypt_field(cred, 'password') == 'some_password'
#
# GCE Credentials
#
@pytest.mark.django_db
@pytest.mark.parametrize('version, params', [
['v1', {
'kind': 'gce',
'name': 'Best credential ever',
'username': 'some_username',
'project': 'some_project',
'ssh_key_data': EXAMPLE_PRIVATE_KEY,
}],
['v2', {
'credential_type': 1,
'name': 'Best credential ever',
'inputs': {
'username': 'some_username',
'project': 'some_project',
'ssh_key_data': EXAMPLE_PRIVATE_KEY,
}
}]
])
def test_gce_create_ok(post, organization, admin, version, params):
gce = CredentialType.defaults['gce']()
gce.save()
params['organization'] = organization.id
response = post(
reverse('api:credential_list', kwargs={'version': version}),
params,
admin
)
assert response.status_code == 201
assert Credential.objects.count() == 1
cred = Credential.objects.all()[:1].get()
assert cred.inputs['username'] == 'some_username'
assert cred.inputs['project'] == 'some_project'
assert decrypt_field(cred, 'ssh_key_data') == EXAMPLE_PRIVATE_KEY
#
# Azure Resource Manager
#
@pytest.mark.django_db
@pytest.mark.parametrize('version, params', [
['v1', {
'kind': 'azure_rm',
'name': 'Best credential ever',
'subscription': 'some_subscription',
'username': 'some_username',
'password': 'some_password',
'client': 'some_client',
'secret': 'some_secret',
'tenant': 'some_tenant'
}],
['v2', {
'credential_type': 1,
'name': 'Best credential ever',
'inputs': {
'subscription': 'some_subscription',
'username': 'some_username',
'password': 'some_password',
'client': 'some_client',
'secret': 'some_secret',
'tenant': 'some_tenant'
}
}]
])
def test_azure_rm_create_ok(post, organization, admin, version, params):
azure_rm = CredentialType.defaults['azure_rm']()
azure_rm.save()
params['organization'] = organization.id
response = post(
reverse('api:credential_list', kwargs={'version': version}),
params,
admin
)
assert response.status_code == 201
assert Credential.objects.count() == 1
cred = Credential.objects.all()[:1].get()
assert cred.inputs['subscription'] == 'some_subscription'
assert cred.inputs['username'] == 'some_username'
assert decrypt_field(cred, 'password') == 'some_password'
assert cred.inputs['client'] == 'some_client'
assert decrypt_field(cred, 'secret') == 'some_secret'
assert cred.inputs['tenant'] == 'some_tenant'
#
# RH Satellite6 Credentials
#
@pytest.mark.django_db
@pytest.mark.parametrize('version, params', [
['v1', {
'kind': 'satellite6',
'name': 'Best credential ever',
'host': 'some_host',
'username': 'some_username',
'password': 'some_password',
}],
['v2', {
'credential_type': 1,
'name': 'Best credential ever',
'inputs': {
'host': 'some_host',
'username': 'some_username',
'password': 'some_password',
}
}]
])
def test_satellite6_create_ok(post, organization, admin, version, params):
sat6 = CredentialType.defaults['satellite6']()
sat6.save()
params['organization'] = organization.id
response = post(
reverse('api:credential_list', kwargs={'version': version}),
params,
admin
)
assert response.status_code == 201
assert Credential.objects.count() == 1
cred = Credential.objects.all()[:1].get()
assert cred.inputs['host'] == 'some_host'
assert cred.inputs['username'] == 'some_username'
assert decrypt_field(cred, 'password') == 'some_password'
#
# AWS Credentials
#
@pytest.mark.django_db
@pytest.mark.parametrize('version, params', [
['v1', {
'kind': 'aws',
'name': 'Best credential ever',
'username': 'some_username',
'password': 'some_password',
'security_token': 'abc123'
}],
['v2', {
'credential_type': 1,
'name': 'Best credential ever',
'inputs': {
'username': 'some_username',
'password': 'some_password',
'security_token': 'abc123'
}
}]
])
def test_aws_create_ok(post, organization, admin, version, params):
aws = CredentialType.defaults['aws']()
aws.save()
params['organization'] = organization.id
response = post(
reverse('api:credential_list', kwargs={'version': version}),
params,
admin
)
assert response.status_code == 201
assert Credential.objects.count() == 1
cred = Credential.objects.all()[:1].get()
assert cred.inputs['username'] == 'some_username'
assert decrypt_field(cred, 'password') == 'some_password'
assert decrypt_field(cred, 'security_token') == 'abc123'
@pytest.mark.django_db
@pytest.mark.parametrize('version, params', [
['v1', {
'kind': 'aws',
'name': 'Best credential ever',
}],
['v2', {
'credential_type': 1,
'name': 'Best credential ever',
'inputs': {}
}]
])
def test_aws_create_fail_required_fields(post, organization, admin, version, params):
aws = CredentialType.defaults['aws']()
aws.save()
params['organization'] = organization.id
response = post(
reverse('api:credential_list', kwargs={'version': version}),
params,
admin
)
assert response.status_code == 400
assert Credential.objects.count() == 0
errors = response.data
if version == 'v2':
errors = response.data['inputs']
assert errors['username'] == ['required for %s' % aws.name]
assert errors['password'] == ['required for %s' % aws.name]
#
# VMware vCenter Credentials
#
@pytest.mark.django_db
@pytest.mark.parametrize('version, params', [
['v1', {
'kind': 'vmware',
'host': 'some_host',
'name': 'Best credential ever',
'username': 'some_username',
'password': 'some_password'
}],
['v2', {
'credential_type': 1,
'name': 'Best credential ever',
'inputs': {
'host': 'some_host',
'username': 'some_username',
'password': 'some_password'
}
}]
])
def test_vmware_create_ok(post, organization, admin, version, params):
vmware = CredentialType.defaults['vmware']()
vmware.save()
params['organization'] = organization.id
response = post(
reverse('api:credential_list', kwargs={'version': version}),
params,
admin
)
assert response.status_code == 201
assert Credential.objects.count() == 1
cred = Credential.objects.all()[:1].get()
assert cred.inputs['host'] == 'some_host'
assert cred.inputs['username'] == 'some_username'
assert decrypt_field(cred, 'password') == 'some_password'
@pytest.mark.django_db
@pytest.mark.parametrize('version, params', [
['v1', {
'kind': 'vmware',
'name': 'Best credential ever',
}],
['v2', {
'credential_type': 1,
'name': 'Best credential ever',
'inputs': {}
}]
])
def test_vmware_create_fail_required_fields(post, organization, admin, version, params):
vmware = CredentialType.defaults['vmware']()
vmware.save()
params['organization'] = organization.id
response = post(
reverse('api:credential_list', kwargs={'version': version}),
params,
admin
)
assert response.status_code == 400
assert Credential.objects.count() == 0
errors = response.data
if version == 'v2':
errors = response.data['inputs']
assert errors['username'] == ['required for %s' % vmware.name]
assert errors['password'] == ['required for %s' % vmware.name]
assert errors['host'] == ['required for %s' % vmware.name]
#
# Openstack Credentials
#
@pytest.mark.django_db
@pytest.mark.parametrize('version, params', [
['v1', {
'username': 'some_user',
'password': 'some_password',
'project': 'some_project',
'host': 'some_host',
}],
['v2', {
'credential_type': 1,
'inputs': {
'username': 'some_user',
'password': 'some_password',
'project': 'some_project',
'host': 'some_host',
}
}]
])
def test_openstack_create_ok(post, organization, admin, version, params):
openstack = CredentialType.defaults['openstack']()
openstack.save()
params['kind'] = 'openstack'
params['name'] = 'Best credential ever'
params['organization'] = organization.id
response = post(
reverse('api:credential_list', kwargs={'version': version}),
params,
admin
)
assert response.status_code == 201
@pytest.mark.django_db
@pytest.mark.parametrize('version, params', [
['v1', {}],
['v2', {
'credential_type': 1,
'inputs': {}
}]
])
def test_openstack_create_fail_required_fields(post, organization, admin, version, params):
openstack = CredentialType.defaults['openstack']()
openstack.save()
params['kind'] = 'openstack'
params['name'] = 'Best credential ever'
params['organization'] = organization.id
response = post(
reverse('api:credential_list', kwargs={'version': version}),
params,
admin
)
assert response.status_code == 400
errors = response.data
if version == 'v2':
errors = response.data['inputs']
assert errors['username'] == ['required for %s' % openstack.name]
assert errors['password'] == ['required for %s' % openstack.name]
assert errors['host'] == ['required for %s' % openstack.name]
assert errors['project'] == ['required for %s' % openstack.name]
@pytest.mark.django_db
@pytest.mark.parametrize('version, params', [
['v1', {
'name': 'Best credential ever',
'kind': 'ssh',
'username': 'joe',
'password': '',
}],
['v2', {
'name': 'Best credential ever',
'credential_type': 1,
'inputs': {
'username': 'joe',
'password': '',
}
}]
])
def test_field_removal(put, organization, admin, credentialtype_ssh, version, params):
cred = Credential(
credential_type=credentialtype_ssh,
name='Best credential ever',
organization=organization,
inputs={
'username': u'jim',
'password': u'secret'
}
)
cred.save()
params['organization'] = organization.id
response = put(
reverse('api:credential_detail', kwargs={'version': version, 'pk': cred.pk}),
params,
admin
)
assert response.status_code == 200
cred = Credential.objects.all()[:1].get()
assert cred.inputs['username'] == 'joe'
assert 'password' not in cred.inputs
@pytest.mark.django_db
@pytest.mark.parametrize('relation, related_obj', [
['ad_hoc_commands', AdHocCommand()],
['insights_inventories', Inventory()],
['unifiedjobs', Job()],
['unifiedjobtemplates', JobTemplate()],
['unifiedjobtemplates', InventorySource()],
['projects', Project()],
['workflowjobnodes', WorkflowJobNode()],
])
def test_credential_type_mutability(patch, organization, admin, credentialtype_ssh,
credentialtype_aws, relation, related_obj):
cred = Credential(
credential_type=credentialtype_ssh,
name='Best credential ever',
organization=organization,
inputs={
'username': u'jim',
'password': u'pass'
}
)
cred.save()
related_obj.save()
getattr(cred, relation).add(related_obj)
def _change_credential_type():
return patch(
reverse('api:credential_detail', kwargs={'version': 'v2', 'pk': cred.pk}),
{
'credential_type': credentialtype_aws.pk,
'inputs': {
'username': u'jim',
'password': u'pass'
}
},
admin
)
response = _change_credential_type()
assert response.status_code == 400
expected = ['You cannot change the credential type of the credential, '
'as it may break the functionality of the resources using it.']
assert response.data['credential_type'] == expected
response = patch(
reverse('api:credential_detail', kwargs={'version': 'v2', 'pk': cred.pk}),
{'name': 'Worst credential ever'},
admin
)
assert response.status_code == 200
assert Credential.objects.get(pk=cred.pk).name == 'Worst credential ever'
related_obj.delete()
response = _change_credential_type()
assert response.status_code == 200
@pytest.mark.django_db
def test_vault_credential_type_mutability(patch, organization, admin, credentialtype_ssh,
credentialtype_vault):
cred = Credential(
credential_type=credentialtype_vault,
name='Best credential ever',
organization=organization,
inputs={
'vault_password': u'some-vault',
}
)
cred.save()
jt = JobTemplate()
jt.save()
jt.credentials.add(cred)
def _change_credential_type():
return patch(
reverse('api:credential_detail', kwargs={'version': 'v2', 'pk': cred.pk}),
{
'credential_type': credentialtype_ssh.pk,
'inputs': {
'username': u'jim',
'password': u'pass'
}
},
admin
)
response = _change_credential_type()
assert response.status_code == 400
expected = ['You cannot change the credential type of the credential, '
'as it may break the functionality of the resources using it.']
assert response.data['credential_type'] == expected
response = patch(
reverse('api:credential_detail', kwargs={'version': 'v2', 'pk': cred.pk}),
{'name': 'Worst credential ever'},
admin
)
assert response.status_code == 200
assert Credential.objects.get(pk=cred.pk).name == 'Worst credential ever'
jt.delete()
response = _change_credential_type()
assert response.status_code == 200
@pytest.mark.django_db
def test_cloud_credential_type_mutability(patch, organization, admin, credentialtype_ssh,
credentialtype_aws):
cred = Credential(
credential_type=credentialtype_aws,
name='Best credential ever',
organization=organization,
inputs={
'username': u'jim',
'password': u'pass'
}
)
cred.save()
jt = JobTemplate()
jt.save()
jt.credentials.add(cred)
def _change_credential_type():
return patch(
reverse('api:credential_detail', kwargs={'version': 'v2', 'pk': cred.pk}),
{
'credential_type': credentialtype_ssh.pk,
'inputs': {
'username': u'jim',
'password': u'pass'
}
},
admin
)
response = _change_credential_type()
assert response.status_code == 400
expected = ['You cannot change the credential type of the credential, '
'as it may break the functionality of the resources using it.']
assert response.data['credential_type'] == expected
response = patch(
reverse('api:credential_detail', kwargs={'version': 'v2', 'pk': cred.pk}),
{'name': 'Worst credential ever'},
admin
)
assert response.status_code == 200
assert Credential.objects.get(pk=cred.pk).name == 'Worst credential ever'
jt.delete()
response = _change_credential_type()
assert response.status_code == 200
@pytest.mark.django_db
@pytest.mark.parametrize('version, params', [
['v1', {
'name': 'Best credential ever',
'kind': 'ssh',
'username': 'joe',
'ssh_key_data': '$encrypted$',
}],
['v2', {
'name': 'Best credential ever',
'credential_type': 1,
'inputs': {
'username': 'joe',
'ssh_key_data': '$encrypted$',
}
}]
])
def test_ssh_unlock_needed(put, organization, admin, credentialtype_ssh, version, params):
cred = Credential(
credential_type=credentialtype_ssh,
name='Best credential ever',
organization=organization,
inputs={
'username': u'joe',
'ssh_key_data': EXAMPLE_ENCRYPTED_PRIVATE_KEY,
'ssh_key_unlock': 'unlock'
}
)
cred.save()
params['organization'] = organization.id
response = put(
reverse('api:credential_detail', kwargs={'version': version, 'pk': cred.pk}),
params,
admin
)
assert response.status_code == 400
assert response.data['inputs']['ssh_key_unlock'] == ['must be set when SSH key is encrypted.']
@pytest.mark.django_db
@pytest.mark.parametrize('version, params', [
['v1', {
'name': 'Best credential ever',
'kind': 'ssh',
'username': 'joe',
'ssh_key_data': '$encrypted$',
'ssh_key_unlock': 'superfluous-key-unlock',
}],
['v2', {
'name': 'Best credential ever',
'credential_type': 1,
'inputs': {
'username': 'joe',
'ssh_key_data': '$encrypted$',
'ssh_key_unlock': 'superfluous-key-unlock',
}
}]
])
def test_ssh_unlock_not_needed(put, organization, admin, credentialtype_ssh, version, params):
cred = Credential(
credential_type=credentialtype_ssh,
name='Best credential ever',
organization=organization,
inputs={
'username': u'joe',
'ssh_key_data': EXAMPLE_PRIVATE_KEY,
}
)
cred.save()
params['organization'] = organization.id
response = put(
reverse('api:credential_detail', kwargs={'version': version, 'pk': cred.pk}),
params,
admin
)
assert response.status_code == 400
assert response.data['inputs']['ssh_key_unlock'] == ['should not be set when SSH key is not encrypted.']
@pytest.mark.django_db
@pytest.mark.parametrize('version, params', [
['v1', {
'name': 'Best credential ever',
'kind': 'ssh',
'username': 'joe',
'ssh_key_data': '$encrypted$',
'ssh_key_unlock': 'new-unlock',
}],
['v2', {
'name': 'Best credential ever',
'credential_type': 1,
'inputs': {
'username': 'joe',
'ssh_key_data': '$encrypted$',
'ssh_key_unlock': 'new-unlock',
}
}]
])
def test_ssh_unlock_with_prior_value(put, organization, admin, credentialtype_ssh, version, params):
cred = Credential(
credential_type=credentialtype_ssh,
name='Best credential ever',
organization=organization,
inputs={
'username': u'joe',
'ssh_key_data': EXAMPLE_ENCRYPTED_PRIVATE_KEY,
'ssh_key_unlock': 'old-unlock'
}
)
cred.save()
params['organization'] = organization.id
response = put(
reverse('api:credential_detail', kwargs={'version': version, 'pk': cred.pk}),
params,
admin
)
assert response.status_code == 200
cred = Credential.objects.all()[:1].get()
assert decrypt_field(cred, 'ssh_key_unlock') == 'new-unlock'
#
# test secret encryption/decryption
#
@pytest.mark.django_db
@pytest.mark.parametrize('version, params', [
['v1', {
'kind': 'ssh',
'username': 'joe',
'password': 'secret',
}],
['v2', {
'credential_type': 1,
'inputs': {
'username': 'joe',
'password': 'secret',
}
}]
])
def test_secret_encryption_on_create(get, post, organization, admin, credentialtype_ssh, version, params):
params['name'] = 'Best credential ever'
params['organization'] = organization.id
response = post(
reverse('api:credential_list', kwargs={'version': version}),
params,
admin
)
assert response.status_code == 201
response = get(
reverse('api:credential_list', kwargs={'version': version}),
admin
)
assert response.status_code == 200
assert response.data['count'] == 1
cred = response.data['results'][0]
if version == 'v1':
assert cred['username'] == 'joe'
assert cred['password'] == '$encrypted$'
elif version == 'v2':
assert cred['inputs']['username'] == 'joe'
assert cred['inputs']['password'] == '$encrypted$'
cred = Credential.objects.all()[:1].get()
assert cred.inputs['password'].startswith('$encrypted$UTF8$AES')
assert decrypt_field(cred, 'password') == 'secret'
@pytest.mark.django_db
@pytest.mark.parametrize('version, params', [
['v1', {'password': 'secret'}],
['v2', {'inputs': {'username': 'joe', 'password': 'secret'}}]
])
def test_secret_encryption_on_update(get, post, patch, organization, admin, credentialtype_ssh, version, params):
response = post(
reverse('api:credential_list', kwargs={'version': 'v2'}),
{
'name': 'Best credential ever',
'organization': organization.id,
'credential_type': 1,
'inputs': {
'username': 'joe',
}
},
admin
)
assert response.status_code == 201
response = patch(
reverse('api:credential_detail', kwargs={'pk': 1, 'version': version}),
params,
admin
)
assert response.status_code == 200
response = get(
reverse('api:credential_list', kwargs={'version': version}),
admin
)
assert response.status_code == 200
assert response.data['count'] == 1
cred = response.data['results'][0]
if version == 'v1':
assert cred['username'] == 'joe'
assert cred['password'] == '$encrypted$'
elif version == 'v2':
assert cred['inputs']['username'] == 'joe'
assert cred['inputs']['password'] == '$encrypted$'
cred = Credential.objects.all()[:1].get()
assert cred.inputs['password'].startswith('$encrypted$UTF8$AES')
assert decrypt_field(cred, 'password') == 'secret'
@pytest.mark.django_db
@pytest.mark.parametrize('version, params', [
['v1', {
'username': 'joe',
'password': '$encrypted$',
}],
['v2', {
'inputs': {
'username': 'joe',
'password': '$encrypted$',
}
}]
])
def test_secret_encryption_previous_value(patch, organization, admin, credentialtype_ssh, version, params):
cred = Credential(
credential_type=credentialtype_ssh,
name='Best credential ever',
organization=organization,
inputs={
'username': u'jim',
'password': u'secret'
}
)
cred.save()
assert decrypt_field(cred, 'password') == 'secret'
response = patch(
reverse('api:credential_detail', kwargs={'pk': cred.pk, 'version': version}),
params,
admin
)
assert response.status_code == 200
cred = Credential.objects.all()[:1].get()
assert cred.inputs['username'] == 'joe'
assert cred.inputs['password'].startswith('$encrypted$UTF8$AES')
assert decrypt_field(cred, 'password') == 'secret'
@pytest.mark.django_db
def test_custom_credential_type_create(get, post, organization, admin):
credential_type = CredentialType(
kind='cloud',
name='MyCloud',
inputs = {
'fields': [{
'id': 'api_token',
'label': 'API Token',
'type': 'string',
'secret': True
}]
}
)
credential_type.save()
params = {
'name': 'Best credential ever',
'organization': organization.pk,
'credential_type': credential_type.pk,
'inputs': {
'api_token': 'secret'
}
}
response = post(
reverse('api:credential_list', kwargs={'version': 'v2'}),
params,
admin
)
assert response.status_code == 201
response = get(
reverse('api:credential_list', kwargs={'version': 'v2'}),
admin
)
assert response.status_code == 200
assert response.data['count'] == 1
cred = response.data['results'][0]
assert cred['inputs']['api_token'] == '$encrypted$'
cred = Credential.objects.all()[:1].get()
assert cred.inputs['api_token'].startswith('$encrypted$UTF8$AES')
assert decrypt_field(cred, 'api_token') == 'secret'
#
# misc xfail conditions
#
@pytest.mark.parametrize('version, params', [
['v1', {'name': 'Some name', 'username': 'someusername'}],
['v2', {'name': 'Some name', 'credential_type': 1, 'inputs': {'username': 'someusername'}}]
])
@pytest.mark.django_db
def test_create_credential_missing_user_team_org_xfail(post, admin, credentialtype_ssh, version, params):
# Must specify one of user, team, or organization
response = post(
reverse('api:credential_list', kwargs={'version': version}),
params,
admin
)
assert response.status_code == 400
|
wwitzel3/awx
|
awx/main/tests/functional/api/test_credential.py
|
Python
|
apache-2.0
| 59,080
|
__author__ = "UShareSoft"
from texttable import Texttable
from ussclicore.argumentParser import ArgumentParser, ArgumentParserError
from ussclicore.cmd import Cmd, CoreGlobal
from uforgecli.utils import org_utils
from ussclicore.utils import printer
from ussclicore.utils import generics_utils
from uforgecli.utils.uforgecli_utils import *
from uforge.objects import uforge
from subscription_admin import Subscription_Admins
from subscription_role import Subscription_Roles
from subscription_format import Subscription_Format
from subscription_os import Subscription_Os
from subscription_quota import Subscription_Quota
from uforgecli.utils import uforgecli_utils
import pyxb
import shlex
import sys
class Subscription_Cmd(Cmd, CoreGlobal):
"""Manage subscription profiles : list profile, create profiles, update profiles"""
cmd_name = "subscription"
def __init__(self):
self.subCmds = {}
self.generate_sub_commands()
super(Subscription_Cmd, self).__init__()
def generate_sub_commands(self):
subscriptionRoles = Subscription_Roles()
self.subCmds[subscriptionRoles.cmd_name] = subscriptionRoles
subscriptionAdmins = Subscription_Admins()
self.subCmds[subscriptionAdmins.cmd_name] = subscriptionAdmins
subscriptionFormat = Subscription_Format()
self.subCmds[subscriptionFormat.cmd_name] = subscriptionFormat
subscriptionOs = Subscription_Os()
self.subCmds[subscriptionOs.cmd_name] = subscriptionOs
subscriptionQuota = Subscription_Quota()
self.subCmds[subscriptionQuota.cmd_name] = subscriptionQuota
def arg_list(self):
doParser = ArgumentParser(prog=self.cmd_name + " list", add_help=True, description="List all the subscription profiles for a given organization. If no organization is provided the default organization is used.")
optional = doParser.add_argument_group("optional arguments")
optional.add_argument('--org', dest='org', required=False, help="The organization name. If no organization is provided, then the default organization is used.")
return doParser
def do_list(self, args):
try:
doParser = self.arg_list()
doArgs = doParser.parse_args(shlex.split(args))
org = org_utils.org_get(self.api, doArgs.org)
# call UForge API
printer.out("Getting all the subscription profiles for organization ...")
subscriptions = self.api.Orgs(org.dbId).Subscriptions().Getall(Search=None)
subscriptions = generics_utils.order_list_object_by(subscriptions.subscriptionProfiles.subscriptionProfile, "name")
if subscriptions is None or len(subscriptions) == 0:
printer.out("There is no subscriptions in [" + org.name + "] ")
return 0
printer.out("List of subscription profiles in [" + org.name + "] :")
table = Texttable(200)
table.set_cols_align(["c", "c", "c", "c"])
table.header(["Name", "Code", "Active", "description"])
for subscription in subscriptions:
if subscription.active:
active = "X"
else:
active = ""
table.add_row([subscription.name, subscription.code, active, subscription.description])
print table.draw() + "\n"
printer.out("Foumd " + str(len(subscriptions)) + " subscription profile(s).")
return 0
except ArgumentParserError as e:
printer.out("ERROR: In Arguments: " + str(e), printer.ERROR)
self.help_list()
except Exception as e:
return handle_uforge_exception(e)
def help_list(self):
doParser = self.arg_list()
doParser.print_help()
def arg_info(self):
doParser = ArgumentParser(prog=self.cmd_name + " info", add_help=True, description="Get detailed information on a subscription profile within an organization.")
mandatory = doParser.add_argument_group("mandatory arguments")
optional = doParser.add_argument_group("optional arguments")
mandatory.add_argument('--name', dest='name', required=True, help="The name of the subscription profile")
optional.add_argument('--org', dest='org', required=False, help="The organization name. If no organization is provided, then the default organization is used.")
return doParser
def do_info(self, args):
try:
# add arguments
doParser = self.arg_info()
doArgs = doParser.parse_args(shlex.split(args))
# call UForge API
printer.out("Getting subscription profile with name [" + doArgs.name + "]...")
org = org_utils.org_get(self.api, doArgs.org)
subscriptions = self.api.Orgs(org.dbId).Subscriptions().Getall(Search=None)
printer.out("Subscription profile for [" + doArgs.name + "] :")
subscription = subscriptions.subscriptionProfiles.subscriptionProfile
exist = False
for item in subscription:
if item.name == doArgs.name:
exist = True
subscription = item
if not exist:
printer.out("Subscription profile requested don't exist in [" + org.name + "]")
return 0
table = Texttable(200)
table.set_cols_align(["l", "l"])
table.header(["Info", "Value"])
table.add_row(["Name", subscription.name])
table.add_row(["Code", subscription.code])
if subscription.active:
active = "X"
else:
active = ""
table.add_row(["Active", active])
if subscription.roles.role:
nb = len(subscription.roles.role)
table.add_row(["Roles", str(nb)])
else:
table.add_row(["Roles", "None"])
if subscription.admins.admin:
nbAdmin = len(subscription.admins.admin)
table.add_row(["Administrators", str(nbAdmin)])
else:
table.add_row(["Administrators", "None"])
if subscription.distributions.distribution:
nbDist = len(subscription.distributions.distribution)
table.add_row(["Operating Systems", str(nbDist)])
else:
table.add_row(["Operating Systems", "None"])
if subscription.formats.format:
nbFormat = len(subscription.formats.format)
table.add_row(["Image Formats", str(nbFormat)])
else:
table.add_row(["Image Formats", "None"])
print table.draw() + "\n"
if subscription.description is not None or subscription.description == "":
printer.out("Description : " + subscription.description + "\n")
if subscription.admins.admin:
nb = subscription.admins.admin
nb = len(nb)
printer.out("Administrator Details :")
table = Texttable(200)
table.set_cols_align(["l"])
table.header(["Name"])
for item in subscription.admins.admin:
table.add_row([item.name])
print table.draw() + "\n"
printer.out("Found " + str(nb) + " administrator(s).\n")
else:
printer.out("Subscription profile doesn't have any administrator.\n")
if subscription.roles.role:
printer.out("Role Details :")
table = Texttable(200)
table.set_cols_align(["l"])
table.header(["Name"])
for item in subscription.roles.role:
table.add_row([item.name])
print table.draw() + "\n"
else:
printer.out("Subscription profile doesn't have any roles.\n")
if subscription.distributions.distribution:
nb = subscription.distributions.distribution
nb = len(nb)
printer.out("Operating system Details :")
table = Texttable(200)
table.set_cols_align(["l", "l", "l", "l", "l", "l"])
table.header(["Distribution", "Version", "Architecture", "Access", "Visible", "Release Date"])
for item in subscription.distributions.distribution:
if item.active:
active = "X"
else:
active = ""
if item.visible:
visible = "X"
else:
visible = ""
if item.releaseDate is None:
releaseDate = "Unknown"
else:
releaseDate = item.releaseDate
table.add_row([item.name, item.version, item.arch, active, visible, releaseDate])
print table.draw() + "\n"
printer.out("Found " + str(nb) + " distribution(s).\n")
else:
printer.out("Subscription profile doesn't have any distribution.\n")
if subscription.formats.format:
printer.out("Formats Details :")
table = Texttable(200)
table.set_cols_align(["l", "l"])
table.header(["Format", "Access"])
for item in subscription.formats.format:
if item.access:
access = "X"
else:
access = ""
table.add_row([item.name, access])
print table.draw() + "\n"
printer.out("Found " + str(nbFormat) + " format(s).\n")
else:
printer.out("Subscription profile doesn't have any formats.\n")
return 0
except ArgumentParserError as e:
printer.out("ERROR: In Arguments: " + str(e), printer.ERROR)
self.help_info()
except Exception as e:
return handle_uforge_exception(e)
def help_info(self):
doParser = self.arg_info()
doParser.print_help()
def arg_create(self):
doParser = ArgumentParser(prog=self.cmd_name + " create", add_help=True, description="Create a new subscription profile within an organization.")
mandatory = doParser.add_argument_group("mandatory arguments")
optional = doParser.add_argument_group("optional arguments")
mandatory.add_argument('--name', dest='name', required=True, help="The name of the subscription profile to create")
mandatory.add_argument('--code', dest='code', required=True, help="The code of the subscription profile to create")
optional.add_argument('--description', dest='description', type=str, required=False, help="The description of the subscription profile to create")
optional.add_argument('--active', dest='active', action='store_true', required=False, help="Flag to make the subscription profile active.")
optional.add_argument('--admins', dest='admins', nargs='+', required=False, help="Admin users to be added to the subscription profile that can use the subscription profile to create a user (users separated by spaces)")
optional.add_argument('--roles', dest='roles', nargs='+', required=False, help="Roles to be added to the subscription profile")
optional.add_argument('--org', dest='org', required=False, help="The organization name. If no organization is provided, then the default organization is used.")
return doParser
def do_create(self, args):
try:
# add arguments
doParser = self.arg_create()
doArgs = doParser.parse_args(shlex.split(args))
org = org_utils.org_get(self.api, doArgs.org)
# call UForge API
printer.out("Creating subscription profile [" + doArgs.name + "] ...")
# create a user manually
new_subscription_profile = subscriptionProfile()
new_subscription_profile.name = doArgs.name
new_subscription_profile.code = doArgs.code
if doArgs.description:
new_subscription_profile.description = doArgs.description
if doArgs.active:
new_subscription_profile.active = doArgs.active
new_subscription_profile.admins = pyxb.BIND()
if doArgs.admins:
for a in doArgs.admins:
new_admin = user()
new_admin.loginName = a
new_subscription_profile.admins.append(new_admin)
new_subscription_profile.roles = pyxb.BIND()
if doArgs.roles:
for a in doArgs.roles:
new_role = role()
new_role.name = a
new_subscription_profile.roles.append(new_role)
# Send the create user request to the server
new_subscription_profile = self.api.Orgs(org.dbId).Subscriptions().Add(new_subscription_profile)
if new_subscription_profile is None:
printer.out("No information about the new subscription profile available", printer.ERROR)
else:
printer.out("New subscription profile [" + new_subscription_profile.name + "] created.", printer.OK)
table = Texttable(200)
table.set_cols_align(["c", "c", "c"])
table.header(
["Name", "Code", "Active"])
table.add_row([new_subscription_profile.name, new_subscription_profile.code,
"X" if new_subscription_profile.active else ""])
print table.draw() + "\n"
return 0
except ArgumentParserError as e:
printer.out("ERROR: In Arguments: " + str(e), printer.ERROR)
self.help_create()
except Exception as e:
return handle_uforge_exception(e)
def help_create(self):
doParser = self.arg_create()
doParser.print_help()
def arg_delete(self):
doParser = ArgumentParser(prog=self.cmd_name + " delete", add_help=True, description="Delete a subscription profile from an organization.")
mandatory = doParser.add_argument_group("mandatory arguments")
optional = doParser.add_argument_group("optional arguments")
mandatory.add_argument('--name', dest='name', required=True, help="The name of the subscription profile to delete")
optional.add_argument('--org', dest='org', required=False, help="The organization name. If no organization is provided, then the default organization is used.")
return doParser
def do_delete(self, args):
try:
# add arguments
doParser = self.arg_delete()
doArgs = doParser.parse_args(shlex.split(args))
printer.out("Deleting subscription profile [" + doArgs.name + "] ...")
org = org_utils.org_get(self.api, doArgs.org)
# call UForge API
subscriptions = self.api.Orgs(org.dbId).Subscriptions().Getall(Search=None)
exist = False
for item in subscriptions.subscriptionProfiles.subscriptionProfile:
if item.name == doArgs.name:
exist = True
subscription = subscriptions.subscriptionProfiles.subscriptionProfile[0]
self.api.Orgs(org.dbId).Subscriptions(subscription.dbId).Remove(None)
printer.out("Subscription profile [" + doArgs.name + "] deleted", printer.OK)
if not exist:
printer.out("Subscription profile requested don't exist in [" + org.name + "]")
return 0
return 0
except ArgumentParserError as e:
printer.out("ERROR: In Arguments: " + str(e), printer.ERROR)
self.help_delete()
except Exception as e:
return handle_uforge_exception(e)
def help_delete(self):
doParser = self.arg_delete()
doParser.print_help()
def arg_update(self):
doParser = ArgumentParser(prog=self.cmd_name + " update", add_help=True, description="Updates an existing subscription profile.")
mandatory = doParser.add_argument_group("mandatory arguments")
optional = doParser.add_argument_group("optional arguments")
mandatory.add_argument('--name', dest='name', required=True, help="The name of the subscription profile to update.")
optional.add_argument('--description', dest='description', type=str, required=False, help="The description of the subscription profile to update.")
optional.add_argument('--active', dest='active', action='store_true', required=False, help="Flag to make the subscription profile active.")
optional.add_argument('--org', dest='org', required=False, help="The organization name. If no organization is provided, then the default organization is used.")
return doParser
def do_update(self, args):
try:
doParser = self.arg_update()
doArgs = doParser.parse_args(shlex.split(args))
printer.out("Getting subscription profile with name [" + doArgs.name + "]...")
org = org_utils.org_get(self.api, doArgs.org)
subscriptions = self.api.Orgs(org.dbId).Subscriptions().Getall(Search=None)
exist = False
for item in subscriptions.subscriptionProfiles.subscriptionProfile:
if item.name == doArgs.name:
exist = True
updated_subscription = subscriptionProfile()
updated_subscription.name = item.name
updated_subscription.code = item.code
if doArgs.description:
updated_subscription.description = doArgs.description
if doArgs.active:
updated_subscription.active = True
else:
updated_subscription.active = False
printer.out("Updating subscription profile with name [" + doArgs.name + "] ...")
# call UForge API
self.api.Orgs(org.dbId).Subscriptions(item.dbId).Update(updated_subscription)
printer.out("Subscription profile [" + doArgs.name + "] updated.", printer.OK)
if not exist:
printer.out("Subscription profile requested don't exist in [" + org.name + "]")
return 0
return 0
except ArgumentParserError as e:
printer.out("ERROR: In Arguments: " + str(e), printer.ERROR)
self.help_update()
except Exception as e:
return handle_uforge_exception(e)
def help_update(self):
doParser = self.arg_update()
doParser.print_help()
def arg_enable(self):
doParser = ArgumentParser(prog=self.cmd_name + " enable", add_help=True, description="Activates or enables a subscription profile within an organization.")
mandatory = doParser.add_argument_group("mandatory arguments")
optional = doParser.add_argument_group("optional arguments")
mandatory.add_argument('--name', dest='name', required=True, help="The name of the subscription profile to enable.")
optional.add_argument('--org', dest='org', required=False, help="The organization name. If no organization is provided, then the default organization is used.")
return doParser
def do_enable(self, args):
try:
# add arguments
doParser = self.arg_enable()
doArgs = doParser.parse_args(shlex.split(args))
printer.out("Getting subscription profile with name [" + doArgs.name + "]...")
org = org_utils.org_get(self.api, doArgs.org)
# call UForge API
subscriptions = self.api.Orgs(org.dbId).Subscriptions().Getall(Search=None)
exist = False
for item in subscriptions.subscriptionProfiles.subscriptionProfile:
if item.name == doArgs.name:
exist = True
updated_subscription = subscriptionProfile()
updated_subscription.name = item.name
updated_subscription.code = item.code
if not item.active:
updated_subscription.active = True
printer.out("Enabling subscription profile with name [" + doArgs.name + "] ...")
self.api.Orgs(org.dbId).Subscriptions(item.dbId).Update(updated_subscription)
printer.out("Subscription [" + doArgs.name + "] is enabled.", printer.OK)
else:
printer.out("Subscription [" + doArgs.name + "] is already enabled", printer.WARNING)
if not exist:
printer.out("Subscription profile requested don't exist in [" + org.name + "]")
return 0
return 0
except ArgumentParserError as e:
printer.out("ERROR: In Arguments: " + str(e), printer.ERROR)
self.help_enable()
except Exception as e:
return handle_uforge_exception(e)
def help_enable(self):
doParser = self.arg_enable()
doParser.print_help()
def arg_disable(self):
doParser = ArgumentParser(prog=self.cmd_name + " disable", add_help=True, description="Disables a subscription profile within an organization (cannot be used to reate users).")
mandatory = doParser.add_argument_group("mandatory arguments")
optional = doParser.add_argument_group("optional arguments")
mandatory.add_argument('--name', dest='name', required=True, help="The name of the subscription profile to update")
optional.add_argument('--org', dest='org', required=False, help="The organization name. If no organization is provided, then the default organization is used.")
return doParser
def do_disable(self, args):
try:
doParser = self.arg_enable()
doArgs = doParser.parse_args(shlex.split(args))
printer.out("Getting subscription profile with name [" + doArgs.name + "]...")
org = org_utils.org_get(self.api, doArgs.org)
subscriptions = self.api.Orgs(org.dbId).Subscriptions().Getall(Search=doArgs.name)
exist = False
for item in subscriptions.subscriptionProfiles.subscriptionProfile:
if item.name == doArgs.name:
exist = True
updated_subscription = subscriptionProfile()
updated_subscription.name = item.name
updated_subscription.code = item.code
if item.active:
updated_subscription.active = False
printer.out("Disabling subscription profile with name [" + doArgs.name + "] ...")
# call UForge API
self.api.Orgs(org.dbId).Subscriptions(item.dbId).Update(updated_subscription)
printer.out("Subscription [" + doArgs.name + "] is disabled.", printer.OK)
else:
printer.out("Subscription [" + doArgs.name + "] is already disabled", printer.WARNING)
if not exist:
printer.out("Subscription profile requested don't exist in [" + org.name + "]")
return 0
return 0
except ArgumentParserError as e:
printer.out("ERROR: In Arguments: " + str(e), printer.ERROR)
self.help_enable()
except Exception as e:
return handle_uforge_exception(e)
def help_disable(self):
doParser = self.arg_enable()
doParser.print_help()
|
pedrolegold/uforge-cli
|
src/uforgecli/commands/subscription/subscription.py
|
Python
|
apache-2.0
| 29,418
|
#! /usr/bin/env python
# encoding: utf-8
from __future__ import absolute_import
from .base import AuthenticationMixinBase
from . import GrantFailed
# We need to get urlencode from urllib.parse in Python 3, but fall back to
# urllib in Python 2
try:
from urllib.parse import urlencode
except ImportError:
from urllib import urlencode
try:
basestring
except NameError:
basestring = str
class AuthorizationCodeMixin(AuthenticationMixinBase):
"""Implement helpers for the Authorization Code grant for OAuth2."""
def auth_url(self, scope, redirect, state):
"""Get the url to direct a user to authenticate."""
url = self.API_ROOT + "/oauth/authorize?"
query = {
"response_type": "code",
"client_id": self.app_info[0]
}
if scope:
if not isinstance(scope, basestring):
scope = ' '.join(scope)
query['scope'] = scope
if redirect:
query['redirect_uri'] = redirect
if state:
query['state'] = state
return url + urlencode(query)
def exchange_code(self, code, redirect):
"""Perform the exchange step for the code from the redirected user."""
code, headers, resp = self.call_grant(
'/oauth/access_token', {
"grant_type": "authorization_code",
"code": code,
"redirect_uri": redirect
})
if not code == 200:
raise GrantFailed()
self.token = resp['access_token']
return self.token, resp['user'], resp['scope']
|
gabrielgisoldo/vimeo.py
|
vimeo/auth/authorization_code.py
|
Python
|
apache-2.0
| 1,613
|
# coding: utf-8
"""
Wavefront REST API Documentation
<p>The Wavefront REST API enables you to interact with Wavefront servers using standard REST API tools. You can use the REST API to automate commonly executed operations such as automatically tagging sources.</p><p>When you make REST API calls outside the Wavefront REST API documentation you must add the header \"Authorization: Bearer <<API-TOKEN>>\" to your HTTP requests.</p> # noqa: E501
OpenAPI spec version: v2
Contact: chitimba@wavefront.com
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
import pprint
import re # noqa: F401
import six
from wavefront_api_client.configuration import Configuration
class ResponseStatus(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'code': 'int',
'message': 'str',
'result': 'str'
}
attribute_map = {
'code': 'code',
'message': 'message',
'result': 'result'
}
def __init__(self, code=None, message=None, result=None, _configuration=None): # noqa: E501
"""ResponseStatus - a model defined in Swagger""" # noqa: E501
if _configuration is None:
_configuration = Configuration()
self._configuration = _configuration
self._code = None
self._message = None
self._result = None
self.discriminator = None
self.code = code
if message is not None:
self.message = message
self.result = result
@property
def code(self):
"""Gets the code of this ResponseStatus. # noqa: E501
HTTP Response code corresponding to this response # noqa: E501
:return: The code of this ResponseStatus. # noqa: E501
:rtype: int
"""
return self._code
@code.setter
def code(self, code):
"""Sets the code of this ResponseStatus.
HTTP Response code corresponding to this response # noqa: E501
:param code: The code of this ResponseStatus. # noqa: E501
:type: int
"""
if self._configuration.client_side_validation and code is None:
raise ValueError("Invalid value for `code`, must not be `None`") # noqa: E501
self._code = code
@property
def message(self):
"""Gets the message of this ResponseStatus. # noqa: E501
Descriptive message of the status of this response # noqa: E501
:return: The message of this ResponseStatus. # noqa: E501
:rtype: str
"""
return self._message
@message.setter
def message(self, message):
"""Sets the message of this ResponseStatus.
Descriptive message of the status of this response # noqa: E501
:param message: The message of this ResponseStatus. # noqa: E501
:type: str
"""
self._message = message
@property
def result(self):
"""Gets the result of this ResponseStatus. # noqa: E501
:return: The result of this ResponseStatus. # noqa: E501
:rtype: str
"""
return self._result
@result.setter
def result(self, result):
"""Sets the result of this ResponseStatus.
:param result: The result of this ResponseStatus. # noqa: E501
:type: str
"""
if self._configuration.client_side_validation and result is None:
raise ValueError("Invalid value for `result`, must not be `None`") # noqa: E501
allowed_values = ["OK", "ERROR"] # noqa: E501
if (self._configuration.client_side_validation and
result not in allowed_values):
raise ValueError(
"Invalid value for `result` ({0}), must be one of {1}" # noqa: E501
.format(result, allowed_values)
)
self._result = result
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
if issubclass(ResponseStatus, dict):
for key, value in self.items():
result[key] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, ResponseStatus):
return False
return self.to_dict() == other.to_dict()
def __ne__(self, other):
"""Returns true if both objects are not equal"""
if not isinstance(other, ResponseStatus):
return True
return self.to_dict() != other.to_dict()
|
wavefrontHQ/python-client
|
wavefront_api_client/models/response_status.py
|
Python
|
apache-2.0
| 5,837
|
#!/usr/bin/env python
#
# Copyright 2016 Major Hayden
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""
Searches Intel's ARK site and returns data about various processors.
TOTALLY UNOFFICIAL. ;)
"""
from bs4 import BeautifulSoup
import click
import requests
from terminaltables import AsciiTable
USER_AGENT = ("Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like"
"Gecko) Chrome/47.0.2526.111 Safari/537.36")
def get_full_ark_url(quickurl):
full_url = "http://ark.intel.com{0}".format(quickurl)
return full_url
def get_cpu_html(quickurl):
"""Connect to Intel's ark website and retrieve HTML."""
full_url = get_full_ark_url(quickurl)
headers = {
'User-Agent': USER_AGENT,
}
r = requests.get(full_url, headers=headers)
return r.text
def generate_table_data(html_output):
"""Generate an ASCII table based on the HTML provided."""
soup = BeautifulSoup(html_output, 'html.parser')
table_data = [
['Parameter', 'Value']
]
for table in soup.select('table.specs'):
rows = table.find_all("tr")
for row in rows[1:]:
cells = [cell.get_text("\n", strip=True)
for cell in row.find_all('td')]
if cells[0] == 'T\nCASE':
cells[0] = 'T(CASE)'
if "\n" in cells[0]:
cells[0] = cells[0][:cells[0].index("\n")]
table_data.append(cells)
return table_data
def quick_search(search_term):
url = "http://ark.intel.com/search/AutoComplete?term={0}"
headers = {
'User-Agent': USER_AGENT,
}
r = requests.get(url.format(search_term, headers=headers))
return r.json()
@click.command()
@click.argument('search_term')
@click.pass_context
def search(ctx, search_term):
"""Main function of the script."""
ark_json = quick_search(search_term)
if len(ark_json) < 1:
click.echo("Couldn't find any processors matching "
"{0}".format(search_term))
ctx.exit(0)
click.echo(u"Processors found: {0}".format(len(ark_json)))
choice_dict = {}
counter = 0
for cpu in ark_json:
choice_dict[counter] = cpu['quickUrl']
click.echo(u"[{0}] {1}".format(counter, cpu['value']))
counter += 1
if len(ark_json) > 1:
choice = click.prompt(u"Which processor", prompt_suffix='? ', type=int)
else:
choice = 0
cpu_data = get_cpu_html(choice_dict[int(choice)])
table_data = generate_table_data(cpu_data)
table = AsciiTable(table_data)
click.echo(table.table)
ctx.exit(0)
if __name__ == '__main__':
search()
|
major/arksearch
|
arksearch/arksearch.py
|
Python
|
apache-2.0
| 3,170
|
# Copyright 2011 Nicholas Bray
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import
from tests.shape.shape_base import *
import analysis.shape.constraints
class FirstExampleBase(TestConstraintBase):
def shapeSetUp(self):
x, self.xSlot, self.xExpr = self.makeLocalObjs('x')
y, self.ySlot, self.yExpr = self.makeLocalObjs('y')
z, self.zSlot, self.zExpr = self.makeLocalObjs('z')
t, self.tSlot, self.tExpr = self.makeLocalObjs('t')
q, self.qSlot, self.qExpr = self.makeLocalObjs('q')
self.nSlot = self.sys.canonical.fieldSlot(None, ('LowLevel', 'n'))
self.xRef = self.refs(self.xSlot)
self.yRef = self.refs(self.ySlot)
self.zRef = self.refs(self.zSlot)
self.tRef = self.refs(self.tSlot)
self.qRef = self.refs(self.qSlot)
self.nRef = self.refs(self.nSlot)
self.nnRef = self.refs(self.nSlot, self.nSlot)
self.ynnRef = self.refs(self.ySlot, self.nSlot, self.nSlot)
self.xyRef = self.refs(self.xSlot, self.ySlot)
self.xtRef = self.refs(self.xSlot, self.tSlot)
self.yzRef = self.refs(self.ySlot, self.zSlot)
self.xnRef = self.refs(self.xSlot, self.nSlot)
self.ynRef = self.refs(self.ySlot, self.nSlot)
self.tnRef = self.refs(self.tSlot, self.nSlot)
self.xynRef = self.refs(self.xSlot, self.ySlot, self.nSlot)
self.ytnRef = self.refs(self.ySlot, self.tSlot, self.nSlot)
self.ynExpr = self.expr(self.yExpr, self.nSlot)
self.tnExpr = self.expr(self.tExpr, self.nSlot)
def assign(self, rhs, lhs):
self.setConstraint(analysis.shape.constraints.AssignmentConstraint(self.sys, self.inputPoint, self.outputPoint, rhs, lhs))
class TestLocalAssignConstraint(FirstExampleBase):
def testIndex1(self):
self.assign(self.xExpr, self.tExpr)
# yz -> yz
argument = (self.yzRef, None, None)
results = [
(self.yzRef, (self.yExpr, self.zExpr), None),
]
self.checkTransfer(argument, results)
def testIndex2(self):
self.assign(self.xExpr, self.tExpr)
# z -> z
argument = (self.zRef, None, None)
results = [
(self.zRef, (self.zExpr,), None),
]
self.checkTransfer(argument, results)
def testIndex3(self):
self.assign(self.xExpr, self.tExpr)
# x -> xt
argument = (self.xRef, None, None)
results = [
(self.xtRef, (self.xExpr, self.tExpr,), None),
]
self.checkTransfer(argument, results)
def testIndex4(self):
self.assign(self.xExpr, self.tExpr)
# tn -> n
argument = (self.tnRef, None, None)
results = [
(self.nRef, None, None),
]
self.checkTransfer(argument, results)
def testIndex5(self):
self.assign(self.xExpr, self.tExpr)
# n -> n
argument = (self.nRef, None, None)
results = [
(self.nRef, None, None),
]
self.checkTransfer(argument, results)
def testIndex6(self):
self.assign(self.xExpr, self.tExpr)
# yn -> yn
argument = (self.ynRef, None, None)
results = [
(self.ynRef, (self.yExpr,), None),
]
self.checkTransfer(argument, results)
def testIndex7(self):
self.assign(self.xExpr, self.tExpr)
# ytn -> yn
argument = (self.ytnRef, None, None)
results = [
(self.ynRef, (self.yExpr,), None),
]
self.checkTransfer(argument, results)
def testTNX1(self):
self.assign(self.tnExpr, self.xExpr)
# yz -> yz
argument = (self.yzRef, None, None)
results = [
(self.yzRef, (self.yExpr,self.zExpr,), None),
]
self.checkTransfer(argument, results)
def testTNX2(self):
self.assign(self.tnExpr, self.xExpr)
# z -> z
argument = (self.zRef, None, None)
results = [
(self.zRef, (self.zExpr,), None),
]
self.checkTransfer(argument, results)
def testTNX3(self):
self.assign(self.tnExpr, self.xExpr)
# xt -> t
argument = (self.xtRef, None, None)
results = [
(self.tRef, (self.tExpr,), None),
]
self.checkTransfer(argument, results)
def testTNX4(self):
self.assign(self.tnExpr, self.xExpr)
# n -> n, xn
argument = (self.nRef, None, None)
results = [
(self.nRef, None, (self.tnExpr,)),
(self.xnRef, (self.tnExpr,), None),
]
self.checkTransfer(argument, results)
def testTNX5(self):
self.assign(self.tnExpr, self.xExpr)
# yn -> yn, xyn
argument = (self.ynRef, None, None)
results = [
(self.ynRef, None, (self.tnExpr,)),
(self.xynRef, (self.tnExpr,), None),
]
self.checkTransfer(argument, results)
def testYNTN1(self):
self.assign(self.ynExpr, self.tnExpr)
# yz -> yz
argument = (self.yzRef, None, None)
results = [
(self.yzRef, None, None),
]
self.checkTransfer(argument, results)
def testYNTN2(self):
self.assign(self.ynExpr, self.tnExpr)
# z -> z
argument = (self.xRef, None, None)
results = [
(self.xRef, None, None),
]
self.checkTransfer(argument, results)
def testYNTN3(self):
self.assign(self.ynExpr, self.tnExpr)
# t -> t
argument = (self.tRef, None, None)
results = [
(self.tRef, None, None),
]
self.checkTransfer(argument, results)
def testYNTN4(self):
self.assign(self.ynExpr, self.tnExpr)
# n -> n, nn
argument = (self.nRef, None, (self.tnExpr,))
results = [
(self.nRef, None, (self.ynExpr, self.tnExpr,)),
(self.nnRef, (self.ynExpr, self.tnExpr,), None),
]
self.checkTransfer(argument, results)
def testYNTN5(self):
self.assign(self.ynExpr, self.tnExpr)
# yn -> yn, ynn
argument = (self.ynRef, None, (self.tnExpr,))
results = [
(self.ynRef, None, (self.ynExpr, self.tnExpr,)),
(self.ynnRef, (self.ynExpr, self.tnExpr,), None),
]
self.checkTransfer(argument, results)
def testYNTN6(self):
self.assign(self.ynExpr, self.tnExpr)
# xn -> x, xn
argument = (self.xnRef, (self.tnExpr,), None)
results = [
(self.xRef, None, None),
(self.xnRef, (self.ynExpr, self.tnExpr,), None),
]
self.checkTransfer(argument, results)
def testYNTN7(self):
self.assign(self.ynExpr, self.tnExpr)
# xyn -> xy, xyn
argument = (self.xynRef, (self.tnExpr,), None)
results = [
(self.xyRef, None, None),
(self.xynRef, (self.ynExpr, self.tnExpr,), None),
]
self.checkTransfer(argument, results)
def testTYN1(self):
self.assign(self.tExpr, self.ynExpr)
# t -> tn
argument = (self.tRef, None, None)
results = [
(self.tnRef, (self.ynExpr,), None),
]
self.checkTransfer(argument, results)
def testTYN2(self):
self.assign(self.tExpr, self.ynExpr)
# nn -> n
argument = (self.nnRef, (self.tnExpr, self.ynExpr), None)
results = [
(self.nRef, None, (self.ynExpr,)),
]
self.checkTransfer(argument, results)
def testTYN3(self):
self.assign(self.tExpr, self.ynExpr)
# n -> n
argument = (self.nRef, None, (self.tnExpr, self.ynExpr))
results = [
(self.nRef, None, (self.ynExpr,)),
]
self.checkTransfer(argument, results)
def testTYN4(self):
self.assign(self.tExpr, self.ynExpr)
# yn -> yn
argument = (self.ynRef, None, (self.tnExpr, self.ynExpr))
results = [
(self.ynRef, None, (self.ynExpr,)),
]
self.checkTransfer(argument, results)
def testTYN5(self):
self.assign(self.tExpr, self.ynExpr)
# ynn -> yn
argument = (self.ynnRef, (self.tnExpr, self.ynExpr), None)
results = [
(self.ynRef, None, (self.ynExpr,)),
]
self.checkTransfer(argument, results)
def testTYN6(self):
self.assign(self.tExpr, self.ynExpr)
# xn -> x
argument = (self.xnRef, (self.tnExpr, self.ynExpr), None)
results = [
(self.xRef, None, None),
]
self.checkTransfer(argument, results)
def testTYN7(self):
self.assign(self.tExpr, self.ynExpr)
# x -> x
argument = (self.xRef, None, None)
results = [
(self.xRef, None, None),
]
self.checkTransfer(argument, results)
def testTYN8(self):
self.assign(self.tExpr, self.ynExpr)
# xyn -> xy
argument = (self.xynRef, (self.tnExpr, self.ynExpr), None)
results = [
(self.xyRef, None, None),
]
self.checkTransfer(argument, results)
def testTYN9(self):
self.assign(self.tExpr, self.ynExpr)
# xy -> xy
argument = (self.xyRef, None, None)
results = [
(self.xyRef, None, None),
]
self.checkTransfer(argument, results)
# There's more, but the tricky part seems to work?
|
ncbray/pystream
|
bin/tests/shape/test_shape_examples.py
|
Python
|
apache-2.0
| 8,509
|
"""
Generic interchange document structures.
"""
STYLE_TYPE_INT = 'int'
STYLE_TYPE_FLOAT = 'float'
STYLE_TYPE_CDATA = 'cdata'
STYLE_TYPE_BOOLEAN = 'boolean'
STYLE_TYPES = (
STYLE_TYPE_INT,
STYLE_TYPE_FLOAT,
STYLE_TYPE_CDATA,
STYLE_TYPE_BOOLEAN
)
class Style(object):
def __init__(self, types):
object.__init__(self)
self.name = None
self.source = None
self.__settings = {}
self.__types = types
for key, val in types.items():
assert (isinstance(key, str)
), "key '{0}' not string".format(key)
if isinstance(val, str):
assert (val in STYLE_TYPES
), "key '{0}' type '{1}' not valid".format(key, val)
else:
assert (hasattr(val, '__iter__') and callable(getattr(val, '__iter__'))
), "key '{0}' type '{1}' not list".format(key, val)
# FIXME Assert entries in type are strings
def get_setting(self, name):
if name in self.__settings:
return self.__settings[name]
if name in self.__types:
return None
raise Exception("bad setting name: {0}".format(str(name)))
def set_setting(self, name, value):
if name in self.__types:
t = self.__types[name]
if t == STYLE_TYPE_INT:
value = int(value)
elif t == STYLE_TYPE_FLOAT:
value = float(value)
elif t == STYLE_TYPE_CDATA:
value = str(value)
elif t == STYLE_TYPE_BOOLEAN:
value = bool(value)
else:
value = str(value).lower()
if value not in t:
raise Exception("Bad setting value for name {0}: {1}".format(
str(name), value))
self.__settings[name] = value
raise Exception("bad setting name: {0}".format(str(name)))
def defaults(self, default_settings):
for key, val in default_settings.iteritems():
self.set_setting(key, val)
def keys(self):
return self.__types.keys()
def __getitem__(self, key):
return self.get_setting(key)
def __setitem__(self, name, value):
self.set_setting(name, value)
def __str__(self):
return "Style({0}: {1})".format(self.name, self.__settings)
class BlockStyle(Style):
def __init__(self):
# All size measurements in mm
Style.__init__(self, {
'margin-left': STYLE_TYPE_FLOAT,
'margin-right': STYLE_TYPE_FLOAT,
'margin-top': STYLE_TYPE_FLOAT,
'margin-bottom': STYLE_TYPE_FLOAT,
'page-break': STYLE_TYPE_BOOLEAN,
'h-align': ["center", "left", "right", "justify"],
'border-left-width': STYLE_TYPE_INT,
'border-right-width': STYLE_TYPE_INT,
'border-top-width': STYLE_TYPE_INT,
'border-bottom-width': STYLE_TYPE_INT
})
class TextStyle(Style):
def __init__(self):
Style.__init__(self, {
'italic': STYLE_TYPE_BOOLEAN,
'bold': STYLE_TYPE_BOOLEAN,
'underline': STYLE_TYPE_BOOLEAN,
'strikethrough': STYLE_TYPE_BOOLEAN,
'all-caps': STYLE_TYPE_BOOLEAN,
'small-caps': STYLE_TYPE_BOOLEAN,
'v-align': ['sup', 'sub', 'normal'],
'size': STYLE_TYPE_INT,
'font': ['sans', 'serif', 'mono', 'normal'],
'color': STYLE_TYPE_CDATA,
'background-color': STYLE_TYPE_CDATA
})
class ContentObj(object):
def __init__(self):
object.__init__(self)
self.source = None
def get_text(self):
raise NotImplementedError()
class Div(ContentObj):
"""A block spacing object."""
def __init__(self):
ContentObj.__init__(self)
self.style = BlockStyle()
self.is_section = False
def get_children(self):
raise NotImplementedError()
def get_text(self):
ret = ""
for ch in self.get_children():
ret += ch.get_text()
return ret
class SideBar(Div):
"""A side section of content."""
def __init__(self):
Div.__init__(self)
self.divs = []
def get_children(self):
return self.divs
class SeparatorLine(Div):
"""A single line separating parts of a chapter."""
def __init__(self):
Div.__init__(self)
def get_children(self):
return []
class Para(Div):
def __init__(self):
Div.__init__(self)
self.spans = []
def add_span(self, span):
assert isinstance(span, Span), "not a span: {0}".format(span)
self.spans.append(span)
def get_children(self):
return self.spans
def __str__(self):
spanstxt = u"["
visited = False
for spn in self.spans:
if visited:
spanstxt += u", "
else:
visited = True
spanstxt += repr(spn)
spanstxt += u"]"
return u"Para(Style: {0}; spans: {1})".format(
self.style, spanstxt)
class TableRow(Div):
def __init__(self):
Div.__init__(self)
self.cells = []
def add_cell(self, cell):
assert isinstance(cell, Div)
self.cells.append(cell)
def get_children(self):
return self.cells
class Table(Div):
def __init__(self):
Div.__init__(self)
self.header = None
self.rows = []
def set_header(self, header):
assert header is None or isinstance(header, TableRow)
self.header = header
def add_row(self, row):
assert isinstance(row, TableRow)
self.rows.append(row)
def get_children(self):
ret = [self.header]
ret.extend(self.rows)
return ret
class Span(ContentObj):
"""A inline object. Contained in a Div"""
def __init__(self):
ContentObj.__init__(self)
self.style = TextStyle()
def get_text(self):
return ""
class Text(Span):
def __init__(self):
Span.__init__(self)
self.text = ""
def get_text(self):
return self.text
def __str__(self):
return u"Text(Style: {0}, text: '{1}')".format(
self.style, self.text)
class SpecialCharacter(Text):
def __init__(self):
Text.__init__(self)
self.html = ""
self.is_whitespace = False
class Correction(Span):
def __init__(self, original):
Span.__init__(self)
self.original = original
self.text = ""
class Media(Span):
def __init__(self, filename):
Span.__init__(self)
assert filename.find('.') >= 0
self.filename = filename
self.ext = filename[filename.rindex('.')+1:]
def get_mimetype(self):
raise NotImplementedError()
def save_as(self, dest_stream):
raise NotImplementedError()
class Image(Media):
def __init__(self, filename):
Media.__init__(self, filename)
assert filename.find('.') >= 0
self.filename = filename
self.ext = filename[filename.rindex('.')+1:]
def get_mimetype(self):
return "image/{0}".format(self.ext)
def save_as(self, dest_stream):
raise NotImplementedError()
class Section(Div):
def __init__(self, index):
Div.__init__(self)
self.is_section = True
self.index = index
self.is_toc = False
self.is_book = False
def get_children(self):
raise NotImplementedError()
class Chapter(Section):
def __init__(self, name, index):
Section.__init__(self, index)
self.name = name
self.divs = []
def add_div(self, div):
assert isinstance(div, Div)
self.divs.append(div)
def get_children(self):
return self.divs
class TOC(Section):
def __init__(self, index, depth_index_func):
Section.__init__(self, index)
self.is_toc = True
self.title_div = None
if depth_index_func is not None:
assert callable(depth_index_func)
self.depth_index_func = depth_index_func
# Should be a list of block styles,
# one per depth of the TOC
self.line_div_styles = []
# A list of nodes
self.section_tree = []
def get_children(self):
ret = []
if self.title_div is not None:
ret.append(self.title_div)
ret.extend(self.section_tree)
return ret
def set_chapters(self, chapters):
self.section_tree = self.__chapter_builder(chapters, 0)
def __chapter_builder(self, sections, depth):
order = []
index = 0
for ch in sections:
if isinstance(ch, Chapter):
index += 1
order.append(self.__create_entry(ch, depth, index))
kids = self.__chapter_builder(ch.get_children(), depth + 1)
if len(kids) > 0:
order.extend(kids)
return order
def __create_entry(self, ch, depth, index):
assert isinstance(ch, Chapter)
prefix = None
if self.depth_index_func is not None:
prefix = self.depth_index_func(depth, index)
d = TocRow(ch, depth, index, prefix)
if depth > len(self.line_div_styles):
d.style = self.line_div_styles[-1]
else:
d.style = self.line_div_styles[depth]
return d
class TocRow(Div):
def __init__(self, chapter, depth, index, prefix):
Div.__init__(self)
self.name = chapter.name
self.prefix = prefix or ""
self.depth = depth
self.index = index
self.text = Text()
self.text.text = self.prefix + self.name
def get_children(self):
return [self.text]
class MetaData(object):
def __init__(self):
object.__init__(self)
self.author_first = "" # (includes middle name / initial)
self.author_last = ""
self.year = ""
self.cover = None # Image
self.title = ""
self.description = ""
self.isbn_10 = ""
self.isbn_13 = ""
self.language = "en"
self.subtitles = []
def set_cover(self, cover):
assert isinstance(cover, Image)
self.cover = cover
def as_dict(self):
return {
"author_first": self.author_first,
"author_last": self.author_last,
"year": self.year,
"title": self.title,
"description": self.description,
"isbn_10": self.isbn_10,
"isbn_13": self.isbn_13,
"language": self.language,
"subtitles": self.subtitles
}
|
groboclown/py-book-selfpub
|
src/selfpub/text.py
|
Python
|
apache-2.0
| 10,783
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from neutron_lib.api.definitions import port
from neutron_lib.tests.unit.api.definitions import base
class PortDefinitionTestCase(base.DefinitionBaseTestCase):
extension_module = port
extension_attributes = ()
|
openstack/neutron-lib
|
neutron_lib/tests/unit/api/definitions/test_port.py
|
Python
|
apache-2.0
| 793
|
#!/usr/bin/python
##############################################################################################
# Copyright (C) 2014 Pier Luigi Ventre - (Consortium GARR and University of Rome "Tor Vergata")
# Copyright (C) 2014 Giuseppe Siracusano, Stefano Salsano - (CNIT and University of Rome "Tor Vergata")
# www.garr.it - www.uniroma2.it/netgroup - www.cnit.it
#
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Deployer Configuration Utils.
#
# @author Pier Luigi Ventre <pl.ventre@gmail.com>
# @author Giuseppe Siracusano <a_siracusano@tin.it>
# @author Stefano Salsano <stefano.salsano@uniroma2.it>
#
#
from mininet.node import Node
def convert_port_name_to_number(oshi, port):
p = oshi.cmd("ovs-ofctl dump-ports-desc br-%s | grep %s |awk -F '(' '{print $1}'| cut -d ' ' -f 2" %(oshi.name, port ))
return str(int(p))
def configure_standalone_sw(switches):
print "*** Configuring L2 Switches"
root = Node( 'root', inNamespace=False )
for sw in switches:
print "*** Configuring", sw.name, "As Learning Switch"
root.cmd("ovs-vsctl set-fail-mode %s standalone" % sw.name)
|
netgroup/Dreamer-Mininet-Deployer
|
deployer_configuration_utils.py
|
Python
|
apache-2.0
| 1,593
|
from .tensor import *
from .math import *
|
cypherai/PySyft
|
syft/__init__.py
|
Python
|
apache-2.0
| 42
|
import sys
import click
from globus_cli.termio import FORMAT_SILENT, formatted_print
from ..services.transfer import CustomTransferClient
def transfer_task_wait_with_io(
transfer_client: CustomTransferClient,
meow,
heartbeat,
polling_interval,
timeout,
task_id,
timeout_exit_code,
) -> None:
"""
Options are the core "task wait" options, including the `--meow` easter
egg.
This does the core "task wait" loop, including all of the IO.
It *does exit* on behalf of the caller. (We can enhance with a
`noabort=True` param or somesuch in the future if necessary.)
"""
def timed_out(waited_time):
if timeout is None:
return False
else:
return waited_time >= timeout
def check_completed():
completed = transfer_client.task_wait(
task_id, timeout=polling_interval, polling_interval=polling_interval
)
if completed:
if heartbeat:
click.echo("", err=True)
# meowing tasks wake up!
if meow:
click.echo(
r"""
_..
/}_{\ /.-'
( a a )-.___...-'/
==._.== ;
\ i _..._ /,
{_;/ {_//""",
err=True,
)
# TODO: possibly update TransferClient.task_wait so that we don't
# need to do an extra fetch to get the task status after completion
res = transfer_client.get_task(task_id)
formatted_print(res, text_format=FORMAT_SILENT)
status = res["status"]
if status == "SUCCEEDED":
click.get_current_context().exit(0)
else:
click.get_current_context().exit(1)
return completed
# Tasks start out sleepy
if meow:
click.echo(
r"""
|\ _,,,---,,_
/,`.-'`' -. ;-;;,_
|,4- ) )-,_..;\ ( `'-'
'---''(_/--' `-'\_)""",
err=True,
)
waited_time = 0
while not timed_out(waited_time) and not check_completed():
if heartbeat:
click.echo(".", err=True, nl=False)
sys.stderr.flush()
waited_time += polling_interval
# add a trailing newline to heartbeats if we fail
if heartbeat:
click.echo("", err=True)
exit_code = 1
if timed_out(waited_time):
click.echo(f"Task has yet to complete after {timeout} seconds", err=True)
exit_code = timeout_exit_code
# output json if requested, but nothing for text mode
res = transfer_client.get_task(task_id)
formatted_print(res, text_format=FORMAT_SILENT)
click.get_current_context().exit(exit_code)
|
globus/globus-cli
|
src/globus_cli/commands/_common.py
|
Python
|
apache-2.0
| 2,720
|
"""
:author: yoram@ignissoft.com
"""
import getpass
from random import randint
from stcrestclient import stchttp
class StcRestWrapper(object):
def __init__(self, logger, server, port=80, user_name=getpass.getuser(), session_name=None):
""" Init STC REST client.
:param server: STC REST API server address.
:param port: STC REST API HTTP port.
:param user_name: user name, part of session ID.
:param session_name: session, name part of session ID.
Add logger to log STC REST commands only.
This creates a clean REST script that can be used later for debug.
"""
super(self.__class__, self).__init__()
debug_print = True if logger.level == 10 else False
self.ls = stchttp.StcHttp(server, port, debug_print=debug_print)
if session_name:
self.session_id = self.ls.join_session(session_name)
else:
session_name = 'session' + str(randint(0, 99))
self.session_id = self.ls.new_session(user_name, session_name, kill_existing=True)
def disconnect(self, terminate):
self.ls.end_session(terminate)
def create(self, obj_type, parent, **attributes):
""" Creates one or more Spirent TestCenter Automation objects.
:param obj_type: object type.
:param parent: object parent - object will be created under this parent.
:param attributes: additional attributes.
:return: STC object reference.
"""
return self.ls.create(obj_type, under=parent.obj_ref(), **attributes)
def perform(self, command, **arguments):
""" Execute a command.
:param command: requested command.
:param arguments: additional arguments.
"""
if (command in ['CSTestSessionConnect', 'CSTestSessionDisconnect']):
return
self.command_rc = self.ls.perform(command, **arguments)
return self.command_rc
def get(self, obj_ref, attribute=''):
""" Returns the value(s) of one or more object attributes or a set of object handles.
:param obj_ref: requested object reference.
:param attribute: requested attribute. If empty - return values of all object attributes.
:return: requested value(s) as returned by get command.
"""
return self.ls.get(obj_ref, attribute)
def getList(self, obj_ref, attribute):
""" Returns the value of the object attributes or a python list.
:param obj_ref: requested object reference.
:param attribute: requested attribute.
:return: requested value as returned by get command.
"""
return self.ls.get(obj_ref, attribute).split()
def config(self, obj_ref, **attributes):
""" Set or modifies one or more object attributes, or a relation.
:param obj_ref: requested object reference.
:param attributes: dictionary of {attributes: values} to configure.
"""
self.ls.config(obj_ref, **attributes)
def subscribe(self, **arguments):
""" Subscribe to statistics view.
:param arguments: subscribe command arguments.
must arguments: parent, resultParent, configType, resultType
+ additional arguments.
:return: ResultDataSet handler
"""
return self.perform('ResultsSubscribe', **arguments)['ReturnedDataSet']
def unsubscribe(self, result_data_set):
""" Unsubscribe from statistics view.
:param result_data_set: ResultDataSet handler
"""
self.perform('ResultDataSetUnsubscribe', ResultDataSet=result_data_set)
def apply(self):
""" Sends a test configuration to the Spirent TestCenter chassis. """
self.ls.apply()
def wait(self):
""" Wait until sequencer is finished. """
self.ls.wait_until_complete()
|
shmir/PyTestCenter
|
testcenter/api/stc_rest.py
|
Python
|
apache-2.0
| 3,868
|
# -*- coding: utf-8 -*-
#
# Copyright 2014-2021 BigML
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Predicates structure for the BigML local AnomalyTree
This module defines an auxiliary Predicates structure that is used in the
AnomalyTree to save the node's predicates info.
"""
from bigml.predicate import Predicate
class Predicates():
"""A list of predicates to be evaluated in an anomaly tree's node.
"""
def __init__(self, predicates_list):
self.predicates = []
for predicate in predicates_list:
if predicate is True:
self.predicates.append(True)
else:
self.predicates.append(
Predicate(predicate.get('op'),
predicate.get('field'),
predicate.get('value'),
predicate.get('term')))
def to_rule(self, fields, label='name'):
""" Builds rule string from a predicates list
"""
return " and ".join([predicate.to_rule(fields, label=label) for
predicate in self.predicates
if not isinstance(predicate, bool)])
def apply(self, input_data, fields):
""" Applies the operators defined in each of the predicates to
the provided input data
"""
return all([predicate.apply(input_data, fields) for
predicate in self.predicates
if isinstance(predicate, Predicate)])
|
jaor/python
|
bigml/predicates.py
|
Python
|
apache-2.0
| 2,025
|
"""Implementation of the WebSocket protocol.
`WebSockets <http://dev.w3.org/html5/websockets/>`_ allow for bidirectional
communication between the browser and server.
WebSockets are supported in the current versions of all major browsers,
although older versions that do not support WebSockets are still in use
(refer to http://caniuse.com/websockets for details).
This module implements the final version of the WebSocket protocol as
defined in `RFC 6455 <http://tools.ietf.org/html/rfc6455>`_. Certain
browser versions (notably Safari 5.x) implemented an earlier draft of
the protocol (known as "draft 76") and are not compatible with this module.
.. versionchanged:: 4.0
Removed support for the draft 76 protocol version.
"""
from __future__ import absolute_import, division, print_function, with_statement
# Author: Jacob Kristhammar, 2010
import base64
import collections
import hashlib
import os
import struct
import tornado.escape
import tornado.web
import zlib
from tornado.concurrent import TracebackFuture
from tornado.escape import utf8, native_str, to_unicode
from tornado import httpclient, httputil
from tornado.ioloop import IOLoop
from tornado.iostream import StreamClosedError
from tornado.log import gen_log, app_log
from tornado import simple_httpclient
from tornado.tcpclient import TCPClient
from tornado.util import _websocket_mask, PY3
if PY3:
from urllib.parse import urlparse # py2
xrange = range
else:
from urlparse import urlparse # py3
class WebSocketError(Exception):
pass
class WebSocketClosedError(WebSocketError):
"""Raised by operations on a closed connection.
.. versionadded:: 3.2
"""
pass
class WebSocketHandler(tornado.web.RequestHandler):
"""Subclass this class to create a basic WebSocket handler.
Override `on_message` to handle incoming messages, and use
`write_message` to send messages to the client. You can also
override `open` and `on_close` to handle opened and closed
connections.
See http://dev.w3.org/html5/websockets/ for details on the
JavaScript interface. The protocol is specified at
http://tools.ietf.org/html/rfc6455.
Here is an example WebSocket handler that echos back all received messages
back to the client:
.. testcode::
class EchoWebSocket(tornado.websocket.WebSocketHandler):
def open(self):
print("WebSocket opened")
def on_message(self, message):
self.write_message(u"You said: " + message)
def on_close(self):
print("WebSocket closed")
.. testoutput::
:hide:
WebSockets are not standard HTTP connections. The "handshake" is
HTTP, but after the handshake, the protocol is
message-based. Consequently, most of the Tornado HTTP facilities
are not available in handlers of this type. The only communication
methods available to you are `write_message()`, `ping()`, and
`close()`. Likewise, your request handler class should implement
`open()` method rather than ``get()`` or ``post()``.
If you map the handler above to ``/websocket`` in your application, you can
invoke it in JavaScript with::
var ws = new WebSocket("ws://localhost:8888/websocket");
ws.onopen = function() {
ws.send("Hello, world");
};
ws.onmessage = function (evt) {
alert(evt.data);
};
This script pops up an alert box that says "You said: Hello, world".
Web browsers allow any site to open a websocket connection to any other,
instead of using the same-origin policy that governs other network
access from javascript. This can be surprising and is a potential
security hole, so since Tornado 4.0 `WebSocketHandler` requires
applications that wish to receive cross-origin websockets to opt in
by overriding the `~WebSocketHandler.check_origin` method (see that
method's docs for details). Failure to do so is the most likely
cause of 403 errors when making a websocket connection.
When using a secure websocket connection (``wss://``) with a self-signed
certificate, the connection from a browser may fail because it wants
to show the "accept this certificate" dialog but has nowhere to show it.
You must first visit a regular HTML page using the same certificate
to accept it before the websocket connection will succeed.
"""
def __init__(self, application, request, **kwargs):
super(WebSocketHandler, self).__init__(application, request, **kwargs)
self.ws_connection = None
self.close_code = None
self.close_reason = None
self.stream = None
self._on_close_called = False
@tornado.web.asynchronous
def get(self, *args, **kwargs):
self.open_args = args
self.open_kwargs = kwargs
# Upgrade header should be present and should be equal to WebSocket
if self.request.headers.get("Upgrade", "").lower() != 'websocket':
self.clear()
self.set_status(400)
log_msg = "Can \"Upgrade\" only to \"WebSocket\"."
self.finish(log_msg)
gen_log.debug(log_msg)
return
# Connection header should be upgrade.
# Some proxy servers/load balancers
# might mess with it.
headers = self.request.headers
connection = map(lambda s: s.strip().lower(),
headers.get("Connection", "").split(","))
if 'upgrade' not in connection:
self.clear()
self.set_status(400)
log_msg = "\"Connection\" must be \"Upgrade\"."
self.finish(log_msg)
gen_log.debug(log_msg)
return
# Handle WebSocket Origin naming convention differences
# The difference between version 8 and 13 is that in 8 the
# client sends a "Sec-Websocket-Origin" header and in 13 it's
# simply "Origin".
if "Origin" in self.request.headers:
origin = self.request.headers.get("Origin")
else:
origin = self.request.headers.get("Sec-Websocket-Origin", None)
# If there was an origin header, check to make sure it matches
# according to check_origin. When the origin is None, we assume it
# did not come from a browser and that it can be passed on.
if origin is not None and not self.check_origin(origin):
self.clear()
self.set_status(403)
log_msg = "Cross origin websockets not allowed"
self.finish(log_msg)
gen_log.debug(log_msg)
return
self.stream = self.request.connection.detach()
self.stream.set_close_callback(self.on_connection_close)
self.ws_connection = self.get_websocket_protocol()
if self.ws_connection:
self.clear_header('Content-Type')
self.ws_connection.accept_connection()
else:
if not self.stream.closed():
self.stream.write(tornado.escape.utf8(
"HTTP/1.1 426 Upgrade Required\r\n"
"Sec-WebSocket-Version: 7, 8, 13\r\n\r\n"))
self.stream.close()
def write_message(self, message, binary=False):
"""Sends the given message to the client of this Web Socket.
The message may be either a string or a dict (which will be
encoded as json). If the ``binary`` argument is false, the
message will be sent as utf8; in binary mode any byte string
is allowed.
If the connection is already closed, raises `WebSocketClosedError`.
.. versionchanged:: 3.2
`WebSocketClosedError` was added (previously a closed connection
would raise an `AttributeError`)
.. versionchanged:: 4.3
Returns a `.Future` which can be used for flow control.
"""
if self.ws_connection is None:
raise WebSocketClosedError()
if isinstance(message, dict):
message = tornado.escape.json_encode(message)
return self.ws_connection.write_message(message, binary=binary)
def select_subprotocol(self, subprotocols):
"""Invoked when a new WebSocket requests specific subprotocols.
``subprotocols`` is a list of strings identifying the
subprotocols proposed by the client. This method may be
overridden to return one of those strings to select it, or
``None`` to not select a subprotocol. Failure to select a
subprotocol does not automatically abort the connection,
although clients may close the connection if none of their
proposed subprotocols was selected.
"""
return None
def get_compression_options(self):
"""Override to return compression options for the connection.
If this method returns None (the default), compression will
be disabled. If it returns a dict (even an empty one), it
will be enabled. The contents of the dict may be used to
control the memory and CPU usage of the compression,
but no such options are currently implemented.
.. versionadded:: 4.1
"""
return None
def open(self, *args, **kwargs):
"""Invoked when a new WebSocket is opened.
The arguments to `open` are extracted from the `tornado.web.URLSpec`
regular expression, just like the arguments to
`tornado.web.RequestHandler.get`.
"""
pass
def on_message(self, message):
"""Handle incoming messages on the WebSocket
This method must be overridden.
"""
raise NotImplementedError
def ping(self, data):
"""Send ping frame to the remote end."""
if self.ws_connection is None:
raise WebSocketClosedError()
self.ws_connection.write_ping(data)
def on_pong(self, data):
"""Invoked when the response to a ping frame is received."""
pass
def on_close(self):
"""Invoked when the WebSocket is closed.
If the connection was closed cleanly and a status code or reason
phrase was supplied, these values will be available as the attributes
``self.close_code`` and ``self.close_reason``.
.. versionchanged:: 4.0
Added ``close_code`` and ``close_reason`` attributes.
"""
pass
def close(self, code=None, reason=None):
"""Closes this Web Socket.
Once the close handshake is successful the socket will be closed.
``code`` may be a numeric status code, taken from the values
defined in `RFC 6455 section 7.4.1
<https://tools.ietf.org/html/rfc6455#section-7.4.1>`_.
``reason`` may be a textual message about why the connection is
closing. These values are made available to the client, but are
not otherwise interpreted by the websocket protocol.
.. versionchanged:: 4.0
Added the ``code`` and ``reason`` arguments.
"""
if self.ws_connection:
self.ws_connection.close(code, reason)
self.ws_connection = None
def check_origin(self, origin):
"""Override to enable support for allowing alternate origins.
The ``origin`` argument is the value of the ``Origin`` HTTP
header, the url responsible for initiating this request. This
method is not called for clients that do not send this header;
such requests are always allowed (because all browsers that
implement WebSockets support this header, and non-browser
clients do not have the same cross-site security concerns).
Should return True to accept the request or False to reject it.
By default, rejects all requests with an origin on a host other
than this one.
This is a security protection against cross site scripting attacks on
browsers, since WebSockets are allowed to bypass the usual same-origin
policies and don't use CORS headers.
To accept all cross-origin traffic (which was the default prior to
Tornado 4.0), simply override this method to always return true::
def check_origin(self, origin):
return True
To allow connections from any subdomain of your site, you might
do something like::
def check_origin(self, origin):
parsed_origin = urllib.parse.urlparse(origin)
return parsed_origin.netloc.endswith(".mydomain.com")
.. versionadded:: 4.0
"""
parsed_origin = urlparse(origin)
origin = parsed_origin.netloc
origin = origin.lower()
host = self.request.headers.get("Host")
# Check to see that origin matches host directly, including ports
return origin == host
def set_nodelay(self, value):
"""Set the no-delay flag for this stream.
By default, small messages may be delayed and/or combined to minimize
the number of packets sent. This can sometimes cause 200-500ms delays
due to the interaction between Nagle's algorithm and TCP delayed
ACKs. To reduce this delay (at the expense of possibly increasing
bandwidth usage), call ``self.set_nodelay(True)`` once the websocket
connection is established.
See `.BaseIOStream.set_nodelay` for additional details.
.. versionadded:: 3.1
"""
self.stream.set_nodelay(value)
def on_connection_close(self):
if self.ws_connection:
self.ws_connection.on_connection_close()
self.ws_connection = None
if not self._on_close_called:
self._on_close_called = True
self.on_close()
def send_error(self, *args, **kwargs):
if self.stream is None:
super(WebSocketHandler, self).send_error(*args, **kwargs)
else:
# If we get an uncaught exception during the handshake,
# we have no choice but to abruptly close the connection.
# TODO: for uncaught exceptions after the handshake,
# we can close the connection more gracefully.
self.stream.close()
def get_websocket_protocol(self):
websocket_version = self.request.headers.get("Sec-WebSocket-Version")
if websocket_version in ("7", "8", "13"):
return WebSocketProtocol13(
self, compression_options=self.get_compression_options(),
response_headers=self._headers)
def _wrap_method(method):
def _disallow_for_websocket(self, *args, **kwargs):
if self.stream is None:
method(self, *args, **kwargs)
else:
raise RuntimeError("Method not supported for Web Sockets")
return _disallow_for_websocket
for method in ["write", "redirect", "set_header", "set_cookie",
"set_status", "flush", "finish"]:
setattr(WebSocketHandler, method,
_wrap_method(getattr(WebSocketHandler, method)))
class WebSocketProtocol(object):
"""Base class for WebSocket protocol versions.
"""
def __init__(self, handler):
self.handler = handler
self.request = handler.request
self.stream = handler.stream
self.client_terminated = False
self.server_terminated = False
def _run_callback(self, callback, *args, **kwargs):
"""Runs the given callback with exception handling.
On error, aborts the websocket connection and returns False.
"""
try:
callback(*args, **kwargs)
except Exception:
app_log.error("Uncaught exception in %s",
self.request.path, exc_info=True)
self._abort()
def on_connection_close(self):
self._abort()
def _abort(self):
"""Instantly aborts the WebSocket connection by closing the socket"""
self.client_terminated = True
self.server_terminated = True
self.stream.close() # forcibly tear down the connection
self.close() # let the subclass cleanup
class _PerMessageDeflateCompressor(object):
def __init__(self, persistent, max_wbits):
if max_wbits is None:
max_wbits = zlib.MAX_WBITS
# There is no symbolic constant for the minimum wbits value.
if not (8 <= max_wbits <= zlib.MAX_WBITS):
raise ValueError("Invalid max_wbits value %r; allowed range 8-%d",
max_wbits, zlib.MAX_WBITS)
self._max_wbits = max_wbits
if persistent:
self._compressor = self._create_compressor()
else:
self._compressor = None
def _create_compressor(self):
return zlib.compressobj(tornado.web.GZipContentEncoding.GZIP_LEVEL,
zlib.DEFLATED, -self._max_wbits)
def compress(self, data):
compressor = self._compressor or self._create_compressor()
data = (compressor.compress(data) +
compressor.flush(zlib.Z_SYNC_FLUSH))
assert data.endswith(b'\x00\x00\xff\xff')
return data[:-4]
class _PerMessageDeflateDecompressor(object):
def __init__(self, persistent, max_wbits):
if max_wbits is None:
max_wbits = zlib.MAX_WBITS
if not (8 <= max_wbits <= zlib.MAX_WBITS):
raise ValueError("Invalid max_wbits value %r; allowed range 8-%d",
max_wbits, zlib.MAX_WBITS)
self._max_wbits = max_wbits
if persistent:
self._decompressor = self._create_decompressor()
else:
self._decompressor = None
def _create_decompressor(self):
return zlib.decompressobj(-self._max_wbits)
def decompress(self, data):
decompressor = self._decompressor or self._create_decompressor()
return decompressor.decompress(data + b'\x00\x00\xff\xff')
class WebSocketProtocol13(WebSocketProtocol):
"""Implementation of the WebSocket protocol from RFC 6455.
This class supports versions 7 and 8 of the protocol in addition to the
final version 13.
"""
# Bit masks for the first byte of a frame.
FIN = 0x80
RSV1 = 0x40
RSV2 = 0x20
RSV3 = 0x10
RSV_MASK = RSV1 | RSV2 | RSV3
OPCODE_MASK = 0x0f
def __init__(self, handler, mask_outgoing=False,
compression_options=None, response_headers=None):
WebSocketProtocol.__init__(self, handler)
self._response_headers = response_headers
self.mask_outgoing = mask_outgoing
self._final_frame = False
self._frame_opcode = None
self._masked_frame = None
self._frame_mask = None
self._frame_length = None
self._fragmented_message_buffer = None
self._fragmented_message_opcode = None
self._waiting = None
self._compression_options = compression_options
self._decompressor = None
self._compressor = None
self._frame_compressed = None
# The total uncompressed size of all messages received or sent.
# Unicode messages are encoded to utf8.
# Only for testing; subject to change.
self._message_bytes_in = 0
self._message_bytes_out = 0
# The total size of all packets received or sent. Includes
# the effect of compression, frame overhead, and control frames.
self._wire_bytes_in = 0
self._wire_bytes_out = 0
def accept_connection(self):
try:
self._handle_websocket_headers()
self._accept_connection()
except ValueError:
gen_log.debug("Malformed WebSocket request received",
exc_info=True)
self._abort()
return
def _handle_websocket_headers(self):
"""Verifies all invariant- and required headers
If a header is missing or have an incorrect value ValueError will be
raised
"""
fields = ("Host", "Sec-Websocket-Key", "Sec-Websocket-Version")
if not all(map(lambda f: self.request.headers.get(f), fields)):
raise ValueError("Missing/Invalid WebSocket headers")
@staticmethod
def compute_accept_value(key):
"""Computes the value for the Sec-WebSocket-Accept header,
given the value for Sec-WebSocket-Key.
"""
sha1 = hashlib.sha1()
sha1.update(utf8(key))
sha1.update(b"258EAFA5-E914-47DA-95CA-C5AB0DC85B11") # Magic value
return native_str(base64.b64encode(sha1.digest()))
def _challenge_response(self):
return WebSocketProtocol13.compute_accept_value(
self.request.headers.get("Sec-Websocket-Key"))
def _accept_connection(self):
subprotocol_header = ''
subprotocols = self.request.headers.get("Sec-WebSocket-Protocol", '')
subprotocols = [s.strip() for s in subprotocols.split(',')]
if subprotocols:
selected = self.handler.select_subprotocol(subprotocols)
if selected:
assert selected in subprotocols
subprotocol_header = ("Sec-WebSocket-Protocol: %s\r\n"
% selected)
extension_header = ''
extensions = self._parse_extensions_header(self.request.headers)
for ext in extensions:
if (ext[0] == 'permessage-deflate' and
self._compression_options is not None):
# TODO: negotiate parameters if compression_options
# specifies limits.
self._create_compressors('server', ext[1])
if ('client_max_window_bits' in ext[1] and
ext[1]['client_max_window_bits'] is None):
# Don't echo an offered client_max_window_bits
# parameter with no value.
del ext[1]['client_max_window_bits']
extension_header = ('Sec-WebSocket-Extensions: %s\r\n' %
httputil._encode_header(
'permessage-deflate', ext[1]))
break
response_headers = ''
if self._response_headers is not None:
for header_name, header_value in self._response_headers.get_all():
response_headers += '%s: %s\r\n' % (header_name, header_value)
if self.stream.closed():
self._abort()
return
self.stream.write(tornado.escape.utf8(
"HTTP/1.1 101 Switching Protocols\r\n"
"Upgrade: websocket\r\n"
"Connection: Upgrade\r\n"
"Sec-WebSocket-Accept: %s\r\n"
"%s%s%s"
"\r\n" % (self._challenge_response(), subprotocol_header,
extension_header, response_headers)))
self._run_callback(self.handler.open, *self.handler.open_args,
**self.handler.open_kwargs)
self._receive_frame()
def _parse_extensions_header(self, headers):
extensions = headers.get("Sec-WebSocket-Extensions", '')
if extensions:
return [httputil._parse_header(e.strip())
for e in extensions.split(',')]
return []
def _process_server_headers(self, key, headers):
"""Process the headers sent by the server to this client connection.
'key' is the websocket handshake challenge/response key.
"""
assert headers['Upgrade'].lower() == 'websocket'
assert headers['Connection'].lower() == 'upgrade'
accept = self.compute_accept_value(key)
assert headers['Sec-Websocket-Accept'] == accept
extensions = self._parse_extensions_header(headers)
for ext in extensions:
if (ext[0] == 'permessage-deflate' and
self._compression_options is not None):
self._create_compressors('client', ext[1])
else:
raise ValueError("unsupported extension %r", ext)
def _get_compressor_options(self, side, agreed_parameters):
"""Converts a websocket agreed_parameters set to keyword arguments
for our compressor objects.
"""
options = dict(
persistent=(side + '_no_context_takeover') not in agreed_parameters)
wbits_header = agreed_parameters.get(side + '_max_window_bits', None)
if wbits_header is None:
options['max_wbits'] = zlib.MAX_WBITS
else:
options['max_wbits'] = int(wbits_header)
return options
def _create_compressors(self, side, agreed_parameters):
# TODO: handle invalid parameters gracefully
allowed_keys = set(['server_no_context_takeover',
'client_no_context_takeover',
'server_max_window_bits',
'client_max_window_bits'])
for key in agreed_parameters:
if key not in allowed_keys:
raise ValueError("unsupported compression parameter %r" % key)
other_side = 'client' if (side == 'server') else 'server'
self._compressor = _PerMessageDeflateCompressor(
**self._get_compressor_options(side, agreed_parameters))
self._decompressor = _PerMessageDeflateDecompressor(
**self._get_compressor_options(other_side, agreed_parameters))
def _write_frame(self, fin, opcode, data, flags=0):
if fin:
finbit = self.FIN
else:
finbit = 0
frame = struct.pack("B", finbit | opcode | flags)
l = len(data)
if self.mask_outgoing:
mask_bit = 0x80
else:
mask_bit = 0
if l < 126:
frame += struct.pack("B", l | mask_bit)
elif l <= 0xFFFF:
frame += struct.pack("!BH", 126 | mask_bit, l)
else:
frame += struct.pack("!BQ", 127 | mask_bit, l)
if self.mask_outgoing:
mask = os.urandom(4)
data = mask + _websocket_mask(mask, data)
frame += data
self._wire_bytes_out += len(frame)
try:
return self.stream.write(frame)
except StreamClosedError:
self._abort()
def write_message(self, message, binary=False):
"""Sends the given message to the client of this Web Socket."""
if binary:
opcode = 0x2
else:
opcode = 0x1
message = tornado.escape.utf8(message)
assert isinstance(message, bytes)
self._message_bytes_out += len(message)
flags = 0
if self._compressor:
message = self._compressor.compress(message)
flags |= self.RSV1
return self._write_frame(True, opcode, message, flags=flags)
def write_ping(self, data):
"""Send ping frame."""
assert isinstance(data, bytes)
self._write_frame(True, 0x9, data)
def _receive_frame(self):
try:
self.stream.read_bytes(2, self._on_frame_start)
except StreamClosedError:
self._abort()
def _on_frame_start(self, data):
self._wire_bytes_in += len(data)
header, payloadlen = struct.unpack("BB", data)
self._final_frame = header & self.FIN
reserved_bits = header & self.RSV_MASK
self._frame_opcode = header & self.OPCODE_MASK
self._frame_opcode_is_control = self._frame_opcode & 0x8
if self._decompressor is not None and self._frame_opcode != 0:
self._frame_compressed = bool(reserved_bits & self.RSV1)
reserved_bits &= ~self.RSV1
if reserved_bits:
# client is using as-yet-undefined extensions; abort
self._abort()
return
self._masked_frame = bool(payloadlen & 0x80)
payloadlen = payloadlen & 0x7f
if self._frame_opcode_is_control and payloadlen >= 126:
# control frames must have payload < 126
self._abort()
return
try:
if payloadlen < 126:
self._frame_length = payloadlen
if self._masked_frame:
self.stream.read_bytes(4, self._on_masking_key)
else:
self.stream.read_bytes(self._frame_length,
self._on_frame_data)
elif payloadlen == 126:
self.stream.read_bytes(2, self._on_frame_length_16)
elif payloadlen == 127:
self.stream.read_bytes(8, self._on_frame_length_64)
except StreamClosedError:
self._abort()
def _on_frame_length_16(self, data):
self._wire_bytes_in += len(data)
self._frame_length = struct.unpack("!H", data)[0]
try:
if self._masked_frame:
self.stream.read_bytes(4, self._on_masking_key)
else:
self.stream.read_bytes(self._frame_length, self._on_frame_data)
except StreamClosedError:
self._abort()
def _on_frame_length_64(self, data):
self._wire_bytes_in += len(data)
self._frame_length = struct.unpack("!Q", data)[0]
try:
if self._masked_frame:
self.stream.read_bytes(4, self._on_masking_key)
else:
self.stream.read_bytes(self._frame_length, self._on_frame_data)
except StreamClosedError:
self._abort()
def _on_masking_key(self, data):
self._wire_bytes_in += len(data)
self._frame_mask = data
try:
self.stream.read_bytes(self._frame_length,
self._on_masked_frame_data)
except StreamClosedError:
self._abort()
def _on_masked_frame_data(self, data):
# Don't touch _wire_bytes_in; we'll do it in _on_frame_data.
self._on_frame_data(_websocket_mask(self._frame_mask, data))
def _on_frame_data(self, data):
self._wire_bytes_in += len(data)
if self._frame_opcode_is_control:
# control frames may be interleaved with a series of fragmented
# data frames, so control frames must not interact with
# self._fragmented_*
if not self._final_frame:
# control frames must not be fragmented
self._abort()
return
opcode = self._frame_opcode
elif self._frame_opcode == 0: # continuation frame
if self._fragmented_message_buffer is None:
# nothing to continue
self._abort()
return
self._fragmented_message_buffer += data
if self._final_frame:
opcode = self._fragmented_message_opcode
data = self._fragmented_message_buffer
self._fragmented_message_buffer = None
else: # start of new data message
if self._fragmented_message_buffer is not None:
# can't start new message until the old one is finished
self._abort()
return
if self._final_frame:
opcode = self._frame_opcode
else:
self._fragmented_message_opcode = self._frame_opcode
self._fragmented_message_buffer = data
if self._final_frame:
self._handle_message(opcode, data)
if not self.client_terminated:
self._receive_frame()
def _handle_message(self, opcode, data):
if self.client_terminated:
return
if self._frame_compressed:
data = self._decompressor.decompress(data)
if opcode == 0x1:
# UTF-8 data
self._message_bytes_in += len(data)
try:
decoded = data.decode("utf-8")
except UnicodeDecodeError:
self._abort()
return
self._run_callback(self.handler.on_message, decoded)
elif opcode == 0x2:
# Binary data
self._message_bytes_in += len(data)
self._run_callback(self.handler.on_message, data)
elif opcode == 0x8:
# Close
self.client_terminated = True
if len(data) >= 2:
self.handler.close_code = struct.unpack('>H', data[:2])[0]
if len(data) > 2:
self.handler.close_reason = to_unicode(data[2:])
# Echo the received close code, if any (RFC 6455 section 5.5.1).
self.close(self.handler.close_code)
elif opcode == 0x9:
# Ping
self._write_frame(True, 0xA, data)
elif opcode == 0xA:
# Pong
self._run_callback(self.handler.on_pong, data)
else:
self._abort()
def close(self, code=None, reason=None):
"""Closes the WebSocket connection."""
if not self.server_terminated:
if not self.stream.closed():
if code is None and reason is not None:
code = 1000 # "normal closure" status code
if code is None:
close_data = b''
else:
close_data = struct.pack('>H', code)
if reason is not None:
close_data += utf8(reason)
self._write_frame(True, 0x8, close_data)
self.server_terminated = True
if self.client_terminated:
if self._waiting is not None:
self.stream.io_loop.remove_timeout(self._waiting)
self._waiting = None
self.stream.close()
elif self._waiting is None:
# Give the client a few seconds to complete a clean shutdown,
# otherwise just close the connection.
self._waiting = self.stream.io_loop.add_timeout(
self.stream.io_loop.time() + 5, self._abort)
class WebSocketClientConnection(simple_httpclient._HTTPConnection):
"""WebSocket client connection.
This class should not be instantiated directly; use the
`websocket_connect` function instead.
"""
def __init__(self, io_loop, request, on_message_callback=None,
compression_options=None):
self.compression_options = compression_options
self.connect_future = TracebackFuture()
self.protocol = None
self.read_future = None
self.read_queue = collections.deque()
self.key = base64.b64encode(os.urandom(16))
self._on_message_callback = on_message_callback
self.close_code = self.close_reason = None
scheme, sep, rest = request.url.partition(':')
scheme = {'ws': 'http', 'wss': 'https'}[scheme]
request.url = scheme + sep + rest
request.headers.update({
'Upgrade': 'websocket',
'Connection': 'Upgrade',
'Sec-WebSocket-Key': self.key,
'Sec-WebSocket-Version': '13',
})
if self.compression_options is not None:
# Always offer to let the server set our max_wbits (and even though
# we don't offer it, we will accept a client_no_context_takeover
# from the server).
# TODO: set server parameters for deflate extension
# if requested in self.compression_options.
request.headers['Sec-WebSocket-Extensions'] = (
'permessage-deflate; client_max_window_bits')
self.tcp_client = TCPClient(io_loop=io_loop)
super(WebSocketClientConnection, self).__init__(
io_loop, None, request, lambda: None, self._on_http_response,
104857600, self.tcp_client, 65536, 104857600)
def close(self, code=None, reason=None):
"""Closes the websocket connection.
``code`` and ``reason`` are documented under
`WebSocketHandler.close`.
.. versionadded:: 3.2
.. versionchanged:: 4.0
Added the ``code`` and ``reason`` arguments.
"""
if self.protocol is not None:
self.protocol.close(code, reason)
self.protocol = None
def on_connection_close(self):
if not self.connect_future.done():
self.connect_future.set_exception(StreamClosedError())
self.on_message(None)
self.tcp_client.close()
super(WebSocketClientConnection, self).on_connection_close()
def _on_http_response(self, response):
if not self.connect_future.done():
if response.error:
self.connect_future.set_exception(response.error)
else:
self.connect_future.set_exception(WebSocketError(
"Non-websocket response"))
def headers_received(self, start_line, headers):
if start_line.code != 101:
return super(WebSocketClientConnection, self).headers_received(
start_line, headers)
self.headers = headers
self.protocol = self.get_websocket_protocol()
self.protocol._process_server_headers(self.key, self.headers)
self.protocol._receive_frame()
if self._timeout is not None:
self.io_loop.remove_timeout(self._timeout)
self._timeout = None
self.stream = self.connection.detach()
self.stream.set_close_callback(self.on_connection_close)
# Once we've taken over the connection, clear the final callback
# we set on the http request. This deactivates the error handling
# in simple_httpclient that would otherwise interfere with our
# ability to see exceptions.
self.final_callback = None
self.connect_future.set_result(self)
def write_message(self, message, binary=False):
"""Sends a message to the WebSocket server."""
return self.protocol.write_message(message, binary)
def read_message(self, callback=None):
"""Reads a message from the WebSocket server.
If on_message_callback was specified at WebSocket
initialization, this function will never return messages
Returns a future whose result is the message, or None
if the connection is closed. If a callback argument
is given it will be called with the future when it is
ready.
"""
assert self.read_future is None
future = TracebackFuture()
if self.read_queue:
future.set_result(self.read_queue.popleft())
else:
self.read_future = future
if callback is not None:
self.io_loop.add_future(future, callback)
return future
def on_message(self, message):
if self._on_message_callback:
self._on_message_callback(message)
elif self.read_future is not None:
self.read_future.set_result(message)
self.read_future = None
else:
self.read_queue.append(message)
def on_pong(self, data):
pass
def get_websocket_protocol(self):
return WebSocketProtocol13(self, mask_outgoing=True,
compression_options=self.compression_options)
def websocket_connect(url, io_loop=None, callback=None, connect_timeout=None,
on_message_callback=None, compression_options=None):
"""Client-side websocket support.
Takes a url and returns a Future whose result is a
`WebSocketClientConnection`.
``compression_options`` is interpreted in the same way as the
return value of `.WebSocketHandler.get_compression_options`.
The connection supports two styles of operation. In the coroutine
style, the application typically calls
`~.WebSocketClientConnection.read_message` in a loop::
conn = yield websocket_connect(url)
while True:
msg = yield conn.read_message()
if msg is None: break
# Do something with msg
In the callback style, pass an ``on_message_callback`` to
``websocket_connect``. In both styles, a message of ``None``
indicates that the connection has been closed.
.. versionchanged:: 3.2
Also accepts ``HTTPRequest`` objects in place of urls.
.. versionchanged:: 4.1
Added ``compression_options`` and ``on_message_callback``.
The ``io_loop`` argument is deprecated.
"""
if io_loop is None:
io_loop = IOLoop.current()
if isinstance(url, httpclient.HTTPRequest):
assert connect_timeout is None
request = url
# Copy and convert the headers dict/object (see comments in
# AsyncHTTPClient.fetch)
request.headers = httputil.HTTPHeaders(request.headers)
else:
request = httpclient.HTTPRequest(url, connect_timeout=connect_timeout)
request = httpclient._RequestProxy(
request, httpclient.HTTPRequest._DEFAULTS)
conn = WebSocketClientConnection(io_loop, request,
on_message_callback=on_message_callback,
compression_options=compression_options)
if callback is not None:
io_loop.add_future(conn.connect_future, callback)
return conn.connect_future
|
obsh/tornado
|
tornado/websocket.py
|
Python
|
apache-2.0
| 41,216
|
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from kafka.tools.protocol.responses import BaseResponse
class DescribeAclsV0Response(BaseResponse):
schema = [
{'name': 'throttle_time_ms', 'type': 'int32'},
{'name': 'error_code', 'type': 'int16'},
{'name': 'error_message', 'type': 'string'},
{'name': 'resources',
'type': 'array',
'item_type': [
{'name': 'resource_type', 'type': 'int8'},
{'name': 'resource_name', 'type': 'string'},
{'name': 'acls',
'type': 'array',
'item_type': [
{'name': 'principal', 'type': 'string'},
{'name': 'host', 'type': 'string'},
{'name': 'operation', 'type': 'int8'},
{'name': 'permission_type', 'type': 'int8'},
]},
]},
]
|
toddpalino/kafka-tools
|
kafka/tools/protocol/responses/describe_acls_v0.py
|
Python
|
apache-2.0
| 1,614
|
from collections import namedtuple
from cytomine.models._utilities.pattern_matching import resolve_pattern
class TestPatternMatching:
def get_fake_type(self):
return namedtuple("fakeobj", ["lst", "atomstr", "atomfloat"])
def test_no_iterable_pattern(self):
fake = self.get_fake_type()(lst=1, atomstr="aa", atomfloat=1.5)
resolved = sorted(resolve_pattern("{lst}/{atomstr}_{atomfloat}.png", fake))
assert(len(resolved) == 1)
assert(resolved[0] == "1/aa_1.5.png")
def test_single_iterable_pattern(self):
fake = self.get_fake_type()(lst=[1, 2, 3], atomstr="aa", atomfloat=1.5)
resolved = sorted(resolve_pattern("{lst}/{atomstr}_{atomfloat}.png", fake))
assert(len(resolved) == 3)
assert(resolved[0] == "1/aa_1.5.png")
assert(resolved[1] == "2/aa_1.5.png")
assert(resolved[2] == "3/aa_1.5.png")
def test_no_placeholder(self):
fake = self.get_fake_type()(lst=[1, 2, 3], atomstr="aa", atomfloat=1.5)
resolved = resolve_pattern("no_placeholder", fake)
assert(len(resolved) == 1)
|
cytomine/Cytomine-python-client
|
cytomine/tests/test_pattern_matching.py
|
Python
|
apache-2.0
| 1,106
|
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import hashlib
import re
from datetime import datetime
from typing import Any, Dict, List, Optional, Tuple, TYPE_CHECKING
import pandas as pd
from sqlalchemy import literal_column
from sqlalchemy.sql.expression import ColumnClause
from superset.db_engine_specs.base import BaseEngineSpec
from superset.utils import core as utils
if TYPE_CHECKING:
from superset.models.core import Database # pragma: no cover
class BigQueryEngineSpec(BaseEngineSpec):
"""Engine spec for Google's BigQuery
As contributed by @mxmzdlv on issue #945"""
engine = "bigquery"
engine_name = "Google BigQuery"
max_column_name_length = 128
"""
https://www.python.org/dev/peps/pep-0249/#arraysize
raw_connections bypass the pybigquery query execution context and deal with
raw dbapi connection directly.
If this value is not set, the default value is set to 1, as described here,
https://googlecloudplatform.github.io/google-cloud-python/latest/_modules/google/cloud/bigquery/dbapi/cursor.html#Cursor
The default value of 5000 is derived from the pybigquery.
https://github.com/mxmzdlv/pybigquery/blob/d214bb089ca0807ca9aaa6ce4d5a01172d40264e/pybigquery/sqlalchemy_bigquery.py#L102
"""
arraysize = 5000
_date_trunc_functions = {
"DATE": "DATE_TRUNC",
"DATETIME": "DATETIME_TRUNC",
"TIME": "TIME_TRUNC",
"TIMESTAMP": "TIMESTAMP_TRUNC",
}
_time_grain_expressions = {
None: "{col}",
"PT1S": "{func}({col}, SECOND)",
"PT1M": "{func}({col}, MINUTE)",
"PT1H": "{func}({col}, HOUR)",
"P1D": "{func}({col}, DAY)",
"P1W": "{func}({col}, WEEK)",
"P1M": "{func}({col}, MONTH)",
"P0.25Y": "{func}({col}, QUARTER)",
"P1Y": "{func}({col}, YEAR)",
}
@classmethod
def convert_dttm(cls, target_type: str, dttm: datetime) -> Optional[str]:
tt = target_type.upper()
if tt == utils.TemporalType.DATE:
return f"CAST('{dttm.date().isoformat()}' AS DATE)"
if tt == utils.TemporalType.DATETIME:
return f"""CAST('{dttm.isoformat(timespec="microseconds")}' AS DATETIME)"""
if tt == utils.TemporalType.TIME:
return f"""CAST('{dttm.strftime("%H:%M:%S.%f")}' AS TIME)"""
if tt == utils.TemporalType.TIMESTAMP:
return f"""CAST('{dttm.isoformat(timespec="microseconds")}' AS TIMESTAMP)"""
return None
@classmethod
def fetch_data(
cls, cursor: Any, limit: Optional[int] = None
) -> List[Tuple[Any, ...]]:
data = super().fetch_data(cursor, limit)
# Support type BigQuery Row, introduced here PR #4071
# google.cloud.bigquery.table.Row
if data and type(data[0]).__name__ == "Row":
data = [r.values() for r in data] # type: ignore
return data
@staticmethod
def _mutate_label(label: str) -> str:
"""
BigQuery field_name should start with a letter or underscore and contain only
alphanumeric characters. Labels that start with a number are prefixed with an
underscore. Any unsupported characters are replaced with underscores and an
md5 hash is added to the end of the label to avoid possible collisions.
:param label: Expected expression label
:return: Conditionally mutated label
"""
label_hashed = "_" + hashlib.md5(label.encode("utf-8")).hexdigest()
# if label starts with number, add underscore as first character
label_mutated = "_" + label if re.match(r"^\d", label) else label
# replace non-alphanumeric characters with underscores
label_mutated = re.sub(r"[^\w]+", "_", label_mutated)
if label_mutated != label:
# add first 5 chars from md5 hash to label to avoid possible collisions
label_mutated += label_hashed[:6]
return label_mutated
@classmethod
def _truncate_label(cls, label: str) -> str:
"""BigQuery requires column names start with either a letter or
underscore. To make sure this is always the case, an underscore is prefixed
to the md5 hash of the original label.
:param label: expected expression label
:return: truncated label
"""
return "_" + hashlib.md5(label.encode("utf-8")).hexdigest()
@classmethod
def extra_table_metadata(
cls, database: "Database", table_name: str, schema_name: str
) -> Dict[str, Any]:
indexes = database.get_indexes(table_name, schema_name)
if not indexes:
return {}
partitions_columns = [
index.get("column_names", [])
for index in indexes
if index.get("name") == "partition"
]
cluster_columns = [
index.get("column_names", [])
for index in indexes
if index.get("name") == "clustering"
]
return {
"partitions": {"cols": partitions_columns},
"clustering": {"cols": cluster_columns},
}
@classmethod
def _get_fields(cls, cols: List[Dict[str, Any]]) -> List[ColumnClause]:
"""
BigQuery dialect requires us to not use backtick in the fieldname which are
nested.
Using literal_column handles that issue.
https://docs.sqlalchemy.org/en/latest/core/tutorial.html#using-more-specific-text-with-table-literal-column-and-column
Also explicility specifying column names so we don't encounter duplicate
column names in the result.
"""
return [
literal_column(c["name"]).label(c["name"].replace(".", "__")) for c in cols
]
@classmethod
def epoch_to_dttm(cls) -> str:
return "TIMESTAMP_SECONDS({col})"
@classmethod
def epoch_ms_to_dttm(cls) -> str:
return "TIMESTAMP_MILLIS({col})"
@classmethod
def df_to_sql(cls, df: pd.DataFrame, **kwargs: Any) -> None:
"""
Upload data from a Pandas DataFrame to BigQuery. Calls
`DataFrame.to_gbq()` which requires `pandas_gbq` to be installed.
:param df: Dataframe with data to be uploaded
:param kwargs: kwargs to be passed to to_gbq() method. Requires that `schema`,
`name` and `con` are present in kwargs. `name` and `schema` are combined
and passed to `to_gbq()` as `destination_table`.
"""
try:
import pandas_gbq
from google.oauth2 import service_account
except ImportError:
raise Exception(
"Could not import libraries `pandas_gbq` or `google.oauth2`, which are "
"required to be installed in your environment in order "
"to upload data to BigQuery"
)
if not ("name" in kwargs and "schema" in kwargs and "con" in kwargs):
raise Exception("name, schema and con need to be defined in kwargs")
gbq_kwargs = {}
gbq_kwargs["project_id"] = kwargs["con"].engine.url.host
gbq_kwargs["destination_table"] = f"{kwargs.pop('schema')}.{kwargs.pop('name')}"
# add credentials if they are set on the SQLAlchemy Dialect:
creds = kwargs["con"].dialect.credentials_info
if creds:
credentials = service_account.Credentials.from_service_account_info(creds)
gbq_kwargs["credentials"] = credentials
# Only pass through supported kwargs
supported_kwarg_keys = {"if_exists"}
for key in supported_kwarg_keys:
if key in kwargs:
gbq_kwargs[key] = kwargs[key]
pandas_gbq.to_gbq(df, **gbq_kwargs)
|
airbnb/superset
|
superset/db_engine_specs/bigquery.py
|
Python
|
apache-2.0
| 8,436
|
#!/usr/bin/env python2
import numpy as np
import pdb
from random import sample
from time import time
import heapq
import matplotlib
matplotlib.use('Agg')
from matplotlib import pyplot as plt
import sys, os
from eft_calculator import EFT_calculator, Water
import tools
def load_coordinates(name):
lines = open('test.dat/random/'+name).readlines()[-7:-1]
coors = [[float(item) for item in line.split()[2:5]] for line in lines]
return np.array(coors)
class Classical_calculator:
def __init__(self):
self.eps = [0.12, 0.046, 0.046]
self.sigma = [1.7, 0.2245, 0.2245]
self.charge = [-0.834, 0.417, 0.417]
def eval(self, coors):
mol = Water()
coor0 = coors[:3]
coor1 = coors[3:]
e = 0.
f = np.zeros(3)
t = np.zeros(3)
com1 = mol.getCOM(coor1)
eps, sigma, charge = self.eps, self.sigma, self.charge
for i in range(3):
for j in range(3):
ener, force = self.atomicEF(coor0[i], eps[i], sigma[i], charge[i], coor1[j], eps[j], sigma[j], charge[j])
e += ener
f += force
t += np.cross(coor1[j]-com1, force)
#if e>100.0:
# e = 100.0
# f = f/np.linalg.norm(f) * 100.0
# t = t/np.linalg.norm(t) * 100.0
return np.array([e, f[0], f[1], f[2], t[0], t[1], t[2]])
def atomicEF(self, x0, e0, s0, q0, x1, e1, s1, q1):
k = 138.935456
e = np.sqrt(e0 * e1)
s = s0 + s1
r = np.linalg.norm(x0 - x1)
if r <0.1 : return 100.0, np.array([100., 100.,100.,])
sor6 = (s/r) ** 6
evdw = e * (sor6**2 - 2 * sor6)
fvdw = e / r**2 * sor6 * (sor6 - 1) * (x1 - x0)
eelec = k * q0 * q1 / r
felec = k * q0 * q1 / r**3 * (x1 - x0)
ener = evdw + eelec
force = fvdw + felec
return ener, force
def test_random_set():
e0 = []
e1 = []
fce0 = []
fce1 = []
trq0 = []
trq1 = []
all = []
t1 = time()
for i in range(1, 2000):
# load atomic coor
name = 'test.dat/random/test%04d.inp.log' % i
#if i == 1693: pdb.set_trace()
eft, coors = calculator._parseQMlog(name)
# evaluate with analytical function
eft = cc.eval(coors)
e0.append(eft[0])
fce0 += list(eft[1:4])
trq0 += list(eft[4:7])
# convert atomic coor to r, phi, theta...
X0, q0 = calculator.mol.atomic2Xq(coors[:3])
X1, q1 = calculator.mol.atomic2Xq(coors[3:])
# evaluate with calculator
eft = calculator.eval(X0, q0, X1, q1)
e1.append(eft[0])
#if eft[0] > 15:
# print(coors, name)
# print(np.dtype(q1[0]))
fce1 += list(eft[1:4])
trq1 += list(eft[4:7])
#all.append((-np.abs(e0[-1]-e1[-1]), name))
all.append((-np.linalg.norm(np.array(fce0) - np.array(fce1)), name))
t2 = time()
print('took %.1f s to evaluate the random set' % (t2 - t1))
heapq.heapify(all)
#for i in range(3):
# de, name = heapq.heappop(all)
# print -de, name
"""
for i in range(len(e0)):
if e1[i]> 100.0:
e0[i] = e1[i] = 0.0
for j in range(3):
fce0[i*3 +j ] = fce1[i*3+j] = trq0[i*3+j] = trq1[i*3+j] = 0.0
"""
# make a plot
_, axarr = plt.subplots(1, 3)
p = np.corrcoef(e0, e1)[0, 1]
print("Energy: p =", p)
axarr[0].scatter(e0, e1)
axarr[0].text(0, 0, 'p=%.4f'%p)
p = np.corrcoef(fce0, fce1)[0, 1]
print("Force: p =", p)
axarr[1].scatter(fce0, fce1)
axarr[1].text(0, 0, 'p=%.4f'%p)
p = np.corrcoef(trq0, trq1)[0, 1]
print("Torque: p =", p)
axarr[2].scatter(trq0, trq1)
axarr[2].text(0, 0, 'p=%.4f'%p)
plt.savefig(figname)
def randomSample():
root = 'golden.dat'
if not os.path.exists(root):os.mkdir(root)
def mol2mol_init(ele):
mol = [[i,0.0,0.0,0.0] for i in ele]
return mol
size = 200
folder_id = 0
file_count = 0
confs = calculator.grid._iter_conf()
confs = list(confs)
if len(confs) > 2000:
confs = sample(list(confs), 2000)
for idx, coors in calculator.gen_PDB(confs):
#for id, coors in calculator.gen_atomic_coors(0,10):
#print(idx, coors)
if file_count%size == 0:
folder = os.path.join(root,"EFT_%04d"%(folder_id))
if not os.path.exists(folder):os.mkdir(folder)
folder_id += 1
pdb = open("%s/eft.%s.pdb"%(folder,idx),"w")
pdb.write(coors)
pdb.close()
file_count += 1
def grids_conf():
root = 'grids.dat'
if not os.path.exists(root):os.mkdir(root)
def mol2mol_init(ele):
mol = [[i,0.0,0.0,0.0] for i in ele]
return mol
size = 200
folder_id = 0
file_count = 0
confs = calculator.grid._grid_conf()
for idx, coors in calculator.gen_PDB(confs):
#for id, coors in calculator.gen_atomic_coors(0,10):
#print(idx, coors)
if file_count%size == 0:
folder = os.path.join(root,"EFT_%04d"%(folder_id))
if not os.path.exists(folder):os.mkdir(folder)
folder_id += 1
pdb = open("%s/eft.%s.pdb"%(folder,idx),"w")
pdb.write(coors)
pdb.close()
file_count += 1
if __name__ == '__main__':
if len(sys.argv) < 2:
print("\n Usage:#0 figname.png [datfilename.dat err_cutoff]\n")
sys.exit()
figname = sys.argv[1] # a output fig name
databaseName = sys.argv[2]
t0 = time()
cc = Classical_calculator()
if os.path.exists(databaseName):
print("loaded a old database")
calculator = EFT_calculator(databaseName)
else:
print("created a new mesh")
calculator = EFT_calculator()
if len(sys.argv) == 4:
error_cutoff = float(sys.argv[3])
print("set cutoff as %f"%(error_cutoff))
calculator.fill_grid(cc, databaseName, error_cutoff)
t1 = time()
print('took %.1f s to fill the grid' % (t1 - t0))
test_random_set()
#randomSample()
grids_conf()
|
yangjincai/Xq2EFT
|
testAndOutputGrids.py
|
Python
|
apache-2.0
| 6,139
|
from optparse import OptionParser
import sys
from os.path import expanduser, abspath
import sunburnt
import dist_job_mgr.client as djm
QUERY_TERM="datablox"
def run_query(addresses):
master = addresses[0]
si = sunburnt.SolrInterface("http://%s:8983/solr" % master)
resp = si.query(QUERY_TERM).execute()
assert resp.status==0
objs = resp.result.numFound
time_ms = resp.QTime
if time_ms>0:
rate = "%.2f obj/sec" % (1000.0*(float(objs)/float(time_ms)))
else:
rate = "Rate too fast to measure"
print "%d results in %d ms (%s)" % (objs, time_ms, rate)
return 0
def main(argv=sys.argv[1:]):
usage = "%prog [options] query_host [host2 host3 ...]"
parser = OptionParser(usage=usage)
(options, args) = parser.parse_args(argv)
if len(args)==0:
parser.error("Need to provide at least one host name")
djm_conn = djm.get_local_connection(abspath(expanduser("~/apps/djm")))
addresses = []
for name in args:
host = djm_conn.find_node_by_name(name)
if host==None:
parser.error("No node named '%s' found in djm database" % name)
addresses.append(host["contact_address"])
return run_query(addresses)
if __name__ == "__main__":
sys.exit(main())
|
mpi-sws-rse/datablox
|
scripts/solr_test.py
|
Python
|
apache-2.0
| 1,268
|
# Copyright 2018 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Contains code for loading and preprocessing the MNIST data."""
import os
import tensorflow as tf
slim = tf.contrib.slim
dataset_data_provider = slim.dataset_data_provider
dataset = slim.dataset
queues = slim.queues
gfile = tf.gfile
import demosaic_utils
def make_demosaic(image, height, width, BURST_LENGTH, to_shift, upscale, jitter):
y = tf.random_uniform([1], jitter * upscale, tf.shape(image)[0]-height - jitter*upscale, tf.int32)
x = tf.random_uniform([1], jitter * upscale, tf.shape(image)[1]-width - jitter*upscale, tf.int32)
y, x = y[0], x[0]
demosaic = tf.reshape(image[y:y+height, x:x+width, :], (1, height, width, 1, 3))
delta = tf.random_uniform([BURST_LENGTH-1,2], -jitter*upscale, jitter*upscale+1, tf.int32)
# delta_big = tf.random_uniform([BURST_LENGTH-1,2], -20, 20, tf.int32)
shift_mask = tf.random_uniform([BURST_LENGTH-1, 1], 0., BURST_LENGTH-1., tf.float32) * to_shift
shift_mask = tf.where(shift_mask > BURST_LENGTH-2., tf.ones([BURST_LENGTH-1, 1]), tf.zeros([BURST_LENGTH-1, 1]))
delta = delta # + tf.cast(tf.tile(shift_mask, [1, 2]), tf.int32) * delta_big
shift_mask = tf.reshape(shift_mask, [1, BURST_LENGTH-1])
for d in range(BURST_LENGTH-1):
y_ = (y + delta[d,0]) # % (tf.shape(image)[0]-height)
x_ = (x + delta[d,1]) # % (tf.shape(image)[1]-width)
demosaic2 = tf.reshape(image[y_:y_+height, x_:x_+width, :], (1, height, width, 1, 3))
demosaic = tf.concat([demosaic, demosaic2], axis=3)
return demosaic, shift_mask
def make_stack_demosaic(image, height, width, depth, BURST_LENGTH, to_shift, upscale, jitter):
height = height * upscale
width = width * upscale
v_error = tf.maximum(height + 2 * jitter * upscale - tf.shape(image)[0] + 1, 0)
h_error = tf.maximum(width + 2 * jitter * upscale - tf.shape(image)[1] + 1, 0)
image = tf.pad(image, [[0,v_error],[0,h_error],[0,0]])
demosaic_stack, shift_stack = make_demosaic(image, height, width, BURST_LENGTH, to_shift, upscale, jitter)
for i in range(depth-1):
demosaic, shift_mask = make_demosaic(image, height, width, BURST_LENGTH, to_shift, upscale, jitter)
demosaic_stack = tf.concat((demosaic_stack, demosaic), axis=0)
shift_stack = tf.concat((shift_stack, shift_mask) , axis=0)
dt = tf.reshape(tf.transpose(demosaic_stack, [0, 3, 1, 2, 4]), [-1, height, width, 3])
height = height // upscale
width = width // upscale
dt = tf.image.resize_images(dt, [height, width], method=tf.image.ResizeMethod.AREA)
demosaic_stack = tf.transpose(tf.reshape(dt, [depth, BURST_LENGTH, height, width, 3]), [0, 2, 3, 1, 4])
mosaic = tf.stack((demosaic_stack[:,::2,::2,:,0],demosaic_stack[:,::2,1::2,:,1],demosaic_stack[:,1::2,::2,:,1],demosaic_stack[:,1::2,1::2,:,2]), axis=-1)
mosaic = demosaic_utils.tf22reshape2(mosaic, BURST_LENGTH)
mosaic = tf.reshape(mosaic, (depth, height, width, BURST_LENGTH))
return mosaic, demosaic_stack, shift_stack
def load_batch_demosaic(BURST_LENGTH, dataset_dir, batch_size=32, height=64, width=64, degamma=1., to_shift=1., upscale=1, jitter=1):
filenames = [os.path.join(dataset_dir, f) for f in gfile.ListDirectory(dataset_dir)]
filename_queue = tf.train.string_input_producer(filenames)
mosaic = None
while mosaic == None:
_, image_file = tf.WholeFileReader().read(filename_queue)
image = tf.image.decode_image(image_file)
mosaic, demosaic, shift = make_stack_demosaic((tf.cast(image[0], tf.float32) / 255.)**degamma,
height, width, 128, BURST_LENGTH, to_shift, upscale, jitter)
# Batch it up.
mosaic, demosaic, shift = tf.train.shuffle_batch(
[mosaic, demosaic, shift],
batch_size=batch_size,
num_threads=2,
capacity=500 + 3 * batch_size,
enqueue_many=True,
min_after_dequeue=100)
return mosaic, demosaic, shift
def make_batch_hqjitter(patches, BURST_LENGTH, batch_size, repeats, height, width,
to_shift, upscale, jitter, smalljitter):
# patches is [BURST_LENGTH, h_up, w_up, 3]
j_up = jitter * upscale
h_up = height * upscale # + 2 * j_up
w_up = width * upscale # + 2 * j_up
bigj_patches = patches
delta_up = (jitter - smalljitter) * upscale
smallj_patches = patches[:, delta_up:-delta_up, delta_up:-delta_up, ...]
unique = batch_size//repeats
batch = []
for i in range(unique):
for j in range(repeats):
curr = [patches[i, j_up:-j_up, j_up:-j_up, :]]
prob = tf.minimum(tf.cast(tf.random_poisson(1.5, []), tf.float32)/BURST_LENGTH, 1.)
for k in range(BURST_LENGTH - 1):
flip = tf.random_uniform([])
p2use = tf.cond(flip < prob, lambda : bigj_patches, lambda : smallj_patches)
curr.append(tf.random_crop(p2use[i, ...], [h_up, w_up, 3]))
curr = tf.stack(curr, axis=0)
curr = tf.image.resize_images(curr, [height, width], method=tf.image.ResizeMethod.AREA)
curr = tf.transpose(curr, [1,2,3,0])
batch.append(curr)
batch = tf.stack(batch, axis=0)
return batch
def make_stack_hqjitter(image, height, width, depth, BURST_LENGTH, to_shift, upscale, jitter):
j_up = jitter * upscale
h_up = height * upscale + 2 * j_up
w_up = width * upscale + 2 * j_up
v_error = tf.maximum((h_up - tf.shape(image)[0] + 1) // 2, 0)
h_error = tf.maximum((w_up - tf.shape(image)[1] + 1) // 2, 0)
image = tf.pad(image, [[v_error, v_error],[h_error,h_error],[0,0]])
stack = []
for i in range(depth):
stack.append(tf.random_crop(image, [h_up, w_up, 3]))
stack = tf.stack(stack, axis=0)
return stack
def load_batch_hqjitter(dataset_dir, patches_per_img=32, min_queue=8, BURST_LENGTH=1, batch_size=32,
repeats=1, height=64, width=64, degamma=1.,
to_shift=1., upscale=1, jitter=1, smalljitter=1):
filenames = [os.path.join(dataset_dir, f) for f in gfile.ListDirectory(dataset_dir)]
filename_queue = tf.train.string_input_producer(filenames)
_, image_file = tf.WholeFileReader().read(filename_queue)
image = tf.image.decode_image(image_file)
patches = make_stack_hqjitter((tf.cast(image[0], tf.float32) / 255.)**degamma,
height, width, patches_per_img, BURST_LENGTH, to_shift, upscale, jitter)
unique = batch_size//repeats
# Batch it up.
patches = tf.train.shuffle_batch(
[patches],
batch_size=unique,
num_threads=2,
capacity=min_queue + 3 * batch_size,
enqueue_many=True,
min_after_dequeue=min_queue)
print('PATCHES =================',patches.get_shape().as_list())
patches = make_batch_hqjitter(patches, BURST_LENGTH, batch_size, repeats, height, width, to_shift, upscale, jitter, smalljitter)
return patches
def make_noised(image, height, width, sig_range):
y = tf.random_uniform([1], 0, tf.shape(image)[0]-height, tf.int32)
x = tf.random_uniform([1], 0, tf.shape(image)[1]-width, tf.int32)
y, x = y[0], x[0]
noised = tf.reshape(image[y:y+height, x:x+width, :], (1, height, width, 1, 3))
denoised = noised
sig = tf.random_uniform([1], 0, sig_range, tf.float32)
noised = tf.clip_by_value(noised + tf.random_normal(tf.shape(noised),mean=0.,stddev=sig[0]),0.,1.)
return noised, denoised, tf.reshape(sig, [1,1])
def make_stack_noised(image, height, width, depth, sig_range):
v_error = tf.maximum(height - tf.shape(image)[0] + 1, 0)
h_error = tf.maximum(width - tf.shape(image)[1] + 1, 0)
image = tf.pad(image, [[0,v_error],[0,h_error],[0,0]])
noised_stack, denoised_stack, sig_stack = make_noised(image, height, width, sig_range)
for i in range(depth-1):
noised, denoised, sig = make_noised(image, height, width, sig_range)
noised_stack = tf.concat((noised_stack, noised), axis=0)
denoised_stack = tf.concat((denoised_stack, denoised), axis=0)
sig_stack = tf.concat((sig_stack, sig), axis=0)
return noised_stack, denoised_stack, sig_stack
def load_batch_noised(depth, dataset_dir, batch_size=32, height=64, width=64, degamma=1., sig_range=20.):
filenames = [os.path.join(dataset_dir, f) for f in gfile.ListDirectory(dataset_dir)]
filename_queue = tf.train.string_input_producer(filenames)
noised_stack = None
while noised_stack == None:
_, image_file = tf.WholeFileReader().read(filename_queue)
image = tf.image.decode_image(image_file)
noised_stack, denoised_stack, sig_stack = make_stack_noised((tf.cast(image[0], tf.float32) / 255.)**degamma, height, width, depth, sig_range)
# Batch it up.
noised, denoised, sig = tf.train.shuffle_batch(
[noised_stack, denoised_stack, sig_stack],
batch_size=batch_size,
num_threads=2,
capacity=1024 + 3 * batch_size,
enqueue_many=True,
min_after_dequeue=500)
return noised, denoised, sig
def decode(tfr_features):
burst = tf.decode_raw(tfr_features['burst_raw'], tf.float32)
merged = tf.decode_raw(tfr_features['merge_raw'], tf.float32)
readvar = tf.decode_raw(tfr_features['readvar'], tf.float32)
shotfactor = tf.decode_raw(tfr_features['shotfactor'], tf.float32)
channelgain = tf.decode_raw(tfr_features['channelgain'], tf.float32)
blacklevels = tf.decode_raw(tfr_features['blacklevels'], tf.float32)
depth = tf.cast(tfr_features['depth'], tf.int32) # 0
height = tf.cast(tfr_features['height'], tf.int32) # 1
width = tf.cast(tfr_features['width'], tf.int32) # 2
# depth = width_
# height = depth_
# width = height_
# WIDTH=4032
# HEIGHT=3024
# payload_raw_c = (payload_raw-bl/16) * ch
burst = tf.reshape(burst, (height,width,depth))
sh = tf.shape(burst)
ch = tf.tile(tf.reshape(channelgain, (2,2,1)), (sh[0]/2, sh[1]/2, sh[2]))
bl = tf.tile(tf.reshape(blacklevels, (2,2,1)), (sh[0]/2, sh[1]/2, sh[2]))
burst = (burst - bl/16.) * ch
merged = tf.reshape(merged, (height,width,3)) / 16.
scale = tf.reduce_max(merged)
burst = tf.clip_by_value(burst, 0., scale)
scale = 1024.
burst = burst / scale
merged = merged / scale
readvar = tf.reshape(readvar * channelgain * channelgain, [4]) / scale / scale
shotfactor = tf.reshape(shotfactor * channelgain, [4]) / scale
return burst, merged, readvar, shotfactor
def decode_patches(tfr_features):
burst = tf.decode_raw(tfr_features['burst_raw'], tf.float32)
merged = tf.decode_raw(tfr_features['merge_raw'], tf.float32)
demosaic = tf.decode_raw(tfr_features['demosaic_raw'], tf.float32)
readvar = tf.decode_raw(tfr_features['readvar'], tf.float32)
shotfactor = tf.decode_raw(tfr_features['shotfactor'], tf.float32)
channelgain = tf.decode_raw(tfr_features['channelgain'], tf.float32)
blacklevels = tf.decode_raw(tfr_features['blacklevels'], tf.float32)
depth = tf.cast(tfr_features['depth'], tf.int32) # 0
height = tf.cast(tfr_features['height'], tf.int32) # 1
width = tf.cast(tfr_features['width'], tf.int32) # 2
patches = tf.cast(tfr_features['patches'], tf.int32)
burst = tf.reshape(burst, (patches, height,width,depth))
sh = tf.shape(burst)
ch = tf.tile(tf.reshape(channelgain, (2,2,1)), (sh[1]/2, sh[2]/2, sh[3]))
bl = tf.tile(tf.reshape(blacklevels, (2,2,1)), (sh[1]/2, sh[2]/2, sh[3]))
burst = (burst - bl/16./2**10) * ch
merged = tf.reshape(merged, (patches,height,width))
demosaic = tf.reshape(demosaic, (patches,height,width,3))
demosaic = demosaic
burst = tf.clip_by_value(burst, -10, 1.)
merged = tf.clip_by_value(merged, -10, 1.)
scale = 2.**10
readvar = tf.reshape(readvar, [4]) / scale / scale
shotfactor = tf.reshape(shotfactor, [4]) / scale
return burst, merged, demosaic, readvar, shotfactor
def read_and_decode_single(filename):
e = tf.python_io.tf_record_iterator(filename).next()
features = tf.parse_single_example(e, features={
'readvar': tf.FixedLenFeature([], tf.string),
'shotfactor': tf.FixedLenFeature([], tf.string),
'blacklevels': tf.FixedLenFeature([], tf.string),
'channelgain': tf.FixedLenFeature([], tf.string),
'burst_raw': tf.FixedLenFeature([], tf.string),
'merge_raw': tf.FixedLenFeature([], tf.string),
'depth': tf.FixedLenFeature([], tf.int64),
'height': tf.FixedLenFeature([], tf.int64),
'width': tf.FixedLenFeature([], tf.int64),
})
return decode(features)
def read_and_decode(filename_queue):
reader = tf.TFRecordReader()
_, serialized_example = reader.read(filename_queue)
features = tf.parse_single_example(
serialized_example,
features={
'readvar': tf.FixedLenFeature([], tf.string),
'shotfactor': tf.FixedLenFeature([], tf.string),
'blacklevels': tf.FixedLenFeature([], tf.string),
'channelgain': tf.FixedLenFeature([], tf.string),
'burst_raw': tf.FixedLenFeature([], tf.string),
'merge_raw': tf.FixedLenFeature([], tf.string),
'depth': tf.FixedLenFeature([], tf.int64),
'height': tf.FixedLenFeature([], tf.int64),
'width': tf.FixedLenFeature([], tf.int64),
})
return decode(features)
def read_and_decode_patches(filename_queue):
reader = tf.TFRecordReader()
_, serialized_example = reader.read(filename_queue)
features = tf.parse_single_example(
serialized_example,
features={
'readvar': tf.FixedLenFeature([], tf.string),
'shotfactor': tf.FixedLenFeature([], tf.string),
'blacklevels': tf.FixedLenFeature([], tf.string),
'channelgain': tf.FixedLenFeature([], tf.string),
'burst_raw': tf.FixedLenFeature([], tf.string),
'merge_raw': tf.FixedLenFeature([], tf.string),
'demosaic_raw': tf.FixedLenFeature([], tf.string),
'depth': tf.FixedLenFeature([], tf.int64),
'height': tf.FixedLenFeature([], tf.int64),
'width': tf.FixedLenFeature([], tf.int64),
'patches': tf.FixedLenFeature([], tf.int64),
})
return decode_patches(features)
def read_and_decode_str(filename_queue):
reader = tf.TFRecordReader()
s, serialized_example = reader.read(filename_queue)
features = tf.parse_single_example(
serialized_example,
features={
'readvar': tf.FixedLenFeature([], tf.string),
'shotfactor': tf.FixedLenFeature([], tf.string),
'blacklevels': tf.FixedLenFeature([], tf.string),
'channelgain': tf.FixedLenFeature([], tf.string),
'burst_raw': tf.FixedLenFeature([], tf.string),
'merge_raw': tf.FixedLenFeature([], tf.string),
'depth': tf.FixedLenFeature([], tf.int64),
'height': tf.FixedLenFeature([], tf.int64),
'width': tf.FixedLenFeature([], tf.int64),
})
return s, decode(features)
def load_tfrecord(filename):
g = tf.Graph()
with g.as_default():
tf.logging.set_verbosity(tf.logging.INFO)
mosaic, demosaic_truth, readvar, shotfactor = read_and_decode_single(filename)
init_op = tf.group(tf.initialize_all_variables(), tf.initialize_local_variables())
with tf.Session() as sess:
sess.run(init_op)
mosaic, demosaic_truth, readvar, shotfactor = \
sess.run([mosaic, demosaic_truth, readvar, shotfactor])
return mosaic, demosaic_truth, readvar, shotfactor
def sample_patch(burst, merged, height, width, burst_length):
y = tf.random_uniform([1], 0, tf.shape(burst)[0]-height, tf.int32)
x = tf.random_uniform([1], 0, tf.shape(burst)[1]-width, tf.int32)
y, x = (y[0]//2)*2, (x[0]//2)*2
mosaic = burst[y:y+height, x:x+width,:burst_length]
demosaic = merged[y:y+height, x:x+width,:]
return mosaic, demosaic
def stackRGB(burst):
burst = tf.stack((burst[:,::2,::2],(burst[:,1::2,::2]+burst[:,::2,1::2])/2,burst[:,1::2,1::2]), axis=-1)
return burst
def burst2patches(burst, merged, height, width, depth, burst_length):
mosaic, demosaic = sample_patch(burst, merged, height, width, burst_length)
mosaic = tf.expand_dims(mosaic, axis=0)
demosaic = tf.expand_dims(demosaic, axis=0)
for i in range(depth-1):
m, d = sample_patch(burst, merged, height, width, burst_length)
m = tf.expand_dims(m, axis=0)
d = tf.expand_dims(d, axis=0)
mosaic = tf.concat((mosaic, m), axis=0)
demosaic = tf.concat((demosaic, d), axis=0)
return mosaic, demosaic
def inputs(filenames, batch_size, height, width, depth, burst_length):
with tf.name_scope('input'):
filename_queue = tf.train.string_input_producer(filenames)
# Even when reading in multiple threads, share the filename
# queue.
burst, merged, readvar, shotfactor = read_and_decode(filename_queue)
d = tf.shape(burst)[-1]
burst = tf.cond(d > burst_length, lambda: burst[...,:burst_length], lambda: burst)
burst = tf.cond(d < burst_length, lambda: tf.pad(burst, [[0,0],[0,0],[0,burst_length-d]]), lambda: burst)
mosaic, demosaic = burst2patches(burst, merged, height, width, depth, burst_length)
mosaic = tf.reshape(mosaic, [depth, height, width, burst_length])
demosaic = tf.reshape(demosaic, [depth, height, width, 3])
readvar = tf.tile(tf.reshape(readvar, [1, 4]), [depth, 1])
shotfactor = tf.tile(tf.reshape(shotfactor, [1, 4]), [depth, 1])
valid_mask = tf.ones([1,tf.minimum(burst_length,d)])
valid_mask = tf.cond(burst_length > d, lambda : tf.concat([valid_mask,tf.zeros([1,burst_length-d])], axis=-1), lambda : valid_mask)
valid_mask = tf.tile(valid_mask, [depth, 1])
valid_mask = tf.reshape(valid_mask, [depth, burst_length])
mosaic, demosaic, readvar, shotfactor, valid_mask = tf.train.shuffle_batch(
[mosaic, demosaic, readvar, shotfactor, valid_mask],
batch_size=batch_size,
num_threads=2,
capacity=1024 + 3 * batch_size,
enqueue_many=True,
min_after_dequeue=128)
return mosaic, demosaic, readvar, shotfactor, valid_mask
def inputs_patches(filenames, batch_size, burst_length):
with tf.name_scope('input'):
filename_queue = tf.train.string_input_producer(filenames)
# Even when reading in multiple threads, share the filename
# queue.
burst, merged, demosaic, readvar, shotfactor = read_and_decode_patches(filename_queue)
# d = tf.shape(burst)[-1]
# burst = tf.cond(d > burst_length, lambda: burst[...,:burst_length], lambda: burst)
# burst = tf.cond(d < burst_length, lambda: tf.pad(burst, [[0,0],[0,0],[0,0],[0,burst_length-d]]), lambda: burst)
burst = burst[...,:burst_length]
depth = 16 # tf.shape(burst)[0]
readvar = tf.tile(tf.reshape(readvar, [1, 4]), [depth, 1])
shotfactor = tf.tile(tf.reshape(shotfactor, [1, 4]), [depth, 1])
burst = tf.reshape(burst, [depth, 256, 256, burst_length])
merged = tf.reshape(merged, [depth, 256, 256])
demosaic = tf.reshape(demosaic, [depth, 256, 256, 3])
burst, merged, demosaic, readvar, shotfactor = tf.train.shuffle_batch(
[burst, merged, demosaic, readvar, shotfactor],
batch_size=batch_size,
num_threads=2,
capacity=1000 + 3 * batch_size,
enqueue_many=True,
min_after_dequeue=128)
return burst, merged, demosaic, readvar, shotfactor
def load_test_patches(filenames, burst_length):
with tf.Graph().as_default():
with tf.name_scope('input'):
filename_queue = tf.train.string_input_producer(filenames, num_epochs=1, shuffle=False)
# Even when reading in multiple threads, share the filename
# queue.
burst, merged, demosaic, readvar, shotfactor = read_and_decode_patches(filename_queue)
burst = burst[...,:burst_length]
depth = 16 # tf.shape(burst)[0]
readvar = tf.tile(tf.reshape(readvar, [1, 4]), [depth, 1])
shotfactor = tf.tile(tf.reshape(shotfactor, [1, 4]), [depth, 1])
burst = tf.reshape(burst, [depth, 256, 256, burst_length])
merged = tf.reshape(merged, [depth, 256, 256])
demosaic = tf.reshape(demosaic, [depth, 256, 256, 3])
init_op = tf.group(tf.global_variables_initializer(),
tf.local_variables_initializer())
with tf.Session() as sess:
sess.run(init_op)
with queues.QueueRunners(sess):
patches = {}
for i,f in enumerate(filenames):
print 'loading',f,'its', i,'of',len(filenames)
burst_np, merged_np, demosaic_np, readvar_np, shotfactor_np = sess.run([burst, merged, demosaic, readvar, shotfactor])
patches[f] = [burst_np, merged_np, demosaic_np, readvar_np, shotfactor_np]
return patches
|
google/burst-denoising
|
kpn_data_provider.py
|
Python
|
apache-2.0
| 21,051
|
"""
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import os
#Used in subsequent imports from params
from resource_management.libraries.script.script import Script
from resource_management.libraries.functions.default import default
from resource_management.libraries.functions.format import format
from install_params import exclude_packages
from status_params import *
config = Script.get_config()
hadoop_conf_dir = None
hbase_conf_dir = None
hadoop_home = None
try:
hadoop_conf_dir = os.environ["HADOOP_CONF_DIR"]
hbase_conf_dir = os.environ["HBASE_CONF_DIR"]
hadoop_home = os.environ["HADOOP_HOME"]
except:
pass
#directories & files
dfs_name_dir = config['configurations']['hdfs-site']['dfs.namenode.name.dir']
fs_checkpoint_dir = config['configurations']['hdfs-site']['dfs.namenode.checkpoint.dir']
dfs_data_dir = config['configurations']['hdfs-site']['dfs.datanode.data.dir']
#decomission
hdfs_exclude_file = default("/clusterHostInfo/decom_dn_hosts", [])
exclude_file_path = config['configurations']['hdfs-site']['dfs.hosts.exclude']
include_file_path = default("/configurations/hdfs-site/dfs.hosts", None)
hdfs_include_file = None
manage_include_files = default("/configurations/hdfs-site/manage.include.files", False)
if include_file_path and manage_include_files:
slave_hosts = default("/clusterHostInfo/slave_hosts", [])
hdfs_include_file = slave_hosts
update_files_only = default("/commandParams/update_files_only",False)
# HDFS High Availability properties
dfs_ha_enabled = False
dfs_ha_nameservices = default("/configurations/hdfs-site/dfs.internal.nameservices", None)
dfs_ha_namenode_ids = default(format("/configurations/hdfs-site/dfs.ha.namenodes.{dfs_ha_nameservices}"), None)
namenode_id = None
namenode_rpc = None
hostname = config["hostname"]
if dfs_ha_namenode_ids:
dfs_ha_namemodes_ids_list = dfs_ha_namenode_ids.split(",")
dfs_ha_namenode_ids_array_len = len(dfs_ha_namemodes_ids_list)
if dfs_ha_namenode_ids_array_len > 1:
dfs_ha_enabled = True
if dfs_ha_enabled:
for nn_id in dfs_ha_namemodes_ids_list:
nn_host = config['configurations']['hdfs-site'][format('dfs.namenode.rpc-address.{dfs_ha_nameservices}.{nn_id}')]
if hostname.lower() in nn_host.lower():
namenode_id = nn_id
namenode_rpc = nn_host
hadoop_user = config["configurations"]["cluster-env"]["hadoop.user.name"]
hdfs_user = hadoop_user
grep_exe = "findstr"
name_node_params = default("/commandParams/namenode", None)
service_map = {
"datanode" : datanode_win_service_name,
"journalnode" : journalnode_win_service_name,
"namenode" : namenode_win_service_name,
"secondarynamenode" : snamenode_win_service_name,
"zkfc_slave": zkfc_win_service_name
}
|
radicalbit/ambari
|
ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/params_windows.py
|
Python
|
apache-2.0
| 3,411
|
# Copyright 2013 Canonical Ltd
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Render Vendordata as stored in configured file."""
import errno
from oslo_config import cfg
from oslo_log import log as logging
from oslo_serialization import jsonutils
from jacket.api.compute.metadata import base
from jacket.i18n import _LW
file_opt = cfg.StrOpt('vendordata_jsonfile_path',
help='File to load JSON formatted vendor data from')
CONF = cfg.CONF
CONF.register_opt(file_opt)
LOG = logging.getLogger(__name__)
class JsonFileVendorData(base.VendorDataDriver):
def __init__(self, *args, **kwargs):
super(JsonFileVendorData, self).__init__(*args, **kwargs)
data = {}
fpath = CONF.vendordata_jsonfile_path
logprefix = "%s[%s]:" % (file_opt.name, fpath)
if fpath:
try:
with open(fpath, "r") as fp:
data = jsonutils.load(fp)
except IOError as e:
if e.errno == errno.ENOENT:
LOG.warning(_LW("%(logprefix)s file does not exist"),
{'logprefix': logprefix})
else:
LOG.warning(_LW("%(logprefix)s unexpected IOError when "
"reading"), {'logprefix': logprefix})
raise e
except ValueError:
LOG.warning(_LW("%(logprefix)s failed to load json"),
{'logprefix': logprefix})
raise
self._data = data
def get(self):
return self._data
|
HybridF5/jacket
|
jacket/api/compute/metadata/vendordata_json.py
|
Python
|
apache-2.0
| 2,141
|
import collections
import mui4py.mui4py_mod as mui4py_mod
from mui4py.config import get_default_config
from mui4py.types import map_type, get_float_type_str, get_int_type_str, get_io_type_str
import re
import numpy as np
class CppClass(object):
def __init__(self, config=None, args=(), kwargs={}):
self._cpp_class_name = None
self._cpp_point_class_name = None
self.raw_point = None
self.raw = None
self.io_data_type = None
# Convertir args en Args()
self.args = tuple([Arg(a) if not issubclass(a.__class__, Arg) else a for a in args])
self.namespace = ""
# Filter None-valued entries to gake C++ default values.
self.kwargs = {k: v for k, v in kwargs.items() if v is not None}
self.configured = False
self._ALLOWED_IO_TYPES = None
if config is None:
self.config = get_default_config()
else:
self.config = config
self.signature = self._signature()
def _signature(self):
sig = self._split_class_name()
args_str = [str(a) for a in self.get_plain_args()]
kwargs_str = ["{}={}".format(k,v) for k,v in self.kwargs.items()]
if args_str:
sig += "_ARGS_" + "_".join(args_str)
if kwargs_str:
sig += "_KWARGS_" + "_".join(kwargs_str)
return sig
def _split_class_name(self, title=True):
tokens = re.findall('[A-Z][^A-Z]*', self.__class__.__name__)
tokens = [t.lower() for t in tokens]
if title:
tokens[0] = tokens[0].title()
return "_".join(tokens)
def get_plain_args(self):
return tuple([a.arg for a in self.args])
def get_plain_kwargs(self):
return
def configure(self, config, io_data_type=None, cpp_obj=None, onlycheck=False):
self.config = config
self.point_class_name = get_cpp_name("Point", config.dim,\
config.float_type, config.int_type)
self.raw_point = getattr(mui4py_mod, self.point_class_name)
self.io_data_type = map_type[io_data_type]
if self.io_data_type is not None and self.io_data_type not in self._ALLOWED_IO_TYPES:
raise Exception("Data type not supported by spatial sampler ''. Supported types : {float, np.float32, np.float64, etc.}")
if onlycheck:
self.io_data_type = None
self.raw = cpp_obj
self._cpp_class_name = get_cpp_name(self._split_class_name(), config.dim, config.float_type,
config.int_type, namespace=self.namespace, type_io=self.io_data_type)
if self.raw is None:
# Configure class arguments
for a in self.args:
a.configure(config, self.raw_point)
self.raw = getattr(mui4py_mod, self._cpp_class_name)(*self.get_plain_args(), **self.kwargs)
self.configured = True
class Arg(object):
def __init__(self, arg):
self.arg = arg
def configure(self, config, cpp_point):
pass
class _Point(Arg):
def __init__(self, point_rep):
super(_Point, self).__init__(None)
self.point_rep = point_rep
def configure(self, config, cpp_point):
self.arg = array2Point(self.point_rep, config, cpp_point)
def array2Point(arr, config, cpp_point):
arr_aux = arr
if not isinstance(arr, list) and\
not isinstance(arr, tuple) and\
not isinstance(arr, np.ndarray):
arr_aux = [arr]
# TODO:Maybe check for point type?
if len(arr_aux) == config.dim:
return cpp_point(arr_aux)
else:
raise Exception("Size of point is different than uniface dimensions.")
def get_cpp_name(cname, dim, float_type, int_type, namespace="", type_io=None):
s = ""
if namespace:
s += "_" + namespace
s += "_{}{}d_{}_{}".format(cname, dim, get_float_type_str(float_type),\
get_int_type_str(int_type))
if type_io is not None:
s += "_" + get_io_type_str(type_io)
return s
|
yhtang/MUI
|
wrappers/Python/mui4py/common.py
|
Python
|
apache-2.0
| 4,086
|
# -*- coding: utf-8 -*-
from rest_framework import serializers
from accesslog.models import AccessLog, DaySummary, MonthSummary
class SourceSerializer(serializers.Serializer):
source = serializers.CharField(max_length=200)
total = serializers.IntegerField()
time_min = serializers.DateTimeField()
time_max = serializers.DateTimeField()
class AccessLogSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = AccessLog
fields = ('id', 'time', 'host', 'path', 'query', 'method', 'protocol',
'status', 'size', 'referer', 'ua', 'trailing', 'source')
class DaySerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = DaySummary
fields = ('id', 'day', 'host_kind', 'path_kind', 'protocol', 'method',
'status', 'size_min', 'size_max', 'size_avg', 'referer_kind',
'ua_kind', 'total', 'source')
class MonthSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = MonthSummary
fields = ('id', 'year', 'month', 'host_kind', 'path_kind', 'protocol',
'method', 'status', 'size_min', 'size_max', 'size_avg',
'referer_kind', 'ua_kind', 'total', 'source')
# vim: set et ts=4 sw=4 cindent fileencoding=utf-8 :
|
skitazaki/django-access-dashboard
|
src/accesslog/serializers.py
|
Python
|
apache-2.0
| 1,315
|
# Copyright 2015 Google Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import time
import unittest2
from gcloud import _helpers
from gcloud import pubsub
from gcloud.pubsub.subscription import Subscription
from gcloud.pubsub.topic import Topic
_helpers._PROJECT_ENV_VAR_NAME = 'GCLOUD_TESTS_PROJECT_ID'
pubsub.set_defaults()
class TestPubsub(unittest2.TestCase):
def setUp(self):
self.to_delete = []
def tearDown(self):
for doomed in self.to_delete:
doomed.delete()
def test_create_topic(self):
TOPIC_NAME = 'a-new-topic'
topic = Topic(TOPIC_NAME)
self.assertFalse(topic.exists())
topic.create()
self.to_delete.append(topic)
self.assertTrue(topic.exists())
self.assertEqual(topic.name, TOPIC_NAME)
def test_list_topics(self):
topics_to_create = [
'new%d' % (1000 * time.time(),),
'newer%d' % (1000 * time.time(),),
'newest%d' % (1000 * time.time(),),
]
for topic_name in topics_to_create:
topic = Topic(topic_name)
topic.create()
self.to_delete.append(topic)
# Retrieve the topics.
all_topics, _ = pubsub.list_topics()
project = pubsub.get_default_project()
created = [topic for topic in all_topics
if topic.name in topics_to_create and
topic.project == project]
self.assertEqual(len(created), len(topics_to_create))
def test_create_subscription(self):
TOPIC_NAME = 'subscribe-me'
topic = Topic(TOPIC_NAME)
self.assertFalse(topic.exists())
topic.create()
self.to_delete.append(topic)
SUBSCRIPTION_NAME = 'subscribing-now'
subscription = Subscription(SUBSCRIPTION_NAME, topic)
self.assertFalse(subscription.exists())
subscription.create()
self.to_delete.append(subscription)
self.assertTrue(subscription.exists())
self.assertEqual(subscription.name, SUBSCRIPTION_NAME)
self.assertTrue(subscription.topic is topic)
def test_list_subscriptions(self):
TOPIC_NAME = 'subscribe-me'
topic = Topic(TOPIC_NAME)
self.assertFalse(topic.exists())
topic.create()
self.to_delete.append(topic)
subscriptions_to_create = [
'new%d' % (1000 * time.time(),),
'newer%d' % (1000 * time.time(),),
'newest%d' % (1000 * time.time(),),
]
for subscription_name in subscriptions_to_create:
subscription = Subscription(subscription_name, topic)
subscription.create()
self.to_delete.append(subscription)
# Retrieve the subscriptions.
all_subscriptions, _ = pubsub.list_subscriptions()
created = [subscription for subscription in all_subscriptions
if subscription.name in subscriptions_to_create and
subscription.topic.name == TOPIC_NAME]
self.assertEqual(len(created), len(subscriptions_to_create))
def test_message_pull_mode_e2e(self):
TOPIC_NAME = 'subscribe-me'
topic = Topic(TOPIC_NAME, timestamp_messages=True)
self.assertFalse(topic.exists())
topic.create()
self.to_delete.append(topic)
SUBSCRIPTION_NAME = 'subscribing-now'
subscription = Subscription(SUBSCRIPTION_NAME, topic)
self.assertFalse(subscription.exists())
subscription.create()
self.to_delete.append(subscription)
MESSAGE_1 = b'MESSAGE ONE'
MESSAGE_2 = b'MESSAGE ONE'
EXTRA_1 = 'EXTRA 1'
EXTRA_2 = 'EXTRA 2'
topic.publish(MESSAGE_1, extra=EXTRA_1)
topic.publish(MESSAGE_2, extra=EXTRA_2)
received = subscription.pull(max_messages=2)
ack_ids = [recv[0] for recv in received]
subscription.acknowledge(ack_ids)
messages = [recv[1] for recv in received]
def _by_timestamp(message):
return message.timestamp
message1, message2 = sorted(messages, key=_by_timestamp)
self.assertEqual(message1.data, MESSAGE_1)
self.assertEqual(message1.attributes['extra'], EXTRA_1)
self.assertEqual(message2.data, MESSAGE_2)
self.assertEqual(message2.attributes['extra'], EXTRA_2)
|
blowmage/gcloud-python
|
regression/pubsub.py
|
Python
|
apache-2.0
| 4,859
|
# -*- encoding: utf-8 -*-
# Copyright (c) 2015 b<>com
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""In charge of collecting data from drivers and push it to the publisher."""
import os
import msgpack
import nanomsg
from oslo_log import log
from watcher_metering.agent.manager import MetricManager
LOG = log.getLogger(__name__)
class Agent(MetricManager):
def __init__(self, conf, driver_names, use_nanoconfig_service,
publisher_endpoint, nanoconfig_service_endpoint,
nanoconfig_update_endpoint, nanoconfig_profile):
"""
:param conf: Configuration obtained from a configuration file
:type conf: oslo_config.cfg.ConfigOpts instance
:param driver_names: The list of driver names to register
:type driver_names: list of str
:param use_nanoconfig_service: Indicates whether or not it should use a
nanoconfig service
:type use_nanoconfig_service: bool
:param publisher_endpoint: Publisher server URI
:type publisher_endpoint: str
:param nanoconfig_service_endpoint: Nanoconfig service URI
:type nanoconfig_service_endpoint: str
:param nanoconfig_update_endpoint: Nanoconfig update service URI
:type nanoconfig_update_endpoint: str
:param nanoconfig_profile: Nanoconfig profile URI
:type nanoconfig_profile: str
"""
super(Agent, self).__init__(conf, driver_names)
self.socket = nanomsg.Socket(nanomsg.PUSH)
self.use_nanoconfig_service = use_nanoconfig_service
self.publisher_endpoint = publisher_endpoint
self.nanoconfig_service_endpoint = nanoconfig_service_endpoint
self.nanoconfig_update_endpoint = nanoconfig_update_endpoint
self.nanoconfig_profile = nanoconfig_profile
@property
def namespace(self):
return "watcher_metering.drivers"
def start(self):
LOG.info("[Agent] Starting main thread...")
super(Agent, self).start()
def setup_socket(self):
if self.use_nanoconfig_service:
self.set_nanoconfig_endpoints()
self.socket.configure(self.nanoconfig_profile)
LOG.info("[Agent] Agent nanomsg's profile `%s`",
self.nanoconfig_profile)
else:
LOG.debug("[Agent] Agent connected to: `%s`",
self.publisher_endpoint)
self.socket.connect(self.publisher_endpoint)
LOG.info("[Agent] Ready for pushing to Publisher node")
def set_nanoconfig_endpoints(self):
"""This methods sets both the `NN_CONFIG_SERVICE` and
`NN_CONFIG_UPDATES` environment variable as nanoconfig uses it to
access the nanoconfig service
"""
# NN_CONFIG_SERVICE:
nn_config_service = os.environ.get("NN_CONFIG_SERVICE")
if not self.nanoconfig_service_endpoint and not nn_config_service:
raise ValueError(
"Invalid configuration! No NN_CONFIG_SERVICE set. You need to "
"configure your `nanoconfig_service_endpoint`.")
if self.nanoconfig_service_endpoint:
os.environ["NN_CONFIG_SERVICE"] = self.nanoconfig_service_endpoint
else:
self.nanoconfig_service_endpoint = nn_config_service
# NN_CONFIG_UPDATES
nn_config_updates = os.environ.get("NN_CONFIG_UPDATES")
if not self.nanoconfig_update_endpoint and not nn_config_updates:
raise ValueError(
"Invalid configuration! No NN_CONFIG_UPDATES set. You need to "
"configure your `nanoconfig_update_endpoint`.")
if self.nanoconfig_update_endpoint:
os.environ["NN_CONFIG_UPDATES"] = self.nanoconfig_update_endpoint
else:
self.nanoconfig_update_endpoint = nn_config_updates
def run(self):
self.setup_socket()
super(Agent, self).run()
def stop(self):
self.socket.close()
super(Agent, self).stop()
LOG.debug("[Agent] Stopped")
def update(self, notifier, data):
LOG.debug("[Agent] Updated by: %s", notifier)
LOG.debug("[Agent] Preparing to send message %s", msgpack.loads(data))
try:
LOG.debug("[Agent] Sending message...")
# The agent will wait for the publisher server to be listening on
# the related publisher_endpoint before continuing
# In which case, you should start the publisher to make it work!
self.socket.send(data)
LOG.debug("[Agent] Message sent successfully!")
except nanomsg.NanoMsgError as exc:
LOG.error("Exception during sending the message to controller %s",
exc.args[0])
|
b-com/watcher-metering
|
watcher_metering/agent/agent.py
|
Python
|
apache-2.0
| 5,245
|
'''
Created on 2016年9月16日
@author: Administrator
'''
import calendar
#返回year的日历
def getYear(year):
return calendar.calendar(year)
#返回year-month的日历
def getMonth(year, month):
return calendar.month(year, month)
#返回某年某月的第一天是星期几(从0开始, 0是星期一,6是星期日)和该月天数
def getMonthRange(year, month):
return calendar.monthrange(year, month)
#返回某个月以每一周为元素的序列
def getMonthYear(year, month):
return calendar.monthcalendar(year, month)
#判断year是是否闰年
def isLeap(year):
return calendar.isleap(year)
print(getYear(2016))
print(getMonth(2016, 10))
print(getMonthYear(2016, 10))
print(getMonthRange(2016, 5))
print(isLeap(2016))
|
egassem/python_study
|
src/com/xiaobei/util/CalenderUtils.py
|
Python
|
apache-2.0
| 763
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
from numpy.testing import assert_approx_equal, assert_allclose, assert_array_equal
from UliEngineering.SignalProcessing.Utils import *
from parameterized import parameterized
import concurrent.futures
import numpy as np
import datetime
import unittest
unstairMethods = [
("left",),
("middle",),
("right",),
("reduce",),
]
class TestRemoveMean(unittest.TestCase):
def testRemoveMean(self):
assert_allclose(remove_mean([]), [])
assert_allclose(remove_mean([1.0, 2.0, 3.0]), [-1.0, 0.0, 1.0])
class TestRMS(unittest.TestCase):
def testRMS(self):
assert_allclose(rms([]), [])
assert_allclose(rms([1.0, 2.0, 3.0]), np.sqrt(np.mean([1*1, 2*2, 3*3])))
class TestPeakToPeak(unittest.TestCase):
def testPeakToPeak(self):
assert_allclose(peak_to_peak(None), 0.0)
assert_allclose(peak_to_peak([]), 0.0)
assert_allclose(peak_to_peak([0.0]), 0.0)
assert_allclose(peak_to_peak([1.0]), 0.0)
assert_allclose(peak_to_peak([1.0, 1.0]), 0.0)
assert_allclose(peak_to_peak([1.0, 2.0]), 1.0)
assert_allclose(peak_to_peak([2.0, 1.0]), 1.0)
assert_allclose(peak_to_peak([0, 1, 3, -3, 0, 5, 0.7, 0.9]), 8)
assert_allclose(peak_to_peak(np.asarray([])), 0.0)
assert_allclose(peak_to_peak(np.asarray([0.0])), 0.0)
assert_allclose(peak_to_peak(np.asarray([1.0])), 0.0)
assert_allclose(peak_to_peak(np.asarray([1.0, 1.0])), 0.0)
assert_allclose(peak_to_peak(np.asarray([1.0, 2.0])), 1.0)
assert_allclose(peak_to_peak(np.asarray([2.0, 1.0])), 1.0)
assert_allclose(peak_to_peak(np.asarray([0, 1, 3, -3, 0, 5, 0.7, 0.9])), 8)
class TestUnstair(unittest.TestCase):
@parameterized.expand(unstairMethods)
def testNoReduction(self, method):
# Test if unstair returns the original array for a non-step function
x = np.arange(10)
y = np.square(x)
xres, yres = unstair(x, y, method=method)
assert_array_equal(xres, x)
assert_array_equal(yres, y)
def testSimpleLeft(self):
y = np.asarray([0, 0, 0, 1, 2, 2, 2, 3, 4, 5, 5])
x = np.arange(y.size)
xexpected = [0, 3, 4, 7, 8, 9, 10]
yexpected = y[xexpected]
xres, yres = unstair(x, y, method="left")
assert_array_equal(xres, xexpected)
assert_array_equal(yres, yexpected)
def testSimpleRight(self):
y = np.asarray([0, 0, 0, 1, 2, 2, 2, 3, 4, 5, 5])
x = np.arange(y.size)
xexpected = [0, 2, 3, 6, 7, 8, 10]
yexpected = y[xexpected]
xres, yres = unstair(x, y, method="right")
assert_array_equal(xres, xexpected)
assert_array_equal(yres, yexpected)
def testSimpleMiddle(self):
y = np.asarray([0, 0, 0, 1, 2, 2, 2, 3, 4, 5, 5])
x = np.arange(y.size)
xexpected = [0, 1, 3, 5, 7, 8, 10]
yexpected = y[xexpected]
xres, yres = unstair(x, y, method="middle")
assert_array_equal(xres, xexpected)
assert_array_equal(yres, yexpected)
def testSimpleReduce(self):
y = np.asarray([0, 0, 0, 1, 2, 2, 2, 3, 4, 5, 5])
x = np.arange(y.size)
xexpected = [0, 2, 3, 4, 6, 7, 8, 9, 10]
yexpected = y[xexpected]
xres, yres = unstair(x, y, method="reduce")
assert_array_equal(xres, xexpected)
assert_array_equal(yres, yexpected)
@parameterized.expand(unstairMethods)
def testSine(self, method):
# Test with a rounded sine function. Data should be reduced
sx = np.arange(1000) * .02
rsine = np.round(np.sin(sx) * 10.) / 10.
rx, ry = unstair(sx, rsine, method=method)
self.assertLess(rx.size, sx.size)
self.assertLess(ry.size, rsine.size)
class TestOptimumPolyfit(unittest.TestCase):
def testBasic(self):
x = np.linspace(-100., 100., 10000)
y = np.square(x)
poly, deg, score = optimum_polyfit(x, y)
self.assertLess(score, 1e-10)
self.assertEqual(np.max(np.abs(y - poly(x))), score)
def testRandom(self):
x = np.linspace(-100., 100., 1000)
y = np.random.random_sample(x.size)
poly, deg, score = optimum_polyfit(x, y)
class TestLinSpace(unittest.TestCase):
@parameterized.expand([
(0.0, 100.0, 101, True),
(0.0, 100.0, 202, True),
(0.0, 100.0, 735, True),
(0.0, 200.0, 101, True),
(12.5, 202.3, 101, True),
(0.0, 100.0, 101, False),
(0.0, 100.0, 202, False),
(0.0, 100.0, 735, False),
(0.0, 200.0, 101, False),
(12.5, 202.3, 101, False),
])
def testBasic(self, start, end, n, endpoint):
params = (start, end, n)
spc = LinRange(*params, endpoint=endpoint)
linspc = np.linspace(*params, endpoint=endpoint)
self.assertEqual(len(spc), params[2])
self.assertEqual(len(spc), linspc.size)
self.assertEqual((len(spc),), linspc.shape)
assert_allclose(spc[:], linspc)
# Test samplerate
assert_approx_equal(spc.samplerate(), (n - 1 if endpoint else n) / (end - start))
# Test some slice
istart, iend = len(spc) // 3, len(spc) // 2
assert_allclose(spc[istart:iend], linspc[istart:iend])
# Test negative indices
assert_allclose(spc[-istart], linspc[-istart])
# Test mid
self.assertEqual(spc.mid, (start + end) / 2.)
# Test view
assert_allclose(spc.view(0, None).size, linspc.size)
assert_allclose(spc.view(0, None)[:], linspc)
def test_equal(self):
l1 = LinRange(0., 100., 100, endpoint=False)
l2 = LinRange(0., 100., 100, endpoint=False)
l3 = LinRange(0., 100., 100, endpoint=True)
self.assertTrue(l1 == l2)
self.assertTrue(l2 == l1)
self.assertFalse(l3 == l1)
self.assertFalse(l3 == l2)
def test_repr(self):
l = LinRange(0., 100., 100, endpoint=False)
self.assertEqual("LinRange(0.0, 100.0, 1.0)", str(l))
l = LinRange(0., 100., 100, endpoint=False, dtype=int)
self.assertEqual("LinRange(0.0, 100.0, 1.0, dtype=int)", str(l))
def testDtype(self):
lin1 = LinRange(0.0, 100.0, 101)
self.assertIsInstance(lin1, LinRange)
self.assertIsInstance(lin1.view(0, 5), LinRange)
class TestAggregate(unittest.TestCase):
def test_aggregate(self):
self.assertEqual([("a", 1), ("b", 1), ("c", 1)], list(aggregate("abc")))
self.assertEqual([], list(aggregate("")))
self.assertEqual([("a", 2), ("b", 1), ("c", 2), ("d", 1)],
list(aggregate("aabccd")))
|
ulikoehler/UliEngineering
|
tests/SignalProcessing/TestUtils.py
|
Python
|
apache-2.0
| 6,677
|
"""
Module for managing Windows Updates using the Windows Update Agent.
List updates on the system using the following functions:
- :py:func:`win_wua.available <salt.modules.win_wua.available>`
- :py:func:`win_wua.list <salt.modules.win_wua.list_>`
This is an easy way to find additional information about updates available to
to the system, such as the GUID, KB number, or description.
Once you have the GUID or a KB number for the update you can get information
about the update, download, install, or uninstall it using these functions:
- :py:func:`win_wua.get <salt.modules.win_wua.get>`
- :py:func:`win_wua.download <salt.modules.win_wua.download>`
- :py:func:`win_wua.install <salt.modules.win_wua.install>`
- :py:func:`win_wua.uninstall <salt.modules.win_wua.uninstall>`
The get function expects a name in the form of a GUID, KB, or Title and should
return information about a single update. The other functions accept either a
single item or a list of items for downloading/installing/uninstalling a
specific list of items.
The :py:func:`win_wua.list <salt.modules.win_wua.list_>` and
:py:func:`win_wua.get <salt.modules.win_wua.get>` functions are utility
functions. In addition to returning information about updates they can also
download and install updates by setting ``download=True`` or ``install=True``.
So, with py:func:`win_wua.list <salt.modules.win_wua.list_>` for example, you
could run the function with the filters you want to see what is available. Then
just add ``install=True`` to install everything on that list.
If you want to download, install, or uninstall specific updates, use
:py:func:`win_wua.download <salt.modules.win_wua.download>`,
:py:func:`win_wua.install <salt.modules.win_wua.install>`, or
:py:func:`win_wua.uninstall <salt.modules.win_wua.uninstall>`. To update your
system with the latest updates use :py:func:`win_wua.list
<salt.modules.win_wua.list_>` and set ``install=True``
You can also adjust the Windows Update settings using the
:py:func:`win_wua.set_wu_settings <salt.modules.win_wua.set_wu_settings>`
function. This function is only supported on the following operating systems:
- Windows Vista / Server 2008
- Windows 7 / Server 2008R2
- Windows 8 / Server 2012
- Windows 8.1 / Server 2012R2
As of Windows 10 and Windows Server 2016, the ability to modify the Windows
Update settings has been restricted. The settings can be modified in the Local
Group Policy using the ``lgpo`` module.
.. versionadded:: 2015.8.0
:depends: salt.utils.win_update
"""
import logging
import salt.utils.platform
import salt.utils.win_service
import salt.utils.win_update
import salt.utils.winapi
from salt.exceptions import CommandExecutionError
try:
import win32com.client
HAS_PYWIN32 = True
except ImportError:
HAS_PYWIN32 = False
log = logging.getLogger(__name__)
__func_alias__ = {
"list_": "list",
}
def __virtual__():
"""
Only works on Windows systems with PyWin32
"""
if not salt.utils.platform.is_windows():
return False, "WUA: Only available on Windows systems"
if not HAS_PYWIN32:
return False, "WUA: Requires PyWin32 libraries"
if not salt.utils.win_update.HAS_PYWIN32:
return False, "WUA: Missing Libraries required by salt.utils.win_update"
if salt.utils.win_service.info("wuauserv")["StartType"] == "Disabled":
return (
False,
"WUA: The Windows Update service (wuauserv) must not be disabled",
)
if salt.utils.win_service.info("msiserver")["StartType"] == "Disabled":
return (
False,
"WUA: The Windows Installer service (msiserver) must not be disabled",
)
if salt.utils.win_service.info("BITS")["StartType"] == "Disabled":
return (
False,
"WUA: The Background Intelligent Transfer service (bits) must not "
"be disabled",
)
if not salt.utils.win_service.info("CryptSvc")["StartType"] == "Auto":
return (
False,
"WUA: The Cryptographic Services service (CryptSvc) must not be disabled",
)
if salt.utils.win_service.info("TrustedInstaller")["StartType"] == "Disabled":
return (
False,
"WUA: The Windows Module Installer service (TrustedInstaller) must "
"not be disabled",
)
return True
def available(
software=True,
drivers=True,
summary=False,
skip_installed=True,
skip_hidden=True,
skip_mandatory=False,
skip_reboot=False,
categories=None,
severities=None,
online=True,
):
"""
.. versionadded:: 2017.7.0
List updates that match the passed criteria. This allows for more filter
options than :func:`list`. Good for finding a specific GUID or KB.
Args:
software (bool):
Include software updates in the results. Default is ``True``
drivers (bool):
Include driver updates in the results. Default is ``True``
summary (bool):
- ``True``: Return a summary of updates available for each category.
- ``False`` (default): Return a detailed list of available updates.
skip_installed (bool):
Skip updates that are already installed. Default is ``True``
skip_hidden (bool):
Skip updates that have been hidden. Default is ``True``
skip_mandatory (bool):
Skip mandatory updates. Default is ``False``
skip_reboot (bool):
Skip updates that require a reboot. Default is ``False``
categories (list):
Specify the categories to list. Must be passed as a list. All
categories returned by default.
Categories include the following:
* Critical Updates
* Definition Updates
* Drivers (make sure you set ``drivers=True``)
* Feature Packs
* Security Updates
* Update Rollups
* Updates
* Update Rollups
* Windows 7
* Windows 8.1
* Windows 8.1 drivers
* Windows 8.1 and later drivers
* Windows Defender
severities (list):
Specify the severities to include. Must be passed as a list. All
severities returned by default.
Severities include the following:
* Critical
* Important
online (bool):
Tells the Windows Update Agent go online to update its local update
database. ``True`` will go online. ``False`` will use the local
update database as is. Default is ``True``
.. versionadded:: 3001
Returns:
dict: Returns a dict containing either a summary or a list of updates:
.. code-block:: cfg
Dict of Updates:
{'<GUID>': {
'Title': <title>,
'KB': <KB>,
'GUID': <the globally unique identifier for the update>,
'Description': <description>,
'Downloaded': <has the update been downloaded>,
'Installed': <has the update been installed>,
'Mandatory': <is the update mandatory>,
'UserInput': <is user input required>,
'EULAAccepted': <has the EULA been accepted>,
'Severity': <update severity>,
'NeedsReboot': <is the update installed and awaiting reboot>,
'RebootBehavior': <will the update require a reboot>,
'Categories': [
'<category 1>',
'<category 2>',
... ]
}}
Summary of Updates:
{'Total': <total number of updates returned>,
'Available': <updates that are not downloaded or installed>,
'Downloaded': <updates that are downloaded but not installed>,
'Installed': <updates installed (usually 0 unless installed=True)>,
'Categories': {
<category 1>: <total for that category>,
<category 2>: <total for category 2>,
... }
}
CLI Examples:
.. code-block:: bash
# Normal Usage (list all software updates)
salt '*' win_wua.available
# List all updates with categories of Critical Updates and Drivers
salt '*' win_wua.available categories=["Critical Updates","Drivers"]
# List all Critical Security Updates
salt '*' win_wua.available categories=["Security Updates"] severities=["Critical"]
# List all updates with a severity of Critical
salt '*' win_wua.available severities=["Critical"]
# A summary of all available updates
salt '*' win_wua.available summary=True
# A summary of all Feature Packs and Windows 8.1 Updates
salt '*' win_wua.available categories=["Feature Packs","Windows 8.1"] summary=True
"""
# Create a Windows Update Agent instance
wua = salt.utils.win_update.WindowsUpdateAgent(online=online)
# Look for available
updates = wua.available(
skip_hidden=skip_hidden,
skip_installed=skip_installed,
skip_mandatory=skip_mandatory,
skip_reboot=skip_reboot,
software=software,
drivers=drivers,
categories=categories,
severities=severities,
)
# Return results as Summary or Details
return updates.summary() if summary else updates.list()
def get(name, download=False, install=False, online=True):
"""
.. versionadded:: 2017.7.0
Returns details for the named update
Args:
name (str):
The name of the update you're searching for. This can be the GUID, a
KB number, or any part of the name of the update. GUIDs and KBs are
preferred. Run ``list`` to get the GUID for the update you're
looking for.
download (bool):
Download the update returned by this function. Run this function
first to see if the update exists, then set ``download=True`` to
download the update.
install (bool):
Install the update returned by this function. Run this function
first to see if the update exists, then set ``install=True`` to
install the update.
online (bool):
Tells the Windows Update Agent go online to update its local update
database. ``True`` will go online. ``False`` will use the local
update database as is. Default is ``True``
.. versionadded:: 3001
Returns:
dict:
Returns a dict containing a list of updates that match the name if
download and install are both set to False. Should usually be a
single update, but can return multiple if a partial name is given.
If download or install is set to true it will return the results of the
operation.
.. code-block:: cfg
Dict of Updates:
{'<GUID>': {
'Title': <title>,
'KB': <KB>,
'GUID': <the globally unique identifier for the update>,
'Description': <description>,
'Downloaded': <has the update been downloaded>,
'Installed': <has the update been installed>,
'Mandatory': <is the update mandatory>,
'UserInput': <is user input required>,
'EULAAccepted': <has the EULA been accepted>,
'Severity': <update severity>,
'NeedsReboot': <is the update installed and awaiting reboot>,
'RebootBehavior': <will the update require a reboot>,
'Categories': [
'<category 1>',
'<category 2>',
... ]
}}
CLI Examples:
.. code-block:: bash
# Recommended Usage using GUID without braces
# Use this to find the status of a specific update
salt '*' win_wua.get 12345678-abcd-1234-abcd-1234567890ab
# Use the following if you don't know the GUID:
# Using a KB number
# Not all updates have an associated KB
salt '*' win_wua.get KB3030298
# Using part or all of the name of the update
# Could possibly return multiple results
# Not all updates have an associated KB
salt '*' win_wua.get 'Microsoft Camera Codec Pack'
"""
# Create a Windows Update Agent instance
wua = salt.utils.win_update.WindowsUpdateAgent(online=online)
# Search for Update
updates = wua.search(name)
ret = {}
# Download
if download or install:
ret["Download"] = wua.download(updates)
# Install
if install:
ret["Install"] = wua.install(updates)
return ret if ret else updates.list()
def list(
software=True,
drivers=False,
summary=False,
skip_installed=True,
categories=None,
severities=None,
download=False,
install=False,
online=True,
):
"""
.. versionadded:: 2017.7.0
Returns a detailed list of available updates or a summary. If ``download``
or ``install`` is ``True`` the same list will be downloaded and/or
installed.
Args:
software (bool):
Include software updates in the results. Default is ``True``
drivers (bool):
Include driver updates in the results. Default is ``False``
summary (bool):
- ``True``: Return a summary of updates available for each category.
- ``False`` (default): Return a detailed list of available updates.
skip_installed (bool):
Skip installed updates in the results. Default is ``True``
download (bool):
(Overrides reporting functionality) Download the list of updates
returned by this function. Run this function first with
``download=False`` to see what will be downloaded, then set
``download=True`` to download the updates. Default is ``False``
install (bool):
(Overrides reporting functionality) Install the list of updates
returned by this function. Run this function first with
``install=False`` to see what will be installed, then set
``install=True`` to install the updates. Default is ``False``
categories (list):
Specify the categories to list. Must be passed as a list. All
categories returned by default.
Categories include the following:
* Critical Updates
* Definition Updates
* Drivers (make sure you set ``drivers=True``)
* Feature Packs
* Security Updates
* Update Rollups
* Updates
* Update Rollups
* Windows 7
* Windows 8.1
* Windows 8.1 drivers
* Windows 8.1 and later drivers
* Windows Defender
severities (list):
Specify the severities to include. Must be passed as a list. All
severities returned by default.
Severities include the following:
* Critical
* Important
online (bool):
Tells the Windows Update Agent go online to update its local update
database. ``True`` will go online. ``False`` will use the local
update database as is. Default is ``True``
.. versionadded:: 3001
Returns:
dict: Returns a dict containing either a summary or a list of updates:
.. code-block:: cfg
Dict of Updates:
{'<GUID>': {
'Title': <title>,
'KB': <KB>,
'GUID': <the globally unique identifier for the update>,
'Description': <description>,
'Downloaded': <has the update been downloaded>,
'Installed': <has the update been installed>,
'Mandatory': <is the update mandatory>,
'UserInput': <is user input required>,
'EULAAccepted': <has the EULA been accepted>,
'Severity': <update severity>,
'NeedsReboot': <is the update installed and awaiting reboot>,
'RebootBehavior': <will the update require a reboot>,
'Categories': [
'<category 1>',
'<category 2>',
... ]
}}
Summary of Updates:
{'Total': <total number of updates returned>,
'Available': <updates that are not downloaded or installed>,
'Downloaded': <updates that are downloaded but not installed>,
'Installed': <updates installed (usually 0 unless installed=True)>,
'Categories': {
<category 1>: <total for that category>,
<category 2>: <total for category 2>,
... }
}
CLI Examples:
.. code-block:: bash
# Normal Usage (list all software updates)
salt '*' win_wua.list
# List all updates with categories of Critical Updates and Drivers
salt '*' win_wua.list categories=['Critical Updates','Drivers']
# List all Critical Security Updates
salt '*' win_wua.list categories=['Security Updates'] severities=['Critical']
# List all updates with a severity of Critical
salt '*' win_wua.list severities=['Critical']
# A summary of all available updates
salt '*' win_wua.list summary=True
# A summary of all Feature Packs and Windows 8.1 Updates
salt '*' win_wua.list categories=['Feature Packs','Windows 8.1'] summary=True
"""
# Create a Windows Update Agent instance
wua = salt.utils.win_update.WindowsUpdateAgent(online=online)
# Search for Update
updates = wua.available(
skip_installed=skip_installed,
software=software,
drivers=drivers,
categories=categories,
severities=severities,
)
ret = {}
# Download
if download or install:
ret["Download"] = wua.download(updates)
# Install
if install:
ret["Install"] = wua.install(updates)
if not ret:
return updates.summary() if summary else updates.list()
return ret
def installed(summary=False, kbs_only=False):
"""
.. versionadded:: 3001
Get a list of all updates that are currently installed on the system.
.. note::
This list may not necessarily match the Update History on the machine.
This will only show the updates that apply to the current build of
Windows. So, for example, the system may have shipped with Windows 10
Build 1607. That machine received updates to the 1607 build. Later the
machine was upgraded to a newer feature release, 1803 for example. Then
more updates were applied. This will only return the updates applied to
the 1803 build and not those applied when the system was at the 1607
build.
Args:
summary (bool):
Return a summary instead of a detailed list of updates. ``True``
will return a Summary, ``False`` will return a detailed list of
installed updates. Default is ``False``
kbs_only (bool):
Only return a list of KBs installed on the system. If this parameter
is passed, the ``summary`` parameter will be ignored. Default is
``False``
Returns:
dict:
Returns a dictionary of either a Summary or a detailed list of
updates installed on the system when ``kbs_only=False``
list:
Returns a list of KBs installed on the system when ``kbs_only=True``
CLI Examples:
.. code-block:: bash
# Get a detailed list of all applicable updates installed on the system
salt '*' win_wua.installed
# Get a summary of all applicable updates installed on the system
salt '*' win_wua.installed summary=True
# Get a simple list of KBs installed on the system
salt '*' win_wua.installed kbs_only=True
"""
# Create a Windows Update Agent instance. Since we're only listing installed
# updates, there's no need to go online to update the Windows Update db
wua = salt.utils.win_update.WindowsUpdateAgent(online=False)
updates = wua.installed() # Get installed Updates objects
results = updates.list() # Convert to list
if kbs_only:
list_kbs = set()
for item in results:
list_kbs.update(results[item]["KBs"])
return sorted(list_kbs)
return updates.summary() if summary else results
def download(names):
"""
.. versionadded:: 2017.7.0
Downloads updates that match the list of passed identifiers. It's easier to
use this function by using list_updates and setting ``download=True``.
Args:
names (str, list):
A single update or a list of updates to download. This can be any
combination of GUIDs, KB numbers, or names. GUIDs or KBs are
preferred.
.. note::
An error will be raised if there are more results than there are
items in the names parameter
Returns:
dict: A dictionary containing the details about the downloaded updates
CLI Example:
.. code-block:: bash
# Normal Usage
salt '*' win_wua.download names=['12345678-abcd-1234-abcd-1234567890ab', 'KB2131233']
"""
# Create a Windows Update Agent instance
wua = salt.utils.win_update.WindowsUpdateAgent()
# Search for Update
updates = wua.search(names)
if updates.count() == 0:
raise CommandExecutionError("No updates found")
# Make sure it's a list so count comparison is correct
if isinstance(names, str):
names = [names]
if isinstance(names, int):
names = [str(names)]
if updates.count() > len(names):
raise CommandExecutionError(
"Multiple updates found, names need to be more specific"
)
return wua.download(updates)
def install(names):
"""
.. versionadded:: 2017.7.0
Installs updates that match the list of identifiers. It may be easier to use
the list_updates function and set ``install=True``.
Args:
names (str, list):
A single update or a list of updates to install. This can be any
combination of GUIDs, KB numbers, or names. GUIDs or KBs are
preferred.
.. note::
An error will be raised if there are more results than there are items
in the names parameter
Returns:
dict: A dictionary containing the details about the installed updates
CLI Examples:
.. code-block:: bash
# Normal Usage
salt '*' win_wua.install KB12323211
"""
# Create a Windows Update Agent instance
wua = salt.utils.win_update.WindowsUpdateAgent()
# Search for Updates
updates = wua.search(names)
if updates.count() == 0:
raise CommandExecutionError("No updates found")
# Make sure it's a list so count comparison is correct
if isinstance(names, str):
names = [names]
if isinstance(names, int):
names = [str(names)]
if updates.count() > len(names):
raise CommandExecutionError(
"Multiple updates found, names need to be more specific"
)
return wua.install(updates)
def uninstall(names):
"""
.. versionadded:: 2017.7.0
Uninstall updates.
Args:
names (str, list):
A single update or a list of updates to uninstall. This can be any
combination of GUIDs, KB numbers, or names. GUIDs or KBs are
preferred.
Returns:
dict: A dictionary containing the details about the uninstalled updates
CLI Examples:
.. code-block:: bash
# Normal Usage
salt '*' win_wua.uninstall KB3121212
# As a list
salt '*' win_wua.uninstall guid=['12345678-abcd-1234-abcd-1234567890ab', 'KB1231231']
"""
# Create a Windows Update Agent instance
wua = salt.utils.win_update.WindowsUpdateAgent()
# Search for Updates
updates = wua.search(names)
if updates.count() == 0:
raise CommandExecutionError("No updates found")
return wua.uninstall(updates)
def set_wu_settings(
level=None,
recommended=None,
featured=None,
elevated=None,
msupdate=None,
day=None,
time=None,
):
"""
Change Windows Update settings. If no parameters are passed, the current
value will be returned.
Supported:
- Windows Vista / Server 2008
- Windows 7 / Server 2008R2
- Windows 8 / Server 2012
- Windows 8.1 / Server 2012R2
.. note:
Microsoft began using the Unified Update Platform (UUP) starting with
Windows 10 / Server 2016. The Windows Update settings have changed and
the ability to 'Save' Windows Update settings has been removed. Windows
Update settings are read-only. See MSDN documentation:
https://msdn.microsoft.com/en-us/library/aa385829(v=vs.85).aspx
Args:
level (int):
Number from 1 to 4 indicating the update level:
1. Never check for updates
2. Check for updates but let me choose whether to download and
install them
3. Download updates but let me choose whether to install them
4. Install updates automatically
recommended (bool):
Boolean value that indicates whether to include optional or
recommended updates when a search for updates and installation of
updates is performed.
featured (bool):
Boolean value that indicates whether to display notifications for
featured updates.
elevated (bool):
Boolean value that indicates whether non-administrators can perform
some update-related actions without administrator approval.
msupdate (bool):
Boolean value that indicates whether to turn on Microsoft Update for
other Microsoft products
day (str):
Days of the week on which Automatic Updates installs or uninstalls
updates. Accepted values:
- Everyday
- Monday
- Tuesday
- Wednesday
- Thursday
- Friday
- Saturday
time (str):
Time at which Automatic Updates installs or uninstalls updates. Must
be in the ##:## 24hr format, eg. 3:00 PM would be 15:00. Must be in
1 hour increments.
Returns:
dict: Returns a dictionary containing the results.
CLI Examples:
.. code-block:: bash
salt '*' win_wua.set_wu_settings level=4 recommended=True featured=False
"""
# The AutomaticUpdateSettings.Save() method used in this function does not
# work on Windows 10 / Server 2016. It is called in throughout this function
# like this:
#
# with salt.utils.winapi.Com():
# obj_au = win32com.client.Dispatch('Microsoft.Update.AutoUpdate')
# obj_au_settings = obj_au.Settings
# obj_au_settings.Save()
#
# The `Save()` method reports success but doesn't actually change anything.
# Windows Update settings are read-only in Windows 10 / Server 2016. There's
# a little blurb on MSDN that mentions this, but gives no alternative for
# changing these settings in Windows 10 / Server 2016.
#
# https://msdn.microsoft.com/en-us/library/aa385829(v=vs.85).aspx
#
# Apparently the Windows Update framework in Windows Vista - Windows 8.1 has
# been changed quite a bit in Windows 10 / Server 2016. It is now called the
# Unified Update Platform (UUP). I haven't found an API or a Powershell
# commandlet for working with the UUP. Perhaps there will be something
# forthcoming. The `win_lgpo` module might be an option for changing the
# Windows Update settings using local group policy.
ret = {"Success": True}
# Initialize the PyCom system
with salt.utils.winapi.Com():
# Create an AutoUpdate object
obj_au = win32com.client.Dispatch("Microsoft.Update.AutoUpdate")
# Create an AutoUpdate Settings Object
obj_au_settings = obj_au.Settings
# Only change the setting if it's passed
if level is not None:
obj_au_settings.NotificationLevel = int(level)
result = obj_au_settings.Save()
if result is None:
ret["Level"] = level
else:
ret["Comment"] = "Settings failed to save. Check permissions."
ret["Success"] = False
if recommended is not None:
obj_au_settings.IncludeRecommendedUpdates = recommended
result = obj_au_settings.Save()
if result is None:
ret["Recommended"] = recommended
else:
ret["Comment"] = "Settings failed to save. Check permissions."
ret["Success"] = False
if featured is not None:
obj_au_settings.FeaturedUpdatesEnabled = featured
result = obj_au_settings.Save()
if result is None:
ret["Featured"] = featured
else:
ret["Comment"] = "Settings failed to save. Check permissions."
ret["Success"] = False
if elevated is not None:
obj_au_settings.NonAdministratorsElevated = elevated
result = obj_au_settings.Save()
if result is None:
ret["Elevated"] = elevated
else:
ret["Comment"] = "Settings failed to save. Check permissions."
ret["Success"] = False
if day is not None:
# Check that day is valid
days = {
"Everyday": 0,
"Sunday": 1,
"Monday": 2,
"Tuesday": 3,
"Wednesday": 4,
"Thursday": 5,
"Friday": 6,
"Saturday": 7,
}
if day not in days:
ret["Comment"] = (
"Day needs to be one of the following: Everyday, "
"Monday, Tuesday, Wednesday, Thursday, Friday, "
"Saturday"
)
ret["Success"] = False
else:
# Set the numeric equivalent for the day setting
obj_au_settings.ScheduledInstallationDay = days[day]
result = obj_au_settings.Save()
if result is None:
ret["Day"] = day
else:
ret["Comment"] = "Settings failed to save. Check permissions."
ret["Success"] = False
if time is not None:
# Check for time as a string: if the time is not quoted, yaml will
# treat it as an integer
if not isinstance(time, str):
ret["Comment"] = (
"Time argument needs to be a string; it may need to "
"be quoted. Passed {}. Time not set.".format(time)
)
ret["Success"] = False
# Check for colon in the time
elif ":" not in time:
ret["Comment"] = (
"Time argument needs to be in 00:00 format. "
"Passed {}. Time not set.".format(time)
)
ret["Success"] = False
else:
# Split the time by :
t = time.split(":")
# We only need the hours value
obj_au_settings.FeaturedUpdatesEnabled = t[0]
result = obj_au_settings.Save()
if result is None:
ret["Time"] = time
else:
ret["Comment"] = "Settings failed to save. Check permissions."
ret["Success"] = False
if msupdate is not None:
# Microsoft Update requires special handling
# First load the MS Update Service Manager
with salt.utils.winapi.Com():
obj_sm = win32com.client.Dispatch("Microsoft.Update.ServiceManager")
# Give it a bogus name
obj_sm.ClientApplicationID = "My App"
if msupdate:
# msupdate is true, so add it to the services
try:
obj_sm.AddService2("7971f918-a847-4430-9279-4a52d1efe18d", 7, "")
ret["msupdate"] = msupdate
except Exception as error: # pylint: disable=broad-except
# pylint: disable=unpacking-non-sequence,unbalanced-tuple-unpacking
(
hr,
msg,
exc,
arg,
) = error.args
# pylint: enable=unpacking-non-sequence,unbalanced-tuple-unpacking
# Consider checking for -2147024891 (0x80070005) Access Denied
ret["Comment"] = "Failed with failure code: {}".format(exc[5])
ret["Success"] = False
else:
# msupdate is false, so remove it from the services
# check to see if the update is there or the RemoveService function
# will fail
if _get_msupdate_status():
# Service found, remove the service
try:
obj_sm.RemoveService("7971f918-a847-4430-9279-4a52d1efe18d")
ret["msupdate"] = msupdate
except Exception as error: # pylint: disable=broad-except
# pylint: disable=unpacking-non-sequence,unbalanced-tuple-unpacking
(
hr,
msg,
exc,
arg,
) = error.args
# pylint: enable=unpacking-non-sequence,unbalanced-tuple-unpacking
# Consider checking for the following
# -2147024891 (0x80070005) Access Denied
# -2145091564 (0x80248014) Service Not Found (shouldn't get
# this with the check for _get_msupdate_status above
ret["Comment"] = "Failed with failure code: {}".format(exc[5])
ret["Success"] = False
else:
ret["msupdate"] = msupdate
ret["Reboot"] = get_needs_reboot()
return ret
def get_wu_settings():
"""
Get current Windows Update settings.
Returns:
dict: A dictionary of Windows Update settings:
Featured Updates:
Boolean value that indicates whether to display notifications for
featured updates.
Group Policy Required (Read-only):
Boolean value that indicates whether Group Policy requires the
Automatic Updates service.
Microsoft Update:
Boolean value that indicates whether to turn on Microsoft Update for
other Microsoft Products
Needs Reboot:
Boolean value that indicates whether the machine is in a reboot
pending state.
Non Admins Elevated:
Boolean value that indicates whether non-administrators can perform
some update-related actions without administrator approval.
Notification Level:
Number 1 to 4 indicating the update level:
1. Never check for updates
2. Check for updates but let me choose whether to download and
install them
3. Download updates but let me choose whether to install them
4. Install updates automatically
Read Only (Read-only):
Boolean value that indicates whether the Automatic Update
settings are read-only.
Recommended Updates:
Boolean value that indicates whether to include optional or
recommended updates when a search for updates and installation of
updates is performed.
Scheduled Day:
Days of the week on which Automatic Updates installs or uninstalls
updates.
Scheduled Time:
Time at which Automatic Updates installs or uninstalls updates.
CLI Examples:
.. code-block:: bash
salt '*' win_wua.get_wu_settings
"""
ret = {}
day = [
"Every Day",
"Sunday",
"Monday",
"Tuesday",
"Wednesday",
"Thursday",
"Friday",
"Saturday",
]
# Initialize the PyCom system
with salt.utils.winapi.Com():
# Create an AutoUpdate object
obj_au = win32com.client.Dispatch("Microsoft.Update.AutoUpdate")
# Create an AutoUpdate Settings Object
obj_au_settings = obj_au.Settings
# Populate the return dictionary
ret["Featured Updates"] = obj_au_settings.FeaturedUpdatesEnabled
ret["Group Policy Required"] = obj_au_settings.Required
ret["Microsoft Update"] = _get_msupdate_status()
ret["Needs Reboot"] = get_needs_reboot()
ret["Non Admins Elevated"] = obj_au_settings.NonAdministratorsElevated
ret["Notification Level"] = obj_au_settings.NotificationLevel
ret["Read Only"] = obj_au_settings.ReadOnly
ret["Recommended Updates"] = obj_au_settings.IncludeRecommendedUpdates
ret["Scheduled Day"] = day[obj_au_settings.ScheduledInstallationDay]
# Scheduled Installation Time requires special handling to return the time
# in the right format
if obj_au_settings.ScheduledInstallationTime < 10:
ret["Scheduled Time"] = "0{}:00".format(
obj_au_settings.ScheduledInstallationTime
)
else:
ret["Scheduled Time"] = "{}:00".format(
obj_au_settings.ScheduledInstallationTime
)
return ret
def _get_msupdate_status():
"""
Check to see if Microsoft Update is Enabled
Return Boolean
"""
# To get the status of Microsoft Update we actually have to check the
# Microsoft Update Service Manager
# Initialize the PyCom system
with salt.utils.winapi.Com():
# Create a ServiceManager Object
obj_sm = win32com.client.Dispatch("Microsoft.Update.ServiceManager")
# Return a collection of loaded Services
col_services = obj_sm.Services
# Loop through the collection to find the Microsoft Udpate Service
# If it exists return True otherwise False
for service in col_services:
if service.name == "Microsoft Update":
return True
return False
def get_needs_reboot():
"""
Determines if the system needs to be rebooted.
Returns:
bool: ``True`` if the system requires a reboot, otherwise ``False``
CLI Examples:
.. code-block:: bash
salt '*' win_wua.get_needs_reboot
"""
return salt.utils.win_update.needs_reboot()
|
saltstack/salt
|
salt/modules/win_wua.py
|
Python
|
apache-2.0
| 39,211
|
"""
Get WHOIS information for a given host.
For more details about this platform, please refer to the documentation at
https://home-assistant.io/components/sensor.whois/
"""
from datetime import timedelta
import logging
import voluptuous as vol
from homeassistant.components.sensor import PLATFORM_SCHEMA
from homeassistant.const import CONF_NAME
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.entity import Entity
REQUIREMENTS = ['python-whois==0.7.1']
_LOGGER = logging.getLogger(__name__)
CONF_DOMAIN = 'domain'
DEFAULT_NAME = 'Whois'
ATTR_EXPIRES = 'expires'
ATTR_NAME_SERVERS = 'name_servers'
ATTR_REGISTRAR = 'registrar'
ATTR_UPDATED = 'updated'
SCAN_INTERVAL = timedelta(hours=24)
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({
vol.Required(CONF_DOMAIN): cv.string,
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string
})
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the WHOIS sensor."""
import whois
domain = config.get(CONF_DOMAIN)
name = config.get(CONF_NAME)
try:
if 'expiration_date' in whois.whois(domain):
add_entities([WhoisSensor(name, domain)], True)
else:
_LOGGER.error(
"WHOIS lookup for %s didn't contain expiration_date",
domain)
return
except whois.BaseException as ex:
_LOGGER.error(
"Exception %s occurred during WHOIS lookup for %s", ex, domain)
return
class WhoisSensor(Entity):
"""Implementation of a WHOIS sensor."""
def __init__(self, name, domain):
"""Initialize the sensor."""
import whois
self.whois = whois.whois
self._name = name
self._domain = domain
self._state = None
self._attributes = None
@property
def name(self):
"""Return the name of the sensor."""
return self._name
@property
def icon(self):
"""Return the icon to represent this sensor."""
return 'mdi:calendar-clock'
@property
def unit_of_measurement(self):
"""Return the unit of measurement to present the value in."""
return 'days'
@property
def state(self):
"""Return the expiration days for hostname."""
return self._state
@property
def device_state_attributes(self):
"""Get the more info attributes."""
return self._attributes
def _empty_state_and_attributes(self):
"""Empty the state and attributes on an error."""
self._state = None
self._attributes = None
def update(self):
"""Get the current WHOIS data for the domain."""
import whois
try:
response = self.whois(self._domain)
except whois.BaseException as ex:
_LOGGER.error("Exception %s occurred during WHOIS lookup", ex)
self._empty_state_and_attributes()
return
if response:
if 'expiration_date' not in response:
_LOGGER.error(
"Failed to find expiration_date in whois lookup response. "
"Did find: %s", ', '.join(response.keys()))
self._empty_state_and_attributes()
return
if not response['expiration_date']:
_LOGGER.error("Whois response contains empty expiration_date")
self._empty_state_and_attributes()
return
attrs = {}
expiration_date = response['expiration_date']
attrs[ATTR_EXPIRES] = expiration_date.isoformat()
if 'nameservers' in response:
attrs[ATTR_NAME_SERVERS] = ' '.join(response['nameservers'])
if 'updated_date' in response:
update_date = response['updated_date']
if isinstance(update_date, list):
attrs[ATTR_UPDATED] = update_date[0].isoformat()
else:
attrs[ATTR_UPDATED] = update_date.isoformat()
if 'registrar' in response:
attrs[ATTR_REGISTRAR] = response['registrar']
time_delta = (expiration_date - expiration_date.now())
self._attributes = attrs
self._state = time_delta.days
|
jamespcole/home-assistant
|
homeassistant/components/whois/sensor.py
|
Python
|
apache-2.0
| 4,299
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from solution import Solution
from solution import TreeNode
def constructOne(s):
if s == '#':
return None
else:
return TreeNode(int(s))
def createTree(tree):
q = []
root = constructOne(tree[0]);
q.append(root);
idx = 1;
while q:
tn = q.pop(0)
if not tn:
continue
if idx == len(tree):
break
left = constructOne(tree[idx])
tn.left = left
q.append(left)
idx += 1
if idx == len(tree):
break
right = constructOne(tree[idx])
idx += 1
tn.right = right
q.append(right)
return root
# inpt = createTree(['1', '#', '2', '3'])
inpt = createTree(['1', '2', '3', '#' , '#', '4', '#', '#', '5'])
sol = Solution()
res = sol.inorderTraversal(inpt)
print(res)
|
zhlinh/leetcode
|
0094.Binary Tree Inorder Traversal/test.py
|
Python
|
apache-2.0
| 874
|
'''
HMMPowerSupplyMap
'''
from Products.DataCollector.plugins.CollectorPlugin import (
SnmpPlugin, GetTableMap, GetMap
)
from DeviceDefine import HMMSTATUS, HMMPRESENCE, HMMPOWERMODE, HMMLOCATION
class HMMPowerSupplyMap(SnmpPlugin):
'''
HMMPowerSupplyMap
'''
relname = 'hmmpowerSupplys'
modname = 'ZenPacks.community.HuaweiServer.HMMPowerSupply'
snmpGetTableMaps = (
GetTableMap(
'hmmPowerSupplyTable', '1.3.6.1.4.1.2011.2.82.1.82.6.2001.1', {
'.1': 'powerIndex',
'.2': 'powerPresence',
'.3': 'powerState',
'.4': 'powerRatingPower',
'.5': 'powerMode',
'.8': 'powerRuntimePower',
}
),
GetTableMap(
'hmmPSUTable', '1.3.6.1.4.1.2011.2.82.1.82.100.4.2001.1', {
'.1': 'psuIndex',
'.2': 'psuLocation',
'.3': 'psuHealth',
}
),
)
snmpGetMap = GetMap({
'.1.3.6.1.4.1.2011.2.82.1.82.100.4.2001.1.1.1': 'psuIndex1',
'.1.3.6.1.4.1.2011.2.82.1.82.100.4.2001.1.2.1': 'psuLocation1',
'.1.3.6.1.4.1.2011.2.82.1.82.100.4.2001.1.3.1': 'psuHealth1',
'.1.3.6.1.4.1.2011.2.82.1.82.100.4.2001.1.1.2': 'psuIndex2',
'.1.3.6.1.4.1.2011.2.82.1.82.100.4.2001.1.2.2': 'psuLocation2',
'.1.3.6.1.4.1.2011.2.82.1.82.100.4.2001.1.3.2': 'psuHealth2',
'.1.3.6.1.4.1.2011.2.82.1.82.100.4.2001.1.1.3': 'psuIndex3',
'.1.3.6.1.4.1.2011.2.82.1.82.100.4.2001.1.2.3': 'psuLocation3',
'.1.3.6.1.4.1.2011.2.82.1.82.100.4.2001.1.3.3': 'psuHealth3',
'.1.3.6.1.4.1.2011.2.82.1.82.100.4.2001.1.1.4': 'psuIndex4',
'.1.3.6.1.4.1.2011.2.82.1.82.100.4.2001.1.2.4': 'psuLocation4',
'.1.3.6.1.4.1.2011.2.82.1.82.100.4.2001.1.3.4': 'psuHealth4',
'.1.3.6.1.4.1.2011.2.82.1.82.100.4.2001.1.1.5': 'psuIndex5',
'.1.3.6.1.4.1.2011.2.82.1.82.100.4.2001.1.2.5': 'psuLocation5',
'.1.3.6.1.4.1.2011.2.82.1.82.100.4.2001.1.3.5': 'psuHealth5',
'.1.3.6.1.4.1.2011.2.82.1.82.100.4.2001.1.1.6': 'psuIndex6',
'.1.3.6.1.4.1.2011.2.82.1.82.100.4.2001.1.2.6': 'psuLocation6',
'.1.3.6.1.4.1.2011.2.82.1.82.100.4.2001.1.3.6': 'psuHealth6',
})
def process(self, device, results, log):
'''
process oid
'''
log = log
device = device
temp_sensors = results[1].get('hmmPowerSupplyTable', {})
getdata = results[0]
psumap = {}
# psu_tables = results[1].get('hmmPSUTable', {})
# for snmpindex, row in psu_tables.items():
# name = str(row.get('psuIndex'))
# if not name:
# log.warn('Skipping hmmPowerSupplyTable with no name')
# continue
#
# psumap[int(name)] = [HMMLOCATION.get(row.get('psuLocation'), ''),
# HMMSTATUS.get(row.get('psuHealth'), 'normal')]
for row in range(1, 7):
rindex = 'psuIndex'+str(row)
rlocation = 'psuLocation'+str(row)
rhealth = 'psuHealth'+str(row)
psumap[row] = [HMMLOCATION.get(getdata.get(rlocation), ''),
HMMSTATUS.get(getdata.get(rhealth), 'normal')]
relmap = self.relMap()
for snmpindex, row in temp_sensors.items():
name = str(row.get('powerIndex'))
if not name:
log.warn('Skipping hmmPSUTable with no name')
continue
if 1 != int(row.get('powerPresence')):
continue
psustatus = ''
psulocation = ''
if (int(name)) in psumap:
psulocation = psumap[int(name)][0]
psustatus = psumap[int(name)][1]
relmap.append(self.objectMap({
'id': self.prepId('PS_'+name),
'title': 'PS_'+name,
'snmpindex': snmpindex.strip('.'),
'hpspresence': HMMPRESENCE.get(row.get('powerPresence'),
'unknown'),
'hpsratingPower': row.get('powerRatingPower'),
'hpsruntimePower': row.get('powerRuntimePower'),
'hpsstatus': psustatus,
'hpslocation': psulocation,
'hpspowerMode': HMMPOWERMODE.get(
row.get('powerMode'), row.get('powerMode')),
}))
return relmap
|
Wuguanping/Server_Manage_Plugin
|
Zenoss_Plugin/ZenPacks/community/HuaweiServer/modeler/plugins/community/snmp/HMMPowerSupplyMap.py
|
Python
|
apache-2.0
| 4,621
|
# -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import abc
import typing
import pkg_resources
import google.auth # type: ignore
from google.api_core import gapic_v1
from google.auth import credentials as ga_credentials # type: ignore
from google.ads.googleads.v9.resources.types import geo_target_constant
from google.ads.googleads.v9.services.types import geo_target_constant_service
try:
DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(
gapic_version=pkg_resources.get_distribution("google-ads",).version,
)
except pkg_resources.DistributionNotFound:
DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo()
class GeoTargetConstantServiceTransport(metaclass=abc.ABCMeta):
"""Abstract transport class for GeoTargetConstantService."""
AUTH_SCOPES = ("https://www.googleapis.com/auth/adwords",)
def __init__(
self,
*,
host: str = "googleads.googleapis.com",
credentials: ga_credentials.Credentials = None,
client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
) -> None:
"""Instantiate the transport.
Args:
host (Optional[str]):
The hostname to connect to.
credentials (Optional[google.auth.credentials.Credentials]): The
authorization credentials to attach to requests. These
credentials identify the application to the service; if none
are specified, the client will attempt to ascertain the
credentials from the environment.
client_info (google.api_core.gapic_v1.client_info.ClientInfo):
The client info used to send a user-agent string along with
API requests. If ``None``, then default info will be used.
Generally, you only need to set this if you're developing
your own client library.
"""
# Save the hostname. Default to port 443 (HTTPS) if none is specified.
if ":" not in host:
host += ":443"
self._host = host
# If no credentials are provided, then determine the appropriate
# defaults.
if credentials is None:
credentials, _ = google.auth.default(scopes=self.AUTH_SCOPES)
# Save the credentials.
self._credentials = credentials
# Lifted into its own function so it can be stubbed out during tests.
self._prep_wrapped_messages(client_info)
def _prep_wrapped_messages(self, client_info):
# Precomputed wrapped methods
self._wrapped_methods = {
self.get_geo_target_constant: gapic_v1.method.wrap_method(
self.get_geo_target_constant,
default_timeout=None,
client_info=client_info,
),
self.suggest_geo_target_constants: gapic_v1.method.wrap_method(
self.suggest_geo_target_constants,
default_timeout=None,
client_info=client_info,
),
}
def close(self):
"""Closes resources associated with the transport.
.. warning::
Only call this method if the transport is NOT shared
with other clients - this may cause errors in other clients!
"""
raise NotImplementedError()
@property
def get_geo_target_constant(
self,
) -> typing.Callable[
[geo_target_constant_service.GetGeoTargetConstantRequest],
geo_target_constant.GeoTargetConstant,
]:
raise NotImplementedError
@property
def suggest_geo_target_constants(
self,
) -> typing.Callable[
[geo_target_constant_service.SuggestGeoTargetConstantsRequest],
geo_target_constant_service.SuggestGeoTargetConstantsResponse,
]:
raise NotImplementedError
__all__ = ("GeoTargetConstantServiceTransport",)
|
googleads/google-ads-python
|
google/ads/googleads/v9/services/services/geo_target_constant_service/transports/base.py
|
Python
|
apache-2.0
| 4,445
|
# Copyright 2018 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Test for using the Aggregate API."""
import os
import apache_beam as beam
from apache_beam.testing import util
import tensorflow as tf
from tensorflow_model_analysis import constants
from tensorflow_model_analysis.eval_saved_model import testutil
from tensorflow_model_analysis.eval_saved_model.example_trainers import linear_classifier
from tensorflow_model_analysis.evaluators import legacy_aggregate as aggregate
from tensorflow_model_analysis.evaluators import legacy_poisson_bootstrap as poisson_bootstrap
def create_test_input(predict_list, slice_list):
results = []
for entry in predict_list:
for slice_key in slice_list:
results.append((slice_key, {constants.INPUT_KEY: entry}))
return results
class AggregateTest(testutil.TensorflowModelAnalysisTest):
def _getEvalExportDir(self):
return os.path.join(self._getTempDir(), 'eval_export_dir')
def testAggregateOverallSlice(self):
temp_eval_export_dir = self._getEvalExportDir()
_, eval_export_dir = linear_classifier.simple_linear_classifier(
None, temp_eval_export_dir)
eval_shared_model = self.createTestEvalSharedModel(
eval_saved_model_path=eval_export_dir)
with beam.Pipeline() as pipeline:
example1 = self._makeExample(age=3.0, language='english', label=1.0)
example2 = self._makeExample(age=3.0, language='chinese', label=0.0)
example3 = self._makeExample(age=4.0, language='english', label=1.0)
example4 = self._makeExample(age=5.0, language='chinese', label=0.0)
predict_result = ([
example1.SerializeToString(),
example2.SerializeToString(),
example3.SerializeToString(),
example4.SerializeToString()
])
metrics = (
pipeline
| 'CreateTestInput' >> beam.Create(
create_test_input(predict_result, [()]))
| 'ComputePerSliceMetrics' >> aggregate.ComputePerSliceMetrics(
eval_shared_model=eval_shared_model, desired_batch_size=3))
def check_result(got):
self.assertEqual(1, len(got), 'got: %s' % got)
slice_key, metrics = got[0]
self.assertEqual(slice_key, ())
self.assertDictElementsAlmostEqual(
metrics, {
'accuracy': 1.0,
'label/mean': 0.5,
'my_mean_age': 3.75,
'my_mean_age_times_label': 1.75,
})
util.assert_that(metrics, check_result)
def testAggregateMultipleSlices(self):
temp_eval_export_dir = self._getEvalExportDir()
_, eval_export_dir = linear_classifier.simple_linear_classifier(
None, temp_eval_export_dir)
eval_shared_model = self.createTestEvalSharedModel(
eval_saved_model_path=eval_export_dir)
with beam.Pipeline() as pipeline:
example1 = self._makeExample(age=3.0, language='english', label=1.0)
example2 = self._makeExample(age=3.0, language='chinese', label=0.0)
example3 = self._makeExample(age=4.0, language='english', label=1.0)
example4 = self._makeExample(age=5.0, language='chinese', label=0.0)
predict_result_english_slice = ([
example1.SerializeToString(),
example3.SerializeToString()
])
predict_result_chinese_slice = ([
example2.SerializeToString(),
example4.SerializeToString()
])
test_input = (
create_test_input(predict_result_english_slice, [(
('language', 'english'))]) +
create_test_input(predict_result_chinese_slice, [(
('language', 'chinese'))]) +
# Overall slice
create_test_input(
predict_result_english_slice + predict_result_chinese_slice,
[()]))
metrics = (
pipeline
| 'CreateTestInput' >> beam.Create(test_input)
| 'ComputePerSliceMetrics' >> aggregate.ComputePerSliceMetrics(
eval_shared_model=eval_shared_model, desired_batch_size=3))
def check_result(got):
self.assertEqual(3, len(got), 'got: %s' % got)
slices = {}
for slice_key, metrics in got:
slices[slice_key] = metrics
overall_slice = ()
english_slice = (('language', 'english'))
chinese_slice = (('language', 'chinese'))
self.assertCountEqual(
list(slices.keys()), [overall_slice, english_slice, chinese_slice])
self.assertDictElementsAlmostEqual(
slices[overall_slice], {
'accuracy': 1.0,
'label/mean': 0.5,
'my_mean_age': 3.75,
'my_mean_age_times_label': 1.75,
})
self.assertDictElementsAlmostEqual(
slices[english_slice], {
'accuracy': 1.0,
'label/mean': 1.0,
'my_mean_age': 3.5,
'my_mean_age_times_label': 3.5,
})
self.assertDictElementsAlmostEqual(
slices[chinese_slice], {
'accuracy': 1.0,
'label/mean': 0.0,
'my_mean_age': 4.0,
'my_mean_age_times_label': 0.0,
})
util.assert_that(metrics, check_result)
def testAggregateMultipleSlicesWithSampling(self):
temp_eval_export_dir = self._getEvalExportDir()
_, eval_export_dir = linear_classifier.simple_linear_classifier(
None, temp_eval_export_dir)
eval_shared_model = self.createTestEvalSharedModel(
eval_saved_model_path=eval_export_dir)
with beam.Pipeline() as pipeline:
example1 = self._makeExample(age=3.0, language='english', label=1.0)
example2 = self._makeExample(age=3.0, language='chinese', label=0.0)
example3 = self._makeExample(age=4.0, language='english', label=1.0)
example4 = self._makeExample(age=5.0, language='chinese', label=0.0)
predict_result_english_slice = ([
example1.SerializeToString(),
example3.SerializeToString()
])
predict_result_chinese_slice = ([
example2.SerializeToString(),
example4.SerializeToString()
])
test_input = (
create_test_input(predict_result_english_slice, [(
('language', 'english'))]) +
create_test_input(predict_result_chinese_slice, [(
('language', 'chinese'))]) +
# Overall slice
create_test_input(
predict_result_english_slice + predict_result_chinese_slice,
[()]))
metrics = (
pipeline
| 'CreateTestInput' >> beam.Create(test_input)
| 'ComputePerSliceMetrics' >>
poisson_bootstrap.ComputeWithConfidenceIntervals(
aggregate.ComputePerSliceMetrics,
num_bootstrap_samples=10,
eval_shared_model=eval_shared_model,
desired_batch_size=3))
def assert_almost_equal_to_value_with_t_distribution(
target,
unsampled_value,
sample_mean,
sample_standard_deviation,
sample_degrees_of_freedom,
delta=2):
self.assertEqual(target.unsampled_value, unsampled_value)
self.assertAlmostEqual(target.sample_mean, sample_mean, delta=delta)
self.assertAlmostEqual(
target.sample_standard_deviation,
sample_standard_deviation,
delta=delta)
# The possion resampling could return [0, 0, ... ], which will reduce
# the number of samples.
self.assertLessEqual(target.sample_degrees_of_freedom,
sample_degrees_of_freedom)
def check_overall_slice(slices):
my_dict = slices[()]
assert_almost_equal_to_value_with_t_distribution(
my_dict['my_mean_age'], 3.75, 3.64, 0.34, 19)
assert_almost_equal_to_value_with_t_distribution(
my_dict['accuracy'], 1.0, 1.0, 0, 19)
assert_almost_equal_to_value_with_t_distribution(
my_dict['label/mean'], 0.5, 0.59, 0.29, 19)
assert_almost_equal_to_value_with_t_distribution(
my_dict['my_mean_age_times_label'], 1.75, 2.15, 1.06, 19)
def check_english_slice(slices):
my_dict = slices[(('language', 'english'))]
assert_almost_equal_to_value_with_t_distribution(
my_dict['my_mean_age'], 3.5, 3.18, 0.28, 19)
assert_almost_equal_to_value_with_t_distribution(
my_dict['accuracy'], 1.0, 1.0, 0, 19)
assert_almost_equal_to_value_with_t_distribution(
my_dict['label/mean'], 1.0, 1.0, 0, 19)
assert_almost_equal_to_value_with_t_distribution(
my_dict['my_mean_age_times_label'], 3.5, 3.18, 0.28, 19)
def check_chinese_slice(slices):
my_dict = slices[(('language', 'chinese'))]
assert_almost_equal_to_value_with_t_distribution(
my_dict['my_mean_age'], 4.0, 4.12, 0.83, 19)
assert_almost_equal_to_value_with_t_distribution(
my_dict['accuracy'], 1.0, 1.0, 0, 19)
assert_almost_equal_to_value_with_t_distribution(
my_dict['label/mean'], 0, 0, 0, 19)
assert_almost_equal_to_value_with_t_distribution(
my_dict['my_mean_age_times_label'], 0, 0, 0, 19)
def check_result(got):
self.assertEqual(3, len(got), 'got: %s' % got)
slices = {}
for slice_key, metrics in got:
slices[slice_key] = metrics
check_overall_slice(slices)
check_english_slice(slices)
check_chinese_slice(slices)
util.assert_that(metrics, check_result)
if __name__ == '__main__':
tf.test.main()
|
tensorflow/model-analysis
|
tensorflow_model_analysis/evaluators/legacy_aggregate_test.py
|
Python
|
apache-2.0
| 10,171
|