repo_name
stringlengths 5
100
| path
stringlengths 4
231
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 6
947k
| score
float64 0
0.34
| prefix
stringlengths 0
8.16k
| middle
stringlengths 3
512
| suffix
stringlengths 0
8.17k
|
|---|---|---|---|---|---|---|---|---|
ivanamihalek/blender
|
old/bgjob.py
|
Python
|
gpl-2.0
| 3,413
| 0.033402
|
# This script is an example of how you can run blender from the command line (in background mode with no interface)
# to automate tasks, in this example it creates a text object, camera and light, then renders and/or saves it.
# This example also shows how you can parse command line options to python scripts.
#
# Example usage for this test.
# blender -b -P $HOME/background_job.py -- --text="Hello World" --render="/tmp/hello" --save="/tmp/hello.blend"
# [Ivana:] note that /tmp can be replace by the full path to PWD - ./ does not work
#
# Notice all python args are after the '--' argument.
import Blender
import bpy
def example_function(body_text, save_path, render_path):
sce= bpy.data.scenes.active
txt_data= bpy.data.curves.new('MyText', 'Text3d')
# Text Object
txt_ob = sce.objects.new(txt_data) # add the data to the scene as an object
txt_data.setText(body_text) # set the body text to the command line arg given
txt_data.setAlignment(Blender.Text3d.MIDDLE)# center text
# Camera
cam_data= bpy.data.cameras.new('MyCam') # create new camera data
cam_ob= sce.objects.new(cam_data) # add the camera data to the scene (creating a new object)
sce.objects.camera= cam_ob # set the active camera
cam_ob.loc= 0,0,10
# Lamp
lamp_data= bpy.data.lamps.new('MyLamp')
lamp_ob= sce.objects.new(lamp_data)
lamp_ob.loc= 2,2,5
if save_path:
try:
f= open(save_path, 'w')
f.close()
ok= True
except:
print 'Cannot save to path "%s"' % save_path
ok= Fa
|
lse
if ok:
Blender.Save(save_path, 1)
if rend
|
er_path:
render= sce.render
render.extensions= True
render.renderPath = render_path
#[Ivana:] don't know how to change the format
#render.setImageType(PNG)
render.sFrame= 1
render.eFrame= 1
render.renderAnim()
import sys # to get command line args
import optparse # to parse options for us and print a nice help message
script_name= 'background_job.py'
def main():
# get the args passed to blender after "--", all of which are ignored by blender specifically
# so python may receive its own arguments
argv= sys.argv
if '--' not in argv:
argv = [] # as if no args are passed
else:
argv = argv[argv.index('--')+1: ] # get all args after "--"
# When --help or no args are given, print this help
usage_text = 'Run blender in background mode with this script:'
usage_text += ' blender -b -P ' + script_name + ' -- [options]'
parser = optparse.OptionParser(usage = usage_text)
# Example background utility, add some text and renders or saves it (with options)
# Possible types are: string, int, long, choice, float and complex.
parser.add_option('-t', '--text', dest='body_text', help='This text will be used to render an image', type='string')
parser.add_option('-s', '--save', dest='save_path', help='Save the generated file to the specified path', metavar='FILE')
parser.add_option('-r', '--render', dest='render_path', help='Render an image to the specified path', metavar='FILE')
options, args = parser.parse_args(argv) # In this example we wont use the args
if not argv:
parser.print_help()
return
if not options.body_text:
print 'Error: --text="some string" argument not given, aborting.'
parser.print_help()
return
# Run the example function
example_function(options.body_text, options.save_path, options.render_path)
print 'batch job finished, exiting'
if __name__ == '__main__':
main()
|
Zentyal/openchange
|
mapiproxy/services/web/rpcproxy/rpcproxy/channels.py
|
Python
|
gpl-3.0
| 23,946
| 0.000752
|
# channels.py -- OpenChange RPC-over-HTTP implementation
# -*- coding: utf-8 -*-
#
# Copyright (C) 2012-2015 Julien Kerihuel <j.kerihuel@openchange.org>
# Wolfgang Sourdeau <wsourdeau@inverse.ca>
# Enrique J. Hernández <ejhernandez@zentyal.com>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
from operator import itemgetter
import os
from select import poll, POLLIN, POLLHUP
from socket import socket, AF_INET, AF_UNIX, SOCK_STREAM, MSG_WAITALL, \
SHUT_RDWR, error as socket_error
from struct import pack, unpack_from
import sys
from time import time, sleep
from uuid import UUID
# from rpcproxy.RPCH import RPCH, RTS_FLAG_ECHO
from openchange.utils.fdunix import send_socket, receive_socket
from openchange.utils.packets import (RTS_CMD_CONNECTION_TIMEOUT,
RTS_CMD_CUSTOM_OUT,
RTS_CMD_DESTINATION,
RTS_CMD_FLOW_CONTROL_ACK,
RTS_CMD_RECEIVE_WINDOW_SIZE,
RTS_CMD_VERSION,
RTS_FLAG_ECHO,
RTS_FLAG_PING,
RTS_CMD_DATA_LABELS,
RPCPacket, RPCRTSPacket, RPCRTSOutPacket)
# Connection Timeout Timer (in ms)
INBOUND_PROXY_CONN_TIMEOUT = 120000
OUTBOUND_PROXY_CONN_TIMEOUT = 120000
"""Documentation:
1. "Connection Establishment" sequence (from RPCH.pdf, 3.2.1.5.3.1)
client -> IN request -> proxy in
# server -> legacy server response -> proxy in
# server -> legacy server response -> proxy out
client -> Out request -> proxy out
client -> A1 -> proxy out
client -> B1 -> proxy in
# proxy out -> A2 -> server
proxy out -> OUT channel response -> client
# proxy in -> B2 -> server
proxy out -> A3 -> client
# server -> C1 -> proxy out
# server -> B3 -> proxy in
proxy out -> C2 -> client
2. internal unix socket protocols
Note: OUT proxy is always the server
* establishing virtual connection
OUT proxy listens on unix socket
IN proxy connects to OUT proxy
IN -> OUT: "IP"
IN -> OUT: in_window_size
IN -> OUT: in_conn_timeout
OUT -> IN: sends connection to OpenChange
The UNIX socket is open to transmit packets from IN to OUT channel using RTS CMD
* channel recycling (unused yet, hypothethical)
When new OUT conn arrives:
new OUT -> OUT: "OP"
OUT -> new OUT: OUT listening socket (fdunix)
OUT -> new OUT: IN socket (fdunix)
OUT -> new OUT: oc socket (fdunix)
close OUT socket locally
"""
# those id must have the same length
INBOUND_PROXY_ID = "IP"
OUTBOUND_PROXY_ID = "OP"
def _safe_close(socket_obj):
try:
socket_obj.shutdown(SHUT_RDWR)
socket_obj.close()
except:
pass
class RPCProxyChannelHandler(object):
def __init__(self, sockets_dir, logger):
self.sockets_dir = sockets_dir
self.logger = logger
self.unix_socket = None
self.client_socket = None # placeholder for wsgi.input
self.bytes_read = 0
self.bytes_written = 0
self.startup_time = time()
self.channel_cookie = None
self.connection_cookie = None
def handle_echo_request(self, environ, start_response):
self.logger.debug("handling echo request")
packet = RPCRTSOutPacket()
packet.flags = RTS_FLAG_ECHO
data = packet.make()
self.bytes_written = self.bytes_written + packet.size
start_response("200 Success", [("Content-length", "%d" % packet.size),
("Content-Type", "application/rpc")])
return [data]
def log_connection_stats(self):
self.logger.debug("channel kept alive during %f secs;"
" %d bytes received; %d bytes sent"
% ((time() - self.startup_time),
self.bytes_read, self.bytes_written))
class RPCProxyInboundChannelHandler(RPCProxyChannelHandler):
def __init__(self, sockets_dir, logger):
RPCProxyChannelHandler.__init__(self, sockets_dir, logger)
self.oc_conn = None
# Window_size to 256KiB (max size allowed)
self.window_size = 256 * 1024
self.conn_timeout = 0
self.client_keepalive = 0
self.association_group_id = None
# Variables required to flow control
self.local_available_window = self.window_size
self.rpc_pdu_bytes_received = 0
def _receive_conn_b1(self):
# CONN/B1 RTS PDU (TODO: validation)
# receive the cookie
self.logger.debug("receiving CONN/B1")
packet = RPCPacket.from_file(self.client_socket, self.logger)
if not isinstance(packet, RPCRTSPacket):
raise Exception("Unexpected non-rts packet received for CONN/B1")
self.logger.debug("packet headers = " + packet.pretty_dump())
self.connection_cookie = str(UUID(bytes_le=packet.commands[1]["Cookie"]))
self.channel_cookie = str(UUID(bytes_le=packet.commands[2]["Cookie"]))
self.client_keepalive = packet.commands[4]["ClientKeepalive"]
self.association_group_id = str(UUID(bytes_le=packet.commands[5]["AssociationGroupId"]))
self.bytes_read = self.bytes_read + packet.size
def _connect_to_OUT_channel(self):
# connect as a client to the cookie unix socket
socket_name = os.path.join(self.sockets_dir, self.connection_cookie)
self.logger.debug("connecting to OUT via unix socket '%s'"
% socket_name)
unix_socket = socket(AF_UNIX, SOCK_STREAM)
connected = False
attempt = 0
while not connected and attempt < 10:
try:
attempt = attempt + 1
unix_socket.connect(socket_name)
self.unix_socket = unix_socket
|
connected = True
except socket_error:
self.logger.debug("handling socket.error: %s"
% str(sys.exc_info()))
self.logger.warn("reattempting to connect to OUT"
" channel... (%d/10)" % attempt)
sleep(1)
if connected:
self.logger.debug("connection succeeded")
self.logger.debug("sending window size and connection timeout")
|
# identify ourselves as the IN proxy
unix_socket.sendall(INBOUND_PROXY_ID)
# send window_size to 256Kib (max size allowed)
# and conn_timeout (in milliseconds, max size allowed)
unix_socket.sendall(pack("<ll", self.window_size, INBOUND_PROXY_CONN_TIMEOUT))
# recv oc socket
self.oc_conn = receive_socket(unix_socket)
self.logger.debug("oc_conn received (fileno=%d)"
% self.oc_conn.fileno())
else:
self.logger.error("too many failed attempts to establish a"
" connection to OUT channel")
return connected
def _send_flow_control_ack(self):
"""Send FlowControlAckWithDestination RTS command to say the client there
is room for sending more information"""
self.logger.debug('Send to client the FlowControlAckWithDestination RTS command '
'after %d of avalaible window size' % self.local_available_window)
rts_packet = RPCRTSOutPacket(self.logger)
# We always returns back the same available maximum recei
|
ZaoLahma/PyCHIP-8
|
rom.py
|
Python
|
mit
| 308
| 0.003247
|
#!/usr/bin/env python3
import struct
class Rom(object):
def __init__(s
|
elf):
self.romData = []
def load(self, path):
file = open(path, 'rb')
while True:
|
byte = file.read(1)
if not byte:
break
self.romData.append(ord(byte))
|
sprucedev/DockCI
|
manage.py
|
Python
|
isc
| 157
| 0
|
#!/usr/bin/env python
import dockci.commands
f
|
rom dockci.server import APP, app_init, MANAGER
if __name__ == "__main__":
app_init()
MANAG
|
ER.run()
|
SandyChapman/ramlizer
|
ramlizer/RamlResponse.py
|
Python
|
mit
| 907
| 0
|
from .decorators import raml_optional, raml_simple_parse, raml_tabbed
from .RamlBody import RamlBody
from .RamlParseable import RamlParseable
class RamlResponse(RamlParseable):
def __init__(self, code, yaml):
s
|
elf.code = code
super(RamlResponse, self).__init__(yaml)
@raml_optional
@raml_simple_parse
def parse_description(self):
pass
@raml_optional
@raml_simple_parse
def parse_headers(self):
|
pass
@raml_optional
def parse_body(self):
self.body = {
body_encoding[0]: RamlBody(body_encoding[0], body_encoding[1])
for body_encoding in self.yaml['body'].items()
}
@raml_tabbed
def __str__(self):
return '''\
[RamlResponse({0.code}):
description: {0.description}
headers: {0.headers}
body: {0.body}
]'''.format(self)
def __repr__(self):
return str(self)
|
DanceCats/CoreCat
|
corecat/models/project.py
|
Python
|
mit
| 1,533
| 0
|
from corecat.constants import OBJECT_CODES, MODEL_VERSION
from ._sqlalchemy import Base, CoreCatBaseMixin
from ._sqlalchemy import Column, \
Integer, \
String, Text
class Project(CoreCatBaseMixin, Base):
"""Project Model class represent for the 'projects' table
which is used to store project's basic information."""
# Add the real table name here.
# TODO: Add the database prefix here
__tablename__ = 'project'
# Column definition
project_id = Column('id', Integer,
primary_key=True,
autoincrement=True
)
project_name = Column('name', String(100),
nullable=False
)
project_description = Column('description', Text,
nullable=True
)
# Relationship
# TODO: Building relationship
def __init__(self, project_name,
created
|
_by_user_id,
**kwargs):
"""
Constructor of Project Model Class.
:param project_name: Name of the project.
:param created_by_user_id: Project is created under this user ID.
:param project_description: Description of the project.
"""
self.set_up_basic_information(
MODEL_VERSION[OBJECT_CODES['Project']],
created_by_user_id
)
self.project_name = project_name
self.project_description =
|
kwargs.get('project_description', None)
|
awlange/brainsparks
|
src/calrissian/layers/particle2_dipole.py
|
Python
|
mit
| 4,517
| 0.000221
|
from .layer import Layer
from ..activation import Activation
import numpy as np
class Particle2Dipole(object):
"""
Dipole approximated as 2 coupled charges of equal magnitude, uncoupled
"""
def __init__(self, input_size=0, output_size=0, activation="sigmoid", k_bond=1.0, k_eq=0.1, s=1.0, cut=10.0,
q=None, b=None):
self.input_size = input_size
self.output_size = output_size
self.activation_name = activation.lower()
self.activation = Activation.get(activation)
self.d_activation = Activation.get_d(activation)
self.w = None
# Harmonic constraint coefficient and equilibrium
self.k_bond = k_bond
self.k_eq = k_eq
self.cut = cut
self.cut2 = cut*cut
# Weight initialization
g = np.sqrt(2.0 / (input_size + output_size))
if b is None:
b = g
self.b = np.random.uniform(-b, b, (1, output_size))
# Charges
if q is None:
q = g
# self.q = np.random.uniform(-q, q, output_size)
self.q = np.random.choice([q, -q], size=output_size)
self.rx_pos_inp = np.random.normal(0.0, s, input_size)
self.ry_pos_inp = np.random.normal(0.0, s, input_size)
self.rz_pos_inp = np.random.normal(0.0, s, input_size)
self.rx_neg_inp = np.random.normal(0.0, s, input_size)
self.ry_neg_inp = np.random.normal(0.0, s, input_size)
self.rz_neg_inp = np.random.normal(0.0, s, input_size)
self.rx_pos_out = np.random.normal(0.0, s, output_size)
self.ry_pos_out = np.random.normal(0.0, s, output_size)
self.rz_pos_out = np.random.normal(0.0, s, output_size)
self.rx_neg_out = np.random.normal(0.0, s, output_size)
self.ry_neg_out = np.random.normal(0.0, s, output_size)
self.rz_neg_out = np.random.normal(0.0, s, output_size)
def set_cut(self, cut):
self.cut = cut
self.cut2 = cut*cut
def feed_forward(self, a_in):
return self.compute_a(self.compute_z(a_in))
def compute_z(self, a_in):
atrans = a_in.transpose()
z = np.zeros((self.output_size, len(a_in)))
for j in range(self.output_size):
dx = self.rx_pos_inp - self.rx_pos_out[j]
dy = self.ry_pos_inp - self.ry_pos_out[j]
dz = self.rz_pos_inp - self.rz_pos_out[j]
potential = np.exp(-(dx**2 + dy**2 + dz**2))
dx = self.rx_pos_inp - self.rx_neg_out[j]
dy = self.ry_pos_inp - self.ry_neg_out[j]
dz = self.rz_pos_inp - self.rz_neg_out[j]
potential -= np.exp(-(dx**2 + dy**2 + dz**2))
dx = self.rx_neg_inp - self.rx_pos_out[j]
dy = self.ry_neg_inp - self.ry_pos_out[j]
dz = self.rz_neg_inp - self.rz_pos_out[j]
potential -= np.exp(-(dx**2 + dy**2 + dz**2))
dx = self.rx_neg_inp - self.rx_neg_out[j]
dy = self.ry_neg_inp - self.ry_neg_out[j]
dz = self.rz_neg_inp - self.rz_neg_out[j]
potential += np.exp(-(dx**2 + dy**2 + dz**2))
z[j] = self.b[0][j] + self.q[j] * potential.dot(at
|
rans)
return z.transpose()
def compute_a(self, z):
return self.activation(z)
def compute_da(self, z):
return self.d_activation(z)
def compute_w(self):
w = np.zeros((self.input_size, self.output_size))
for j in range(self.output_size):
dx = self.rx_pos_inp - self.rx_
|
pos_out[j]
dy = self.ry_pos_inp - self.ry_pos_out[j]
dz = self.rz_pos_inp - self.rz_pos_out[j]
potential = np.exp(-(dx**2 + dy**2 + dz**2))
dx = self.rx_pos_inp - self.rx_neg_out[j]
dy = self.ry_pos_inp - self.ry_neg_out[j]
dz = self.rz_pos_inp - self.rz_neg_out[j]
potential -= np.exp(-(dx**2 + dy**2 + dz**2))
dx = self.rx_neg_inp - self.rx_pos_out[j]
dy = self.ry_neg_inp - self.ry_pos_out[j]
dz = self.rz_neg_inp - self.rz_pos_out[j]
potential -= np.exp(-(dx**2 + dy**2 + dz**2))
dx = self.rx_neg_inp - self.rx_neg_out[j]
dy = self.ry_neg_inp - self.ry_neg_out[j]
dz = self.rz_neg_inp - self.rz_neg_out[j]
potential += np.exp(-(dx**2 + dy**2 + dz**2))
potential = self.q[j] * potential
for i in range(self.input_size):
w[i][j] = potential[i]
self.w = w
return w
|
addition-it-solutions/project-all
|
addons/document/content_index.py
|
Python
|
agpl-3.0
| 6,555
| 0.005034
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import logging
import os
import tempfile
from subprocess import Popen, PIPE
_logger = logging.getLogger(__name__)
class indexer(object):
""" An indexer knows how to parse the content of some file.
Typically, one indexer should be instantiated per file
type.
Override this class to add more functionality. Note that
you should only override the Content or the File methods
that give an optimal result. """
def _getMimeTypes(self):
""" Return supported mimetypes """
return []
def _getExtensions(self):
return []
def _getDefMime(self, ext):
""" Return a mimetype for this document type, ideally the
closest to the extension ext. """
mts = self._getMimeTypes();
if len (mts):
return mts[0]
return None
def indexContent(self, content, filename=None, realfile=None):
""" Use either content or the real file, to index.
Some parsers will work better with the actual
content, others parse a file easier. Try the
optimal.
"""
res = ''
try:
if content != None:
return self._doIndexContent(content)
except NotImplementedError:
pass
if realfile != None:
try:
return self._doIndexFile(realfile)
except NotImplementedError:
pass
fp = open(realfile,'rb')
try:
content2 = fp.read()
finally:
fp.close()
# The not-handled exception may be raised here
return self._doIndexContent(content2)
# last try, with a tmp file
if content:
try:
fname,ext = filename and os.path.splitext(filename) or ('','')
fd, rfname = tempfile.mkstemp(suffix=ext)
os.write(fd, content)
os.close(fd)
res = self._doIndexFile(rfname)
os.unlink(rfname)
return res
except NotImplementedError:
pass
raise ValueError('No appropriate method to index file.')
def _doIndexContent(self, content):
raise NotImplementedError("Content cannot be handled here.")
def _doIndexFile(self, fpath):
raise NotImplementedError("Content cannot be handled here.")
def __repr__(self):
return "<indexer %s.%s>" %(self.__module__, self.__class__.__name__)
def mime_match(mime, mdict):
if mdict.has_key(mime):
return (mime, mdict[mime])
if '/' in mime:
mpat = mime.split('/')[0]+'/*'
if mdict.has_key(mpat):
return (mime, mdict[mpat])
return (None, None)
class contentIndex(object):
def __init__(self):
self.mimes = {}
self.exts = {}
def register(self, obj):
f = False
for mime in obj._getMimeTypes():
self.mimes[mime] = obj
f = True
for ext in obj._getExtensions():
self.exts[ext] = obj
f = True
if not f:
raise ValueError("Your indexer should at least support a mimetype or extension.")
_logger.debug('Register content indexer: %r.', obj)
def doIndex(self, content, filename=None, content_type=None, realfname=None, debug=False):
fobj = None
fname = None
mime = None
if content_type and self.mimes.has_key(content_type):
mime = content_type
fobj = self.mimes[content_type]
elif filename:
bname,ext = os.path.splitext(filename)
if self.exts.has_key(ext):
fobj = self.exts[ext]
mime = fobj._getDefMime(ext)
if content_type and not fobj:
mime,fobj = mime_match(content_type, self.mimes)
if not fobj:
try:
if realfname :
fname = realfname
else:
try:
bname,ext = os.path.splitext(filename or 'test.tmp')
except Exception:
bname, ext = filename, 'tmp'
fd, fname = tempfile.mkstemp(suffix=ext)
os.write(fd, content)
os.close(fd)
pop = Popen(['file','-b','--mime',fname], shell=False, stdout=PIPE)
(result, _) = pop.communicate()
mime2 = result.split(';')[0]
_logger.debug('File gives us: %s', mime2)
# Note that the temporary file still exists now.
mime,fobj = mime_match(mime2, self.mimes)
if not mime:
mime = mime2
except Exception:
_logger.info('Cannot determine mime
|
type.', exc_info=True)
|
try:
if fobj:
res = (mime, fobj.indexContent(content,filename,fname or realfname) )
else:
_logger.debug("Have no object, return (%s, None).", mime)
res = (mime, '')
except Exception:
_logger.info("Cannot index file %s (%s).",
filename, fname or realfname, exc_info=True)
res = (mime, '')
# If we created a tmp file, unlink it now
if not realfname and fname:
try:
os.unlink(fname)
except Exception:
_logger.exception("Cannot unlink %s.", fname)
return res
cntIndex = contentIndex()
|
hlange/LogSoCR
|
pysc/usi/log/socr_streamhandler/handler.py
|
Python
|
agpl-3.0
| 770
| 0.011688
|
import logging
import json
import usi
import usi.systemc
class SoCR_StreamHandler(logging.StreamHandler): # Inherit from StreamHandler
def __init__(s
|
elf, stream=None):
logging.StreamHandler.__init__(self)
def emit(self, record):
if "message_type" in record.__dict__:
record.filename = record.__dict__["message_type"]
if "delta_count" not in record.__dict__:
record.__dict__["delta_count"] = usi.systemc.delta_count()
|
if "parameters" not in record.__dict__:
record.__dict__["parameters"] = ""
if "time" not in record.__dict__:
record.__dict__["time"] = usi.systemc.simulation_time(usi.systemc.NS)
logging.StreamHandler.emit(self, record)
|
jasco/authomatic
|
authomatic/providers/oauth1.py
|
Python
|
mit
| 38,859
| 0
|
# -*- coding: utf-8 -*-
"""
|oauth1| Providers
--------------------
Providers which implement the |oauth1|_ protocol.
.. autosummary::
OAuth1
Bitbucket
Flickr
Meetup
Plurk
Twitter
Tumblr
UbuntuOne
Vimeo
Xero
Xing
Yahoo
"""
import abc
import binascii
import datetime
import hashlib
import hmac
import logging
import time
import uuid
import authomatic.core as core
from authomatic import providers
from authomatic.exceptions import (
CancellationError,
FailureError,
OAuth1Error,
)
from authomatic import six
from authomatic.six.moves import urllib_parse as parse
__all__ = [
'OAuth1',
'Bitbucket',
'Flickr',
'Meetup',
'Plurk',
'Twitter',
'Tumblr',
'UbuntuOne',
'Vimeo',
'Xero',
'Xing',
'Yahoo'
]
def _normalize_params(params):
"""
Returns a normalized query string sorted first by key, then by value
excluding the ``realm`` and ``oauth_signature`` parameters as specified
here: http://oauth.net/core/1.0a/#rfc.section.9.1.1.
:param params:
:class:`dict` or :class:`list` of tuples.
"""
if isinstance(params, dict):
params = list(params.items())
# remove "realm" and "oauth_signature"
params = sorted([
(k, v) for k, v in params
if k not in ('oauth_signature', 'realm')
])
# sort
# convert to query string
qs = parse.urlencode(params)
# replace "+" to "%20"
qs = qs.replace('+', '%20')
# replace "%7E" to "%20"
qs = qs.replace('%7E', '~')
return qs
def _join_by_ampersand(*args):
return '&'.join([core.escape(i) for i in args])
def _create_base_string(method, base, params):
"""
Returns base string for HMAC-SHA1 signature as specified in:
http://oauth.net/core/1.0a/#rfc.section.9.1.3.
"""
normalized_qs = _normalize_params(params)
return _join_by_ampersand(method, base, normalized_qs)
class BaseSignatureGenerator(object):
"""
Abstract base class for all signature generators.
"""
__metaclass__ = abc.ABCMeta
#: :class:`str` The name of the signature method.
method = ''
@abc.abstractmethod
def create_signature(self, method, base, params,
consumer_secret, token_secret=''):
"""
Must create signature based on the parameters as specified in
http://oauth.net/core/1.0a/#signing_process.
.. warning::
|classmethod|
:param str method:
HTTP method of the request to be signed.
:param str base:
Base URL of the request without query string an fragment.
:param dict params:
Dictionary or list of tuples of the request parameters.
:param str consumer_secret:
:attr:`.core.Consumer.secret`
:param str token_secret:
Access token secret as specified in
http://oauth.net/core/1.0a/#anchor3.
:returns:
The signature string.
"""
class HMACSHA1SignatureGenerator(BaseSignatureGenerator):
"""
HMAC-SHA1 signature generator.
See: http://oauth.net/core/1.0a/#anchor15
"""
method = 'HMAC-SHA1'
@classmethod
def _create_key(cls, consumer_secret, token_secret=''):
"""
Returns a key for HMAC-SHA1 signature as specified at:
http://oauth.net/core/1.0a/#rfc.section.9.2.
:param str consumer_secret:
:attr:`.core.Consumer.secret`
:param str token_secret:
Access token secret as specified in
http://oauth.net/core/1.0a/#anchor3.
:returns:
Key to sign the request with.
"""
return _join_by_ampersand(consumer_secret, token_secret or '')
@classmethod
def create_signature(cls, method, base, params,
consumer_secret, token_secret=''):
"""
Returns HMAC-SHA1 signature as specified at:
http://oauth.net/core/1.0a/#rfc.section.9.2.
:param str method:
HTTP method of the request to be signed.
:param str base:
Base URL of the request without query string an fragment.
:param dict params:
Dictionary or list of tuples of the request parameters.
:param str consumer_secret:
:attr:`.core.Consumer.secret`
:param str token_secret:
Access token secret as specified in
http://oauth.net/core/1.0a/#anchor3.
:returns:
The signature string.
"""
base_string = _create_base_string(method, base, params)
key = cls._create_key(consumer_secret, token_secret)
hashed = hmac.new(
six.b(key),
base_string.encode('utf-8'),
hashlib.sha1)
base64_encoded = binascii.b2a_base64(hashed.digest())[:-1]
return base64_encoded
class PLAINTEXTSignatureGenerator(BaseSignatureGenerator):
"""
PLAINTEXT signature generator.
See: http://oauth.net/core/1.0a/#anchor21
"""
method = 'PLAINTEXT'
@classmethod
def create_signature(cls, method, base, params,
consumer_secret, token_secret=''):
consumer_secret = parse.quote(consumer_secret, '')
toke
|
n_secret = parse.quote(token_secret, '')
return parse.quote('&'.join((consumer_secret, token_secret)), '')
class OAuth1(providers.AuthorizationProvider):
"""
Base class for |oauth1|_ providers.
"""
_signature_generator = HMACSHA1SignatureGenerator
PROVIDER_TYPE_ID = 1
REQUEST_TOKEN_REQUEST_TYPE = 1
def __init__(self,
|
*args, **kwargs):
"""
Accepts additional keyword arguments:
:param str consumer_key:
The *key* assigned to our application (**consumer**) by
the **provider**.
:param str consumer_secret:
The *secret* assigned to our application (**consumer**) by
the **provider**.
:param id:
A unique short name used to serialize :class:`.Credentials`.
:param dict user_authorization_params:
A dictionary of additional request parameters for
**user authorization request**.
:param dict access_token_params:
A dictionary of additional request parameters for
**access token request**.
:param dict request_token_params:
A dictionary of additional request parameters for
**request token request**.
"""
super(OAuth1, self).__init__(*args, **kwargs)
self.request_token_params = self._kwarg(
kwargs, 'request_token_params', {})
# ========================================================================
# Abstract properties
# ========================================================================
@abc.abstractproperty
def request_token_url(self):
"""
:class:`str` URL where we can get the |oauth1| request token.
see http://oauth.net/core/1.0a/#auth_step1.
"""
# ========================================================================
# Internal methods
# ========================================================================
@classmethod
def create_request_elements(
cls, request_type, credentials, url, params=None, headers=None,
body='', method='GET', verifier='', callback=''
):
"""
Creates |oauth1| request elements.
"""
params = params or {}
headers = headers or {}
consumer_key = credentials.consumer_key or ''
consumer_secret = credentials.consumer_secret or ''
token = credentials.token or ''
token_secret = credentials.token_secret or ''
# separate url base and query parameters
url, base_params = cls._split_url(url)
# add extracted params to future params
params.update(dict(base_params))
if request_type == cls.USER_AUTHORIZATION_REQUEST_TYPE:
# no need for signature
if token:
params['oauth_token'] = token
|
Mappy/PyLR
|
pylr/values.py
|
Python
|
apache-2.0
| 3,453
| 0.003475
|
# -*- coding: utf-8 -*-
''' Define functions for transforming encoded values
.. moduleauthor:: David Marteau <david.marteau@mappy.com>
'''
from .utils import signum
from .constants import (BIT24FACTOR_REVERSED,
DECA_MICRO_DEG_FACTOR,
BEARING_SECTOR,
LENGTH_INTERVAL,
RELATIVE_OFFSET_LENGTH)
def _get32BitRepresentation(v):
""" Calculate the 32 bit double value representation of a coordinate
out of a 24 bit integer value representation.
:param int v: Coordinate expressed in 24 bit integer value
:return: Coordinate (in degrees)
:rtype: float
"""
return (v - signum(v)*0.5) * BIT24FACTOR_REVERSED
def coordinates_values(lon, lat):
""" Calculate the 32 bit double value representations of a pair of coordinates
out of 24 bit integer values representation.
:param int lon: Longitude expressed in 24 bit integer value
:param int lat: Latitude expressed in 24 bit integer value
:return: Pair of coordinates (in degrees)
:rtype: float
"""
return (_get32BitRepresentation(lon),
_get32BitRepresentation(lat))
def rel_coordinates_values(prev, blon, blat ):
""" Calculate absolute corrdinates from relative coordinates to
apply to the absolute coord
|
inates of a reference position.
:param Coords prev: Absolute coordinates of the reference geo point
:param int blon: relative longitude expressed in decamicr
|
odegrees
:param int blat: relative latitude expressed in decamicrodegrees
:return: Pair of coordinates (in degrees)
:rtype: float
"""
lon = (prev.lon + blon / DECA_MICRO_DEG_FACTOR)
lat = (prev.lat + blat / DECA_MICRO_DEG_FACTOR)
return lon, lat
def bearing_estimate(interval):
""" Calculates an estimate for the bearing value. The bearing information
provided by the location reference point indicates an interval in which
the concrete value is. The approximation is the middle of that interval.
:param int interval: bearing interval
:return: bearing estimation (in degrees)
:rtype: float
"""
lower = interval * BEARING_SECTOR
upper = (interval + 1) * BEARING_SECTOR
return ((upper + lower) / 2)
def distance_estimate(interval):
""" Calculates an estimate for a distance value. The distance information
provided by the location reference point indicates an interval in which
the concrete value is. The approximation is the middle of that interval.
:param int interval: distance interval
:return: distance estimation (in meters)
:rtype: int
"""
lower = interval * LENGTH_INTERVAL
upper = (interval + 1) * LENGTH_INTERVAL
return round(((upper + lower) / 2))
def relative_distance(offset):
""" Calculates an estimate for a relative offset value. The offset information
provided by the location reference point indicates an interval in which
the concrete value (percentage) is. The approximation is the middle of
that interval.
:param int offset: offset interval
:return: offset estimation (in %)
:rtype: float
"""
lower = offset * RELATIVE_OFFSET_LENGTH
upper = (offset + 1) * RELATIVE_OFFSET_LENGTH
return (lower + upper) / 2
|
PXke/invenio
|
invenio/modules/search/testsuite/test_search_external_collections_getter.py
|
Python
|
gpl-2.0
| 2,766
| 0.007954
|
# -*- coding: utf-8 -*-
##
## This file is part of Invenio.
## Copyright (C) 2006, 2007, 2008, 2010, 2011, 2013 CERN.
##
## Invenio is free software; you can redistribute it and/or
## modify it under the terms of the GNU General Public License as
## published by the Free Software Foundation; either version 2 of the
## License, or (at your option) any later version.
##
## Invenio is distributed in the hope that it will be useful, but
## WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
## General Public License for more details.
##
## You should have received a copy of the GNU General Public License
## along with Invenio; if not, write to the Free Software Foundation, Inc.,
## 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
"""Testing functions for the page getter module.
"""
__revision__ = "$Id$"
from invenio.base.wrappers import lazy_import
from invenio.testsuite import make_test_suite, run_test_suite, InvenioTestCase
HTTPAsyncPageGetter = lazy_import('invenio.legacy.websearch_external_collections.getter:HTTPAsyncPageGetter')
async_download = lazy_import('invenio.legacy.websearch_external_collections.getter:async_download')
class AsyncDownloadTest(InvenioTestCase):
"""Test suite for websearch_external_collections_*"""
def test_async_download(self):
"""websearch_external_collections_getter - asynchronous download"""
## Test various cases for the async_download function:
## - test 1 working page: invenio-software.org
## - test 1 unresolvable name: rjfreijoiregjreoijgoirg.fr
## - test 1 bad IP: 1.2.3.4
## Return the list of errors.
checks = [
{'url': 'http://invenio-software.org', 'content': 'About Invenio'},
{'url': 'http://rjfreijoiregjreoijgoirg.fr'},
{'url': 'http://1.2.3.4/'} ]
def finished(pagegetter, check, current_time):
"""Function called when a page is received."""
is_ok = pag
|
egetter.status is not None
if 'content' in check and is_ok:
is_ok = pagegetter.data.find(check['content']) > 0
check['result'] = is_ok == ('content' in check)
pagegetters = [HTTPAsyncPageGetter(check['url']) for check in checks]
finished_list = async_download(pagegetters, finished, checks, 20)
|
for (finished, check) in zip(finished_list, checks):
if not finished:
check['result'] = 'content' not in check
errors = [check for check in checks if not check['result']]
self.assertEqual(errors, [])
TEST_SUITE = make_test_suite(AsyncDownloadTest,)
if __name__ == "__main__":
run_test_suite(TEST_SUITE)
|
hareevs/pgbarman
|
tests/test_infofile.py
|
Python
|
gpl-3.0
| 19,981
| 0
|
# Copyright (C) 2013-2016 2ndQuadrant Italia Srl
#
# This file is part of Barman.
#
# Barman is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Barman is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Barman. If not, see <http://www.gnu.org/licenses/>.
import json
import os
from datetime import datetime
import mock
import pytest
from dateutil.tz import tzlocal, tzoffset
from barman.infofile import (BackupInfo, Field, FieldListFile, WalFileInfo,
load_datetime_tz)
from testing_helpers import build_mocked_server
BASE_BACKUP_INFO = """backup_label=None
begin_offset=40
begin_time=2014-12-22 09:25:22.561207+01:00
begin_wal=000000010000000000000004
begin_xlog=0/4000028
config_file=/fakepath/postgresql.conf
end_offset=184
end_time=2014-12-22 09:25:27.410470+01:00
end_wal=000000010000000000000004
end_xlog=0/40000B8
error=None
hba_file=/fakepath/pg_hba.conf
ident_file=/fakepath/pg_ident.conf
mode=default
pgdata=/fakepath/data
server_name=fake-9.4-server
size=20935690
status=DONE
tablespaces=[('fake_tbs', 16384, '/fake_tmp/tbs')]
timeline=1
version=90400"""
def test_load_datetime_tz():
"""
Unit test for load_datetime_tz function
This test covers all load_datetime_tz code with correct parameters
and checks that a ValueError is raised when called with a bad parameter.
"""
# try to load a tz-less timestamp
assert load_datetime_tz("2012-12-15 10:14:51.898000") == \
datetime(2012, 12, 15, 10, 14, 51, 898000,
tzinfo=tzlocal())
# try to load a tz-aware timestamp
assert load_datetime_tz("2012-12-15 10:14:51.898000 +0100") == \
datetime(2012, 12, 15, 10, 14, 51, 898000,
tzinfo=tzoffset('GMT+1', 3600))
# try to load an incorrect date
with pytest.raises(ValueError):
load_datetime_tz("Invalid datetime")
# noinspection PyMethodMayBeStatic
class TestField(object):
def test_field_creation(self):
field = Field('test_field')
assert field
def test_field_with_arguments(self):
dump_function = str
load_function = int
default = 10
docstring = 'Test Docstring'
field = Field('test_field', dump_function, load_function, default,
docstring)
assert field
assert field.name == 'test_field'
assert field.to_str == dump_function
assert field.from_str == load_function
assert field.default == default
assert field.__doc__ == docstring
def test_field_dump_decorator(self):
test_field = Field('test_field')
dump_function = str
test_field = test_field.dump(dump_function)
assert test_field.to_str == dump_function
def test_field_load_decorator(self):
test_field = Field('test_field')
load_function = int
test_field = test_field.dump(load_function)
assert test_field.to_str == load_function
class DummyFieldListFile(FieldListFile):
dummy = Field('dummy', dump=str, load=int, default=12, doc='dummy_field')
# noinspection PyMethodMayBeStatic
class TestFieldListFile(object):
def test_field_list_file_creation(self):
with pytest.raises(AttributeError):
FieldListFile(test_argument=11)
field = FieldListFile()
assert field
def test_subclass_creation(self):
with pytest.raises(AttributeError):
DummyFieldListFile(test_argument=11)
field = DummyFieldListFile()
assert field
assert field.dummy == 12
field = DummyFieldListFile(dummy=13)
assert field
assert field.dummy == 13
def test_subclass_access(self):
dummy = DummyFieldListFile()
dummy.dummy = 14
a
|
ssert dummy.dummy == 14
with pytest.raises(Att
|
ributeError):
del dummy.dummy
def test_subclass_load(self, tmpdir):
tmp_file = tmpdir.join("test_file")
tmp_file.write('dummy=15\n')
dummy = DummyFieldListFile()
dummy.load(tmp_file.strpath)
assert dummy.dummy == 15
def test_subclass_save(self, tmpdir):
tmp_file = tmpdir.join("test_file")
dummy = DummyFieldListFile(dummy=16)
dummy.save(tmp_file.strpath)
assert 'dummy=16' in tmp_file.read()
def test_subclass_from_meta_file(self, tmpdir):
tmp_file = tmpdir.join("test_file")
tmp_file.write('dummy=17\n')
dummy = DummyFieldListFile.from_meta_file(tmp_file.strpath)
assert dummy.dummy == 17
def test_subclass_items(self):
dummy = DummyFieldListFile()
dummy.dummy = 18
assert list(dummy.items()) == [('dummy', '18')]
def test_subclass_repr(self):
dummy = DummyFieldListFile()
dummy.dummy = 18
assert repr(dummy) == "DummyFieldListFile(dummy='18')"
# noinspection PyMethodMayBeStatic
class TestWalFileInfo(object):
def test_from_file_no_compression(self, tmpdir):
tmp_file = tmpdir.join("000000000000000000000001")
tmp_file.write('dummy_content\n')
stat = os.stat(tmp_file.strpath)
wfile_info = WalFileInfo.from_file(tmp_file.strpath)
assert wfile_info.name == tmp_file.basename
assert wfile_info.size == stat.st_size
assert wfile_info.time == stat.st_mtime
assert wfile_info.filename == '%s.meta' % tmp_file.strpath
assert wfile_info.relpath() == (
'0000000000000000/000000000000000000000001')
@mock.patch('barman.infofile.identify_compression')
def test_from_file_compression(self, id_compression, tmpdir):
# prepare
id_compression.return_value = 'test_compression'
tmp_file = tmpdir.join("000000000000000000000001")
tmp_file.write('dummy_content\n')
wfile_info = WalFileInfo.from_file(tmp_file.strpath)
assert wfile_info.name == tmp_file.basename
assert wfile_info.size == tmp_file.size()
assert wfile_info.time == tmp_file.mtime()
assert wfile_info.filename == '%s.meta' % tmp_file.strpath
assert wfile_info.compression == 'test_compression'
assert wfile_info.relpath() == (
'0000000000000000/000000000000000000000001')
@mock.patch('barman.infofile.identify_compression')
def test_from_file_default_compression(self, id_compression, tmpdir):
# prepare
id_compression.return_value = None
tmp_file = tmpdir.join("00000001000000E500000064")
tmp_file.write('dummy_content\n')
wfile_info = WalFileInfo.from_file(
tmp_file.strpath,
default_compression='test_default_compression')
assert wfile_info.name == tmp_file.basename
assert wfile_info.size == tmp_file.size()
assert wfile_info.time == tmp_file.mtime()
assert wfile_info.filename == '%s.meta' % tmp_file.strpath
assert wfile_info.compression == 'test_default_compression'
assert wfile_info.relpath() == (
'00000001000000E5/00000001000000E500000064')
@mock.patch('barman.infofile.identify_compression')
def test_from_file_override_compression(self, id_compression, tmpdir):
# prepare
id_compression.return_value = None
tmp_file = tmpdir.join("000000000000000000000001")
tmp_file.write('dummy_content\n')
wfile_info = WalFileInfo.from_file(
tmp_file.strpath,
default_compression='test_default_compression',
compression='test_override_compression')
assert wfile_info.name == tmp_file.basename
assert wfile_info.size == tmp_file.size()
assert wfile_info.time == tmp_file.mtime()
assert wfile_info.filename == '%s.meta' % tmp_file.strpath
|
maweigert/biobeam
|
biobeam/simlsm/sim_dslm.py
|
Python
|
bsd-3-clause
| 3,286
| 0.025867
|
"""
mweigert@mpi-cbg.de
"""
from __future__ import absolute_import
from __future__ import print_function
import numpy as np
from biobeam.simlsm.simlsm import SimLSM_Base
from six.moves import range
class SimLSM_DSLM(SimLSM_Base):
def _prepare_u0_illum(self, zfoc):
self.u0_illum = self._bpm_illum.u0_beam(NA = self.NA_illum, zfoc = zfoc)
def propagate_illum_single(self,cz = 0, **bpm_kwargs):
bpm_kwargs.update({"return_comp":"intens"})
offset = int(cz/self._bpm_illum.dy)
assert abs(offset)<= self.u0_illum.shape[0]//2
print("offset: ",offset)
u0 = np.roll(self.u0_illum, offset ,axis=0)
u = self._bpm_illum.propagate(u0,**bpm_kwargs)
return self._trans_illum(u, inv = True)
def propagate_illum(self,cz = 0, dx_parallel = None,**bpm_kwargs):
bpm = self._bpm_illum
bpm_kwargs.update({"return_comp":"intens"})
offset = int(cz/bpm.dy)
as
|
sert abs(offset)<= self.u0_illum.shape[0]//2
print("offset", offset)
u0_base = np.roll(self.u0_illum, offset ,axis=0)
#prepare the parallel scheme
max_NA = self.NA_illum if np.isscalar(self.NA_illum) else max(self.NA_illum)
if dx_parallel is None:
dx_parallel = 2*bpm.lam/max_NA
print("dslm prop with parallelize beams of distance dx = %s mu"%dx_paral
|
lel)
# the beamlet centers in the simul_xy coordinates
ind_step = int(np.ceil(1.*dx_parallel/bpm.dx))
#make sure its divisible by the grid size dimension
ind_step = [i for i in range(ind_step, bpm.simul_xy[0]+1) if bpm.simul_xy[0]%i==0][0]
if ind_step<=0:
raise ValueError("dx resolution too coarse to propagate in parallel pleae increase dx_parallel")
inds = np.arange(0,bpm.simul_xy[0],ind_step)
print(inds,ind_step, bpm.simul_xy)
u0 = np.sum([np.roll(u0_base,i,axis=1) for i in inds],axis=0)
u = None
# now scan
for i in range(ind_step):
print("propagating beamlets %s/%s"%(i+1,ind_step))
u_part = bpm.propagate(u0,**bpm_kwargs)
u = u_part if u is None else u+u_part
u0 = np.roll(u0,1,axis=1)
return self._trans_illum(u, inv = True)
if __name__ == '__main__':
dn = np.zeros((256,512,256))
signal = np.zeros_like(dn)
#some point sources
np.random.seed(0)
for _ in range(4000):
k,j,i = np.random.randint(dn.shape[0]),np.random.randint(dn.shape[1]),np.random.randint(dn.shape[2])
signal[k,j,i] = 1.
if not "m" in locals():
m = SimLSM_DSLM(dn = dn,
signal = signal,
NA_illum= .4,
NA_detect=.7,
units = (.4,)*3,
#simul_xy_detect=(512,512),
#simul_xy_illum=(512,1024),
)
u1 = m.propagate_illum(cz = -10)
u2 = m.propagate_illum(cz = 10)
h = m.psf((0.,0,0))
#
# im = m.simulate_image_z(cz=0, zslice=16,
# psf_grid_dim=(16,16),
# conv_sub_blocks=(8,8),
# conv_pad_factor=3,
# )
#
#
|
yannrouillard/weboob
|
modules/regionsjob/__init__.py
|
Python
|
agpl-3.0
| 805
| 0
|
# -*- coding: utf-8 -*-
# Copyright(C) 2014 Bezleputh
#
# This file is part of weboob.
#
# weboob is free software: you can redistribute it and/or modify
# it under the terms of the GNU Af
|
fero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# weboob is distributed in th
|
e hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with weboob. If not, see <http://www.gnu.org/licenses/>.
from .backend import RegionsjobBackend
__all__ = ['RegionsjobBackend']
|
Venturi/cms
|
env/lib/python2.7/site-packages/aldryn_people/south_migrations/0021_auto_person_groups.py
|
Python
|
gpl-2.0
| 15,722
| 0.007696
|
# -*- coding: utf-8 -*-
from south.utils import datetime_utils as datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding SortedM2M table for field groups on 'Person'
db.create_table(u'aldryn_people_person_groups', (
('id', models.AutoField(verbose_name='ID', primary_key=True, auto_created=True)),
('person', models.ForeignKey(orm[u'aldryn_people.person'], null=False)),
('group', models.ForeignKey(orm[u'aldryn_people.group'], null=False)),
('sort_value', models.IntegerField())
))
db.create_unique(u'aldryn_people_person_groups', ['person_id', 'group_id'])
def backwards(self, orm):
# Removing M2M table for field groups on 'Person'
db.delete_table(db.shorten_name(u'aldryn_people_person_groups'))
models = {
u'aldryn_people.group': {
'Meta': {'object_name': 'Group'},
'address': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'city': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'email': ('django.db.models.fields.EmailField', [], {'default': "u''", 'max_length': '75', 'blank': 'True'}),
'fax': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'phone': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'postal_code': ('django.db.models.fields.CharField', [], {'max_length': '20', 'blank': 'True'}),
'website': ('django.db.models.fields.URLField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'})
},
u'aldryn_people.grouptranslation': {
'Meta': {'unique_together': "[(u'language_code', u'master')]", 'object_name': 'GroupTranslation', 'db_table': "u'aldryn_people_group_translation'"},
'description': ('djangocms_text_ckeditor.fields.HTMLField', [], {'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'language_code': ('django.db.models.fields.CharField', [], {'max_length': '15', 'db_index': 'True'}),
u'master': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'translations'", 'null': 'True', 'to': u"orm['aldryn_people.Group']"}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'slug': ('django.db.models.fields.SlugField', [], {'default': "u''", 'max_length': '255'})
},
u'aldryn_people.peopleplugin': {
'Meta': {'object_name': 'PeoplePlugin'},
u'cmsplugin_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['cms.CMSPlugin']", 'unique': 'True', 'primary_key': 'True'}),
'group_by_group': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'people': ('aldryn_common.admin_fields.sortedm2m.SortedM2MModelField', [], {'symmetrical': 'False', 'to': u"orm['aldryn_people.Person']", 'null': 'True', 'blank': 'True'}),
'show_links': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'show_vcard': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'style': ('django.db.models.fields.CharField', [], {'default': "u'standard'", 'max_length': '50'})
},
u'aldryn_people.person': {
'Meta': {'object_name': 'Person'},
'email': ('django.db.models.fields.EmailField', [], {'default': "u''", 'max_length': '75', 'blank': 'True'}),
'fax': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'group': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'persons'", 'null': 'True', 'to': u"orm['aldryn_people.Group']"}),
'groups': ('sortedm2m.fields.SortedManyToManyField', [], {'default': 'None', 'related_name': "u'people'", 'blank': 'True', 'symmetrical': 'False', 'to': u"orm['aldryn_people.Group']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'mobile': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'phone': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'slug': ('django.db.models.fields.CharField', [], {'max_length': '255', 'unique': 'True', 'null': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'persons'", 'unique': 'True', 'null': 'True', 'to': u"orm['auth.User']"}),
'vcard_enabled': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'visual': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'to': "orm['filer.Image']", 'null': 'True', 'on_delete': 'models.SET_NULL', 'blank': 'True'}),
'website': (
|
'dj
|
ango.db.models.fields.URLField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'})
},
u'aldryn_people.persontranslation': {
'Meta': {'unique_together': "[(u'language_code', u'master')]", 'object_name': 'PersonTranslation', 'db_table': "u'aldryn_people_person_translation'"},
'description': ('djangocms_text_ckeditor.fields.HTMLField', [], {'default': "u''", 'blank': 'True'}),
'function': ('django.db.models.fields.CharField', [], {'default': "u''", 'max_length': '255', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'language_code': ('django.db.models.fields.CharField', [], {'max_length': '15', 'db_index': 'True'}),
u'master': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'translations'", 'null': 'True', 'to': u"orm['aldryn_people.Person']"})
},
u'auth.group': {
'Meta': {'object_name': 'Group'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
u'auth.permission': {
'Meta': {'ordering': "(u'content_type__app_label', u'content_type__model', u'codename')", 'unique_together': "((u'content_type', u'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
u'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "u'user_set'", 'blank': 'True', 'to': u"orm['auth.Group']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_sup
|
ChuanleiGuo/AlgorithmsPlayground
|
LeetCodeSolutions/python/209_Minimum_Size_Subarray_Sum.py
|
Python
|
mit
| 564
| 0
|
class Solution(object):
def minSubArrayLen(self, s, nums):
"""
:type s: int
:type nums
|
: List[int]
:rtype: int
"""
MAX_INT = 2 ** 31 - 1
if not nums or len(nums) == 0:
return 0
i = j = n_sum = 0
min_len = MAX_INT
while j < len(nums):
n_sum += nums[j]
j += 1
|
while n_sum >= s:
min_len = min(min_len, j - i)
n_sum -= nums[i]
i += 1
return min_len if min_len != MAX_INT else 0
|
lukas-hetzenecker/home-assistant
|
homeassistant/components/google_assistant/logbook.py
|
Python
|
apache-2.0
| 992
| 0.002016
|
"""De
|
scribe logbook events."""
from homeassistant.core import callback
from .const import DOMAIN, EVENT_COMMAND_RECEIVED, SOURCE_CLOUD
COMMON_COMMAND_PREFIX = "action.devices.commands."
@callback
def async_describe_events(hass, async_describe_event):
"""Describe logbook events."""
@callback
def async_describe_logbook_event(event):
"""Describe a logbook event."""
commands = []
for command_payload in event.data["execution"]:
command = command_payload[
|
"command"]
if command.startswith(COMMON_COMMAND_PREFIX):
command = command[len(COMMON_COMMAND_PREFIX) :]
commands.append(command)
message = f"sent command {', '.join(commands)}"
if event.data["source"] != SOURCE_CLOUD:
message += f" (via {event.data['source']})"
return {"name": "Google Assistant", "message": message}
async_describe_event(DOMAIN, EVENT_COMMAND_RECEIVED, async_describe_logbook_event)
|
persandstrom/home-assistant
|
homeassistant/components/device_tracker/locative.py
|
Python
|
apache-2.0
| 4,098
| 0
|
"""
Support for the Locative platform.
For more details about this platform, please refer to the documentation at
https://home-assistant.io/components/device_tracker.locative/
"""
import asyncio
from functools import partial
import logging
from homeassistant.const import (
ATTR_LATITUDE, ATTR_LONGITUDE, STATE_NOT_HOME, HTTP_UNPROCESSABLE_ENTITY)
from homeassistant.components.http import HomeAssistantView
# pylint: disable=u
|
nused-import
from homeassistant.components.device_tracker import ( # NOQA
DOMAIN, PLATFORM_SCHEMA)
_LOGGER = logging.getLogger(__name__)
DEPENDENCIES = ['http']
URL = '/api/locative'
def setup_scanner(hass, config, see, discovery_info=None):
"""Set up an endpoint for the Locative application."""
hass.http.register_view(LocativeView(s
|
ee))
return True
class LocativeView(HomeAssistantView):
"""View to handle Locative requests."""
url = URL
name = 'api:locative'
def __init__(self, see):
"""Initialize Locative URL endpoints."""
self.see = see
@asyncio.coroutine
def get(self, request):
"""Locative message received as GET."""
res = yield from self._handle(request.app['hass'], request.query)
return res
@asyncio.coroutine
def post(self, request):
"""Locative message received."""
data = yield from request.post()
res = yield from self._handle(request.app['hass'], data)
return res
@asyncio.coroutine
def _handle(self, hass, data):
"""Handle locative request."""
if 'latitude' not in data or 'longitude' not in data:
return ('Latitude and longitude not specified.',
HTTP_UNPROCESSABLE_ENTITY)
if 'device' not in data:
_LOGGER.error('Device id not specified.')
return ('Device id not specified.',
HTTP_UNPROCESSABLE_ENTITY)
if 'trigger' not in data:
_LOGGER.error('Trigger is not specified.')
return ('Trigger is not specified.',
HTTP_UNPROCESSABLE_ENTITY)
if 'id' not in data and data['trigger'] != 'test':
_LOGGER.error('Location id not specified.')
return ('Location id not specified.',
HTTP_UNPROCESSABLE_ENTITY)
device = data['device'].replace('-', '')
location_name = data.get('id', data['trigger']).lower()
direction = data['trigger']
gps_location = (data[ATTR_LATITUDE], data[ATTR_LONGITUDE])
if direction == 'enter':
yield from hass.async_add_job(
partial(self.see, dev_id=device, location_name=location_name,
gps=gps_location))
return 'Setting location to {}'.format(location_name)
if direction == 'exit':
current_state = hass.states.get(
'{}.{}'.format(DOMAIN, device))
if current_state is None or current_state.state == location_name:
location_name = STATE_NOT_HOME
yield from hass.async_add_job(
partial(self.see, dev_id=device,
location_name=location_name, gps=gps_location))
return 'Setting location to not home'
# Ignore the message if it is telling us to exit a zone that we
# aren't currently in. This occurs when a zone is entered
# before the previous zone was exited. The enter message will
# be sent first, then the exit message will be sent second.
return 'Ignoring exit from {} (already in {})'.format(
location_name, current_state)
if direction == 'test':
# In the app, a test message can be sent. Just return something to
# the user to let them know that it works.
return 'Received test message.'
_LOGGER.error('Received unidentified message from Locative: %s',
direction)
return ('Received unidentified message: {}'.format(direction),
HTTP_UNPROCESSABLE_ENTITY)
|
nanshihui/PocCollect
|
middileware/resin/__init__.py
|
Python
|
mit
| 123
| 0.073171
|
KEYWORDS = ['resin', ]
def r
|
ules(head='',context='',ip='',port='',productname={},keywords='
|
',hackinfo=''):
return False
|
lwz7512/logtoeye
|
dashboard/reportor.py
|
Python
|
apache-2.0
| 9,877
| 0.000405
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
# format keyshort in sublime text2: ctrl+shift+alt+t
# powered by SublimePythonTidy
__author__ = 'lwz'
from WebElements.Display import Label
from WebElements.Layout import Horizontal
from WebElements.DOM import Div, Img
from uiwiget import CompleteDom, RoundCornerPanel, CustomHeaderTable
import models
def create():
page = CompleteDom()
page.setTitle('Logtoeye Report Generated by WebElements')
header_bar = Div()
header_bar.addClass('header')
page.addChildElement(header_bar)
topic_name = Label()
topic_name.setProperty('text', 'Nginx Status Daily Report')
topic_name.addClass('logo-text')
header_bar.addChildElement(topic_name)
sub_topic_ctnr = Div()
sub_topic_ctnr.addClass('subtopic-ctnr')
header_bar.addChildElement(sub_topic_ctnr)
create_date = Label()
create_date.setProperty('text', '2013-08-14')
create_date.addClass('white-label')
sub_topic_ctnr.addChildElement(create_date)
visit_hori_box = Horizontal()
visit_hori_box.addClass('center-row')
page.addChildElement(visit_hori_box)
visit_round_div = RoundCornerPanel('Visit Volume Last Day')
visit_label = Label()
visit_label.setProperty('text', '1000/ip') # ???
visit_label.addClass('green-big-label')
visit_round_div.addChildElement(visit_label)
site_label = Label()
site_label.addClass('black-big-label')
site_label.setProperty('text', 'www.logtoeye.com')
visit_round_div.addChildElement(site_label)
visit_hori_box.addChildElement(visit_round_div)
img_box = Div()
img_box.addClass('img-placeholder')
last_7day_visit_tendency = Img()
last_7day_visit_tendency.setProperty('alt', 'last 7 day visit tendency...')
last_7day_visit_tendency.setImage('/static/cache/xxx.png') # ???
img_box.addChildElement(last_7day_visit_tendency)
visit_hori_box.addChildElement(img_box)
request_title_row = Horizontal()
request_title_row.addClass('center-row')
request_title_row.setStyleFromString('margin-top: 10px;')
page.addChildElement(request_title_row)
visitor_title = Label()
visitor_title.setProperty('text', 'Top10 Region of visitors')
visitor_title.addClass('black-big-label')
visitor_title.setStyleFromString('padding-left:20px')
request_title_row.addChildElement(visitor_title)
url_title = Label()
url_title.setProperty('text', 'Top10 Url of visitors')
url_title.addClass('black-big-label')
url_title.setStyleFromString('padding-left:60px')
request_title_row.addChildElement(url_title)
request_title = Label()
request_title.setProperty('text', 'Top10 Request time of url in ms/s')
request_title.addClass('black-big-label')
request_title.setStyleFromString('padding-left:100px')
request_title_row.addChildElement(request_title)
request_grid_row = Horizontal()
request_grid_row.addClass('center-row')
page.addChildElement(request_grid_row)
table = CustomHeaderTable('region_table') # top 10 region table
table.setStyleFromString('width:256px; padding-left:12px')
request_grid_row.addChildElement(table)
table.addHeader('country', 'Country', '40%')
table.addHeader('city', 'City', '40%')
table.addHeader('amount', 'Amount', '30%')
for i in range(10): # ???
row = table.addRow()
row.cell('country').setText('America')
row.cell('city').setText('Chicago')
row.cell('amount').setText('10')
table = CustomHeaderTable('url_table') # top 10 url table
table.setStyleFromString('width:276px; padding-left:12px')
request_grid_row.addChildElement(table)
table.addHeader('url', 'URL', '80%')
table.addHeader('amount', 'Amount', '20%')
for i in range(10): # ???
row = table.addRow()
row.cell('url').setText('/dashborad/preview')
row.cell('amount').setText('10')
table = CustomHeaderTable('request_table') # top 10 request time table
table.setStyleFromString('width:350px; padding-left:12px')
request_grid_row.addChildElement(table)
table.addHeader('url', 'URL', '50%')
table.addHeader('time', 'Time', '30%')
table.addHeader('length', 'Length', '20%')
for i in range(10): # ???
row = table.addRow()
row.cell('url').setText('/dashborad/preview')
row.cell('time').setText('02 00:00:00')
row.cell('length').setText('10')
alert_panel_row = Horizontal()
alert_panel_row.addClass('center-row')
alert_panel_row.setSt
|
yleFromString('margin-top: 20px;')
page.addChildElement(alert_panel_row)
alert_volume = RoundCornerPanel('Alert Volume Last Day', 220, 120)
volume_label = Label()
volume_label.setProperty('text', '1000') # ???
volume_label.addClass('green-big-label')
alert_volume.addChildElement(volume_label)
alert_panel_row.addChildElement(alert_volume)
alert_st
|
atistic = RoundCornerPanel(' Alert-1 | Crit-2 | Error-3 | Warn-4 ', 280, 120)
alert_panel_row.addChildElement(alert_statistic)
label_row = Horizontal()
label_row.setStyleFromString('padding-top:16px;padding-left:24px')
alert_statistic.addChildElement(label_row)
alert_label = Label()
alert_label.setProperty('text', '10') # ???
alert_label.addClass('red-big-label')
label_row.addChildElement(alert_label)
crit_label = Label()
crit_label.setProperty('text', '10') # ???
crit_label.addClass('orange-big-label')
label_row.addChildElement(crit_label)
error_label = Label()
error_label.setProperty('text', '10') # ???
error_label.addClass('yellow-big-label')
label_row.addChildElement(error_label)
warn_label = Label()
warn_label.setProperty('text', '10') # ???
warn_label.addClass('blue-big-label')
label_row.addChildElement(warn_label)
unprocessed_stat = RoundCornerPanel(' Unprocessed Alerts by each Level ', 330, 120)
alert_panel_row.addChildElement(unprocessed_stat)
number_row = Horizontal()
number_row.setStyleFromString('padding-top:16px;padding-left:44px')
unprocessed_stat.addChildElement(number_row)
alert_label = Label()
alert_label.setProperty('text', '10') # ???
alert_label.addClass('red-big-label')
number_row.addChildElement(alert_label)
crit_label = Label()
crit_label.setProperty('text', '10') # ???
crit_label.addClass('orange-big-label')
number_row.addChildElement(crit_label)
error_label = Label()
error_label.setProperty('text', '10') # ???
error_label.addClass('yellow-big-label')
number_row.addChildElement(error_label)
warn_label = Label()
warn_label.setProperty('text', '10') # ???
warn_label.addClass('blue-big-label')
number_row.addChildElement(warn_label)
alert_occur_row = Horizontal()
alert_occur_row.addClass('center-row')
alert_occur_row.setStyleFromString('margin-top: 10px;width:300px')
page.addChildElement(alert_occur_row)
occur_title = Label()
occur_title.setProperty('text', 'Top10 Occurence of alerts')
occur_title.addClass('black-big-label')
alert_occur_row.addChildElement(occur_title)
alert_grid_row = Horizontal()
alert_grid_row.addClass('center-row')
page.addChildElement(alert_grid_row)
table = CustomHeaderTable('alert_table') # top 10 occurence of alert
table.setStyleFromString('width:98%;padding-left:12px')
alert_grid_row.addChildElement(table)
table.addHeader('time', 'Time', '10%')
table.addHeader('occurence', 'Occurence', '10%')
table.addHeader('level', 'Level', '10%')
table.addHeader('title', 'Title', '20%')
table.addHeader('cause', 'Cause', '25%')
table.addHeader('details', 'Details', '25%')
for i in range(10): # ???
row = table.addRow()
row.cell('time').setText('02 00:00:00')
row.cell('occurence').setText('1')
row.cell('level').setText('1')
row.cell('title').setText('xxx')
row.cell('cause').setText('xxxxxx')
row.cell('details').setText('xxxxxx')
nginx_hori_row = Horizontal()
nginx_hori_row.addClass('center-row')
nginx_hori_row.setStyleFromString('margin-top
|
rigetticomputing/pyquil
|
pyquil/latex/latex_generation.py
|
Python
|
apache-2.0
| 1,266
| 0.00158
|
##############################################################################
# Copyright 2016-2019 Rigetti Computing
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#######################
|
#######################################################
import warnings
from typing import Optional
from pyquil.latex._diagram import DiagramSettings
from pyquil.quil import Program
def to_latex(circuit: Program, settings: Optional[DiagramSettings] = None) -> str:
from pyquil.latex._main import to_latex
warnings.warn(
'"pyquil.latex
|
.latex_generation.to_latex" has been moved -- please import it'
'as "from pyquil.latex import to_latex going forward"',
FutureWarning,
)
return to_latex(circuit, settings)
|
FrodeSolheim/fs-uae-launcher
|
fsgamesys/files/installablefiles.py
|
Python
|
gpl-2.0
| 36
| 0
|
from .types imp
|
ort Instal
|
lableFiles
|
dmpetrov/dataversioncontrol
|
tests/unit/test_prompt.py
|
Python
|
apache-2.0
| 339
| 0
|
from dvc.prompt import confirm
def test_confirm_in_tty_if_stdin_is_closed(mocker):
mock_input = mocker.patch("dvc.prompt.input", side_effect=EOFError)
mock_isatty = mocker.patch("sys.stdout.
|
isatty", return_value=True)
ret = confirm(
|
"message")
mock_isatty.assert_called()
mock_input.assert_called()
assert not ret
|
betrisey/home-assistant
|
homeassistant/components/climate/ecobee.py
|
Python
|
mit
| 9,405
| 0
|
"""
Platform for Ecobee Thermostats.
For more details about this platform, please refer to the documentation at
https://home-assistant.io/components/climate.ecobee/
"""
import logging
from os import path
import voluptuous as vol
from homeassistant.components import ecobee
from homeassistant.components.climate import (
DOMAIN, STATE_COOL, STATE_HEAT, STATE_IDLE, ClimateDevice,
ATTR_TARGET_TEMP_LOW, ATTR_TARGET_TEMP_HIGH)
from homeassistant.const import (
ATTR_ENTITY_ID, STATE_OFF, STATE_ON, TEMP_FAHRENHEIT)
from homeassistant.config import load_yaml_config_file
import homeassistant.helpers.config_validation as cv
_CONFIGURING = {}
_LOGGER = logging.getLogger(__name__)
ATTR_FAN_MIN_ON_TIME = 'fan_min_on_time'
DEPENDENCIES = ['ecobee']
SERVICE_SET_FAN_MIN_ON_TIME = 'ecobee_set_fan_min_on_time'
SET_FAN_MIN_ON_TIME_SCHEMA = vol.Schema({
vol.Optional(ATTR_ENTITY_ID): cv.entity_ids,
vol.Required(ATTR_FAN_MIN_ON_TIME): vol.Coerce(int),
})
def setup_platform(hass, config, add_devices, discovery_info=None):
"""Setup the Ecobee Thermostat Platform."""
if discovery_info is None:
return
data = ecobee.NETWORK
hold_temp = discovery_info['hold_temp']
_LOGGER.info(
"Loading ecobee thermostat component with hold_temp set to %s",
hold_temp)
devices = [Thermostat(data, index, hold_temp)
for index in range(len(data.ecobee.thermostats))]
add_devices(devices)
def fan_min_on_time_set_service(service):
"""Set the minimum fan on time on the target thermostats."""
entity_id = service.data.get('entity_id')
if entity_id:
target_thermostats = [device for device in devices
if device.entity_id == entity_id]
else:
target_thermostats = devices
fan_min_on_time = service.data[ATTR_FAN_MIN_ON_TIME]
for thermostat in target_thermostats:
thermostat.set_fan_min_on_time(str(fan_min_on_time))
thermostat.update_ha_state(True)
descriptions = load_yaml_config_file(
path.join(path.dirname(__file__), 'services.yaml'))
hass.services.register(
DOMAIN, SERVICE_SET_FAN_MIN_ON_TIME, fan_min_on_time_set_service,
descriptions.get(SERVICE_SET_FAN_MIN_ON_TIME),
schema=SET_FAN_MIN_ON_TIME_SCHEMA)
# pylint: disable=too-many-public-methods, abstract-method
class Thermostat(ClimateDevice):
"""A thermostat class for Ecobee."""
def __init__(self, data, thermostat_index, hold_temp):
"""Initialize the thermostat."""
self.data = data
self.thermostat_index = thermostat_index
self.thermostat = self.data.ecobee.get_thermostat(
self.thermostat_index)
self._name = self.thermostat['name']
self.hold_temp = hold_temp
self._operation_list = ['auto', 'auxHeatOnly', 'cool',
'heat', 'off']
self.update_without_throttle = False
def update(self):
"""Get the latest state from the thermostat."""
if self.update_without_throttle:
self.data.update(no_throttle=True)
self.update_without_throttle = False
else:
self.data.update()
self.thermostat = self.data.ecobee.get_thermostat(
self.thermostat_index)
@property
def name(self):
"""Return the name of the Ecobee Thermostat."""
return self.thermostat['name']
@property
def temperature_unit(self):
"""Return the unit of measurement."""
return TEMP_FAHRENHEIT
@property
def current_temperature(self):
"""Return the current temperature."""
return self.thermostat['runtime']['actualTemperature'] / 10
@property
def target_temperature_low(self):
"""Return the lower bound temperature we try to reach."""
return int(self.thermostat['runtime']['desiredHeat'] / 10)
@property
def target_temperature_high(self):
"""Return the upper bound temperature we try to reach."""
return int(self.thermostat['runtime']['desiredCool'] / 10)
@property
def desired_fan_mode(self):
"""Return the desired fan mode of operation."""
return self.thermostat['runtime']['desiredFanMode']
@property
def fan(self):
"""Return the current fan state."""
if 'fan' in self.thermostat['equipmentStatus']:
return STATE_ON
else:
return STATE_OFF
@property
def current_operation(self):
"""Return current operation."""
if self.operation_mode == 'auxHeatOnly' or \
self.operation_mode == 'heatPump':
return STATE_HEAT
else:
return self.operation_mode
@property
def operation_list(self):
"""Return the operation modes list."""
return self._operation_list
@property
|
def operation_mode(self):
"""Return current operation ie. heat, cool, idle."""
return self.thermostat['settings']['hvacMode']
@property
def mode(self):
"""Return current mode ie. home, away, sleep."""
retur
|
n self.thermostat['program']['currentClimateRef']
@property
def fan_min_on_time(self):
"""Return current fan minimum on time."""
return self.thermostat['settings']['fanMinOnTime']
@property
def device_state_attributes(self):
"""Return device specific state attributes."""
# Move these to Thermostat Device and make them global
status = self.thermostat['equipmentStatus']
operation = None
if status == '':
operation = STATE_IDLE
elif 'Cool' in status:
operation = STATE_COOL
elif 'auxHeat' in status:
operation = STATE_HEAT
elif 'heatPump' in status:
operation = STATE_HEAT
else:
operation = status
return {
"actual_humidity": self.thermostat['runtime']['actualHumidity'],
"fan": self.fan,
"mode": self.mode,
"operation": operation,
"fan_min_on_time": self.fan_min_on_time
}
@property
def is_away_mode_on(self):
"""Return true if away mode is on."""
mode = self.mode
events = self.thermostat['events']
for event in events:
if event['running']:
mode = event['holdClimateRef']
break
return 'away' in mode
def turn_away_mode_on(self):
"""Turn away on."""
if self.hold_temp:
self.data.ecobee.set_climate_hold(self.thermostat_index,
"away", "indefinite")
else:
self.data.ecobee.set_climate_hold(self.thermostat_index, "away")
self.update_without_throttle = True
def turn_away_mode_off(self):
"""Turn away off."""
self.data.ecobee.resume_program(self.thermostat_index)
self.update_without_throttle = True
def set_temperature(self, **kwargs):
"""Set new target temperature."""
if kwargs.get(ATTR_TARGET_TEMP_LOW) is not None and \
kwargs.get(ATTR_TARGET_TEMP_HIGH) is not None:
high_temp = int(kwargs.get(ATTR_TARGET_TEMP_LOW))
low_temp = int(kwargs.get(ATTR_TARGET_TEMP_HIGH))
if self.hold_temp:
self.data.ecobee.set_hold_temp(self.thermostat_index, low_temp,
high_temp, "indefinite")
_LOGGER.debug("Setting ecobee hold_temp to: low=%s, is=%s, "
"high=%s, is=%s", low_temp, isinstance(
low_temp, (int, float)), high_temp,
isinstance(high_temp, (int, float)))
else:
self.data.ecobee.set_hold_temp(self.thermostat_index, low_temp,
high_temp)
_LOGGER.debug("Setting ecobee temp to: low=%s, is=%s, "
"high=%s, is=%s", low_temp, isinstance(
low_temp, (int, float)), high_temp,
|
ktan2020/legacy-automation
|
win/Lib/site-packages/sst/__init__.py
|
Python
|
mit
| 1,057
| 0.000946
|
#!/usr/bin/env python
#
# Copyright (c) 2011 Canonical Ltd.
#
# This file is part of: SST (selenium-simple-test)
# https://launchpad.net/selenium-simple-test
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, softwar
|
e
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
__all__ = ['runtests']
__version
|
__ = '0.2.2'
try:
from .runtests import runtests
except ImportError as e:
# Selenium not installed
# this means we can import the __version__
# for setup.py when we install, without
# *having* to install selenium first
def runtests(*args, **kwargs):
raise e
|
brianhawthorne/maml
|
maml/test/test_parser.py
|
Python
|
gpl-3.0
| 2,468
| 0.002431
|
"""
This file is part of Maml.
Maml is free software: you can re
|
distribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
Maml is distributed in the
|
hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with Maml. If not, see <http://www.gnu.org/licenses/>.
Copyright 2010 Brian Hawthorne
"""
from unittest import TestCase
from maml.parser import *
example1 = """
-def A(z)
%ul
%li
%html
%body
%h3
"""
example2 = """
-def A(z)
%ul
-for x in range(z)
.list-item#item_id
= x
foo
%html
%body
%h3 yup
= A(6)
"""
class TestParser (TestCase):
def test_tag_attrs(self):
good_results = {
'()': ('(', '', ')'),
'{}': ('{', '', '}'),
'(borp="baz" dorp="daz" blarp="blaz")':
('(', 'borp="baz" dorp="daz" blarp="blaz"', ')'),
'{borp:"baz", dorp:"daz", blarp:"blaz"}':
('{', 'borp:"baz", dorp:"daz", blarp:"blaz"', '}'),
}
for input, output in good_results.items():
self.assertEqual(tuple(tag_attrs.parseString(input)), output)
def test_tag_decl(self):
good_results = {
'%html':
('%', 'html', ''),
'%html foo':
('%', 'html', 'foo'),
'%html= foo':
('%', 'html', '=', 'foo'),
'%html()= foo':
('%', 'html', '(', '', ')', '=', 'foo'),
'%html.class-name()= foo':
('%', 'html', '.', 'class-name', '(', '', ')', '=', 'foo'),
'%html.class-name(borp="baz")= foo':
('%', 'html', '.', 'class-name', '(', 'borp="baz"', ')', '=', 'foo'),
'#foo.boo':
('#', 'foo', '.', 'boo', ''),
'.foo(){}':
('.', 'foo', '(', '', ')', '{', '', '}', ''),
}
for input, output in good_results.items():
self.assertEqual(tuple(tag_decl.parseString(input)), output)
def test_namespace(self):
namespace_example = "-namespace(/common/defs.mak, bnorp)"
assert Parser().parse(namespace_example).render_string()
|
sotondriver/Lego_classification
|
src/boxCap.py
|
Python
|
mit
| 6,567
| 0.014314
|
""" Capturing and analyzing the box information
Author: Lyu Yaopengfei
Date: 23-May-2016
"""
import cv2
import threading
import time
from PIL import Image
import Lego.dsOperation as dso
import Lego.imgPreprocessing as imgprep
from Lego.ocr import tesserOcr
capImg = None
resImg = None
stopFlat = 0
lock = threading.Lock()
def capFrame(cap):
global capImg
global stopFlat
while(1):
lock.acquire()
try:
_,capImg = cap.read()
finally:
lock.release()
if (stopFlat > 0):
break
clickCnt = 0
clickFlag = 0
def detect_circle(event,x,y,flags,param):
global clickFlag
if event==cv2.EVENT_LBUTTONUP:
clickFlag = clickFlag+1
elif event==cv2.EVENT_RBUTTONUP:
clickFlag = -1
# lock.acquire()
# try:
# cv2.imwrite('cap.png',capImg)
# finally:
# clickCnt = clickCnt+1
# lock.release()
# detect the useful information from the selected image
def detectImg(logoAffinePos,img,idx):
_, _, _, _, affinedCropedImg, rtnFlag = logoAffinePos.rcvAffinedAll(img)
if (rtnFlag is False):
return None,None,None,False
filtedCroped = imgprep.imgFilter(affinedCropedImg)
filtedCroped = cv2.cvtColor(filtedCroped,cv2.COLOR_GRAY2RGB)
filtedCropedPIL = Image.fromarray(filtedCroped)
numStr = tesserOcr(filtedCropedPIL)
return affinedCropedImg,filtedCroped,numStr,True
def analyseBoxInfo(bds,imgfolder):
maxCnt = 0
tempCnt = 0
tempNumSet = set(bds.tempNumList)
bds.setImgFolder(imgfolder)
for item in tempNumSet:
tempCnt = bds.tempNumList.count(item)
if(tempCnt > maxCnt):
maxCnt = tempCnt
bds.number = item
def exportLog(lf, expStr):
print(expStr)
expStr = expStr+'\n'
lf.writelines(expStr)
if __name__ == '__main__':
bxnm = input('Input the box name: ')
# time.strftime('%Y-%m-%d-%H%M%S',time.localtime(time.time()))
bx1 = dso.boxds(bxnm)
settingInfo = open('../data/setting','r')
settingInfo.readline()
PATH = settingInfo.readline().strip().lstrip().rstrip(',')
DATAPATH = settingInfo.readline().strip().lstrip().rstrip(',')
FEATURE_IMG_FOLDER = settingInfo.readline().strip().lstrip().rstrip(',')
MATERIAL_IMG_FOLDER = settingInfo.readline().strip().lstrip().rstrip(',')
BOX_DATA_PATH = settingInfo.readline().strip().lstrip().rstrip(',')
LOG_PATH = settingInfo.readline().strip().lstrip().rstrip(',')
curTime = time.strftime('%Y-%m-%d-%H%M%S',time.localtime(time.time()))
LOG_PATH = LOG_PATH+curTime+bx1.boxname+'.log'
logFile = open(LOG_PATH,'w+')
boxData = open(BOX_DATA_PATH,'r')
logoTp = cv2.imread(MATERIAL_IMG_FOLDER + 'purelogo256.png')
logoAffinePos = imgprep.LogoAffinePos(logoTp)
cv2.namedWindow('capFrame')
cv2.setMouseCallback('capFrame',detect_circle)
VWIDTH = 1280
VHIGH = 720
cap = cv2.VideoCapture(0)
cap.set(3,VWIDTH)
cap.set(4,VHIGH)
cap.read();cap.read();cap.read()
tCapFrame = threading.Thread(target=capFrame, args=(cap,))
tCapFrame.start()
while(capImg is None):
pass
dtrtnFlag = False
showFlag = 0
while(1):
if ((cv2.waitKey(1) & 0xFF == 27) | (clickCnt>=6) ):
stopFlat = 1
break
resImg = capImg.copy()
showImg = resImg.copy()
logoContourPts,logoContour,rtnFlag = logoAffinePos.extLegoLogo(resImg, minArea=5000)
if (rtnFlag is True):
# draw contour we finding
cv2.drawContours(showImg, [logoContourPts], -1, (0,255,0), 2)
cPts,rtnFlag = logoAffinePos.extQuadrangleCpts(logoContourPts, logoContour)
if (rtnFlag is True):
# draw corner points we finding
for idx, cPt in enumerate(cPts):
cPt = cPt.flatten()
ptsize = int(logoAffinePos.estLength/20)
showImg[cPt[1]-ptsize:cPt[1]+ptsize,cPt[0]-ptsize:cPt[0]+ptsize,:] = [255,255,0]
showImg = cv2.resize(showImg,(0,0),fx=0.4,fy=0.4)
# right click, discard the data and re-capturing
if(clickFlag < 0):
clickFlag = 0
exportLog(logFile, 'Data was discarded')
cv2.destroyWindow('filted')
# capturing image
if(clickFlag is 0):
dtrtnFlag = False
showFlag = 0
cv2.putText(showImg,'Capturing '+bx1.boxname+'_'+dso.SUF_DEF[clickCnt]+' picture',(10,250), cv2.FONT_HERSHEY_SIMPLEX, 0.5,(0,255,255),1)
# fisrt time left click, detect the image and output the result
elif(clickFlag is 1):
if(dtrtnFlag is False):
affinedCropedImg,filtedCroped,numStr,dtrtnFlag = detectImg(logoAffinePos,r
|
esImg,clickCnt)
if(dtrtnFlag is False):
# if detect result is False, set clickFlag 0, re-capturing
clickFlag = 0
exportLog(logFile, 'Detecting fault, re-capturing')
elif(dtrtnFlag is True):
cv2.imshow('filted',filtedCroped)
cv2.moveWindow('filted',50+int(0.4*VWIDTH),50)
expo
|
rtLog(logFile, bx1.boxname+'_'+dso.SUF_DEF[clickCnt]+' OCR: '+str(numStr))
dtrtnFlag = None
else:
cv2.putText(showImg,'Do you save this result? Lclick Save, Rclick Discard',(10,250), cv2.FONT_HERSHEY_SIMPLEX, 0.5,(0,255,255),1)
elif(clickFlag is 2):
exportLog(logFile, 'Saving '+bx1.boxname+'_'+dso.SUF_DEF[clickCnt]+' data')
imgName = bx1.boxname+'_'+str(clickCnt)+'.tiff'
savingPath = FEATURE_IMG_FOLDER + imgName
savingPath2 = FEATURE_IMG_FOLDER + 'color/c' + imgName
cv2.imwrite(savingPath, filtedCroped)
cv2.imwrite(savingPath2, affinedCropedImg)
bx1.setSingleFeatureImgsName(dso.SUF_DEF[clickCnt], imgName)
exportLog(logFile, '--------Finish capturing--------\n')
if(numStr is not None):
bx1.appendTempNumList(numStr)
clickCnt = clickCnt + 1
clickFlag = 0
cv2.destroyWindow('filted')
else:
clickFlag = 0
cv2.destroyWindow('filted')
cv2.imshow('capFrame',showImg)
analyseBoxInfo(bx1,FEATURE_IMG_FOLDER)
dso.dsWrite(BOX_DATA_PATH,bx1)
print('\n')
logFile.close()
boxData.close()
cap.release()
cv2.destroyAllWindows()
|
holmes-app/holmes-api
|
holmes/validators/meta_tags.py
|
Python
|
mit
| 2,813
| 0.000355
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
from holmes.validators.base import Validator
from holmes.utils import _
class MetaTagsValidator(Validator):
@classmethod
def get_violation_definitions(cls):
return {
'absent.metatags': {
'title': _('Meta tags not present'),
'description': _(
'No meta tags found on this page. This is damaging for '
'Search Engines.'
),
'category': _('HTTP'),
'generic_description': _(
'Validates the presence of metatags. They are important '
'to inform metadata about the HTML document. '
'The absent of metatags are damaging for search '
'engines. Meta elements are typically used to specify '
'page description, keywords, author of the document, last '
'modified, and other metadata.'
)
},
'page.metatags.description_too_big': {
'title': _('Maximum size of description meta tag'),
'description': _(
'The meta description tag is longer t
|
han %(max_size)s '
'characters. It is best to keep meta descriptions '
'shorter for better indexing on search engines.'
),
'category': _('SEO'),
'generic_description': _(
|
'Validates the size of a description metatag. It is best '
'to keep meta descriptions shorter for better indexing on '
'search engines. This limit is configurable by Holmes '
'Configuration.'
),
'unit': 'number'
}
}
@classmethod
def get_default_violations_values(cls, config):
return {
'page.metatags.description_too_big': {
'value': config.METATAG_DESCRIPTION_MAX_SIZE,
'description': config.get_description('METATAG_DESCRIPTION_MAX_SIZE')
}
}
def validate(self):
max_size = self.get_violation_pref('page.metatags.description_too_big')
meta_tags = self.review.data.get('meta.tags', None)
if not meta_tags:
self.add_violation(
key='absent.metatags',
value='No metatags.',
points=100
)
for mt in meta_tags:
if mt['key'] == 'description' and len(mt['content']) > max_size:
self.add_violation(
key='page.metatags.description_too_big',
value={'max_size': max_size},
points=20
)
break
|
Shoufu/Scrapy-Openlaw
|
openlaw/spiders/test_splash.py
|
Python
|
mit
| 538
| 0.001859
|
# -*- coding: utf-8 -*
|
-
import scrapy
import requests
from PIL import Image
from scrapy_splash import SplashRequest
class OpenLawSpider(scrapy.Spider):
name = 'test_splash'
allowed_domains = ['openlaw.cn']
start_urls = [
'http://openlaw.cn/search/judgement/court?zoneName=%E5%8C%97%E4%BA%AC%E5%B8%82'
]
def star
|
t_requests(self):
for url in self.start_urls:
yield SplashRequest(url, self.parse)
def parse(self, response):
print 'Parse Response: '
print response.body
|
sassoftware/jobslave
|
jobslave_test/keytest.py
|
Python
|
apache-2.0
| 5,039
| 0.00258
|
#
# Copyright (c) SAS Institute Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import os, sys
import tempfile
from jobslave.generators import installable_iso
from jobslave_test.jobslave_helper import JobSlaveHelper
from conary import versions
from conary.repository import changeset
from conary import trove
from conary.deps import deps
from conary.lib import openpgpfile, util
TROVE_NAME = 'group-dummy'
TROVE_VERSION = versions.VersionFromString('/test.rpath.local@rpl:devel/1-1-1')
TROVE_FLAVOR = deps.parseFlavor('is: x86')
class DummyTroveInfo(object):
def __init__(self):
self.sigs = self
self.digitalSigs = self
def iter(self):
for base in ('0123456789', '9876543210'):
yield [4 * base]
class DummyVersion(object):
def __init__(self):
self.v = self
def trailingLabel(self):
return 'test.rpath.local@rpl:devel'
class DummyTrove(object):
def __init__(self, *args, **kwargs):
self.version = DummyVersion()
self.troveInfo = DummyTroveInfo()
def getName(self):
return TROVE_NAME
def getVersion(self):
return TROVE_VERSION
def getFlavor(self):
return TROVE_FLAVOR
def count(self, *args, **kwargs):
return 0
class DummyChangeSet(object):
def __init__(self, *args, **kwargs):
pass
def iterNewTroveList(self):
return [DummyTrove()]
class DummyRepos(object):
def findTrove(self, *args, **kwargs):
raise NotImplementedError
def getTrove(self, *args, **kwargs):
return DummyTrove()
def walkTroveSet(self, *args, **kwargs):
yield DummyTrove()
def getAsciiOpenPGPKey(self, label, fp):
if fp == 4 * '0123456789':
return ''
raise openpgpfile.KeyNotFound(fp)
class DummyConfig(object):
flavor = []
class DummyClient(object):
def __init__(self):
self.repos = DummyRepos()
self.cfg = DummyConfig()
class DummyFlavor(object):
def freeze(self):
return '1#x86'
class DummyBuild(object):
def getArchFlavor(self):
return DummyFlavor()
class DummyIso(installable_iso.InstallableIso):
def __init__(self):
self.statusList = []
self.conaryClient = DummyClient()
self.build = DummyBuild()
self.troveName = TROVE_NAME
self.troveVersion = TROVE_VERSION
self.troveFlavor = TROVE_FLAVOR
self.baseFlavor = self.troveFlavor
self.isocfg = self.configObject
def status(self, status):
self.statusList.append(status)
def getConaryClient(self, *args, **kwargs):
return self.conaryClient
class KeyTest(JobSlaveHelper):
def setUp(self):
JobSlaveHelper.setUp(self)
self._call = installable_iso.call
installable_iso.call = lambda *args, **kwargs: None
def tearDown(self):
installable_iso.call = self._call
|
JobSlaveHelper.tearDown(self)
def testMissingKey(self):
DummyRepos.findTrove = lambda *args, **kwargs: (('', '', ''),)
d = Dumm
|
yIso()
csdir = tempfile.mkdtemp()
logFd, logFile = tempfile.mkstemp()
oldErr = os.dup(sys.stderr.fileno())
os.dup2(logFd, sys.stderr.fileno())
os.close(logFd)
ChangeSetFromFile = changeset.ChangeSetFromFile
Trove = trove.Trove
try:
f = open(os.path.join(csdir, 'test.ccs'), 'w')
f.write('')
f.close()
changeset.ChangeSetFromFile = DummyChangeSet
trove.Trove = DummyTrove
try:
d.extractPublicKeys('', '', csdir)
except RuntimeError:
pass
else:
self.fail('Missing keys did not raise runtime error')
finally:
trove.Trove = Trove
changeset.ChangeSetFromFile = ChangeSetFromFile
os.dup2(oldErr, sys.stderr.fileno())
os.close(oldErr)
util.rmtree(csdir)
util.rmtree(logFile)
def testFoundAll(self):
DummyRepos.findTrove = lambda *args, **kwargs: (('', '', ''),)
d = DummyIso()
getAsciiOpenPGPKey = DummyRepos.getAsciiOpenPGPKey
csdir = tempfile.mkdtemp()
try:
DummyRepos.getAsciiOpenPGPKey = lambda *args : ''
d.extractPublicKeys('', '', csdir)
finally:
DummyRepos.getAsciiOpenPGPKey = getAsciiOpenPGPKey
util.rmtree(csdir)
assert d.statusList == ['Extracting Public Keys']
|
renalreg/radar
|
radar/api/views/nurture_tubes.py
|
Python
|
agpl-3.0
| 1,651
| 0.002423
|
from flask import jsonify, request
from radar.api.serializers.nurture_tubes import OptionSerializer, SamplesSerializer
from radar.api.views.common import (
PatientObjectDetailView,
PatientObjectListView,
)
from radar.api.views.generics import ListModelView
from radar.database import db
from radar.exceptions import BadRequest
from radar.models.nurture_tubes import PROTOCOL_OPTION_TYPE, SampleOption, Samples
from radar.utils import camel_case_keys
class SamplesListView(PatientObjectListView):
serializer_class = SamplesSerializer
model_class = Samples
def create(self, *args, **kwargs):
json = request.get_json()
if json is None:
raise BadRequest()
json['protocol'] = PROTOCOL_OPTION_TYPE(json['protocol'])
serializer = self.get_serializer(data=json)
serializer.is_valid(raise_exception=True)
obj = serializer.save()
db.session.add(obj)
db.session.commit()
data = serializer.data
data = camel_case_keys(data)
return jsonify(data), 200
class SamplesDetailView(PatientObjectDetailView):
serializer_class = SamplesSerializer
model_class = Samples
class SamplesProtocolOptions(ListModelView):
serializer_class = OptionSerializer
|
model_class = SampleOption
def register_views(app):
app.add_url_rule('/samples', view_func=SamplesListView.as_view('samples_list'))
app.add_url_rule('/samples/<id>', view_func=SamplesDetailView.as_view('samples_detail'))
app.add_url_rule(
'/samples-protocol-options',
view_func=SamplesProtocolOptions.as_v
|
iew('samples-protocol-options')
)
|
vFense/vFenseAgent-nix
|
agent/deps/rpm6/Python-2.7.5/bin/smtpd.py
|
Python
|
lgpl-3.0
| 18,564
| 0.000431
|
#!/home/toppatch/Python-2.7.5/bin/python2.7
"""An RFC 2821 smtp proxy.
Usage: %(program)s [options] [localhost:localport [remotehost:remoteport]]
Options:
--nosetuid
-n
This program generally tries to setuid `nobody', unless this flag is
set. The setuid call will fail if this program is not run as root (in
which case, use this flag).
--version
-V
Print the version number and exit.
--class classname
-c classname
Use `classname' as the concrete SMTP proxy class. Uses `PureProxy' by
default.
--debug
-d
Turn on debugging prints.
--help
-h
Print this message and exit.
Version: %(__version__)s
If localhost is not given then `localhost' is used, and if localport is not
given then 8025 is used. If remotehost is not given then `localhost' is used,
and if remoteport is not given, then 25 is used.
"""
# Overview:
#
# This file implements the minimal SMTP protocol as defined in RFC 821. It
# has a hierarchy of classes which implement the backend functionality for the
# smtpd. A number of classes are provided:
#
# SMTPServer - the base class for the backend. Raises NotImplementedError
# if you try to use it.
#
# DebuggingServer - simply prints each message it receives on stdout.
#
# PureProxy - Proxies all messages to a real smtpd which does final
# delivery. One known problem with this class is that it doesn't handle
# SMTP errors from the backend server at all. This should be fixed
# (contributions are welcome!).
#
# MailmanProxy - An experimental hack to work with GNU Mailman
# <www.list.org>. Using this server as your real incoming smtpd, your
# mailhost will automatically recognize and accept mail destined to Mailman
# lists when those lists are created. Every message not destined for a list
# gets forwarded to a real backend smtpd, as with PureProxy. Again, errors
# are not handled correctly yet.
#
# Please note that this script requires Python 2.0
#
# Author: Barry Warsaw <barry@python.org>
#
# TODO:
#
# - support mailbox delivery
# - alias files
# - ESMTP
# - handle error codes from the backend smtpd
import sys
import os
import errno
import getopt
import time
import socket
import asyncore
import asynchat
__all__ = ["SMTPServer","DebuggingServer","PureProxy","MailmanProxy"]
program = sys.argv[0]
__version__ = 'Python SMTP proxy version 0.2'
class Devnull:
def write(self, msg): pass
def flush(self): pass
DEBUGSTREAM = Devnull()
NEWLINE = '\n'
EMPTYSTRING = ''
COMMASPACE = ', '
def usage(code, msg=''):
print >> sys.stderr, __doc__ % globals()
if msg:
print >> sys.stderr, msg
sys.exit(code)
class SMTPChannel(asynchat.async_chat):
COMMAND = 0
DATA = 1
def __init__(self, server, conn, addr):
asynchat.async_chat.__init__(self, conn)
self.__server = server
self.__conn = conn
self.__addr = addr
self.__line = []
self.__state = self.COMMAND
self.__greeting = 0
self.__mailfrom = None
self.__rcpttos = []
self.__data = ''
self.__fqdn = socket.getfqdn()
try:
self.__peer = conn.getpeername()
except socket.error, err:
# a race condition may occur if the other end is closing
# before we can get the peername
self.close()
if err[0] != errno.ENOTCONN:
raise
return
print >> DEBUGSTREAM, 'Peer:', repr(self.__peer)
self.push('220 %s %s' % (self.__fqdn, __version__))
self.set_terminator('\r\n')
# Overrides base class for convenience
def push(self, msg):
asynchat.async_chat.push(self, msg + '\r\n')
# Implementation of base class abstract method
def collect_incoming_data(self, data):
self.__line.append(data)
# Implementation of base class abstract method
def found_terminator(self):
line = EMPTYSTRING.join(self.__line)
print >> DEBUGSTREAM, 'Data:', repr(line)
self.__line = []
if self.__state == self.COMMAND:
if not line:
self.push('500 Error: bad syntax')
return
method = None
i = line.find(' ')
if i < 0:
command = line.upper()
arg = None
else:
command = line[:i].upper()
arg = line[i+1:].strip()
method = getattr(self, 'smtp_' + command, None)
if not method:
self.push('502 Error: command "%s" not implemented' % command)
return
method(arg)
return
else:
if self.__state != self.DATA:
self.push('451 Internal confusion')
return
# Remove extraneous carriage returns and de-transparency according
# to RFC 821, Section 4.5.2.
data = []
for text in line.split('\r\n'):
if text and text[0] == '.':
data.append(text[1:])
else:
data.append(text)
self.__data = NEWLINE.join(data)
status = self.__server.process_message(self.__peer,
self.__mailfrom,
self.__rcpttos,
self.__data)
self.__rcpttos = []
self.__mailfrom = None
self.__state = self.COMMAND
self.set_terminator('\r\n')
if not status:
self.push('250 Ok')
else:
self.push(status)
# SMTP and ESMTP commands
def smtp_HELO(self, arg):
if not arg:
self.push('501 Syntax: HELO hostname')
return
if self.__greeting:
self.push('503 Duplicate HELO/EHLO')
else:
self.__greeting = arg
self.push('250 %s' % self.__fqdn)
def smtp_NOOP(self, arg):
if arg:
|
self.push('501 Syntax: NOOP')
else:
self.push('250 Ok')
def smtp_QUIT(self, arg):
# args is ignored
self.push('221 Bye')
self.close_when_done()
# factored
def __getaddr(self, keyword, arg):
address = None
keylen = len(keyword)
if arg[:keylen].upper() == keyword:
address = arg[keylen:].strip()
|
if not address:
pass
elif address[0] == '<' and address[-1] == '>' and address != '<>':
# Addresses can be in the form <person@dom.com> but watch out
# for null address, e.g. <>
address = address[1:-1]
return address
def smtp_MAIL(self, arg):
print >> DEBUGSTREAM, '===> MAIL', arg
address = self.__getaddr('FROM:', arg) if arg else None
if not address:
self.push('501 Syntax: MAIL FROM:<address>')
return
if self.__mailfrom:
self.push('503 Error: nested MAIL command')
return
self.__mailfrom = address
print >> DEBUGSTREAM, 'sender:', self.__mailfrom
self.push('250 Ok')
def smtp_RCPT(self, arg):
print >> DEBUGSTREAM, '===> RCPT', arg
if not self.__mailfrom:
self.push('503 Error: need MAIL command')
return
address = self.__getaddr('TO:', arg) if arg else None
if not address:
self.push('501 Syntax: RCPT TO: <address>')
return
self.__rcpttos.append(address)
print >> DEBUGSTREAM, 'recips:', self.__rcpttos
self.push('250 Ok')
def smtp_RSET(self, arg):
if arg:
self.push('501 Syntax: RSET')
return
# Resets the sender, recipients, and data, but not the greeting
self.__mailfrom = None
self.__rcpttos = []
self.__data = ''
self.__state = self.COMMAND
self.push('250 Ok')
def smtp_DATA(self, arg):
if not self.__rcpttos:
self.push('503
|
jmesteve/openerpseda
|
openerp/addons/l10n_es_payment_order/wizard/csb_19.py
|
Python
|
agpl-3.0
| 13,523
| 0.007848
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (c) 2006 ACYSOS S.L. (http://acysos.com) All Rights Reserved.
# Pedro Tarrafeta <pedro@acysos.com>
# Copyright (c) 2008 Pablo Rocandio. All Rights Reserved.
# Copyright (c) 2009 Zikzakmedia S.L. (http://zikzakmedia.com) All Rights Reserved.
# Jordi Esteve <jesteve@zikzakmedia.com>
# Copyright (c) 2013 Serv. Tecnol. Avanzados (http://www.serviciosbaeza.com)
# Pedro M. Baeza <pedro.baeza@serviciosbaeza.com>
# $Id$
#
# Corregido para instalación TinyERP estándar 4.2.0: Zikzakmedia S.L. 2008
# Jordi Esteve <jesteve@zikzakmedia.com>
#
# Añadidas cuentas de remesas y tipos de pago. 2008
# Pablo Rocandio <salbet@gmail.com>
#
# Rehecho de nuevo para instalación OpenERP 5.0.0 sobre account_payment_extension: Zikzakmedia S.L. 2009
# Jordi Esteve <jesteve@zikzakmedia.com>
#
# Añadidos conceptos extras del CSB 19: Acysos S.L. 2011
# Ignacio Ibeas <ignacio@acysos.com>
#
# Refactorización. Acysos S.L. (http://www.acysos.com) 2012
# Ignacio Ibeas <ignacio@acysos.com>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is dis
|
tributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied
|
warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from osv import osv, fields
from datetime import datetime
from tools.translate import _
from log import *
class csb_19(osv.osv):
_name = 'csb.19'
_auto = False
def _cabecera_presentador_19(self,cr,uid):
converter = self.pool.get('payment.converter.spain')
texto = '5180'
texto += (self.order.mode.bank_id.partner_id.vat[2:] + self.order.mode.sufijo).zfill(12)
texto += datetime.today().strftime('%d%m%y')
texto += 6*' '
texto += converter.to_ascii(cr,uid,self.order.mode.nombre).ljust(40)
texto += 20*' '
cc = converter.digits_only(cr,uid,self.order.mode.bank_id.acc_number)
texto += cc[0:8]
texto += 66*' '
texto += '\r\n'
if len(texto) != 164:
raise Log(_('Configuration error:\n\nThe line "%s" is not 162 characters long:\n%s') % ('Cabecera presentador 19', texto), True)
return texto
def _cabecera_ordenante_19(self,cr,uid, recibo=None):
converter = self.pool.get('payment.converter.spain')
texto = '5380'
texto += (self.order.mode.bank_id.partner_id.vat[2:] + self.order.mode.sufijo).zfill(12)
texto += datetime.today().strftime('%d%m%y')
if self.order.date_prefered == 'due':
assert recibo
if recibo.get('date'):
date_cargo = datetime.strptime(recibo['date'],'%Y-%m-%d')
elif recibo.get('ml_maturity_date'):
date_cargo = datetime.strptime(recibo['ml_maturity_date'],'%Y-%m-%d')
else:
date_cargo = datetime.today()
elif self.order.date_prefered == 'now':
date_cargo = datetime.today()
else: # self.order.date_prefered == 'fixed'
if not self.order.date_scheduled:
raise Log(_('User error:\n\nFixed date of charge has not been defined.'), True)
date_cargo = datetime.strptime(self.order.date_scheduled,'%Y-%m-%d')
texto += date_cargo.strftime('%d%m%y')
texto += converter.to_ascii(cr,uid,self.order.mode.nombre).ljust(40)
cc = converter.digits_only(cr,uid,self.order.mode.bank_id.acc_number)
texto += cc[0:20]
texto += 8*' '
texto += '01'
texto += 64*' '
texto += '\r\n'
if len(texto) != 164:
raise Log(_('Configuration error:\n\nThe line "%s" is not 162 characters long:\n%s') % ('Cabecera ordenante 19', texto), True)
return texto
def _individual_obligatorio_19(self,cr,uid, recibo):
converter = self.pool.get('payment.converter.spain')
texto = '5680'
texto += (self.order.mode.bank_id.partner_id.vat[2:] + self.order.mode.sufijo).zfill(12)
texto += str(recibo['name'])[-12:].zfill(12)
nombre = converter.to_ascii(cr,uid,recibo['partner_id'].name)
texto += nombre[0:40].ljust(40)
ccc = recibo['bank_id'] and recibo['bank_id'].acc_number or ''
ccc = converter.digits_only(cr,uid,ccc)
texto += str(ccc)[0:20].zfill(20)
importe = int(round(abs(recibo['amount'])*100,0))
texto += str(importe).zfill(10)
###### Referencia para devolución (sólo válida si no se agrupa) ######
if len(recibo['ml_inv_ref']) == 1:
texto += str(recibo['ml_inv_ref'][0].id)[-16:].zfill(16)
else:
texto += 16*' '
######################################################################
concepto = ''
if recibo['communication']:
concepto = recibo['communication']
texto += converter.to_ascii(cr,uid,concepto)[0:48].ljust(48)
texto += '\r\n'
if len(texto) != 164:
raise Log(_('Configuration error:\n\nThe line "%s" is not 162 characters long:\n%s') % ('Individual obligatorio 19', texto), True)
return texto
def _individual_opcional_19(self,cr,uid, recibo):
"""Para poner el segundo texto de comunicación (en lugar de nombre, domicilio y localidad opcional)"""
converter = self.pool.get('payment.converter.spain')
texto = '5686'
texto += (self.order.mode.bank_id.partner_id.vat[2:] + self.order.mode.sufijo).zfill(12)
texto += str(recibo['name'])[-12:].zfill(12)
texto += converter.to_ascii(cr,uid,recibo['communication2'])[0:115].ljust(115)
texto += '00000' # Campo de código postal ficticio
texto += 14*' '
texto += '\r\n'
if len(texto) != 164:
raise Log(_('Configuration error:\n\nThe line "%s" is not 162 characters long:\n%s') % ('Individual opcional 19', texto), True)
return texto
def _extra_opcional_19(self,cr,uid, recibo):
"""Para poner los 15 conceptos opcional de los registros 5681-5685 utilizando las lineas de facturación (Máximo 15 lineas)"""
converter = self.pool.get('payment.converter.spain')
res = {}
res['texto'] = ''
res['total_lines'] = 0
counter = 1
registry_counter = 1
length = 0
for invoice in recibo['ml_inv_ref']:
if invoice:
length += len(invoice.invoice_line)
for invoice in recibo['ml_inv_ref']:
if invoice:
for invoice_line in invoice.invoice_line:
if counter <= length:
if counter <= 15:
if (counter-1)%3 == 0:
res['texto'] += '568'+str(registry_counter)
res['texto'] += (self.order.mode.bank_id.partner_id.vat[2:] + self.order.mode.sufijo).zfill(12)
res['texto'] += str(recibo['name']).zfill(12)
price = ' %(#).2f ' % {'#' : invoice_line.price_subtotal}
res['texto'] += converter.to_ascii(cr,uid,invoice_line.name)[0:(40-len(price))].ljust(40-len(price))
res['texto'] += converter.to_ascii(cr,uid,price.replace('.',','))
if counter % 3 == 0:
res['texto'] += 14*' '+'\r\n'
res['total_lines'] += 1
if len(res
|
offlinehacker/flumotion
|
tests/checks.py
|
Python
|
gpl-2.0
| 5,092
| 0.002749
|
#!/usr/bin/env python
#
# gst-python
# Copyright (C) 2005 Andy Wingo <wingo@pobox.com>
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Library General Public
# License as published by the Free Software Foundation; either
# version 2 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Library General Public License for more details.
#
# You should have received a copy of the GNU Library General Public
# License along with this library; if not, write to the
# Free Software Foundation, Inc., 59 Temple Place - Suite 330,
# Boston, MA 02111-1307, USA.
# A test more of gst-plugins than of gst-python.
import sys
import pygtk
pygtk.require('2.0')
import gtk
import gtk.gdk
import pango
import gobject
#import pygst
#pygst.require('0.10')
import gst
import debugslider
from twisted.internet import gtk2reactor
gtk2reactor.install(useGtk=False)
from twisted.internet import reactor
data = ("checkTVCard('/dev/video0')",
"checkTVCard('/dev/video1')",
"checkWebcam('/dev/video0')",
"checkWebcam('/dev/video1')",
"checkMixerTracks('alsasrc', 'hw:0')",
"checkMixerTracks('osssrc', '/dev/dsp')",
"check1394()")
def make_model():
from flumotion.worker.checks import video
m = gtk.ListStore(str, object)
for s in data:
i = m.append()
m.set_value(i, 0, s)
m.set_value(i, 1, eval('lambda: video.%s'%s, {'video': video}))
return m
class Window(gtk.Window):
def __init__(self):
gtk.Window.__init__(self, gtk.WINDOW_TOPLEVEL)
self.current_deferred = None
self.prepare_ui()
def prepare_ui(self):
self.set_default_size(300, 400)
self.set_title('Flumotion Check Checker')
self.set_type_hint(gtk.gdk.WINDOW_TYPE_HINT_DIALOG)
self.connect('delete-event', lambda *x: reactor.stop())
self.set_border_width(18)
b = gtk.VBox(False, 12)
b.show()
self.add(b)
l = gtk.Label()
l.set_markup('<big><b>Flumotion Check Checker</b></big>')
l.show()
b.pack_start(l, False, False, 6)
l = gtk.Label('
|
Choose a check to check.')
l.show()
b.pack_start(l, False, False, 0)
sw = gtk.ScrolledWindow()
sw.set_policy(gtk.POLICY_NEVER, gtk.POLICY_NEVER)
sw.set_shadow_
|
type(gtk.SHADOW_IN)
sw.show()
b.pack_start(sw, True, True, 6)
tv = gtk.TreeView(make_model())
tv.set_property('can-default', False)
r = gtk.CellRendererText()
r.set_property('xalign', 0.5)
c = gtk.TreeViewColumn('System', r, text=0)
tv.append_column(c)
tv.set_headers_visible(False)
tv.show()
sw.add(tv)
ds = debugslider.DebugSlider()
ds.show()
b.pack_start(ds, False, False, 0)
bb = gtk.HButtonBox()
bb.set_layout(gtk.BUTTONBOX_SPREAD)
bb.show()
b.pack_start(bb, False, False, 0)
bu = gtk.Button(stock=gtk.STOCK_EXECUTE)
bu.set_property('can-default', True)
bu.set_focus_on_click(False)
bu.show()
bb.pack_start(bu, True, False, 0)
bu.set_property('has-default', True)
self.button = bu
self.selection = tv.get_selection()
tv.connect('row-activated', lambda *x: self.run_check())
bu.connect('clicked', lambda *x: self.run_check())
def error(self, message, secondary=None):
m = gtk.MessageDialog(
self,
gtk.DIALOG_MODAL | gtk.DIALOG_DESTROY_WITH_PARENT,
gtk.MESSAGE_ERROR,
gtk.BUTTONS_OK,
message)
if secondary:
m.format_secondary_text(secondary)
m.run()
m.destroy()
def run_check(self):
from twisted.internet.defer import Deferred
m, i = self.selection.get_selected()
if not i:
return
name, proc = m.get(i, 0, 1)
def callback(res, d):
if d != self.current_deferred:
print '(got successful old result: %s->%s)' % (name, res)
else:
print '%s successful: %s' % (name, res)
def errback(res, d):
if d != self.current_deferred:
print '(got failing old result: %s->%s)' % (name, res)
else:
print '%s failed, reason: %s' % (name, res)
print name
d = proc()
if isinstance(d, Deferred):
self.current_deferred = d
d.addCallback(callback, d)
d.addErrback(errback, d)
else:
print 'Check %s returned immediately with result %s' % (name, s)
try:
from flumotion.common import errors
from flumotion.common import setup
w = Window()
w.show()
setup.setup()
reactor.run()
except KeyboardInterrupt:
print 'Interrupted'
|
justincely/classwork
|
UMD/AST630/HW2/hw2.py
|
Python
|
bsd-3-clause
| 4,890
| 0.008793
|
"""Functions and script for problems in HW2
For problem 3:
--------------
class oblate:
provides the mechanics for calulating the desired orbital values from an
initilized object of a given mass, radius, and moments.
function problem_3():
function to output results using the oblate class on given scenarios
"""
import matplotlib.pyplot as plt
import numpy as np
M = 1.99e33
G = 6.67e-8
c = 2.998e10
#-------------------------------------------------------------------------------
class oblate():
def __init__(self, mass, radius, moments=[]):
"""Initialize object
Parameters:
-----------
mass : float, int
mass of the planet in any units
radius : float, int
|
radius as a factor of the planetary radius
moments
|
: list, optional
list of the moment coefficients
"""
self.mass = mass
self.radius = radius
self.moments = moments
@property
def n(self):
"""Calculate the body's orbital frequency
"""
const = np.sqrt(G * self.mass / (self.radius**3))
coeffs = [((3.0 / 2.0) * (1.0 / self.radius)**2),
(-1 * (15.0 / 8.0) * (1.0 / self.radius)**4),
((35.0 / 16.0) * (1.0 / self.radius)**6)]
terms = 1
for j, c in zip(self.moments, coeffs):
terms += (j * c)
return np.sqrt(const * terms)
@property
def k(self):
"""Calculate the body's epicyclic frequency
"""
const = np.sqrt(G * self.mass / (self.radius**3))
coeffs = [(-1 * (3.0 / 2.0) * (1.0 / self.radius)**2),
((45.0 / 8.0) * (1.0 / self.radius)**4),
(-1 * (175.0 / 16.0) * (1.0 / self.radius)**6)]
terms = 1
for j, c in zip(self.moments, coeffs):
terms += (j * c)
return np.sqrt(const * terms)
@property
def mu(self):
"""Calculate the body's vertical frequency
"""
return np.sqrt(2 * (self.n**2) - (self.k**2))
@property
def apse_precess(self):
"""Calculate the precession of the periapse longitude
"""
return self.n - self.k
@property
def node_regress(self):
"""Calculate the regression of the nodes of the equitorial orbits
"""
return self.n - self.mu
@property
def period(self):
"""Calculate the orbital period
"""
return 1.0 / self.n
#-------------------------------------------------------------------------------
def problem_3():
"""Print output for problem 3 of HW4
"""
for moments in [[], [1.63e-2], [1.63e-2, -9e-4, 1e-4]]:
for radius in [1.3, 3]:
saturn = oblate(568.46e27, radius, moments)
if len(moments):
print "Using moments of: {}".format(moments)
else:
print "Using no moments"
print "and radius of: {}".format(radius)
print "the orbital period is: {}".format(saturn.period)
print " apse precession rate: {}".format(saturn.apse_precess)
print " node regression rate: {}".format(saturn.node_regress)
print
#-------------------------------------------------------------------------------
def precession(a, e):
nominator = 3. * ((G * M) ** (3/2.))
denom = (a ** (5/2.)) * (1-e**2) * (c**2)
w = nominator / float(denom)
print w
# per second to per year
w *= (60. * 60. * 24. * 365.)
# from rad to arcsec
w *= 206265
return w
#-------------------------------------------------------------------------------
def ldot(radius, msat, rplan):
ldot = (3 / 4.) * (.14 / 86)
ldot *= (G * (msat**2) * rplan**(5))
ldot /= (radius ** 6)
return ldot
#-------------------------------------------------------------------------------
def problem_4():
fig = plt.figure()
ax = fig.add_subplot(1, 1, 1)
ax.grid()
r_mars = 3.3899e8
m_mars = .64185e27
radii = np.linspace(0, 10 * r_mars)
ax.plot(radii / r_mars,
ldot(radii, 1, r_mars),
lw=3)
ax.axvline(x=9.38e+08 / r_mars,
ls = '--',
color='red',
lw=2,
label='Radius Phobos')
ax.axvline(x=23.4e+08 / r_mars,
ls = '-',
lw=2,
color='red',
label='Radius Deimos')
ax.set_yscale('log')
ax.set_xlabel('Mars Radii')
ax.set_ylabel('$\dot L$')
ax.set_title('$\dot L$ vs Radii for orbit around Mars')
ax.legend(shadow=True, numpoints=1, loc='upper right')
plt.savefig('mars_momentum.pdf')
#-------------------------------------------------------------------------------
if __name__ == "__main__":
problem_3()
|
mlperf/training_results_v0.6
|
Fujitsu/benchmarks/resnet/implementations/mxnet/3rdparty/tvm/python/tvm/_ffi/_ctypes/node.py
|
Python
|
apache-2.0
| 2,777
| 0.00144
|
# pylint: disable=invalid-name, protected-access
# pylint: disable=no-member, missing-docstring, not-callable
from __future__ import absolute_import
import ctypes
from ..base import _LIB, check_call, c_str
from ..node_generic import _set_class_node_base
from .types import TVMValue, TypeCode
from .types import RETURN_SWITCH, C_TO_PY_ARG_SWITCH, _wrap_arg_func
NodeHandle = ctypes.c_void_p
__init_by_constructor__ = None
"""Maps node type to its constructor"""
NODE_TYPE = {}
def _register_node(index, cls):
"""register node class"""
NODE_TYPE[index] = cls
def _return_node(x):
"""Return node function"""
handle = x.v_handle
if not isinstance(handle, NodeHandle):
handle = NodeHandle(handle)
tindex = ctypes.c_int()
check_call(_LIB.TVMNodeGetTypeIndex(handle, ctypes.byref(tindex)))
cls = NODE_TYPE.get(tindex.value, NodeBase)
# Avoid calling __init__ of cls, instead directly call __new__
# This allows child class to implement their own __init__
node = cls.__new__(cls)
node.handle = handle
return node
RETURN_SWITCH[TypeCode.NODE_HANDLE] = _return_node
C_TO_PY_ARG_SWITCH[TypeCode.NODE_HANDLE] = _wrap_arg_func(
_return_node, TypeCode.NODE_HANDLE)
class NodeBase(object):
__slots__ = ["handle"]
# pylint: disable=no-member
def __del__(self):
if _LIB is not None:
check_call(_LIB.TVMNodeFree(self.handle))
def __getattr__(self, name):
ret_val = TVMValue()
ret_type_code = ctypes.c_int()
ret_success = ctypes.c_int()
check_call(_LIB.TVMNodeGetAttr(
self.handle, c_str(name),
ctypes.byref(ret_val),
ctypes.byref(ret_type_code),
ctypes.byref(ret_success)))
if not ret_success.value:
raise AttributeError(
"'%s' object has no attribute '%s'" % (str(type(self)), name))
return RETURN_SWITCH[ret_type_code.value](ret_val)
def __init_handle_by_constructor__(self, fconstructor, *args):
"""Initialize the handle by calling constructor function.
Parameters
----------
fconstructor : Function
Constructor function.
args: list of objects
The arguments to the constructor
Note
----
We ha
|
ve a special calling convention to call constructor functions.
So the return handle is directly set into the Node object
instead of creating a new Node.
"""
# assign handle firs
|
t to avoid error raising
self.handle = None
handle = __init_by_constructor__(fconstructor, args)
if not isinstance(handle, NodeHandle):
handle = NodeHandle(handle)
self.handle = handle
_set_class_node_base(NodeBase)
|
PatSunter/SimpleGTFSCreator
|
route_segs.py
|
Python
|
lgpl-3.0
| 52,470
| 0.003774
|
"""A module for handling and accessing both the in-memory, and on-disk,
representation of a set of routes as a set of segments. Where each segment
specifies its start and end stop ids, and other data (see
topology_shapefile_data_model.py for more."""
import sys
import csv
import re
import operator
import itertools
import misc_utils
import topology_shapefile_data_model as tp_model
########
# Basic route name handling
def get_route_order_key_from_name(route_def):
rname = route_def.short_name
if rname:
# Courtesy http://stackoverflow.com/questions/4289331/python-extract-numbers-from-a-string
try:
order_key = int(re.findall(r'\d+', rname)[0])
except IndexError:
order_key = rname
else:
order_key = route_def.long_name
return order_key
def get_route_names_sorted(route_names):
# Get an ordered list of route names so we can write in name order,
keyfunc = None
if len(route_names[0]) <= 3:
# Dropping the 'R' for route, for short route names, and sort
# by integer version of remaining string
keyfunc = lambda s: int(s[1:])
else:
# Just sort by the full route name string.
keyfunc = lambda s: s
rnames_sorted = sorted(route_names, key=keyfunc)
return rnames_sorted
########
# Definition of Route_Def and Seg_Reference lightweight classes and basic
# manipulation of them.
class Route_Def:
def __init__(self, route_id, short_name, long_name, dir_names,
ordered_seg_ids, gtfs_origin_id = None):
self.id = route_id
self.gtfs_origin_id = gtfs_origin_id
self.short_name = short_name
self.long_name = long_name
self.dir_names = dir_names
self.ordered_seg_ids = ordered_seg_ids
class Seg_Reference:
"""A small lightweight class for using as an in-memory storage of
key segment topology information, and reference to actual segment
feature in a shapefile layer.
This is designed to save cost of reading actual
shapefile frequently, e.g. for algorithms that need to search and/or
add to segments list a lot."""
def __init__(self, seg_id, first_stop_id, second_stop_id,
route_dist_on_seg=None, routes=None):
self.seg_id = seg_id # Segment ID
self.first_id = first_stop_id
self.second_id = second_stop_id
self.route_dist_on_seg = route_dist_on_seg
if routes is None:
self.routes = []
else:
self.routes = routes
self.seg_ii = None # Index into segments layer shapefile -
class Route_Ext_Info:
"""Class for holding relevant info about extended routes."""
def __init__(self, ext_id, ext_name, ext_type,
exist_r_s_name, exist_r_l_name,
exist_r_connect_stop_gtfs_id, exist_r_first_stop_gtfs_id,
upd_r_short_name, upd_r_long_name, upd_dir_name):
self.ext_id = ext_id
self.ext_name = ext_name
self.ext_type = ext_type
self.exist_r_short_name = exist_r_s_name
self.exist_r_long_name = exist_r_l_name
self.exist_r_connect_stop_gtfs_id = exist_r_connect_stop_gtfs_id
self.exist_r_first_stop_gtfs_id = exist_r_first_stop_gtfs_id
self.upd_r_short_name = upd_r_short_name
self.upd_r_long_name = upd_r_long_name
self.upd_dir_name = upd_dir_name
assert ext_type in tp_model.ROUTE_EXT_ALL_TYPES
assert self.exist_r_connect_stop_gtfs_id is not None
if ext_type == tp_model.ROUTE_EXT_TYPE_NEW:
assert self.exist_r_first_stop_gtfs_id is not None
assert upd_dir_name
return
def get_print_name(route_def):
print_name = misc_utils.get_route_print_name(
route_def.short_name, route_def.long_name)
return print_name
def add_route_to_seg_ref(seg_ref, route_id):
if route_id not in seg_ref.routes:
seg_ref.routes.append(route_id)
return
def seg_has_stops(seg_ref, stop_id_1, stop_id_2):
if seg_ref.first_id == stop_id_1 and \
seg_ref.second_id == stop_id_2 \
or seg_ref.first_id == stop_id_2 and \
seg_ref.second_id == stop_id_1:
return True
return False
def get_seg_dist_km(seg_ref):
if seg_ref is not None:
return seg_ref.route_dist_on_seg / tp_model.ROUTE_DIST_RATIO_TO_KM
else:
print "Warning:- asked for distance of a seg_ref with ID %d, but "\
"route distance hasn't yet been read or calculated for this "\
"seg_ref." % seg_ref.seg_id
return None
def get_other_stop_id(seg_ref, stop_id):
if stop_id == seg_ref.first_id:
return seg_ref.second_id
else:
assert stop_id == seg_ref.second_id
return seg_ref.first_id
#####################
# Basic manipulations on a list of seg_refs or route_defs
def get_seg_ref_with_id(seg_id, seg_refs):
for seg_ref in seg_refs:
if seg_id == seg_ref.seg_id:
return seg_ref
return None
def build_seg_refs_lookup_table(seg_refs):
seg_refs_lookup_table = {}
for seg_ref in seg_refs:
seg_refs_lookup_table[seg_ref.seg_id] = seg_ref
return seg_refs_lookup_table
def find_seg_ref_matching_stops(all_seg_refs, stop_id_1, stop_id_2):
matched_seg_ref = None
for seg_ref in all_seg_refs:
if seg_has_stops(seg_ref, stop_id_1, stop_id_2):
matched_seg_ref = seg_ref
break
return matched_seg_ref
def add_update_seg_ref(start_stop_id, end_stop_id, route_id,
route_dist_on_seg, all_seg_refs, seg_refs_this_route,
possible_route_duplicates=False):
"""Add a new segment to the two pre-existing lists all_seg_refs, and
seg_refs_this_route. If segment already exists, update its route list."""
seg_id = None
new_status = False
seg_ref_to_return = None
matched_seg_ref = find_seg_ref_matching_stops(all_seg_refs, start_stop_id,
end_stop_id)
if matched_seg_ref:
new_status = False
#print "While adding, matched a segment! Seg id = %s, existing "\
# "routes = %s, new route = '%s'" %\
# (matched_seg_ref.seg_id\
# matched_seg_ref.routes,\
# route_id)
add_route_to_seg_ref(matched_seg_ref, route_id)
seg_ref_to_return = matched_seg_ref
if possible_route_duplicates:
# Adding a new defensive case:- don't want to add a segment twice to
# the same route.
matched_in_route = find_seg_ref_matching_stops(seg_refs_this_route,
start_stop_id, end_stop_id)
if not matched_seg_ref:
seg_refs_this_route.append(seg_ref_to_return)
else:
seg_refs_this_route.append(seg_ref_
|
to_return)
|
else:
new_status = True
# +1 since we want to start counter at 1
seg_id = len(all_seg_refs)+1
new_seg_ref = Seg_Reference(seg_id, start_stop_id, end_stop_id,
route_dist_on_seg, routes = [route_id])
# Its a new segment, so append to the list of all segments.
all_seg_refs.append(new_seg_ref)
seg_ref_to_return = new_seg_ref
seg_refs_this_route.append(seg_ref_to_return)
return seg_ref_to_return, new_status
def route_defs_match_statuses(route_def, route_def2):
match_statuses = []
if route_def.id is not None and route_def2.id is not None:
test = route_def.id == route_def2.id
match_statuses.append(test)
if route_def.short_name and route_def2.short_name:
test = route_def.short_name == route_def2.short_name
match_statuses.append(test)
if route_def.long_name and route_def2.long_name:
test = route_def.long_name == route_def2.long_name
match_statuses.append(test)
match_status = False
# Make sure there is at least one attribute matching, and all match.
if len(match_statuses) >= 1 and False not in match_statuses:
match_status = True
return match_status
def get_matching_route_defs(route_defs, search_route_def):
matching_route_defs = []
for rdef in route_defs:
if route_defs_match_statuses(rdef, s
|
cbcunc/primer
|
primer/__main__.py
|
Python
|
gpl-2.0
| 1,218
| 0
|
#! /usr/bin/env python
"""
A __main__ namespace for the primer package.
"""
from __future__ import print_function
import sys
import argparse
from time import time
from primer
|
import prim
|
es
def main(argv):
"""Call primes when the package is run as a script."""
parser = argparse.ArgumentParser(
prog='primes',
description='Display the first N primes.',
formatter_class=argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument('number',
metavar='N',
nargs='?',
default='100',
help='Number of primes to generate')
parser.add_argument('-t',
action='store_true',
help='Display elapsed time')
args = parser.parse_args(argv)
try:
number = int(args.number, 0)
if args.t:
start = time()
print(" ".join([str(n) for n in primes((number))]))
if args.t:
print()
print("Elapsed time is {} seconds.".format(time() - start))
except Exception as e:
parser.print_help()
print()
print(e)
if __name__ == '__main__':
main(sys.argv[1:])
|
JulienMcJay/eclock
|
windows/kivy/examples/canvas/repeat_texture.py
|
Python
|
gpl-2.0
| 827
| 0.001209
|
'''
Demonstrate repeating textures
==============================
This was a test to fix an issue with repeating texture and window reloading.
'''
from kivy.app import App
from kivy.uix.image import Image
from kivy.properties import ObjectProperty
from kivy.lang import Builder
kv = '''
FloatLayout:
ca
|
nvas.before:
Color:
rgb: 1, 1, 1
Rectangle:
pos: self.pos
size: self.size
texture: app.texture
Label:
text: '{} (tr
|
y to resize the window)'.format(root.size)
'''
class RepeatTexture(App):
texture = ObjectProperty()
def build(self):
self.texture = Image(source='mtexture1.png').texture
self.texture.wrap = 'repeat'
self.texture.uvsize = (8, 8)
return Builder.load_string(kv)
RepeatTexture().run()
|
marcdm/bleach
|
bleach/sanitizer.py
|
Python
|
bsd-3-clause
| 6,574
| 0.00502
|
from __future__ import unicode_literals
import re
from xml.sax.saxutils import escape, unescape
from html5lib.constants import tokenTypes
from html5lib.sanitizer import HTMLSanitizerMixin
from html5lib.tokenizer import HTMLTokenizer
PROTOS = HTMLSanitizerMixin.acceptable_protocols
PROTOS.remove('feed')
class BleachSanitizerMixin(HTMLSanitizerMixin):
"""Mixin to replace sanitize_token() and sanitize_css()."""
allowed_svg_properties = []
# TODO: When the next html5lib version comes out, nuke this.
attr_val_is_uri = HTMLSanitizerMixin.attr_val_is_uri + ['poster']
def sanitize_token(self, token):
"""Sanitize a token either by HTML-encoding or dropping.
Unlike HTMLSanitizerMixin.sanitize_token, allowed_attributes can be
a dict of {'tag': ['attribute', 'pairs'], 'tag': callable}.
Here callable is a function with two arguments of attribute name
and value. It should return true of false.
Also gives the option to strip tags instead of encoding.
"""
if (getattr(self, 'wildcard_attributes', None) is None and
isinstance(self.allowed_attributes, dict)):
self.wildcard_attributes = self.allowed_attributes.get('*', [])
if token['type'] in (tokenTypes['StartTag'], tokenTypes['EndTag'],
tokenTypes['EmptyTag']):
if token['name'] in self.allowed_elements:
if 'data' in token:
if isinstance(self.allowed_attributes, dict):
allowed_attributes = self.allowed_attributes.get(
token['name'], [])
if not callable(allowed_attributes):
allowed_attributes += self.wildcard_attributes
else:
allowed_attributes = self.allowed_attributes
attrs = dict([(name, val) for name, val in
token['data'][::-1]
if (allowed_attributes(name, val)
if callable(allowed_attributes)
else name in allowed_attributes)])
for attr in self.attr_val_is_uri:
if not attr in attrs:
continue
val_unescaped = re.sub("[`\000-\040\177-\240\s]+", '',
unescape(attrs[attr])).lower()
# Remove replacement characters from unescaped
# characters.
val_unescaped = val_unescaped.replace("\ufffd", "")
if (re.match(r'^[a-z0-9][-+.a-z0-9]*:', val_unescaped)
and (val_unescaped.split(':')[0] not in
self.allowed_protocols)):
del attrs[attr]
for attr in self.svg_attr_val_allows_ref:
if attr in attrs:
attrs[attr] = re.sub(r'url\s*\(\s*[^#\s][^)]+?\)',
' ',
unescape(attrs[attr]))
if (token['name'] in self.svg_allow_local_href and
'xlink:href' in attrs and
re.search(r'^\s*[^#\s].*', attrs['xlink:href'])):
del attrs['xlink:href']
if 'style' in attrs:
attrs['style'] = self.sanitize_css(attrs['style'])
token['data'] = [(name, val) for name, val in
attrs.items()]
return token
elif self.strip_disallowed_elements:
pass
else:
if token['type'] == tokenTypes['EndTag']:
token['data'] = '</{0!s}>'.format(token['name'])
elif token['data']:
attrs = ''.join([' {0!s}="{1!s}"'.format(k, escape(v)) for k, v in
token['data']])
token['data'] = '<{0!s}{1!s}>'.format(token['name'], attrs)
else:
token['data'] = '<{0!s}>'.format(token['name'])
if token['selfClosing']:
token['data'] = token['data'][:-1] + '/>'
token['type'] = tokenTypes['Characters']
del token["name"]
return token
elif token['type'] == tokenTypes['Comment']:
if not self.strip_html_comments:
return token
else:
return token
def sanitize_css(self, style):
"""HTMLSanitizerMixin.sanitize_css replacement.
HTMLSanitizerMixin.sanitize_css always whitelists background-*,
border-*, margin-*, and padding-*. We only whitelist what's in
the whitelist.
"""
# disallow urls
style = re.compile('url\s*\(\s*[^\s)]+?\s*\)\s*').sub(' ', style)
# gauntlet
# TODO: Make sure this does what it's meant to - I *think* it wants to
# validate style attribute contents.
parts = style.split(';')
gauntlet = re.compile("""^([-/:,#%.'"\sa-zA-Z0-9!]|\w-\w|'[\s\w]+'\s*"""
"""|"[\s\w]+"|\([\d,%\.\s]+\))*$""")
for part in parts:
if not gauntlet.match(part):
return ''
if not re.match("^\s*([-\w]+\s*:[^:;]*(;\s*|$))*$", style):
return ''
clean = []
for prop, value in re.findall('([-\w]+)\s*:\s*([^:;]*)', style):
if not value:
continue
if prop.lower() in self.allowed_css_properties:
clean.append(prop + ': ' + value + ';')
elif prop.lower() in self.allowed_svg_properties:
clean.append(prop + ': ' + value + ';')
return ' '.join(clean)
class BleachSanitizer(HTMLTokenizer, BleachSanitizerMixin):
def __init__(self, strea
|
m, encoding=None, parseMeta=True, useChardet=True,
lowercaseElementName=True, lowercaseAttrName=True, **kwargs):
HTMLTokenizer.__init__(self, stream, encoding, parseMeta,
|
useChardet,
lowercaseElementName, lowercaseAttrName,
**kwargs)
def __iter__(self):
for token in HTMLTokenizer.__iter__(self):
token = self.sanitize_token(token)
if token:
yield token
|
transientskp/tkp
|
tkp/accessors/lofaraccessor.py
|
Python
|
bsd-2-clause
| 886
| 0
|
from tkp.accessors.dataaccessor import RequiredAttributesMetaclass
class LofarAccessor(object):
__metaclass__ = RequiredAttributesMetaclass
"""
Additional metadata required for processing LOFAR images through QC
checks.
Attributes:
antenna_set (string): Antenna set in use during observation.
String; 'LBA_INNER', 'LBA_OU
|
TER', 'LBA_SPARSE', 'LBA' or 'HBA'
ncore(int): Number of core stations in use during observation.
nremote(int): Number of remote stations in use during observation.
nintl(int): Number of international stations in use during observation.
subbandwidth(float): Width of a subband in Hz.
subbands(int): Number of subbands.
"""
_required_attributes = [
'antenna_set',
|
'ncore',
'nremote',
'nintl',
'subbandwidth',
'subbands',
]
|
kemaswill/keras
|
keras/preprocessing/image.py
|
Python
|
mit
| 25,315
| 0.001659
|
'''Fairly basic set of tools for real-time data augmentation on image data.
Can easily be extended to include new transformations,
new preprocessing methods, etc...
'''
from __future__ import absolute_import
from __future__ import print_function
import numpy as np
import re
from scipy import linalg
import scipy.ndimage as ndi
from six.moves import range
import os
import threading
from .. import backend as K
def random_rotation(x, rg, row_index=1, col_index=2, channel_index=0,
fill_mode='nearest', cval=0.):
theta = np.pi / 180 * np.random.uniform(-rg, rg)
rotation_matrix = np.array([[np.cos(theta), -np.sin(theta), 0],
[np.sin(theta), np.cos(theta), 0],
[0, 0, 1]])
h, w = x.shape[row_index], x.shape[col_index]
transform_matrix = transform_matrix_offset_center(rotation_matrix, h, w)
x = apply_transform(x, transform_matrix, channel_index, fill_mode, cval)
return x
def random_shift(x, wrg, hrg, row_index=1, col_index=2, channel_index=0,
fill_mode='nearest', cval=0.):
h, w = x.shape[row_index], x.shape[col_index]
tx = np.random.uniform(-hrg, hrg) * h
ty = np.random.uniform(-wrg, wrg) * w
translation_matrix = np.array([[1, 0, tx],
[0, 1, ty],
[0, 0, 1]])
transform_matrix = translation_matrix # no need to do offset
x = apply_transform(x, transform_matrix, channel_index, fill_mode, cval)
return x
def random_shear(x, intensity, row_index=1, col_index=2, channel_index=0,
fill_mode='nearest', cval=0.):
shear = np.random.uniform(-intensity, intensity)
shear_matrix = np.array([[1, -np.sin(shear), 0],
[0, np.cos(shear), 0],
[0, 0, 1]])
h, w = x.shape[row_index], x.shape[col_index]
transform_matrix = transform_matrix_offset_center(shear_matrix, h, w)
x = apply_transform(x, transform_matrix, channel_index, fill_mode, cval)
return x
def random_zoom(x, zoom_range, row_index=1, col_index=2, channel_index=0,
fill_mode='nearest', cval=0.):
if len(zoom_range) != 2:
raise Exception('zoom_range should be a tuple or list of two floats. '
'Received arg: ', zoom_range)
if zoom_range[0] == 1 and zoom_range[1] == 1:
zx, zy = 1, 1
else:
zx, zy = np.random.uniform(zoom_range[0], zoom_range[1], 2)
zoom_matrix = np.array([[zx, 0, 0],
[0, zy, 0],
[0, 0, 1]])
h, w = x.shape[row_index], x.shape[col_index]
transform_matrix = transform_matrix_offset_center(zoom_matrix, h, w)
x = apply_transform(x, transform_matrix, channel_index, fill_mode, cval)
return x
def random_barrel_transform(x, intensity):
# TODO
pass
def random_channel_shift(x, intensity, channel_index=0):
x = np.rollaxis(x, channel_index, 0)
min_x, max_x = np.min(x), np.max(x)
channel_images = [np.clip(x_channel + np.random.uniform(-intensity, intensity), min_x, max_x)
for x_channel in x]
x = np.stack(channel_images, axis=0)
x = np.rollaxis(x, 0, channel_index+1)
return x
def transform_matrix_offset_center(matrix, x, y):
o_x = float(x) / 2 + 0.5
o_y = float(y) / 2 + 0.5
offset_matrix = np.array([[1, 0, o_x], [0, 1, o_y], [0, 0, 1]])
reset_matrix = np.array([[1, 0, -o_x], [0, 1, -o_y], [0, 0, 1]])
transform_matrix = np.dot(np.dot(offset_matrix, matrix), reset_matrix)
return transform_matrix
def apply_transform(x, transform_matrix, channel_index=0, fill_mode='nearest', cval=0.):
x = np.rollaxis(x, channel_index, 0)
final_affine_matrix = transform_matrix[:2, :2]
final_offset = transform_matrix[:2, 2]
channel_images = [ndi.interpolation.affine_transform(x_channel, final_affine_matrix,
final_offset, order=0, mode=fill_mode, cval=cval) for x_channel in x]
x = np.stack(channel_images, axis=0)
x = np.rollaxis(x, 0, channel_index+1)
return x
def flip_axis(x, axis):
x = np.asarray(x).swapaxes(axis, 0)
x = x[::-1, ...]
x = x.swapaxes(0, axis)
return x
def array_to_img(x, dim_ordering='default', scale=True):
from PIL import Image
if dim_ordering == 'default':
dim_ordering = K.image_dim_ordering()
if dim_ordering == 'th':
x = x.transpose(1, 2, 0)
if scale:
x += max(-np.min(x), 0)
x_max = np.max(x)
if x_max != 0:
x /= x_max
x *= 255
if x.shape[2] == 3:
# RGB
return Image.fromarray(x.astype('uint8'), 'RGB')
elif x.shape[2] == 1:
# grayscale
return Image.fromarray(x[:, :, 0].astype('uint8'), 'L')
else:
raise Exception('Unsupported channel number: ', x.shape[2])
def img_to_array(img, dim_ordering='default'):
if dim_ordering == 'default':
dim_ordering = K.image_dim_ordering()
if dim_ordering not in ['th', 'tf']:
raise Exception('Unknown dim_ordering: ', dim_ordering)
# image has dim_ordering (height, width, channel)
x = np.asarray(img, dtype='float32')
if len(x.shape) == 3:
if dim_ordering == 'th':
x = x.transpose(2, 0, 1)
elif len(x.shape) == 2:
if dim_ordering == 'th':
x = x.reshape((1, x.shape[0], x.shape[1]))
else:
x = x.reshape((x.shape[0], x.shape[1], 1))
else:
raise Exception('Unsupported image shape: ', x.shape)
return x
def load_img(path, grayscale=False, target_size=None):
from PIL import Image
img = Image.open(path)
if grayscale:
img = img.convert('L')
else: # Ensure 3 channel even when loaded image is grayscale
img = img.convert('RGB')
if target_size:
img = img.resize((target_size[1], target_size[0]))
return img
def list_pictures(directory, ext='jpg|jpeg|bmp|png'):
return [os.path.join(directory, f) for f in os.listdir(directory)
if os.path.isfile(os.path.join(directory, f)) and re.match('([\w]+\.(?:' + ext + '))', f)]
class ImageDataGenerator(object):
'''Generate minibatches with
real-time data augmentation.
# Arguments
featurewise_center: set input mean to 0 over the dataset.
samplewise_center: set each sample mean to 0.
featurewise_std_normalization: divide inputs by std of the dataset.
samplewise_std_normalization: divide each input by its std.
zca_whitening: apply ZCA whitening.
rotation_range: degrees (0 to 180).
width_shift_range: fraction of total width.
height_shift_range: fraction of total height.
shear_range: shear intensity (shear angle in radians).
zoom_range: amount of zoom. if scalar z, zoom will be randomly picked
in the range [1-z, 1+z]. A sequence of two can be passed instead
to select this range.
channel_shift_range: shift range for each c
|
hannels.
fill_mode: points outside the boundaries are filled according to the
given mode ('constant', 'nearest', 'reflect' or 'wrap'). Default
is 'nearest'.
cval: value used for points outside the boundaries when fill_mode is
'constant'. Default is 0.
horizontal_flip:
|
whether to randomly flip images horizontally.
vertical_flip: whether to randomly flip images vertically.
rescale: rescaling factor. If None or 0, no rescaling is applied,
otherwise we multiply the data by the value provided (before applying
any other transformation).
dim_ordering: 'th' or 'tf'. In 'th' mode, the channels dimension
(the depth) is at index 1, in 'tf' mode it is at index 3.
It defaults to the `image_dim_ordering` value found in your
Keras config file at `~/.keras/keras.json`.
If you never set it, then it will be "th".
'''
def __init__(self,
featurewise_center=False,
samplewise_center=False,
featurewis
|
henryiii/rootpy
|
rootpy/tree/chain.py
|
Python
|
gpl-3.0
| 10,795
| 0.000093
|
# Copyright 2012 the rootpy developers
# distributed under the terms of the GNU General Public License
from __future__ import absolute_import
import multiprocessing
import time
from .. import log; log = log[__name__]
from .. import QROOT
from ..io import root_open, DoesNotExist
from ..utils.extras import humanize_bytes
from ..context import preserve_current_directory
from ..plotting.graph import _GraphBase
from ..extern.six import string_types
from .filtering import EventFilterList
__all__ = [
'TreeChain',
'TreeQueue',
]
class BaseTreeChain(object):
def __init__(self, name,
treebuffer=None,
branches=None,
ignore_branches=None,
events=-1,
onfilechange=None,
read_branches_on_demand=False,
cache=False,
# 30 MB cache by default
cache_size=30000000,
learn_entries=10,
always_read=None,
ignore_unsupported=False,
filters=None):
self._name = name
self._buffer = treebuffer
self._branches = branches
self._ignore_branches = ignore_branches
self._tree = None
self._file = None
self._events = events
self._total_events = 0
self._ignore_unsupported = ignore_unsupported
self._initialized = False
if filters is None:
self._filters = EventFilterList([])
else:
self._filters = filters
if onfilechange is None:
onfilechange = []
self._filechange_hooks = onfilechange
self._read_branches_on_demand = read_branches_on_demand
self._use_cache = cache
self._cache_size = cache_size
self._learn_entries = learn_entries
self.weight = 1.
self.userdata = {}
if not self._rollover():
raise RuntimeError("unable to initialize TreeChain")
if always_read is None:
self._always_read = []
elif isinstance(always_read, string_types):
if '*' in always_read:
always_read = self._tree.glob(always_read)
else:
always_read = [always_read]
self.always_read(always_read)
else:
branches = []
for branch in always_read:
if '*' in branch:
branches += self._tree.glob(branch)
else:
branches.append(branch)
self.always_read(branches)
def __nonzero__(self):
return len(self) > 0
__bool__ = __nonzero__
def _next_file(self):
"""
Override in subclasses
"""
return None
def always_read(self, branches):
self._always_read = branches
self._tree.always_read(branches)
def reset(self):
if self._tree is not None:
self._tree = None
if self._file is not None:
self._file.Close()
self._file = None
def Draw(self, *args, **kwargs):
"""
Loop over subfiles, draw each, and sum the output into a single
histogram.
"""
self.reset()
output = None
while self._rollover():
if output is None:
# Make our own copy of the drawn histogram
output = self._tree.Draw(*args, **kwargs)
if output is not None:
output = output.Clone()
# Make it memory resident (histograms)
if hasattr(output, 'SetDirectory'):
output.SetDirectory(0)
else:
newoutput = self._tree.Draw(*args, **kwargs)
if newoutput is not None:
if isinstance(output, _GraphBase):
output.Append(newoutput)
else: # histogram
output += newoutput
return output
draw = Draw
def __getattr__(self, attr):
try:
return getattr(self._tree, attr)
except AttributeError:
raise AttributeError("{0} instance has no attribute '{1}'".format(
self.__class__.__name__, attr))
def __getitem__(self, item):
return self._tree.__getitem__(item)
def __contains__(self, branch):
return self._tree.__contains__(branch)
def __iter__(self):
passed_events = 0
self.reset()
while self._rollover():
entries = 0
total_entries = float(self._tree.GetEntries())
t1 = time.time()
t2 = t1
for entry in self._tree:
entries += 1
self.userdata = {}
if self._filters(entry):
yield entry
passed_events += 1
if self._events == passed_events:
break
if time.time() - t2 > 60:
entry_rate = int(entries / (time.time() - t1))
log.info(
"{0:d} entr{1} per second. "
"{2:.0f}% done current tree.".format(
entry_rate,
'ies' if entry_rate != 1 else 'y',
100 * entries / total_entries))
t2 = time.time()
if self._events == passed_events:
break
log.info("{0:d} entries per second".format(
int(entries / (time.time() - t1))))
log.info("read {0:d} bytes in {1:d} transactions".format(
self._file.GetBytesRead(),
self._file.GetReadCalls()))
self._total_events += entries
self._filters.finalize()
def _rollover(self):
filename = self._next_file()
if filename is None:
return False
log.info("current file: {0}".format(filename))
try:
with preserve_current_directory():
if self._file is not None:
self._file.Close()
self._file = root_open(filename)
except IOError:
self._file = None
log.warning("could not open file {0} (skipping)".format(filename))
return self._rollover()
try:
self._tree = self._file.Get(self._name)
except DoesNotExist:
log.warning(
"tree {0} does not e
|
xist in file {1} (skipping)".format(
self._name, filename))
return self._rollover()
if len(self._tree.GetListOfBranches()) == 0:
log.warning("tree with no branches in file {0} (skipping)".format(
filename))
return self._rollover()
if self._branches is not None:
self._tree.activate(self._branches, exclusive=True)
if self._igno
|
re_branches is not None:
self._tree.deactivate(self._ignore_branches, exclusive=False)
if self._buffer is None:
self._tree.create_buffer(self._ignore_unsupported)
self._buffer = self._tree._buffer
else:
self._tree.set_buffer(
self._buffer,
ignore_missing=True,
transfer_objects=True)
self._buffer = self._tree._buffer
if self._use_cache:
# enable TTreeCache for this tree
log.info(
"enabling a {0} TTreeCache for the current tree "
"({1:d} learning entries)".format(
humanize_bytes(self._cache_size), self._learn_entries))
self._tree.SetCacheSize(self._cache_size)
self._tree.SetCacheLearnEntries(self._learn_entries)
self._tree.read_branches_on_demand = self._read_branches_on_demand
self._tree.always_read(self._always_read)
self.weight = self._tree.GetWeight()
for target, args in self._filechange_hooks:
# run any user-defined functions
target(*args, name=self._name, file=self._file, tree=self._tree)
return True
class Tre
|
scripnichenko/nova
|
nova/tests/unit/api/openstack/test_common.py
|
Python
|
apache-2.0
| 26,175
| 0.00042
|
# Copyright 2010 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Test suites for 'common' code used throughout the OpenStack HTTP API.
"""
import mock
import six
from testtools import matchers
import webob
import webob.exc
import webob.multidict
from nova.api.openstack import common
from nova.compute import task_states
from nova.compute import vm_states
from nova import exception
from nova import test
from nova.tests.unit.api.openstack import fakes
fr
|
om nova.tests.unit import utils
NS = "{http://docs.openstack.org/compute/api/v1.1}"
ATOMNS = "{http://www.w3.org/2005/Atom}"
class LimiterTest(test.NoDBTestCase):
"""Unit tests for the `nova.api.openstack.common.limited` method which
takes in a list of items and, depending on the 'offset' and 'limit' GET
params, returns a subset or compl
|
ete set of the given items.
"""
def setUp(self):
"""Run before each test."""
super(LimiterTest, self).setUp()
self.tiny = range(1)
self.small = range(10)
self.medium = range(1000)
self.large = range(10000)
def test_limiter_offset_zero(self):
# Test offset key works with 0.
req = webob.Request.blank('/?offset=0')
self.assertEqual(common.limited(self.tiny, req), self.tiny)
self.assertEqual(common.limited(self.small, req), self.small)
self.assertEqual(common.limited(self.medium, req), self.medium)
self.assertEqual(common.limited(self.large, req), self.large[:1000])
def test_limiter_offset_medium(self):
# Test offset key works with a medium sized number.
req = webob.Request.blank('/?offset=10')
self.assertEqual(common.limited(self.tiny, req), [])
self.assertEqual(common.limited(self.small, req), self.small[10:])
self.assertEqual(common.limited(self.medium, req), self.medium[10:])
self.assertEqual(common.limited(self.large, req), self.large[10:1010])
def test_limiter_offset_over_max(self):
# Test offset key works with a number over 1000 (max_limit).
req = webob.Request.blank('/?offset=1001')
self.assertEqual(common.limited(self.tiny, req), [])
self.assertEqual(common.limited(self.small, req), [])
self.assertEqual(common.limited(self.medium, req), [])
self.assertEqual(
common.limited(self.large, req), self.large[1001:2001])
def test_limiter_offset_blank(self):
# Test offset key works with a blank offset.
req = webob.Request.blank('/?offset=')
self.assertRaises(
webob.exc.HTTPBadRequest, common.limited, self.tiny, req)
def test_limiter_offset_bad(self):
# Test offset key works with a BAD offset.
req = webob.Request.blank(u'/?offset=\u0020aa')
self.assertRaises(
webob.exc.HTTPBadRequest, common.limited, self.tiny, req)
def test_limiter_nothing(self):
# Test request with no offset or limit.
req = webob.Request.blank('/')
self.assertEqual(common.limited(self.tiny, req), self.tiny)
self.assertEqual(common.limited(self.small, req), self.small)
self.assertEqual(common.limited(self.medium, req), self.medium)
self.assertEqual(common.limited(self.large, req), self.large[:1000])
def test_limiter_limit_zero(self):
# Test limit of zero.
req = webob.Request.blank('/?limit=0')
self.assertEqual(common.limited(self.tiny, req), self.tiny)
self.assertEqual(common.limited(self.small, req), self.small)
self.assertEqual(common.limited(self.medium, req), self.medium)
self.assertEqual(common.limited(self.large, req), self.large[:1000])
def test_limiter_limit_medium(self):
# Test limit of 10.
req = webob.Request.blank('/?limit=10')
self.assertEqual(common.limited(self.tiny, req), self.tiny)
self.assertEqual(common.limited(self.small, req), self.small)
self.assertEqual(common.limited(self.medium, req), self.medium[:10])
self.assertEqual(common.limited(self.large, req), self.large[:10])
def test_limiter_limit_over_max(self):
# Test limit of 3000.
req = webob.Request.blank('/?limit=3000')
self.assertEqual(common.limited(self.tiny, req), self.tiny)
self.assertEqual(common.limited(self.small, req), self.small)
self.assertEqual(common.limited(self.medium, req), self.medium)
self.assertEqual(common.limited(self.large, req), self.large[:1000])
def test_limiter_limit_and_offset(self):
# Test request with both limit and offset.
items = range(2000)
req = webob.Request.blank('/?offset=1&limit=3')
self.assertEqual(common.limited(items, req), items[1:4])
req = webob.Request.blank('/?offset=3&limit=0')
self.assertEqual(common.limited(items, req), items[3:1003])
req = webob.Request.blank('/?offset=3&limit=1500')
self.assertEqual(common.limited(items, req), items[3:1003])
req = webob.Request.blank('/?offset=3000&limit=10')
self.assertEqual(common.limited(items, req), [])
def test_limiter_custom_max_limit(self):
# Test a max_limit other than 1000.
items = range(2000)
req = webob.Request.blank('/?offset=1&limit=3')
self.assertEqual(
common.limited(items, req, max_limit=2000), items[1:4])
req = webob.Request.blank('/?offset=3&limit=0')
self.assertEqual(
common.limited(items, req, max_limit=2000), items[3:])
req = webob.Request.blank('/?offset=3&limit=2500')
self.assertEqual(
common.limited(items, req, max_limit=2000), items[3:])
req = webob.Request.blank('/?offset=3000&limit=10')
self.assertEqual(common.limited(items, req, max_limit=2000), [])
def test_limiter_negative_limit(self):
# Test a negative limit.
req = webob.Request.blank('/?limit=-3000')
self.assertRaises(
webob.exc.HTTPBadRequest, common.limited, self.tiny, req)
def test_limiter_negative_offset(self):
# Test a negative offset.
req = webob.Request.blank('/?offset=-30')
self.assertRaises(
webob.exc.HTTPBadRequest, common.limited, self.tiny, req)
class SortParamUtilsTest(test.NoDBTestCase):
def test_get_sort_params_defaults(self):
'''Verifies the default sort key and direction.'''
sort_keys, sort_dirs = common.get_sort_params({})
self.assertEqual(['created_at'], sort_keys)
self.assertEqual(['desc'], sort_dirs)
def test_get_sort_params_override_defaults(self):
'''Verifies that the defaults can be overriden.'''
sort_keys, sort_dirs = common.get_sort_params({}, default_key='key1',
default_dir='dir1')
self.assertEqual(['key1'], sort_keys)
self.assertEqual(['dir1'], sort_dirs)
sort_keys, sort_dirs = common.get_sort_params({}, default_key=None,
default_dir=None)
self.assertEqual([], sort_keys)
self.assertEqual([], sort_dirs)
def test_get_sort_params_single_value(self):
'''Verifies a single sort key and direction.'''
params = webob.multidict.MultiDict()
params.add('sort_key', 'key1')
params.add('sort_dir', 'dir1')
sort_keys, sort_dirs = common.get_sort_params(params)
self.assertEqual(['key1'], sort_keys)
self.assertEqual(['dir1'], sort_dirs)
def test_get_sort_params_single_with_default(self):
|
pianomania/infoGAN-pytorch
|
trainer.py
|
Python
|
mit
| 4,627
| 0.007348
|
import torch
import torch.nn as nn
import torch.optim as optim
from torch.autograd import Variable
import torch.autograd as autograd
import torchvision.datasets as dset
import torchvision.transforms as transforms
from torch.utils.data import DataLoader
from torchvision.utils import save_image
import numpy as np
class log_gaussian:
def __call__(self, x, mu, var):
logli = -0.5*(var.mul(2*np.pi)+1e-6).log() - \
(x-mu).pow(2).div(var.mul(2.0)+1e-6)
return logli.sum(1).mean().mul(-1)
class Trainer:
def __init__(self, G, FE, D, Q):
self.G = G
self.FE = FE
self.D = D
self.Q = Q
self.batch_size = 100
def _noise_sample(self, dis_c, con_c, noise, bs):
idx = np.random.randint(10, size=bs)
c = np.zeros((bs, 10))
c[range(bs),idx] = 1.0
dis_c.data.copy_(torch.Tensor(c))
con_c.data.uniform_(-1.0, 1.0)
noise.data.uniform_(-1.0, 1.0)
z = torch.cat([noise, dis_c, con_c], 1).view(-1, 74, 1, 1)
return z, idx
def train(self):
real_x = torch.FloatTensor(self.batch_size, 1, 28, 28).cuda()
label = torch.FloatTensor(self.batch_size, 1).cuda()
dis_c = torch.FloatTensor(self.batch_size, 10).cuda()
con_c = torch.FloatTensor(self.batch_size, 2).cuda()
noise = torch.FloatTensor(self.batch_size, 62).cuda()
real_x = Variable(real_x)
label = Variable(label, requires_grad=False)
dis_c = Variable(dis_c)
con_c = Variable(con_c)
noise = Variable(noise)
criterionD = nn.BCELoss().cuda()
criterionQ_dis = nn.CrossEntropyLoss().cuda()
criterionQ_con = log_gaussian()
optimD = optim.Adam([{'params':self.FE.parameters()}, {'params':self.D.parameters()}], lr=0.0002, betas=(0.5, 0.99))
optimG = optim.Adam([{'params':self.G.parameters()}, {'params':self.Q.parameters()}], lr=0.001, betas=(0.5, 0.99)
|
)
dataset = dset.MNIST('./dataset', transform=transforms.ToTensor(), download=True)
dataloader = DataLoader(dataset, batch_size=self.batch_size, shuffle=True, num_workers=1)
# fixed random variables
|
c = np.linspace(-1, 1, 10).reshape(1, -1)
c = np.repeat(c, 10, 0).reshape(-1, 1)
c1 = np.hstack([c, np.zeros_like(c)])
c2 = np.hstack([np.zeros_like(c), c])
idx = np.arange(10).repeat(10)
one_hot = np.zeros((100, 10))
one_hot[range(100), idx] = 1
fix_noise = torch.Tensor(100, 62).uniform_(-1, 1)
for epoch in range(100):
for num_iters, batch_data in enumerate(dataloader, 0):
# real part
optimD.zero_grad()
x, _ = batch_data
bs = x.size(0)
real_x.data.resize_(x.size())
label.data.resize_(bs, 1)
dis_c.data.resize_(bs, 10)
con_c.data.resize_(bs, 2)
noise.data.resize_(bs, 62)
real_x.data.copy_(x)
fe_out1 = self.FE(real_x)
probs_real = self.D(fe_out1)
label.data.fill_(1)
loss_real = criterionD(probs_real, label)
loss_real.backward()
# fake part
z, idx = self._noise_sample(dis_c, con_c, noise, bs)
fake_x = self.G(z)
fe_out2 = self.FE(fake_x.detach())
probs_fake = self.D(fe_out2)
label.data.fill_(0)
loss_fake = criterionD(probs_fake, label)
loss_fake.backward()
D_loss = loss_real + loss_fake
optimD.step()
# G and Q part
optimG.zero_grad()
fe_out = self.FE(fake_x)
probs_fake = self.D(fe_out)
label.data.fill_(1.0)
reconstruct_loss = criterionD(probs_fake, label)
q_logits, q_mu, q_var = self.Q(fe_out)
class_ = torch.LongTensor(idx).cuda()
target = Variable(class_)
dis_loss = criterionQ_dis(q_logits, target)
con_loss = criterionQ_con(con_c, q_mu, q_var)*0.1
G_loss = reconstruct_loss + dis_loss + con_loss
G_loss.backward()
optimG.step()
if num_iters % 100 == 0:
print('Epoch/Iter:{0}/{1}, Dloss: {2}, Gloss: {3}'.format(
epoch, num_iters, D_loss.data.cpu().numpy(),
G_loss.data.cpu().numpy())
)
noise.data.copy_(fix_noise)
dis_c.data.copy_(torch.Tensor(one_hot))
con_c.data.copy_(torch.from_numpy(c1))
z = torch.cat([noise, dis_c, con_c], 1).view(-1, 74, 1, 1)
x_save = self.G(z)
save_image(x_save.data, './tmp/c1.png', nrow=10)
con_c.data.copy_(torch.from_numpy(c2))
z = torch.cat([noise, dis_c, con_c], 1).view(-1, 74, 1, 1)
x_save = self.G(z)
save_image(x_save.data, './tmp/c2.png', nrow=10)
|
nkgilley/home-assistant
|
homeassistant/components/mqtt/__init__.py
|
Python
|
apache-2.0
| 44,531
| 0.001011
|
"""Support for MQTT message handling."""
import asyncio
from functools import partial, wraps
import inspect
from itertools import groupby
import json
import logging
from operator import attrgetter
import os
import ssl
from typing import Any, Callable, List, Optional, Union
import attr
import certifi
import voluptuous as vol
from homeassistant import config_entries
from homeassistant.components import websocket_api
from homeassistant.const import (
CONF_CLIENT_ID,
CONF_DEVICE,
CONF_NAME,
CONF_PASSWORD,
CONF_PAYLOAD,
CONF_PORT,
CONF_PROTOCOL,
CONF_USERNAME,
CONF_VALUE_TEMPLATE,
EVENT_HOMEASSISTANT_STOP,
)
from homeassistant.core import Event, ServiceCall, callback
from homeassistant.exceptions import HomeAssistantError, Unauthorized
from homeassistant.helpers import config_validation as cv, event, template
from homeassistant.helpers.dispatcher import async_dispatcher_connect, dispatcher_send
from homeassistant.helpers.entity import Entity
from homeassistant.helpers.typing import ConfigType, HomeAssistantType, ServiceDataType
from homeassistant.loader import bind_hass
from homeassistant.util import dt as dt_util
from homeassistant.util.async_ import run_callback_threadsafe
from homeassistant.util.logging import catch_log_exception
# Loading the config flow file will register the flow
from . import config_flow # noqa: F401 pylint: disable=unused-import
from . import debug_info, discovery
from .const import (
ATTR_DISCOVERY_HASH,
ATTR_DISCOVERY_TOPIC,
ATTR_PAYLOAD,
ATTR_QOS,
ATTR_RETAIN,
ATTR_TOPI
|
C,
CONF_BIRTH_MESSAGE,
CONF_BROKER,
CONF_DISCOVERY,
CONF_QOS,
CONF_RETAIN,
CONF_STATE_TOPIC,
CONF_WILL_MESSAGE,
DEFAULT_DISCOVERY,
DEFAULT_QOS,
DEFAULT_RETAIN,
MQTT_CONNECTED,
MQTT_DISCONNECTED,
PROTOCOL_311,
)
from .debug_info import log_messages
from .discovery import MQTT_DISCOVERY_UPDATED, clear_discovery_hash, set_discovery_hash
from .models import Message, MessageCallbackType,
|
PublishPayloadType
from .subscription import async_subscribe_topics, async_unsubscribe_topics
from .util import _VALID_QOS_SCHEMA, valid_publish_topic, valid_subscribe_topic
_LOGGER = logging.getLogger(__name__)
DOMAIN = "mqtt"
DATA_MQTT = "mqtt"
DATA_MQTT_CONFIG = "mqtt_config"
SERVICE_PUBLISH = "publish"
SERVICE_DUMP = "dump"
CONF_DISCOVERY_PREFIX = "discovery_prefix"
CONF_KEEPALIVE = "keepalive"
CONF_CERTIFICATE = "certificate"
CONF_CLIENT_KEY = "client_key"
CONF_CLIENT_CERT = "client_cert"
CONF_TLS_INSECURE = "tls_insecure"
CONF_TLS_VERSION = "tls_version"
CONF_COMMAND_TOPIC = "command_topic"
CONF_AVAILABILITY_TOPIC = "availability_topic"
CONF_PAYLOAD_AVAILABLE = "payload_available"
CONF_PAYLOAD_NOT_AVAILABLE = "payload_not_available"
CONF_JSON_ATTRS_TOPIC = "json_attributes_topic"
CONF_JSON_ATTRS_TEMPLATE = "json_attributes_template"
CONF_UNIQUE_ID = "unique_id"
CONF_IDENTIFIERS = "identifiers"
CONF_CONNECTIONS = "connections"
CONF_MANUFACTURER = "manufacturer"
CONF_MODEL = "model"
CONF_SW_VERSION = "sw_version"
CONF_VIA_DEVICE = "via_device"
CONF_DEPRECATED_VIA_HUB = "via_hub"
PROTOCOL_31 = "3.1"
DEFAULT_PORT = 1883
DEFAULT_KEEPALIVE = 60
DEFAULT_PROTOCOL = PROTOCOL_311
DEFAULT_DISCOVERY_PREFIX = "homeassistant"
DEFAULT_TLS_PROTOCOL = "auto"
DEFAULT_PAYLOAD_AVAILABLE = "online"
DEFAULT_PAYLOAD_NOT_AVAILABLE = "offline"
ATTR_PAYLOAD_TEMPLATE = "payload_template"
MAX_RECONNECT_WAIT = 300 # seconds
CONNECTION_SUCCESS = "connection_success"
CONNECTION_FAILED = "connection_failed"
CONNECTION_FAILED_RECOVERABLE = "connection_failed_recoverable"
def validate_device_has_at_least_one_identifier(value: ConfigType) -> ConfigType:
"""Validate that a device info entry has at least one identifying value."""
if not value.get(CONF_IDENTIFIERS) and not value.get(CONF_CONNECTIONS):
raise vol.Invalid(
"Device must have at least one identifying value in "
"'identifiers' and/or 'connections'"
)
return value
CLIENT_KEY_AUTH_MSG = (
"client_key and client_cert must both be present in "
"the MQTT broker configuration"
)
MQTT_WILL_BIRTH_SCHEMA = vol.Schema(
{
vol.Required(ATTR_TOPIC): valid_publish_topic,
vol.Required(ATTR_PAYLOAD, CONF_PAYLOAD): cv.string,
vol.Optional(ATTR_QOS, default=DEFAULT_QOS): _VALID_QOS_SCHEMA,
vol.Optional(ATTR_RETAIN, default=DEFAULT_RETAIN): cv.boolean,
},
required=True,
)
def embedded_broker_deprecated(value):
"""Warn user that embedded MQTT broker is deprecated."""
_LOGGER.warning(
"The embedded MQTT broker has been deprecated and will stop working"
"after June 5th, 2019. Use an external broker instead. For"
"instructions, see https://www.home-assistant.io/docs/mqtt/broker"
)
return value
CONFIG_SCHEMA = vol.Schema(
{
DOMAIN: vol.All(
cv.deprecated(CONF_TLS_VERSION, invalidation_version="0.115"),
vol.Schema(
{
vol.Optional(CONF_CLIENT_ID): cv.string,
vol.Optional(CONF_KEEPALIVE, default=DEFAULT_KEEPALIVE): vol.All(
vol.Coerce(int), vol.Range(min=15)
),
vol.Optional(CONF_BROKER): cv.string,
vol.Optional(CONF_PORT, default=DEFAULT_PORT): cv.port,
vol.Optional(CONF_USERNAME): cv.string,
vol.Optional(CONF_PASSWORD): cv.string,
vol.Optional(CONF_CERTIFICATE): vol.Any("auto", cv.isfile),
vol.Inclusive(
CONF_CLIENT_KEY, "client_key_auth", msg=CLIENT_KEY_AUTH_MSG
): cv.isfile,
vol.Inclusive(
CONF_CLIENT_CERT, "client_key_auth", msg=CLIENT_KEY_AUTH_MSG
): cv.isfile,
vol.Optional(CONF_TLS_INSECURE): cv.boolean,
vol.Optional(
CONF_TLS_VERSION, default=DEFAULT_TLS_PROTOCOL
): vol.Any("auto", "1.0", "1.1", "1.2"),
vol.Optional(CONF_PROTOCOL, default=DEFAULT_PROTOCOL): vol.All(
cv.string, vol.In([PROTOCOL_31, PROTOCOL_311])
),
vol.Optional(CONF_WILL_MESSAGE): MQTT_WILL_BIRTH_SCHEMA,
vol.Optional(CONF_BIRTH_MESSAGE): MQTT_WILL_BIRTH_SCHEMA,
vol.Optional(CONF_DISCOVERY, default=DEFAULT_DISCOVERY): cv.boolean,
# discovery_prefix must be a valid publish topic because if no
# state topic is specified, it will be created with the given prefix.
vol.Optional(
CONF_DISCOVERY_PREFIX, default=DEFAULT_DISCOVERY_PREFIX
): valid_publish_topic,
}
),
)
},
extra=vol.ALLOW_EXTRA,
)
SCHEMA_BASE = {vol.Optional(CONF_QOS, default=DEFAULT_QOS): _VALID_QOS_SCHEMA}
MQTT_AVAILABILITY_SCHEMA = vol.Schema(
{
vol.Optional(CONF_AVAILABILITY_TOPIC): valid_subscribe_topic,
vol.Optional(
CONF_PAYLOAD_AVAILABLE, default=DEFAULT_PAYLOAD_AVAILABLE
): cv.string,
vol.Optional(
CONF_PAYLOAD_NOT_AVAILABLE, default=DEFAULT_PAYLOAD_NOT_AVAILABLE
): cv.string,
}
)
MQTT_ENTITY_DEVICE_INFO_SCHEMA = vol.All(
cv.deprecated(CONF_DEPRECATED_VIA_HUB, CONF_VIA_DEVICE),
vol.Schema(
{
vol.Optional(CONF_IDENTIFIERS, default=list): vol.All(
cv.ensure_list, [cv.string]
),
vol.Optional(CONF_CONNECTIONS, default=list): vol.All(
cv.ensure_list, [vol.All(vol.Length(2), [cv.string])]
),
vol.Optional(CONF_MANUFACTURER): cv.string,
vol.Optional(CONF_MODEL): cv.string,
vol.Optional(CONF_NAME): cv.string,
vol.Optional(CONF_SW_VERSION): cv.string,
vol.Optional(CONF_VIA_DEVICE): cv.string,
}
),
validate_device_has_at_least_one_identifier,
)
MQTT_JSON_ATTRS_SCHEMA = vol.Schema(
{
vol.O
|
mufid/berkilau
|
ws/CSUIBotClass2014/util/plotter2.py
|
Python
|
mit
| 1,816
| 0.010463
|
import matplotlib.pyplot as pl
import numpy as np
import math
from matplotlib.collections import LineCollection
from matplotlib.colors import colorConverter
def plot(X, m, x_star, t, z_t):
fig = pl.figure(figsize=(10,10))
# Draw the grid first
ax = pl.axes()
ax.set_xlim(-4,20)
ax.set_ylim(-4,20)
ax.xaxis.set_major_locator(pl.MultipleLocator(5.0))
ax.xaxis.set_minor_locato
|
r(pl.MultipleLocator(1.0))
ax.yaxis.set_major_locator(
|
pl.MultipleLocator(5.0))
ax.yaxis.set_minor_locator(pl.MultipleLocator(1.0))
ax.grid(which='major', axis='x', linewidth=0.75, linestyle='-', color='0.75')
ax.grid(which='minor', axis='x', linewidth=0.25, linestyle='-', color='0.75')
ax.grid(which='major', axis='y', linewidth=0.75, linestyle='-', color='0.75')
ax.grid(which='minor', axis='y', linewidth=0.25, linestyle='-', color='0.75')
# Draw map
for y, row in enumerate(m):
for x, cell in enumerate(row):
if (cell == 'W'):
rect = pl.Rectangle((x,y), 1, 1, fill=True,color='#cacaca')
ax.add_patch(rect)
# Draw the robot and its direction
x,y,theta = x_star['x'], x_star['y'], x_star['theta']
dx = 1 * math.cos(theta)
dy = 1 * math.sin(theta)
ax.arrow(x,y,dx,dy, head_width=.4, head_length=0.5, length_includes_head=True)
circle = pl.Circle((x, y), radius=0.35, fc='y')
ax.add_patch(circle)
# Draw information
directions = 'n nw w sw s se e ne'.split()
title_arr = []
#print z_t
for direction in directions:
#print z_t[direction]
title_arr.append("%s: %4.2f" % (direction, z_t[direction]))
ax.set_title('; '.join(title_arr))
#print X
xs = [xx[0]['x'] for xx in X]
ys = [xx[0]['y'] for xx in X]
pl.scatter(xs, ys)
return fig
|
kimjinyong/i2nsf-framework
|
Hackathon-105/jetconf/tests/test_yangson.py
|
Python
|
apache-2.0
| 1,077
| 0.001857
|
from yangson.datamodel import DataModel
from yangson.instance import Instan
|
ceRoute
module_dir = "../yang-data/"
yang_library_file = "../yang-data/yang-library-data.json"
with open(yang_library_file) as ylfile:
yl = ylfile.read()
dm = DataModel(yl, [modu
|
le_dir])
with open("data.json", "rt") as fp:
json_data = dm.from_raw(json.load(fp))
handler_sn = dm.get_data_node("/dns-server:dns-server-state/zone")
handler_generated = [
{
'domain': 'example.com',
'class': 'IN',
'server-role': 'master',
'serial': 2010111201
}
]
cooked_val = handler_sn.from_raw(handler_generated)
ii_str_abs = "/dns-server:dns-server-state/zone=example.com/class"
ii_abs = dm.parse_resource_id(ii_str_abs)
print("Absolute II: {}".format(ii_abs))
ii_rel = InstanceRoute(ii_abs[2:])
print("Relative II (hardcoded for now): {}".format(ii_rel))
handler_n = handler_sn.orphan_instance(cooked_val)
n_desired = handler_n.goto(ii_rel)
# crashes here
print(n_desired.value)
n = handler_n[0]
print(n.value)
for i in n:
print(i)
print(type(i))
|
brain-research/mirage-rl-qprop
|
examples/nop_cartpole.py
|
Python
|
mit
| 665
| 0.001504
|
from rllab.algos.nop import NOP
from rllab.baselines.zero_baseline import ZeroBaseline
from rllab.envs.box2d.cartpole_env import CartpoleEnv
from rllab.envs.normalized_env import normalize
from rllab.policies.uniform_control_policy import UniformControlPolicy
env = normalize(CartpoleEnv())
policy = UniformControlPolicy(
env_spec=env.spec,
# The neural network p
|
olicy should have two hidden layers, each
|
with 32 hidden units.
)
baseline = ZeroBaseline(env_spec=env.spec)
algo = NOP(
env=env,
policy=policy,
baseline=baseline,
batch_size=4000,
max_path_length=100,
n_itr=40,
discount=0.99,
step_size=0.01,
)
algo.train()
|
cloudconductor/cloud_conductor_gui
|
gui_app/api_models/blueprint_histories.py
|
Python
|
apache-2.0
| 762
| 0
|
from ..utils import ApiUtil
from ..utils.ApiUtil import Url
class BlueprintHistories:
def __init__(self, code, auth_token, blueprint_id):
self.code = code
self.auth_token = auth_token
|
self.blueprint_id = blueprint_id
url = Url.blueprintHistoriesList(self.blueprint_id, Url.url)
data = {
'auth_token': self.au
|
th_token,
}
self.blueprint_histories = ApiUtil.requestGet(url, self.code, data)
def get_blueprint_history(self, id):
for blueprint_history in self:
if blueprint_history['id'] == id:
return blueprint_history
def __iter__(self):
for blueprint_history in self.blueprint_histories:
yield(blueprint_history)
|
SeiryuZ/HemeWeb
|
src/jobs/__init__.py
|
Python
|
lgpl-3.0
| 43
| 0
|
default_
|
app_config = 'jobs.apps.JobCo
|
nfig'
|
prataprc/eazytext
|
eazytext/extension/ttlpygment.py
|
Python
|
gpl-3.0
| 3,677
| 0.032363
|
# This file is subject to the terms and conditions defined in
# file 'LICENSE', which is part of this source code package.
# Copyright (c) 2009 SKR Farms (P) LTD.
# -*- coding: utf-8 -*-
from pygments.lexer import RegexLexer
from pygments.token import *
try :
from tayra.lexer import TTLLexer
except :
TTLLexer = None
if TTLLexer :
class TemplateLexer( RegexLexer ):
name = 'ttl'
aliases = ['tayra-template', 'tayratemplate', 'ttl']
filenames = ['*.ttl']
comm1 = [
( TTLLexer.escseq, Punctuation ),
]
tokens = {
'root': comm1 + [
( TTLLexer.commentline, Comment ),
( TTLLexer.statement, Generic.Strong ),
( TTLLexer.pass_, Generic.Strong ),
( TTLLexer.emptyspace, Text ),
( TTLLexer.indent, Text ),
( TTLLexer.nl, Text ),
( TTLLexer.spchars, Text ),
( TTLLexer.text, Text ),
# States
( TTLLexer.commentopen, Comment, 'comment' ),
( TTLLexer.filteropen, Operator, 'filter' ),
( TTLLexer.openexprs, Operator, 'exprs' ),
( TTLLexer.tagopen, Keyword, 'tag' ),
# Directives
( TTLLexer.doctype, Generic.Declaration ),
( TTLLexer.charset, Generic.Declaration ),
( TTLLexer.body, Generic.Declaration ),
|
( TTLLexer.importas, Generic.Declaration ),
( TTLLexer.inherit, Generic.Declaration ),
( TTLLexer.implement, Generic.Declaration ),
( TTLLexer.use, Generic.Declaration ),
|
# Blocks
( TTLLexer.interface, Name.Function ),
( TTLLexer.function, Name.Function ),
( TTLLexer.if_, Keyword ),
( TTLLexer.elif_, Keyword ),
( TTLLexer.else_, Keyword ),
( TTLLexer.for_, Keyword ),
( TTLLexer.while_, Keyword ),
],
'tag' : comm1 + [
( TTLLexer.squote, Operator ),
( TTLLexer.dquote, Operator ),
( TTLLexer.equal, Operator ),
( TTLLexer.nl, Text ),
( TTLLexer.space, Text ),
( TTLLexer.atom, Keyword.Type ),
( TTLLexer.tag_text, Text ),
( TTLLexer.tag_spchars, Text ),
# Open new state
( TTLLexer.openexprs, Operator, 'exprs' ),
( TTLLexer.openbrace, Operator, 'style' ),
( TTLLexer.tagend, Keyword, '#pop' ),
( TTLLexer.tagclose, Keyword, '#pop' ),
],
'exprs': comm1 + [
( TTLLexer.string, String ),
( TTLLexer.closebrace, Operator, '#pop' ),
( TTLLexer.nl, Text ),
( TTLLexer.text, Text ),
],
'style': comm1 + [
( TTLLexer.string, String ),
( TTLLexer.nl, Text ),
( TTLLexer.style_text, Text ),
( TTLLexer.style_spchars, Text ),
( TTLLexer.openexprs, Operator, 'exprs' ),
( TTLLexer.closebrace, Operator, '#pop' ),
],
'comment': [
( TTLLexer.commenttext, Comment ),
( TTLLexer.commentclose, Comment, '#pop' ),
],
'filter': [
( TTLLexer.filtertext, Text ),
( TTLLexer.filterclose, Operator, '#pop' ),
],
}
else :
class TemplateLexer( RegexLexer ): pass
|
bbirand/python-driver
|
tests/integration/standard/test_prepared_statements.py
|
Python
|
apache-2.0
| 13,413
| 0.001342
|
# Copyright 2013-2015 DataStax, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from tests.integration import use_singledc, PROTOCOL_VERSION
try:
import unittest2 as unittest
except ImportError:
import unittest # noqa
from cassandra import InvalidRequest
from cassandra.cluster import Cluster
from cassandra.query import PreparedStatement, UNSET_VALUE
def setup_module():
use_singledc()
class PreparedStatementTests(unittest.TestCase):
def test_basic(self):
"""
Test basic PreparedStatement usage
"""
cluster = Cluster(protocol_version=PROTOCOL_VERSION)
session = cluster.connect()
session.execute(
"""
CREATE KEYSPACE preparedtests
WITH replication = {'class': 'SimpleStrategy', 'replication_factor': '1'}
""")
session.set_keyspace("preparedtests")
session.execute(
"""
CREATE TABLE cf0 (
a text,
b text,
c text,
PRIMARY KEY (a, b)
)
""")
prepared = session.prepare(
"""
INSERT INTO cf0 (a, b, c) VALUES (?, ?, ?)
""")
self.assertIsInstance(prepared, PreparedStatement)
bound = prepared.bind(('a', 'b', 'c'))
session.execute(bound)
prepared = session.prepare(
"""
SELECT * FROM cf0 WHERE a=?
""")
self.assertIsInstance(prepared, PreparedStatement)
bound = prepared.bind(('a'))
results = session.execute(bound)
self.assertEqual(results, [('a', 'b', 'c')])
# test with new dict binding
prepared = session.prepare(
"""
INSERT INTO cf0 (a, b, c) VALUES (?, ?, ?)
""")
self.assertIsInstance(prepared, PreparedStatement)
bound = prepared.bind({
'a': 'x',
'b': 'y',
'c': 'z'
})
session.execute(bound)
prepared = session.prepare(
"""
SELECT * FROM cf0 WHERE a=?
""")
self.assertIsInstance(prepared, PreparedStatement)
bound = prepared.bind({'a': 'x'})
results = session.execute(bound)
self.assertEqual(results, [('x', 'y', 'z')])
cluster.shutdown()
def test_missing_primary_key(self):
"""
Ensure an InvalidRequest is thrown
when prepared statements are missing the primary key
"""
cluster = Cluster(protocol_version=PROTOCOL_VERSION)
session = cluster.connect()
prepared = session.prepare(
"""
INSERT INTO test3rf.test (v) VALUES (?)
""")
self.assertIsInstance(prepared, PreparedStatement)
bound = prepared.bind((1,))
self.assertRaises(InvalidRequest, session.execute, bound)
cluster.shutdown()
def test_missing_primary_key_dicts(self):
"""
Ensure an InvalidRequest is thrown
when prepared statements are missing the primary key
with dict bindings
"""
cluster = Cluster(protocol_version=PROTOCOL_VERSION)
session = cluster.connect()
prepared = session.prepare(
"""
INSERT INTO test3r
|
f.test (v) VALUES (?)
""")
self.assertIsInstance(prepared, PreparedStatement)
bound = prepared.bind({'v': 1})
self.assertRaises(Invali
|
dRequest, session.execute, bound)
cluster.shutdown()
def test_too_many_bind_values(self):
"""
Ensure a ValueError is thrown when attempting to bind too many variables
"""
cluster = Cluster(protocol_version=PROTOCOL_VERSION)
session = cluster.connect()
prepared = session.prepare(
"""
INSERT INTO test3rf.test (v) VALUES (?)
""")
self.assertIsInstance(prepared, PreparedStatement)
self.assertRaises(ValueError, prepared.bind, (1, 2))
cluster.shutdown()
def test_too_many_bind_values_dicts(self):
"""
Ensure an error is thrown when attempting to bind the wrong values
with dict bindings
"""
cluster = Cluster(protocol_version=PROTOCOL_VERSION)
session = cluster.connect()
prepared = session.prepare(
"""
INSERT INTO test3rf.test (k, v) VALUES (?, ?)
""")
self.assertIsInstance(prepared, PreparedStatement)
# too many values
self.assertRaises(ValueError, prepared.bind, {'k': 1, 'v': 2, 'v2': 3})
# right number, but one does not belong
if PROTOCOL_VERSION < 4:
# pre v4, the driver bails with key error when 'v' is found missing
self.assertRaises(KeyError, prepared.bind, {'k': 1, 'v2': 3})
else:
# post v4, the driver uses UNSET_VALUE for 'v' and bails when 'v2' is unbound
self.assertRaises(ValueError, prepared.bind, {'k': 1, 'v2': 3})
# also catch too few variables with dicts
self.assertIsInstance(prepared, PreparedStatement)
if PROTOCOL_VERSION < 4:
self.assertRaises(KeyError, prepared.bind, {})
else:
# post v4, the driver attempts to use UNSET_VALUE for unspecified keys
self.assertRaises(ValueError, prepared.bind, {})
cluster.shutdown()
def test_none_values(self):
"""
Ensure binding None is handled correctly
"""
cluster = Cluster(protocol_version=PROTOCOL_VERSION)
session = cluster.connect()
prepared = session.prepare(
"""
INSERT INTO test3rf.test (k, v) VALUES (?, ?)
""")
self.assertIsInstance(prepared, PreparedStatement)
bound = prepared.bind((1, None))
session.execute(bound)
prepared = session.prepare(
"""
SELECT * FROM test3rf.test WHERE k=?
""")
self.assertIsInstance(prepared, PreparedStatement)
bound = prepared.bind((1,))
results = session.execute(bound)
self.assertEqual(results[0].v, None)
cluster.shutdown()
def test_unset_values(self):
"""
Test to validate that UNSET_VALUEs are bound, and have the expected effect
Prepare a statement and insert all values. Then follow with execute excluding
parameters. Verify that the original values are unaffected.
@since 2.6.0
@jira_ticket PYTHON-317
@expected_result UNSET_VALUE is implicitly added to bind parameters, and properly encoded, leving unset values unaffected.
@test_category prepared_statements:binding
"""
if PROTOCOL_VERSION < 4:
raise unittest.SkipTest("Binding UNSET values is not supported in protocol version < 4")
cluster = Cluster(protocol_version=PROTOCOL_VERSION)
session = cluster.connect()
# table with at least two values so one can be used as a marker
session.execute("CREATE TABLE IF NOT EXISTS test1rf.test_unset_values (k int PRIMARY KEY, v0 int, v1 int)")
insert = session.prepare("INSERT INTO test1rf.test_unset_values (k, v0, v1) VALUES (?, ?, ?)")
select = session.prepare("SELECT * FROM test1rf.test_unset_values WHERE k=?")
bind_expected = [
# initial condition
((0, 0, 0), (0, 0, 0)),
# unset implicit
((0, 1,), (0, 1, 0)),
({'k': 0, 'v0': 2}, (0, 2, 0)),
({'k': 0, '
|
bgris/ODL_bgris
|
lib/python3.5/site-packages/pylint/pyreverse/diagrams.py
|
Python
|
gpl-3.0
| 8,634
| 0.001042
|
# Copyright (c) 2004-2016 LOGILAB S.A. (Paris, FRANCE).
# http://www.logilab.fr/ -- mailto:contact@logilab.fr
# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
# For details: https://github.com/PyCQA/pylint/blob/master/COPYING
"""diagram objects
"""
import astroid
from pylint.pyreverse.utils import is_interface, FilterMixIn
from pylint.checkers.utils import decorated_with_property
class Figure(object):
"""base class for counter handling"""
class Relationship(Figure):
"""a relation ship from an object in the diagram to another
"""
def __init__(self, from_object, to_object, relation_type, name=None):
Figure.__init__(self)
self.from_object = from_object
self.to_object = to_object
self.type = relation_type
self.name = name
class DiagramEntity(Figure):
"""a diagram object, i.e. a label associated to an astroid node
"""
def __init__(self, title='No name', node=None):
Figure.__init__(self)
self.title = title
self.node = node
class ClassDiagram(Figure, FilterMixIn):
"""main class diagram handling
"""
TYPE = 'class'
def __init__(self, title, mode):
FilterMixIn.__init__(self, mode)
Figure.__init__(self)
self.title = title
self.objects = []
self.relationships = {}
self._nodes = {}
self.depends = []
def get_relationships(self, role):
# sorted to get predictable (hence testable) results
return sorted(self.relationships.get(role, ()),
key=lambda x: (x.from_object.fig_id, x.to_object.fig_id))
def add_relationship(self, from_object, to_object,
relation_type, name=None):
"""create a relation ship
"""
rel = Relationship(from_object, to_object, relation_type, name)
self.relationships.setdefault(relation_type, []).append(rel)
def get_relationship(self, from_object, relation_type):
"""return a relation ship or None
"""
for rel in self.relationships.get(relation_type, ()):
if rel.from_object is from_object:
return rel
raise KeyError(relation_type)
def get_attrs(self, node):
"""return visible attributes, possibly with class name"""
attrs = []
properties = [
(n, m) for n, m in node.items()
if isinstance(m, astroid.FunctionDef)
and decorated_with_property(m)
]
for node_name, ass_nodes in list(node.instance_attrs_type.items()) + \
list(node.locals_type.items()) + properties:
if not self.show_attr(node_name):
continue
names = self.class_names(ass_nodes)
if names:
node_name = "%s : %s" % (node_name, ", ".join(names))
attrs.append(node_name)
return sorted(attrs)
def get_methods(self, node):
"""return visible methods"""
methods = [
m for m in node.values()
if isinstance(m, astroid.FunctionDef)
and not decorated_with_property(m)
and self.show_attr(m.name)
]
return sorted(methods, key=lambda n: n.name)
def add_object(self, title, node):
"""create a diagram object
"""
assert node not in self._nodes
ent = DiagramEntity(title, node)
self._nodes[node] = ent
self.objects.append(ent)
def class_names(self, nodes):
"""return class names if needed in diagram"""
names = []
for ass_node in nodes:
if isinstance(ass_node, astroid.Instance):
ass_node = ass_node._proxied
if isinstance(ass_node, astroid.ClassDef) \
and hasattr(ass_node, "name") and not self.has_node(ass_node):
if ass_node.name not in names:
ass_name = ass_node.name
names.append(ass_name)
return names
def nodes(self):
"""return the list of underlying nodes
"""
return self._nodes.keys()
def has_node(self, node):
"""return true if the given node is included in the diagram
"""
return node in self._nodes
def object_from_node(self, node):
"""return the diagram object mapped to node
"""
return self._nodes[node]
def classes(self):
"""return all class nodes in the diagram"""
return [o for o in self.objects if isinstance(o.node, astroid.ClassDef)]
def classe(self, name):
"""return a class by its name, raise KeyError if not found
"""
for klass in self.classes():
if klass.node.name == name:
return klass
raise KeyError(name)
def extract_relationships(self):
"""extract relation ships between nodes in the diagram
"""
for obj in self.classes():
node = obj.node
obj.attrs = self.get_attrs(node)
obj.methods = self.get_methods(node)
# shape
if is_interface(node):
obj.shape = 'interface'
else:
obj.shape = 'class'
# inheritance link
for par_node in node.ancestors(recurs=False):
try:
par_obj = self.object_from_node(par_node)
self.add_relationship(obj, par_obj, 'specialization')
except KeyError:
continue
# implements link
for impl_node in node.implements:
try:
impl_obj = self.object_from_node(impl_node)
self.add_relationship(obj, impl_obj, 'implements')
except KeyError:
conti
|
nue
# associations link
for name, values in list(node.instance_attrs_type.items()) + \
list(node.locals_type.items()):
for value in values:
if value is astroid.YES:
continue
if isinstance(value, astroid.Instance):
value = value._proxied
try:
as
|
s_obj = self.object_from_node(value)
self.add_relationship(ass_obj, obj, 'association', name)
except KeyError:
continue
class PackageDiagram(ClassDiagram):
"""package diagram handling
"""
TYPE = 'package'
def modules(self):
"""return all module nodes in the diagram"""
return [o for o in self.objects if isinstance(o.node, astroid.Module)]
def module(self, name):
"""return a module by its name, raise KeyError if not found
"""
for mod in self.modules():
if mod.node.name == name:
return mod
raise KeyError(name)
def get_module(self, name, node):
"""return a module by its name, looking also for relative imports;
raise KeyError if not found
"""
for mod in self.modules():
mod_name = mod.node.name
if mod_name == name:
return mod
#search for fullname of relative import modules
package = node.root().name
if mod_name == "%s.%s" % (package, name):
return mod
if mod_name == "%s.%s" % (package.rsplit('.', 1)[0], name):
return mod
raise KeyError(name)
def add_from_depend(self, node, from_module):
"""add dependencies created by from-imports
"""
mod_name = node.root().name
obj = self.module(mod_name)
if from_module not in obj.node.depends:
obj.node.depends.append(from_module)
def extract_relationships(self):
"""extract relation ships between nodes in the diagram
"""
ClassDiagram.extract_relationships(self)
for obj in self.classes():
# ownership
try:
mod = self.object_from_node(obj.node.root())
|
SUSE/azure-sdk-for-python
|
azure-mgmt-network/azure/mgmt/network/v2016_09_01/models/application_gateway_sku.py
|
Python
|
mit
| 1,696
| 0
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.serialization import Model
class ApplicationGatewaySku(Model):
"""SKU of an application gateway.
:param name: Name of an application gateway SKU. Possible values are:
'Standard_Small', 'Standard_Medium', 'Standard_Large', 'WAF_Medium', and
'WAF_Large'. Possible values include: 'Standard_Small', 'Standard_Medium',
'Standard_Large', 'WAF_Medium', 'WAF_Large'
:type name: str or :class:`ApplicationGatewaySkuName
<azure.mgmt.network.v2016_09_01.models.ApplicationGatewaySkuName>`
:param tier: Tier of an application gateway. Possible values are:
'Standard' and 'WAF'. Possible values include: 'Standard', 'WAF'
:type tier: str or :class:`ApplicationGatewayTier
<az
|
ure.mgmt.network.v2016_09_01.models.ApplicationGatewayTier>`
:param capacity: Capacity (instance count) of an application gateway.
:type capacity: int
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'tier': {
|
'key': 'tier', 'type': 'str'},
'capacity': {'key': 'capacity', 'type': 'int'},
}
def __init__(self, name=None, tier=None, capacity=None):
self.name = name
self.tier = tier
self.capacity = capacity
|
majestrate/i2p.socket
|
i2p/test/test_datatypes.py
|
Python
|
mit
| 8,496
| 0.003296
|
from __future__ import absolute_import, division, print_function, unicode_literals
from builtins import *
from unittest import TestCase
try:
from i2p.crypto import crypto
except ImportError:
crypto = None
from i2p import datatypes
if crypto is not None:
DSA_ELGAMAL_KEY_CERT = b'BQAEAAAAAA=='
DSA_ELGAMAL_KEY_CERT_PAYLOAD = b'AAAAAA=='
# stats.i2p
DEST_DSA_B64 = 'Okd5sN9hFWx-sr0HH8EFaxkeIMi6PC5eGTcjM1KB7uQ0ffCUJ2nVKzcsKZFHQc7pLONjOs2LmG5H-2SheVH504EfLZnoB7vxoamhOMENnDABkIRGGoRisc5AcJXQ759LraLRdiGSR0WTHQ0O1TU0hAz7vAv3SOaDp9OwNDr9u902qFzzTKjUTG5vMTayjTkLo2kOwi6NVchDeEj9M7mjj5ySgySbD48QpzBgcqw1R27oIoHQmjgbtbmV2sBL-2Tpyh3lRe1Vip0-K0Sf4D-Zv78MzSh8ibdxNcZACmZiVODpgMj2ejWJHxAEz41RsfBpazPV0d38Mfg4wzaS95R5hBBo6SdAM4h5vcZ5ESRiheLxJbW0vBpLRd4mNvtKOrcEtyCvtvsP3FpA-6IKVswyZpHgr3wn6ndDHiVCiLAQZws4MsIUE1nkfxKpKtAnFZtPrrB8eh7QO9CkH2JBhj7bG0ED6mV5~X5iqi52UpsZ8gnjZTgyG5pOF8RcFrk86kHxAAAA'
DEST_DSA_B32 = '7tbay5p4kzeekxvyvbf6v7eauazemsnnl2aoyqhg5jzpr5eke7tq.b32.i2p'
# tracker.thebland.i2p
DEST_ECDSA_P256_B64 = 'gzBtMSRcMD6b36PmPCQWZhh30fYm2Ww2r4tRSref4N2T4~cnXK3DjJOuJwao2jRK4bZwX2Rkyjw849xrFMqaR3SdPe3-K61B~Kr9Uo1KLdm3~oahOWFmCaIlipPs-i3jdTT~721YUcYB09n4PGrDq5KZSOOBlLZKulJficO58QRUlDpva4OCCRrX9EUCoAavOciKpvKtnGwl6AiPFu8WnmEeGQ861vjdirjfkHWNp3gj9IjGuxJNcgyHi51BWYZM6il~LJTcbA4zuZn~qudHIx9uzUtO-t08yzSRrmfVwVVVru6-~BBX0ipADi9UGZjyB-PJEKKjizUPxSp2OCmiOlQ2iXpKs2j8yfjHJbn-eWKpIh4jfpNigy6AbDfzFivkvm8lt8CleYf-p3~SHdqIL0iEaacxi5BAU4Baj5yS818kPQP4hEEMMtq4WnKjl4IW64swXSg1wlVBTiKDJzzQGK20jySBuPxhEbd6sfAeirzn585g5EqeV8DLqsMfe5pZBQAEAAEAAA=='
DEST_ECDSA_P256_B32 = 's5ikrdyjwbcgxmqetxb3nyheizftms7euacuub2hic7defkh3xhq.b32.i2p'
def assert_KeyCert_DSA_ElGamal(cert):
assert len(cert.data) == 4
assert cert.sigtype == crypto.SigType.DSA_SHA1
assert cert.enctype == crypto.EncType.ELGAMAL_2048
assert len(cert.extra_sigkey_data) == 0
assert len(cert.extra_enckey_data) == 0
class TestKeyCertificate(TestCase):
def test_create_from_keys(self):
cert = datatypes.KeyCertificate(crypto.DSAKey(),
crypto.ElGamalKey())
assert_KeyCert_DSA_ElGamal(cert)
def test_parse(self):
cert = datatypes.KeyCertificate(raw=DSA_ELGAMAL_KEY_CERT, b64=True)
assert_KeyCert_DSA_ElGamal(cert)
def test_create_and_serialize(self):
cert = datatypes.KeyCertificate(data=DSA_ELGAMAL_KEY_CERT_PAYLOAD, b64=True)
assert_KeyCert_DSA_ElGamal(cert)
assert cert.serialize(True) == DSA_ELGAMAL_KEY_CERT
class TestDestination(TestCase):
def test_generate_default(self):
dest = datatypes.Destination()
assert dest.enckey.key_type == crypto.EncType.ELGAMAL_2048
assert dest.sigkey.key_type == crypto.SigType.DSA_SHA1
assert dest.cert.type == datatypes.CertificateType.NULL
dest2 = datatypes.Destination()
assert dest2.enckey.key.y != dest.enckey.key.y
assert dest2.sigkey.key.y != dest.sigkey.key.y
def test_generate_specify_types(self):
dest = datatypes.Destination(crypto.EncType.ELGAMAL_2048, crypto.SigType.DSA_SHA1)
assert dest.enckey.key_type == crypto.EncType.ELGAMAL_2048
assert dest.sigkey.key_type == crypto.SigType.DSA_SHA1
assert dest.cert.type == datatypes.CertificateType.NULL
dest = datatypes.Destination(sigkey=crypto.SigType.ECDSA_SHA256_P256)
self._assert_keycert(dest, crypto.EncType.ELGAMAL_2048,
crypto.SigType.ECDSA_SHA256_P256)
#dest = datatypes.Destination(crypto.EncType.EC_P256)
#self._assert_keycert(dest, crypto.EncType.EC_P256,
# crypto.SigType.DSA_SHA1)
#dest = datatypes.Destination(crypto.EncType.EC_P256,
# crypto.SigType.ECDSA_SHA256_P256)
#self._assert_keycert(dest, crypto.EncType.EC_P256,
# crypto.SigType.ECDSA_SHA256_P256)
def test_generate_from_keycert(self):
keycert = datatypes.KeyCer
|
tificate(crypto.DSAKey(),
crypto.ElGamalKey())
dest = datatypes.Destination(cert=keycert)
assert dest.enckey.key_type == crypto.EncType.ELGAMAL_2048
assert dest.sigkey.key_type == crypto.SigType.DSA_SHA1
|
assert dest.cert.type == datatypes.CertificateType.NULL
keycert = datatypes.KeyCertificate(crypto.ECDSA256Key(),
crypto.ElGamalKey())
dest = datatypes.Destination(cert=keycert)
self._assert_keycert(dest, crypto.EncType.ELGAMAL_2048,
crypto.SigType.ECDSA_SHA256_P256)
def _assert_keycert(self, dest, enctype, sigtype):
assert dest.enckey.key_type == enctype
assert dest.sigkey.key_type == sigtype
assert dest.cert.type == datatypes.CertificateType.KEY
assert dest.cert.sigtype == sigtype
assert dest.cert.enctype == enctype
def TODO_test_parse_eeppriv(self):
with open(testkey, 'rb') as rf:
dest = datatypes.Destination(raw=rf)
def test_parse_b64(self):
self._test_parse_b64(DEST_DSA_B64, datatypes.CertificateType.NULL, 0)
self._test_parse_b64(DEST_ECDSA_P256_B64, datatypes.CertificateType.KEY, 4)
def _test_parse_b64(self, b64, cert_type, data_len):
dest = datatypes.Destination(raw=b64, b64=True)
assert dest.cert.type == cert_type
assert len(dest.cert.data) == data_len
def test_serialize_nullcert(self):
dest = datatypes.Destination(crypto.ElGamalKey(), crypto.DSAKey())
assert dest.cert.type == datatypes.CertificateType.NULL
data = dest.serialize()
dest2 = datatypes.Destination(raw=data)
assert dest2.enckey.key.y == dest.enckey.key.y
assert dest2.sigkey.key.y == dest.sigkey.key.y
assert dest2.cert.type == dest.cert.type
assert dest2.padding == dest.padding
def test_serialize_keycert(self):
dest = datatypes.Destination(crypto.ElGamalKey(), crypto.ECDSA256Key())
assert dest.cert.type == datatypes.CertificateType.KEY
data = dest.serialize()
dest2 = datatypes.Destination(raw=data)
assert dest2.enckey.key.y == dest.enckey.key.y
assert dest2.sigkey.key.get_pubkey() == dest.sigkey.key.get_pubkey()
assert dest2.cert.type == dest.cert.type
assert dest2.padding == dest.padding
def test_base64(self):
self._test_base64(DEST_DSA_B64)
self._test_base64(DEST_ECDSA_P256_B64)
def _test_base64(self, b64):
dest = datatypes.Destination(raw=b64, b64=True)
assert dest.base64() == b64
def test_base32(self):
self._test_base32(DEST_DSA_B64, DEST_DSA_B32)
self._test_base32(DEST_ECDSA_P256_B64, DEST_ECDSA_P256_B32)
def _test_base32(self, b64, b32):
dest = datatypes.Destination(raw=b64, b64=True)
assert dest.base32() == b32
class TestLeaseSet(TestCase):
def test_serialize(self):
dest = datatypes.Destination(crypto.ElGamalKey(), crypto.DSAKey(), datatypes.Certificate())
lease = datatypes.Lease(b'f'*32, 1, datatypes.Date(1))
ls = datatypes.LeaseSet(dest=dest, ls_enckey=crypto.ElGamalKey(), ls_sigkey=crypto.DSAKey(), leases=[lease])
data = ls.serialize()
dest.verify(data[:-40], data[-40:])
def test_parse(self):
dest = datatypes.Destination(crypto.ElGamalKey(), crypto.DSAKey(), datatypes.Certificate())
lease = datatypes.Lease(b'f'*32, 1, datatypes.Date(1))
ls = datatypes.LeaseSet(dest=dest, ls_enckey=crypto.ElGamalKey(), ls_sigkey=crypto.DSAKey(), l
|
ology/NLTK-Study
|
Defs.py
|
Python
|
artistic-2.0
| 1,971
| 0.006088
|
#!/usr/local/bin/python
# -- coding: utf-8 --
#
# Subroutine defs inspired by http://nltk.org/book/
# -- gene at ology dot net not dot com
#
# Import functionality.
from __future__ import division
import nltk
def generate_model(cfdist, word, num=15):
for i in range(num):
print word,
word = cfdist[word].max() # XXX max() renders loops
def lexical_diversity(text):
word_count = len(text)
vocab_size = len(set(text))
lexical_diversity = word_count / vocab_size
return lexical_diversity
def content_fraction(text, stop):
content = [w for w in text if w.isalpha() and w.lower() not in stop]
return len(content) / len(text)
def token_percent(token, text):
return 100 * text.count(token) / len(text)
def avg_word_len(text):
v = set([w.lower()
|
for w in text if w.isalpha()])
t = sum([len(w) for w in v])
return t / len(v)
def unusual_words(text):
text_vocab = set(w.lower() for w in text if w.isalpha())
english_vocab = set(w.lower() for w in nltk.corpus.words.words())
unusual = text_vocab.difference(english_vocab)
return sorted(unusual)
def cond_freq_dist(text, target1, target2):
cfd = nltk.ConditionalFreqDist(
|
(target, fileid[:4]) # word-target, address-year
for fileid in text.fileids() # inagural address
for w in text.words(fileid) # all words in the address
for target in [target1, target2] # for each word
if w.lower().startswith(target)) # ...that is lower, etc.
cfd.plot()
# ConditionalFreqDist for words in the given languages.
def udhr_cond_freq_dist(udhr, languages):
cfd = nltk.ConditionalFreqDist(
(lang, len(word))
for file in udhr.fileids()
for lang in languages if lang in file
for word in udhr.words(file))
cfd.plot()
# Return the syllable stress list.
def stress(pron):
return [char for phone in pron for char in phone if char.isdigit()]
|
guorendong/iridium-browser-ubuntu
|
tools/telemetry/telemetry/util/find_dependencies_unittest.py
|
Python
|
bsd-3-clause
| 2,648
| 0.007931
|
# Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import optparse
import os
import platform
import shutil
import subprocess
import sys
import tempfile
import unittest
import zipfile
from telemetry.util import cloud_storage
from telemetry.util import find_dependencies
class FindDependenciesTest(unittest.TestCase):
@unittest.skipUnless(
cloud_storage.SupportsProdaccess(
os.path.realpath(cloud_storage.FindGsutil())),
'Could not find a depot_tools installation with gsutil.')
def testGsutil(self):
parser = optparse.OptionParser()
find_dependencies.FindDependenciesCommand.AddCommandLineArgs(parser)
options, _ = parser.parse_args([])
try:
temp_dir = tempfile.mkdtemp()
zip_path = os.path.join(temp_dir, 'gsutil.zip')
options.zip = zip_path
find_dependencies.ZipDependencies([], set(), options
|
)
if platform.system() == 'Windows':
with zipfile.ZipFile(zip_path, 'r') as zip_file:
zip_file.extractall(temp_dir)
else:
# Use unz
|
ip instead of Python zipfile to preserve file permissions.
with open(os.devnull, 'w') as dev_null:
subprocess.call(['unzip', zip_path], cwd=temp_dir, stdout=dev_null)
third_party_path = os.path.join(temp_dir, 'telemetry', 'src', 'tools',
'telemetry', 'third_party')
# __init__.py is in Chromium src, but we didn't include any repo files.
open(os.path.join(third_party_path, '__init__.py'), 'a').close()
gsutil_path = os.path.join(third_party_path, 'gsutil', 'gsutil')
self.assertTrue(os.access(gsutil_path, os.X_OK))
with open(os.devnull, 'w') as dev_null:
# gsutil with no args should print usage and exit with exit code 0.
gsutil_command = [sys.executable, gsutil_path]
self.assertEqual(subprocess.call(gsutil_command, stdout=dev_null), 0)
# gsutil config should wait for the user and not exit with exit code 1.
#gsutil_command = [sys.executable, gsutil_path, 'config',
# '-o', os.path.join(temp_dir, 'config_file')]
#gsutil_process = subprocess.Popen(gsutil_command, stdout=dev_null)
#try:
# util.WaitFor(gsutil_process.poll, timeout=0.5)
# self.assertEqual(gsutil_process.returncode, 0,
# msg='gsutil config failed.')
#except exceptions.TimeoutException:
# gsutil_process.terminate()
# gsutil_process.wait()
finally:
shutil.rmtree(temp_dir)
|
Pseudonick47/sherlock
|
backend/config.py
|
Python
|
gpl-3.0
| 428
| 0.007009
|
import os
dir = os.path.dirname(os.path.realpath(__
|
file__))
class BaseConfig(object):
SECRET_KEY = "SO_SECURE"
DEBUG = True
SQLALCHEMY_DATABASE_URI = os.environ['DATABASE_URL']
SQLALCHEMY_TRACK_MODIFICATIONS = True
class TestConfig(object):
SECRET_KEY = "SO_SECURE"
TESTING = True
DEBUG = True
SQLALCHEMY_DATABASE_URI = 'sqlite:///' + dir +
|
'/test.db'
SQLALCHEMY_TRACK_MODIFICATIONS = True
|
Code4SA/pa-hotness
|
instance/config.py
|
Python
|
apache-2.0
| 76
| 0.013158
|
DE
|
BUG = True
SQLALCHEMY_DATABASE_URI = 'sqlite:///../instance/pa-hotness.db
|
'
|
rohitranjan1991/home-assistant
|
homeassistant/components/template/__init__.py
|
Python
|
mit
| 5,547
| 0.000721
|
"""The template component."""
from __future__ import annotations
import asyncio
from collections.abc import Callable
import logging
from homeassistant import config as conf_util
from homeassistant.const import (
CONF_UNIQUE_ID,
EVENT_HOMEASSISTANT_START,
SERVICE_RELOAD,
)
from homeassistant.core import CoreState, Event, HomeAssistant, ServiceCall, callback
from homeassistant.exceptions import HomeAssistantError
from homeassistant.helpers import (
discovery,
trigger as trigger_helper,
update_coordinator,
)
from homeassistant.helpers.reload import async_reload_integration_platforms
from homeassistant.helpers.typing import ConfigType
from homeassistant.loader import async_get_integration
from .const import CONF_TRIGGER, DOMAIN, PLATFORMS
_LOGGER = logging.getLogger(__name__)
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
"""Set up the template integration."""
if DOMAIN in config:
await _process_config(hass, config)
async def _reload_config(call: Event | ServiceCall) -> None:
"""Reload top-level + platforms."""
try:
unprocessed_conf = await conf_util.async_hass_config_yaml(hass)
except HomeAssistantError as err:
_LOGGER.error(err)
return
conf = await conf_util.async_process_component_config(
hass, unprocessed_conf, await async_get_integration(hass, DOMAIN)
)
if conf is None:
return
await async_reload_integration_platforms(hass, DOMAIN, PLATFORMS)
if DOMAIN in conf:
await _process_config(hass, conf)
hass.bus.async_fire(f"event_{DOMAIN}_reloaded", context=call.context)
hass.helpers.service.async_register_admin_service(
DOMAIN, SERVICE_RELOAD, _reload_config
)
return True
async def _process_config(hass: HomeAssistant, hass_config: ConfigType) -> None:
"""Process config."""
coordinators: list[TriggerUpdateCoordinator] | None = hass.data.pop(DOMAIN, None)
# Remove old ones
if coordinators:
for coordinator in coordinators:
coordinator.async_remove()
async def init_coordinator(hass, conf_section):
coordinator = TriggerUpdateCoordinator(hass, conf_section)
await coordinator.async_setup(hass_config)
return coordinator
coordinator_tasks = []
for conf_section in hass_config[DOMAIN]:
if CONF_TRIGGER in conf_section:
coordinator_tasks.append(init_coordinator(hass, conf_section))
continue
for platform_domain in PLATFORMS:
if platform_domain in conf_section:
hass.async_create_task(
discovery.async_load_platform(
hass,
platform_domain,
DOMAIN,
{
"unique_id": conf_section.get(CONF_UNIQUE_ID),
"entities": conf_section[platform_domain],
},
hass_config,
)
)
if coordinator_tasks:
hass.data[DOMAIN] = await asyncio.gather(*coordinator_tasks)
class TriggerUpdateCoordinator(update_coordinator.DataUpdateCoordinator):
"""Class to handle incoming data."""
REMOVE_TRIGGER = object()
def __init__(self, hass, config):
"""Instantiate trigger data."""
super().__init__(hass, _LOGGER, name="Trigger Update Coordinator")
self.config = config
self._unsub_start: Callable[[], None] | None = None
self._unsub_trigger: Cal
|
lable[[], None] | None = None
@property
def unique_id(self) -> str | None:
"""Return unique ID for the entity."""
return self.config.get("unique_id")
@callback
def async_remove(self):
"""Signal that the entities need to remove themselves."""
if self._unsub_start:
self._u
|
nsub_start()
if self._unsub_trigger:
self._unsub_trigger()
async def async_setup(self, hass_config: ConfigType) -> None:
"""Set up the trigger and create entities."""
if self.hass.state == CoreState.running:
await self._attach_triggers()
else:
self._unsub_start = self.hass.bus.async_listen_once(
EVENT_HOMEASSISTANT_START, self._attach_triggers
)
for platform_domain in PLATFORMS:
if platform_domain in self.config:
self.hass.async_create_task(
discovery.async_load_platform(
self.hass,
platform_domain,
DOMAIN,
{"coordinator": self, "entities": self.config[platform_domain]},
hass_config,
)
)
async def _attach_triggers(self, start_event=None) -> None:
"""Attach the triggers."""
if start_event is not None:
self._unsub_start = None
self._unsub_trigger = await trigger_helper.async_initialize_triggers(
self.hass,
self.config[CONF_TRIGGER],
self._handle_triggered,
DOMAIN,
self.name,
self.logger.log,
start_event is not None,
)
@callback
def _handle_triggered(self, run_variables, context=None):
self.async_set_updated_data(
{"run_variables": run_variables, "context": context}
)
|
FormAlchemy/formalchemy
|
formalchemy/templates.py
|
Python
|
mit
| 4,638
| 0.004743
|
# -*- coding: utf-8 -*-
import os
import sys
from formalchemy.i18n import get_translator
from formalchemy import helpers
from formalchemy.helpers import literal
from tempita import Template as _TempitaTemplate
class TempitaTemplate(_TempitaTemplate):
default_encoding = None
try:
from mako.lookup import TemplateLookup
from mako.template import Template as MakoTemplate
from mako.exceptions import TopLevelLookupException
HAS_MAKO = True
except ImportError:
HAS_MAKO = False
try:
from genshi.template import TemplateLoader as GenshiTemplateLoader
HAS_GENSHI = True
except ImportError:
HAS_GENSHI = False
MAKO_TEMPLATES = os.path.join(
os.path.dirname(__file__),
'paster_templates','pylons_fa','+package+','templates', 'forms')
class TemplateEngine(object):
"""Base class for templates engines
"""
directories = []
extension = None
_templates = ['fieldset', 'fieldset_readonly',
'grid', 'grid_readonly']
def __init__(self, **kw):
self.templates = {}
if 'extension' in kw:
self.extension = kw.pop('extension')
if 'directories' in kw:
self.directories = list(kw.pop
|
('directories'))
for name in self._templates:
self.templates[name] = self.get_template(name, **kw)
def
|
get_template(self, name, **kw):
"""return the template object for `name`. Likely to be overridden by engines"""
return None
def get_filename(self, name):
"""return the filename for template `name`"""
for dirname in self.directories + [os.path.dirname(__file__)]:
filename = os.path.join(dirname, '%s.%s' % (name, self.extension))
if os.path.isfile(filename):
return filename
def render(self, template_name, **kwargs):
"""render the template. Must be overridden by engines"""
raise NotImplementedError("You need to implement %s.render." % self.__class__.__name__)
def _update_args(cls, kw):
kw['F_'] = get_translator(lang=kw.get('lang', None),
request=kw.get('request', None))
kw['html'] = helpers
return kw
_update_args = classmethod(_update_args)
def __call__(self, template_name, **kw):
"""update kw to extend the namespace with some FA's utils then call `render`"""
self._update_args(kw)
return self.render(template_name, **kw)
class TempitaEngine(TemplateEngine):
"""Template engine for tempita. File extension is `.tmpl`.
"""
extension = 'tmpl'
def get_template(self, name, **kw):
filename = self.get_filename(name)
kw['encoding'] = 'utf-8'
if filename:
return TempitaTemplate.from_filename(filename, **kw)
def render(self, template_name, **kwargs):
template = self.templates.get(template_name, None)
return literal(template.substitute(**kwargs))
class MakoEngine(TemplateEngine):
"""Template engine for mako. File extension is `.mako`.
"""
extension = 'mako'
_lookup = None
def get_template(self, name, **kw):
if self._lookup is None:
self._lookup = TemplateLookup(directories=self.directories, **kw)
try:
return self._lookup.get_template('%s.%s' % (name, self.extension))
except TopLevelLookupException:
filename = os.path.join(MAKO_TEMPLATES, '%s.mako_tmpl' % name)
if os.path.isfile(filename):
template = TempitaTemplate.from_filename(filename, encoding="utf-8")
return MakoTemplate(template.substitute(template_engine='mako'), **kw)
def render(self, template_name, **kwargs):
template = self.templates.get(template_name, None)
return literal(template.render_unicode(**kwargs))
class GenshiEngine(TemplateEngine):
"""Template engine for genshi. File extension is `.html`.
"""
extension = 'html'
def get_template(self, name, **kw):
filename = self.get_filename(name)
if filename:
loader = GenshiTemplateLoader(os.path.dirname(filename), **kw)
return loader.load(os.path.basename(filename))
def render(self, template_name, **kwargs):
template = self.templates.get(template_name, None)
return literal(template.generate(**kwargs).render('html', doctype=None))
if HAS_MAKO:
default_engine = MakoEngine(input_encoding='utf-8', output_encoding='utf-8')
engines = dict(mako=default_engine, tempita=TempitaEngine())
else:
default_engine = TempitaEngine()
engines = dict(tempita=TempitaEngine())
|
GhostshipSoftware/avaloria
|
src/tests/test_utils_logger.py
|
Python
|
bsd-3-clause
| 979
| 0.011236
|
import unittest
class TestLogTrace(unittest.TestCase):
def test_log_trace(self):
# self.assertEqual(expected, log_trace(errmsg))
assert True # TODO: implement your test here
class TestLogErrmsg(unittest.TestCase):
def test_log_errmsg(self):
# self.assertEqual(expected, log_errmsg(errmsg))
|
assert True # TODO: implement your test here
class TestLogWarnmsg(unittest.TestCase):
def test_log_warnmsg(self):
# self.assertEqual(expected, log_warnmsg(warnmsg))
assert True # TODO: implement your test here
class TestLogInfomsg(unittest.TestCase):
def test_log_infomsg(self):
# self.as
|
sertEqual(expected, log_infomsg(infomsg))
assert True # TODO: implement your test here
class TestLogDepmsg(unittest.TestCase):
def test_log_depmsg(self):
# self.assertEqual(expected, log_depmsg(depmsg))
assert True # TODO: implement your test here
if __name__ == '__main__':
unittest.main()
|
irinabov/debian-qpid-dispatch
|
tests/router_engine_test.py
|
Python
|
apache-2.0
| 27,299
| 0.009194
|
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http:/
|
/www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND,
|
either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
from __future__ import unicode_literals
from __future__ import division
from __future__ import absolute_import
from __future__ import print_function
import os
import sys
import mock # Mock definitions for tests.
sys.path.append(os.path.join(os.environ["SOURCE_DIR"], "python"))
from qpid_dispatch_internal.router.engine import HelloProtocol, PathEngine, NodeTracker
from qpid_dispatch_internal.router.data import LinkState, MessageHELLO, ProtocolVersion
from qpid_dispatch.management.entity import EntityBase
from system_test import main_module
from system_test import unittest
class Adapter(object):
def __init__(self, domain):
self._domain = domain
def log(self, level, text):
print("Adapter.log(%d): domain=%s, text=%s" % (level, self._domain, text))
def send(self, dest, opcode, body):
print("Adapter.send: domain=%s, dest=%s, opcode=%s, body=%s" % (self._domain, dest, opcode, body))
def remote_bind(self, subject, peer):
print("Adapter.remote_bind: subject=%s, peer=%s" % (subject, peer))
def remote_unbind(self, subject, peer):
print("Adapter.remote_unbind: subject=%s, peer=%s" % (subject, peer))
def node_updated(self, address, reachable, neighbor, link_bit, router_bit):
print("Adapter.node_updated: address=%s, reachable=%r, neighbor=%r, link_bit=%d, router_bit=%d" % \
(address, reachable, neighbor, link_bit, router_bit))
class DataTest(unittest.TestCase):
def test_link_state(self):
ls = LinkState(None, 'R1', 1, {'R2':1, 'R3':1})
self.assertEqual(ls.id, 'R1')
self.assertEqual(ls.ls_seq, 1)
self.assertEqual(ls.peers, {'R2':1, 'R3':1})
ls.bump_sequence()
self.assertEqual(ls.id, 'R1')
self.assertEqual(ls.ls_seq, 2)
self.assertEqual(ls.peers, {'R2':1, 'R3':1})
result = ls.add_peer('R4', 5)
self.assertTrue(result)
self.assertEqual(ls.peers, {'R2':1, 'R3':1, 'R4':5})
result = ls.add_peer('R2', 1)
self.assertFalse(result)
self.assertEqual(ls.peers, {'R2':1, 'R3':1, 'R4':5})
result = ls.del_peer('R3')
self.assertTrue(result)
self.assertEqual(ls.peers, {'R2':1, 'R4':5})
result = ls.del_peer('R5')
self.assertFalse(result)
self.assertEqual(ls.peers, {'R2':1, 'R4':5})
encoded = ls.to_dict()
new_ls = LinkState(encoded)
self.assertEqual(new_ls.id, 'R1')
self.assertEqual(new_ls.ls_seq, 2)
self.assertEqual(new_ls.peers, {'R2':1, 'R4':5})
def test_hello_message(self):
msg1 = MessageHELLO(None, 'R1', ['R2', 'R3', 'R4'])
self.assertEqual(msg1.get_opcode(), "HELLO")
self.assertEqual(msg1.id, 'R1')
self.assertEqual(msg1.seen_peers, ['R2', 'R3', 'R4'])
encoded = msg1.to_dict()
msg2 = MessageHELLO(encoded)
self.assertEqual(msg2.get_opcode(), "HELLO")
self.assertEqual(msg2.id, 'R1')
self.assertEqual(msg2.seen_peers, ['R2', 'R3', 'R4'])
self.assertTrue(msg2.is_seen('R3'))
self.assertFalse(msg2.is_seen('R9'))
class NodeTrackerTest(unittest.TestCase):
def log(self, level, text):
pass
def add_neighbor_router(self, address, router_bit, link_bit):
self.address = address
self.router_bit = router_bit
self.link_bit = link_bit
self.calls += 1
def del_neighbor_router(self, router_id, router_bit):
self.address = None
self.router_bit = router_bit
self.link_bit = None
self.calls += 1
def add_remote_router(self, address, router_bit):
self.address = address
self.router_bit = router_bit
self.link_bit = None
self.calls += 1
def del_remote_router(self, router_id, router_bit):
self.address = None
self.router_bit = router_bit
self.link_bit = None
self.calls += 1
def reset(self):
self.address = None
self.router_bit = None
self.link_bit = None
self.calls = 0
class NeighborTest(unittest.TestCase):
def log(self, level, text):
pass
def log_hello(self, level, text):
pass
def send(self, dest, msg):
self.sent.append((dest, msg))
def neighbor_refresh(self, node_id, ProtocolVersion, instance, link_id, cost, now):
self.neighbors[node_id] = (instance, link_id, cost, now)
def setUp(self):
self.sent = []
self.neighbors = {}
self.id = "R1"
self.instance = 0
# Fake configuration
self.config = EntityBase({
'helloIntervalSeconds' : 1.0,
'helloMaxAgeSeconds' : 3.0,
'raIntervalSeconds' : 30.0,
'remoteLsMaxAgeSeconds' : 60.0 })
self.neighbors = {}
def test_hello_sent(self):
self.sent = []
self.local_link_state = None
self.engine = HelloProtocol(self, self)
self.engine.tick(1.0)
self.assertEqual(len(self.sent), 1)
dest, msg = self.sent.pop(0)
self.assertEqual(dest, "amqp:/_local/qdhello")
self.assertEqual(msg.get_opcode(), "HELLO")
self.assertEqual(msg.id, self.id)
self.assertEqual(msg.seen_peers, [])
self.assertEqual(self.local_link_state, None)
def test_sees_peer(self):
self.sent = []
self.neighbors = {}
self.engine = HelloProtocol(self, self)
self.engine.handle_hello(MessageHELLO(None, 'R2', []), 2.0, 0, 1)
self.engine.tick(5.0)
self.assertEqual(len(self.sent), 1)
dest, msg = self.sent.pop(0)
self.assertEqual(msg.seen_peers, ['R2'])
def test_establish_peer(self):
self.sent = []
self.neighbors = {}
self.engine = HelloProtocol(self, self)
self.engine.handle_hello(MessageHELLO(None, 'R2', ['R1']), 0.5, 0, 1)
self.engine.tick(1.0)
self.engine.tick(2.0)
self.engine.tick(3.0)
self.assertEqual(len(self.neighbors), 1)
self.assertEqual(list(self.neighbors.keys()), ['R2'])
def test_establish_multiple_peers(self):
self.sent = []
self.neighbors = {}
self.engine = HelloProtocol(self, self)
self.engine.handle_hello(MessageHELLO(None, 'R2', ['R1']), 0.5, 0, 1)
self.engine.tick(1.0)
self.engine.handle_hello(MessageHELLO(None, 'R3', ['R1', 'R2']), 1.5, 0, 1)
self.engine.tick(2.0)
self.engine.handle_hello(MessageHELLO(None, 'R4', ['R1']), 2.5, 0, 1)
self.engine.handle_hello(MessageHELLO(None, 'R5', ['R2']), 2.5, 0, 1)
self.engine.handle_hello(MessageHELLO(None, 'R6', ['R1']), 2.5, 0, 1)
self.engine.tick(3.0)
keys = [k for k in self.neighbors.keys()]
keys.sort()
self.assertEqual(keys, ['R2', 'R3', 'R4', 'R6'])
class PathTest(unittest.TestCase):
def setUp(self):
self.id = 'R1'
self.engine = PathEngine(self)
def log(self, level, text):
pass
def test_topology1(self):
"""
+====+ +----+ +----+
| R1 |------| R2 |------| R3 |
+====+ +----+ +----+
"""
collection = { 'R1': LinkState(None, 'R1', 1, {'R2':1}),
'R2': LinkState(None, 'R2', 1, {'R1
|
arsenetar/dupeguru
|
qt/exclude_list_dialog.py
|
Python
|
gpl-3.0
| 7,359
| 0.002582
|
# This software is licensed under the "GPLv3" License as described in the "LICENSE" file,
# which should be included with this package. The terms are also available at
# http://www.gnu.org/licenses/gpl-3.0.html
import re
from PyQt5.QtCore import Qt, pyqtSlot
from PyQt5.QtWidgets import (
QPushButton,
QLineEdit,
QVBoxLayout,
QGridLayout,
QDialog,
QTableView,
QAbstractItemView,
QSpacerItem,
QSizePolicy,
QHeaderView,
)
from .exclude_list_table import ExcludeListTable
from core.exclude import AlreadyThereException
from hscommon.trans import trget
tr = trget("ui")
class ExcludeListDialog(QDialog):
def __init__(self, app, parent, model, **kwargs):
flags = Qt.CustomizeWindowHint | Qt.WindowTitleHint | Qt.WindowSystemMenuHint
super().__init__(parent, flags, **kwargs)
self.app = app
self.specific_actions = frozenset()
self._setupUI()
self.model = model # ExcludeListDialogCore
self.model.view = self
self.table = ExcludeListTable(app, view=self.tableView) # Qt ExcludeListTable
self._row_matched = False # test if at least one row matched our test string
self._input_styled = False
self.buttonAdd.clicked.connect(self.addStringFromLineEdit)
self.buttonRemove.clicked.connect(self.removeSelected)
self.buttonRestore.clicked.connect(self.restoreDefaults)
self.buttonClose.clicked.connect(self.accept)
self.buttonHelp.clicked.connect(self.display_help_message)
self.buttonTestString.clicked.connect(self.onTestStringButtonClicked)
self.inputLine.textEdited.connect(self.reset_input_style)
self.testLine.textEdited.connect(self.reset_input_style)
self.testLine.textEdited.connect(self.reset_table_style)
def _setupUI(self):
layout = QVBoxLayout(self)
gridlayout = QGridLayout()
self.buttonAdd = QPushButton(tr("Add"))
self.buttonRemove = QPushButton(tr("Remove Selected"))
self.buttonRestore = QPushButton(tr("Restore defaults"))
self.buttonTestString = QPushButton(tr("Test string"))
self.buttonClose = QPushButton(tr("Close"))
self.buttonHelp = QPushButton(tr("Help"))
self.inputLine = QLineEdit()
self.testLine = QLineEdit()
self.tableView = QTableView()
triggers = (
QAbstractItemView.DoubleClicked | QAbstractItemView.EditKeyPressed | QAbstractItemView.SelectedClicked
)
self.tableView.setEditTriggers(triggers)
self.tableView.setSelectionMode(QTableView.ExtendedSelection)
self.tableView.setSelectionBehavior(QTableView.SelectRows)
self.tableView.setShowGrid(False)
vheader = self.tableView.verticalHeader()
vheader.setSectionsMovable(True)
vheader.setVisible(False)
hheader = self.tableView.horizontalHeader()
hheader.setSectionsMovable(False)
hheader.setSectionResizeMode(QHeaderView.Fixed)
hheader.setStretchLastSection(True)
hheader.setHighlightSections(False)
hheader.setVisible(True)
gridlayout.addWidget(self.inputLine, 0, 0)
gridlayout.addWidget(self.buttonAdd, 0, 1, Qt.AlignLeft)
gridlayout.addWidget(self.buttonRemove, 1, 1, Qt.AlignLeft)
gridlayout.addWidget(self.buttonRestore, 2, 1, Qt.AlignLeft)
gridlayout.addWidget(self.buttonHelp, 3, 1, Qt.AlignLeft)
gridlayout.addWidget(self.buttonClose, 4, 1)
gridlayout.addWidget(self.tableView, 1, 0, 6, 1)
gridlayout.addItem(QSpacerItem(0, 0, QSizePolicy.Minimum, QSizePolicy.Expanding), 4, 1)
gridlayout.addWidget(self.buttonTestString, 6, 1)
gridlayout.addWidget(self.testLine, 6, 0)
layout.addLayout(gridlayout)
self.inputLine.setPlaceholderText(tr("Type a python regular expression here..."))
self.inputLine.setFocus()
self.testLine.setPlaceholderText(tr("Type a file system path or filename here..."))
self.testLine.setClearButtonEnabled(True)
# --- model --> view
def show(self):
super().show()
self.inputLine.setFocus()
@pyqtSlot()
def addStringFromLineEdit(self):
text = self.inputLine.text()
if not text:
return
try:
self.model.add(text)
except AlreadyThereException:
self.app.show_message("Expression already in the list.")
return
except Exception as e:
self.app.show_message(f"Expression is invalid: {e}")
return
self.inputLine.clear()
def removeSelected(self):
self.model.remove_selected()
def restoreDefaults(self):
self.model.restore_defaults()
def onTestStringButtonClicked(self):
input_text = self.testLine.text()
if not input_text:
self.reset_input_style()
return
# If at least one row matched, we know whether table is highlighted or not
self._row_matched = self.model.test_string(input_text)
self.table.refresh()
# Test the string currently in the input text box as well
input_regex = self.inputLine.text()
if not input_regex:
self.reset_input_style()
return
compiled = None
try:
compiled = re.compile(input_regex)
except re.error:
self.reset_input_style()
return
if self.model.is_match(input_text, compiled):
self.inputLine.setStyle
|
Sheet("background-color: rgb(10, 200, 10);")
self._input_styled = Tru
|
e
else:
self.reset_input_style()
def reset_input_style(self):
"""Reset regex input line background"""
if self._input_styled:
self.inputLine.setStyleSheet(self.styleSheet())
self._input_styled = False
def reset_table_style(self):
if self._row_matched:
self._row_matched = False
self.model.reset_rows_highlight()
self.table.refresh()
def display_help_message(self):
self.app.show_message(
tr(
"""\
These (case sensitive) python regular expressions will filter out files during scans.<br>\
Directores will also have their <strong>default state</strong> set to Excluded \
in the Directories tab if their name happens to match one of the selected regular expressions.<br>\
For each file collected, two tests are performed to determine whether or not to completely ignore it:<br>\
<li>1. Regular expressions with no path separator in them will be compared to the file name only.</li>
<li>2. Regular expressions with at least one path separator in them will be compared to the full path to the file.</li><br>
Example: if you want to filter out .PNG files from the "My Pictures" directory only:<br>\
<code>.*My\\sPictures\\\\.*\\.png</code><br><br>\
You can test the regular expression with the "test string" button after pasting a fake path in the test field:<br>\
<code>C:\\\\User\\My Pictures\\test.png</code><br><br>
Matching regular expressions will be highlighted.<br>\
If there is at least one highlight, the path or filename tested will be ignored during scans.<br><br>\
Directories and files starting with a period '.' are filtered out by default.<br><br>"""
)
)
|
five3/wireless-testing-platform
|
wtp/controller/QueueController.py
|
Python
|
gpl-2.0
| 1,894
| 0.00528
|
# -*- coding: utf-8 -*-
'''
Created on May 15, 2015
@author: chenchen9
'''
import json
import lazyxml
import tornado.web
from manager.TestcaseManager import TestcaseManager
class QueueController(tornado.web.RequestHandler):
def get(self):
queue_size = TestcaseManager().get_size()
queue_list = TestcaseManager().get_dump_list()
dicts = {'queue_size':queue_size, 'queue_list':queue_list}
# print dicts
if self.get_argument('rel', None):
api_type = self.get_argument('rel', 'xml')
pretty = self.get_argument('pretty', 'true').lower() == 'true'
if api_type == 'json':
if pretty:
devices_info = json.dumps(dicts, indent=4)
else:
devices_info = json.dumps(dicts)
elif api_type == 'xml':
if pretty:
devices_info = lazyxml.dumps(dicts, root='device_list', cdata=False, indent=' ')
else:
devices_info = lazyxml.dumps(dicts, root='device_list', cdata=False)
else:
raise Exception('unsupported argument: ' + api_type)
if pretty:
self.render('msg.html', msg=devices_info)
|
else:
self.write(devices_info)
else:
self.render('queue.html', info=dicts)
def post(self):
action = self.get_argument('action', None)
if action=='empty':
Testcas
|
eManager().empty()
elif action=='remove':
uuid = self.get_argument('uuid', None)
if uuid:
TestcaseManager().remove_by_uuid(uuid)
parent_uuid = self.get_argument('parent_uuid', None)
if parent_uuid:
TestcaseManager().remove_by_parent_uuid(parent_uuid)
self.write({'errorCode':0, 'msg':'success'})
|
weigj/django-multidb
|
django/contrib/gis/geos/base.py
|
Python
|
bsd-3-clause
| 21,337
| 0.003749
|
"""
This module contains the 'base' GEOSGeometry object -- all GEOS Geometries
inherit from this object.
"""
# Python, ctypes and types dependencies.
import re
from ctypes import addressof, byref, c_double, c_size_t
from types import UnicodeType
# GEOS-related dependencies.
from django.contrib.gis.geos.coordseq import GEOSCoordSeq
from django.contrib.gis.geos.error import GEOSException
from django.contrib.gis.geos.libgeos import GEOM_PTR
# All other functions in this module come from the ctypes
# prototypes module -- which handles all interaction with
# the underlying GEOS library.
from django.contrib.gis.geos.prototypes import *
# Trying to import GDAL libra
|
ries, if available. Have to place in
# try/except since this pac
|
kage may be used outside GeoDjango.
try:
from django.contrib.gis.gdal import OGRGeometry, SpatialReference, GEOJSON
from django.contrib.gis.gdal.geometries import json_regex
HAS_GDAL = True
except:
HAS_GDAL, GEOJSON = False, False
# Regular expression for recognizing HEXEWKB and WKT. A prophylactic measure
# to prevent potentially malicious input from reaching the underlying C
# library. Not a substitute for good web security programming practices.
hex_regex = re.compile(r'^[0-9A-F]+$', re.I)
wkt_regex = re.compile(r'^(SRID=(?P<srid>\d+);)?(?P<wkt>(POINT|LINESTRING|LINEARRING|POLYGON|MULTIPOINT|MULTILINESTRING|MULTIPOLYGON|GEOMETRYCOLLECTION)[ACEGIMLONPSRUTY\d,\.\-\(\) ]+)$', re.I)
class GEOSGeometry(object):
"A class that, generally, encapsulates a GEOS geometry."
# Initially, the geometry pointer is NULL
_ptr = None
#### Python 'magic' routines ####
def __init__(self, geo_input, srid=None):
"""
The base constructor for GEOS geometry objects, and may take the
following inputs:
* string: WKT
* string: HEXEWKB (a PostGIS-specific canonical form)
* buffer: WKB
The `srid` keyword is used to specify the Source Reference Identifier
(SRID) number for this Geometry. If not set, the SRID will be None.
"""
if isinstance(geo_input, basestring):
if isinstance(geo_input, UnicodeType):
# Encoding to ASCII, WKT or HEXEWKB doesn't need any more.
geo_input = geo_input.encode('ascii')
wkt_m = wkt_regex.match(geo_input)
if wkt_m:
# Handling WKT input.
if wkt_m.group('srid'): srid = int(wkt_m.group('srid'))
g = from_wkt(wkt_m.group('wkt'))
elif hex_regex.match(geo_input):
# Handling HEXEWKB input.
g = from_hex(geo_input, len(geo_input))
elif GEOJSON and json_regex.match(geo_input):
# Handling GeoJSON input.
wkb_input = str(OGRGeometry(geo_input).wkb)
g = from_wkb(wkb_input, len(wkb_input))
else:
raise ValueError('String or unicode input unrecognized as WKT EWKT, and HEXEWKB.')
elif isinstance(geo_input, GEOM_PTR):
# When the input is a pointer to a geomtry (GEOM_PTR).
g = geo_input
elif isinstance(geo_input, buffer):
# When the input is a buffer (WKB).
wkb_input = str(geo_input)
g = from_wkb(wkb_input, len(wkb_input))
else:
# Invalid geometry type.
raise TypeError('Improper geometry input type: %s' % str(type(geo_input)))
if bool(g):
# Setting the pointer object with a valid pointer.
self._ptr = g
else:
raise GEOSException('Could not initialize GEOS Geometry with given input.')
# Post-initialization setup.
self._post_init(srid)
def _post_init(self, srid):
"Helper routine for performing post-initialization setup."
# Setting the SRID, if given.
if srid and isinstance(srid, int): self.srid = srid
# Setting the class type (e.g., Point, Polygon, etc.)
self.__class__ = GEOS_CLASSES[self.geom_typeid]
# Setting the coordinate sequence for the geometry (will be None on
# geometries that do not have coordinate sequences)
self._set_cs()
@property
def ptr(self):
"""
Property for controlling access to the GEOS geometry pointer. Using
this raises an exception when the pointer is NULL, thus preventing
the C library from attempting to access an invalid memory location.
"""
if self._ptr:
return self._ptr
else:
raise GEOSException('NULL GEOS pointer encountered; was this geometry modified?')
def __del__(self):
"""
Destroys this Geometry; in other words, frees the memory used by the
GEOS C++ object.
"""
if self._ptr: destroy_geom(self._ptr)
def __copy__(self):
"""
Returns a clone because the copy of a GEOSGeometry may contain an
invalid pointer location if the original is garbage collected.
"""
return self.clone()
def __deepcopy__(self, memodict):
"""
The `deepcopy` routine is used by the `Node` class of django.utils.tree;
thus, the protocol routine needs to be implemented to return correct
copies (clones) of these GEOS objects, which use C pointers.
"""
return self.clone()
def __str__(self):
"WKT is used for the string representation."
return self.wkt
def __repr__(self):
"Short-hand representation because WKT may be very large."
return '<%s object at %s>' % (self.geom_type, hex(addressof(self.ptr)))
# Pickling support
def __getstate__(self):
# The pickled state is simply a tuple of the WKB (in string form)
# and the SRID.
return str(self.wkb), self.srid
def __setstate__(self, state):
# Instantiating from the tuple state that was pickled.
wkb, srid = state
ptr = from_wkb(wkb, len(wkb))
if not ptr: raise GEOSException('Invalid Geometry loaded from pickled state.')
self._ptr = ptr
self._post_init(srid)
# Comparison operators
def __eq__(self, other):
"""
Equivalence testing, a Geometry may be compared with another Geometry
or a WKT representation.
"""
if isinstance(other, basestring):
return self.wkt == other
elif isinstance(other, GEOSGeometry):
return self.equals_exact(other)
else:
return False
def __ne__(self, other):
"The not equals operator."
return not (self == other)
### Geometry set-like operations ###
# Thanks to Sean Gillies for inspiration:
# http://lists.gispython.org/pipermail/community/2007-July/001034.html
# g = g1 | g2
def __or__(self, other):
"Returns the union of this Geometry and the other."
return self.union(other)
# g = g1 & g2
def __and__(self, other):
"Returns the intersection of this Geometry and the other."
return self.intersection(other)
# g = g1 - g2
def __sub__(self, other):
"Return the difference this Geometry and the other."
return self.difference(other)
# g = g1 ^ g2
def __xor__(self, other):
"Return the symmetric difference of this Geometry and the other."
return self.sym_difference(other)
#### Coordinate Sequence Routines ####
@property
def has_cs(self):
"Returns True if this Geometry has a coordinate sequence, False if not."
# Only these geometries are allowed to have coordinate sequences.
if isinstance(self, (Point, LineString, LinearRing)):
return True
else:
return False
def _set_cs(self):
"Sets the coordinate sequence for this Geometry."
if self.has_cs:
self._cs = GEOSCoordSeq(get_cs(self.ptr), self.hasz)
else:
self._cs = None
@property
def coord_seq(self):
"Returns a clone of the coordinate sequence for this Geo
|
ajaybhatia/archlinux-dotfiles
|
Scripts/pythonscripts/subprocess_extensions.py
|
Python
|
mit
| 760
| 0.007895
|
#! /usr/bin/env python
"""
Get status and output of any command, capable of handling unicode.
"""
import subprocess
import codecs
|
from tempfiles import *
tmp = TempFiles()
def getstatusoutput(cmd):
logname = tmp.getTempFileName()
log = codecs.open(logname, mode="w", encoding="utf-8", errors="replace", buffering=0)
po
|
pen = subprocess.Popen(cmd, shell=True, stdout=log, stderr=subprocess.STDOUT, universal_newlines=True)
status = popen.wait()
log.close()
log = codecs.open(logname, mode="r", encoding="utf-8", errors="replace")
output = log.read()
log.close()
tmp.remove(logname)
return status, output
def getstatus(cmd):
return getstatusoutput(cmd)[0]
def getoutput(cmd):
return getstatusoutput(cmd)[1]
|
sqs/freequery
|
freequery/index/job.py
|
Python
|
agpl-3.0
| 1,947
| 0.002054
|
from disco.core import Disco, result_iterator
from disco.settings import DiscoSettings
from disco.func import chain_reader
from discodex.objects import DataSet
from freequery.document import docparse
from freequery.document.docset import Docset
from freequery.index.tf_idf import TfIdf
class IndexJob(object):
def __init__(self, spec, discodex,
disco_addr="disco://localhost", profile=False):
# TODO(sqs): refactoring potential with PagerankJob
self.spec = spec
self.discodex = discodex
self.docset = Docset(spec.docset_name)
self.disco = Disco(DiscoSettings()['DISCO_MASTER'])
self.nr_partitions = 8
self.profile = profile
def start(self):
results = self.__run_job(self.__index_job())
self.__run_discodex_index(results)
def __run_job(self, job):
results = job.wait()
if self.profile:
self.__profile_job(job)
return results
def __index_job(self):
return self.disco.new_job(
name="index_tfidf",
input=['tag://' + self.docset.ddfs_tag],
map_reader=docparse,
map=TfIdf.map,
reduce=TfIdf.reduce,
sort=True,
partitions=self.nr_partitions,
partition=TfIdf.partition,
merge_partitions=False,
profile=self.profile,
params=dict(doc_count=self.docset.doc_count))
def __run_discodex_index(self, results):
opts = {
'parser': 'disco.func.chain_reader',
'demuxer': 'freequery.index.tf_idf.TfIdf_demux',
'nr_ichunks': 1, # TODO(sqs): after disco#181 fixed, increase this
|
}
|
ds = DataSet(input=results, options=opts)
origname = self.discodex.index(ds)
self.disco.wait(origname) # origname is also the disco job name
self.discodex.clone(origname, self.spec.invindex_name)
|
zfergus2/Wrapping-Textures
|
tests/energy_tests/energy_test.py
|
Python
|
mit
| 7,271
| 0.006602
|
"""
Testing frame work for Wrapping Textures
Written by Zachary Ferguson
"""
from __future__ import print_function
import scipy.sparse
import scipy.sparse.linalg
import numpy
import pdb
from recordclass import recordclass
import includes
import bilerp_energy as energy
import seam_gradient
from seam_intervals import compute_seam_intervals
from util import print_progress, lerp_UV, surrounding_pixels, \
globalEdge_to_local, print_clear_line
from dirichlet_old import gen_symmetric_grid_laplacian2 as gen_dirichlet
from test_util import *
Energy_Test = recordclass("Wrapping_Texture_Test", ("diagram", "width",
"height", "edgePairs", "expected_intervals", "genSymTex", "createLSQ"))
def test_texture_energy(gen_texture, A, expected, test_count):
""" Test arbitrary textures energy value. """
total = 0.0
for i in range(test_count):
print_progress(i/float(test
|
_count))
x = gen_texture()
total += abs(x.T.dot(A.dot(x)).toarray())
print_clear_line()
print("\tExpected total: %.3f\n\tActual total: %.3f" %
(expected, total))
display_results(test_equal_f(float(total), 0.0), format_str = "\t%s\n")
def test_uniform_texture(my_test, A, test_count = 1000):
"""
Test if the energy over a uniform texture is zero.
Parameters are a Energy_Test and the matrix coefficient
|
, A.
Optionally provide a number of iterations for the test.
"""
print("Testing %d uniform textures (x^T*A*x):\n" % test_count)
N = my_test.width * my_test.height
test_texture_energy(lambda: (scipy.sparse.csc_matrix(numpy.ones((N, 1))) *
numpy.random.rand()), A, 0.0, test_count = test_count)
def test_symmetric_texture(my_test, A, test_count = 1000):
"""
Test if the energy over a symmetric texture is zero.
Parameters are a Energy_Test and the matrix coefficient, A.
Optionally provide a number of iterations for the test.
"""
print("Testing %d symmetric textures:\n" % test_count)
test_texture_energy(lambda: scipy.sparse.csc_matrix(my_test.genSymTex()).T,
A, 0.0, test_count = test_count)
def test_bilerp(my_test, interval, test_count = 1000):
""" Test interpolation using A, B, and C. """
print("Testing %d bilinear interpolations:\n" % test_count)
((uv0, uv1), (uv1p, uv0p)) = my_test.edgePairs[0]
N = my_test.width * my_test.height
a, b = interval[0], interval[1]
mid_uv = lerp_UV((a+b)/2., uv0, uv1)
mid_uv_p = lerp_UV((a+b)/2., uv0p, uv1p)
(p00, p01, p10, p11) = surrounding_pixels(mid_uv, my_test.width,
my_test.height, as_index = True)
(p00p, p01p, p10p, p11p) = surrounding_pixels(mid_uv_p, my_test.width,
my_test.height, as_index = True)
luv0, luv1 = globalEdge_to_local(uv0, uv1, p00, my_test.width,
my_test.height)
luv0p, luv1p = globalEdge_to_local(uv0p, uv1p, p00p, my_test.width,
my_test.height)
A, B, C = energy.bilerp_coeffMats(luv0, luv1, p00, p01, p10, p11, N)
Ap, Bp, Cp = energy.bilerp_coeffMats(luv0p, luv1p, p00p, p01p, p10p, p11p,
N)
diff = 0
# pdb.set_trace()
for i in range(test_count):
print_progress(i/float(test_count))
x = scipy.sparse.csc_matrix(my_test.genSymTex()).T
t = numpy.random.rand()
i1 = (A*x)*t**2 + (B*x)*t + C*x
i2 = (Ap*x)*t**2 + (Bp*x)*t + Cp*x
diff += abs(i1-i2)
print_clear_line()
print("\tExpected difference: %.3f\n\tActual difference: %.3f" %
(0.0, float(diff)))
display_results(test_equal_f(float(diff), 0.0), format_str = "\t%s\n")
def test_solve(my_test, A, SG, test_count = 1000):
""" Test if the constrained values is solved correctly. """
print("Solving %d constrained textures:\n" % test_count)
N = my_test.width * my_test.height
total = 0
print_info = False
for i in range(test_count):
# print_progress(i/float(test_count))
texture = scipy.sparse.csc_matrix(my_test.genSymTex().reshape((N, -1)))
C, d = my_test.createLSQ(texture)
# E = gen_dirichlet(my_test.height, my_test.width)
lsqW, gradW, diriW = 1.0e6, 1.0e-3, 1.0e-6
quad = 2 * A + lsqW * C.T.dot(C) + gradW * SG # + diriW * E
lin = lsqW * C.T.dot(d)
if(print_info):
print("(Quad - SG).det = %g" % numpy.linalg.det(
(quad - gradW * SG).toarray()))
print("(Quad - SG).SVD[1] = \n%s\n" % numpy.linalg.svd(
(quad - gradW * SG).toarray())[1])
print("Quad.det = %g" % numpy.linalg.det(quad.toarray()))
print("Quad.SVD[1] = \n%s\n" % numpy.linalg.svd(quad.toarray())[1])
print_info = False
eigen_values = numpy.linalg.svd(quad.toarray())[1]
product = reduce(lambda x, y: x * y, eigen_values)
print("Product: %g\n" % product)
x = scipy.sparse.linalg.spsolve(quad, lin)
total += float(abs(x.T.dot(A.toarray()).dot(x)))
print_clear_line()
print("\tExpected total: %.3f\n\tActual total: %.3f" %
(0.0, total))
display_results(test_equal_f(float(total), 0.0), format_str = "\t%s\n")
#########################
# Test wrapping texture #
#########################
def test_energy(my_test, test_count = 1000):
""" Test wrapping texture. """
print(my_test.diagram)
# Calculate values needed
N = my_test.width * my_test.height
################################
# Test the interval detection. #
################################
intervals = None
if my_test.expected_intervals is not None:
print("Testing interval detection:\n")
print("\tExpected intervals: %s" % str(my_test.expected_intervals))
intervals = compute_seam_intervals(my_test.edgePairs,
my_test.width, my_test.height)
print("\tActual intervals: %s" % intervals)
display_results(test_equal(my_test.expected_intervals, intervals),
format_str = "\t%s\n")
###################################################################
# Loop over the interval and create a total energy coeff. matrix. #
###################################################################
A = energy.E_total(my_test.edgePairs, my_test.width, my_test.height, 1,
numpy.ones((len(my_test.edgePairs)))).Q
# TODO: Fix this up
# SG = seam_gradient.E_total(my_test.edgePairs, my_test.width,
# my_test.height, 1, numpy.ones((len(my_test.edgePairs)))).Q
SG = scipy.sparse.csc_matrix((N, N))
#########################################
# Test interpolation using A, B, and C. #
#########################################
if intervals is None:
intervals = compute_seam_intervals(my_test.edgePairs,
my_test.width, my_test.height)
test_bilerp(my_test, intervals[0], test_count)
#########################
# Test for zero energy. #
#########################
# Uniform Texture
test_uniform_texture(my_test, A, test_count)
# Symmetric texture
test_symmetric_texture(my_test, A, test_count)
########################
# Constrain the values #
########################
test_solve(my_test, A, SG, test_count)
|
MingfeiPan/leetcode
|
math/942.py
|
Python
|
apache-2.0
| 381
| 0.002625
|
class Solution:
def diStringMatch(self, S: 'str') -> 'List[int]':
ret = []
left = 0
right = len(S)
for i in range(0, len(S)):
if S[i
|
] == 'I':
ret.append(left)
left += 1
else:
ret.append(right)
right -= 1
ret.append(left)
return
|
ret
|
uber-common/deck.gl
|
bindings/pydeck/pydeck/bindings/layer.py
|
Python
|
mit
| 6,775
| 0.001919
|
import uuid
import numpy as np
from ..data_utils import is_pandas_df, has_geo_interface, records_from_geo_interface
from .json_tools import JSONMixin, camel_and_lower
from pydeck.types import Image
from pydeck.exceptions import BinaryTransportException
TYPE_IDENTIFIER = "@@type"
FUNCTION_IDENTIFIER = "@@="
QUOTE_CHARS = {"'", '"', "`"}
class Layer(JSONMixin):
def __init__(self, type, data=None, id=None, use_binary_transport=None, **kwargs):
"""Configures a deck.gl layer for rendering on a map. Parameters passed
here will be specific to the particular deck.gl layer that you are choosing to use.
Please see the deck.gl
`Layer catalog <https://deck.gl/docs/api-reference/layers>`_
to determine the particular parameters of your layer. You are highly encouraged to look
at the examples in the pydeck documentation.
Parameters
==========
type : str
Type of layer to render, e.g., `HexagonLayer`
id : str, default None
Unique name for layer
data : str or list of dict of {str: Any} or pandas.DataFrame, default None
Either a URL of data to load in or an array of data
use_binary_transport : bool, default None
Boolean indicating binary data
**kwargs
Any of the parameters passable to a deck.gl layer.
Examples
========
For example, here is a HexagonLayer which reads data from a URL.
>>> import pydeck
>>> # 2014 location of car accidents in the UK
>>> UK_ACCIDENTS_DATA = ('https://raw.githubusercontent.com/uber-common/'
>>> 'deck.gl-data/master/examples/3d-heatmap/heatmap-data.csv')
>>> # Define a layer to display on a map
>>> layer = pydeck.Layer(
>>> 'HexagonLayer',
>>> UK_ACCIDENTS_DATA,
>>> get_position=['lng', 'lat'],
>>> auto_highlight=True,
>>> elevation_scale=50,
>>> pickable=True,
>>> elevation_range=[0, 3000],
>>> extruded=True,
>>> coverage=1)
Alternately, input can be a pandas.DataFrame:
>>> import pydeck
|
>>> df = pd.read_csv(UK_ACCIDENTS_DATA)
>>> layer = pydeck.Layer(
>>> 'HexagonLayer',
>>> df,
>>> get_position=['lng', 'lat'],
>>> auto_highlight=True,
>>> elevation_scale=50,
>>> pickable=True,
>>> elevation_range=[0, 3000],
>>> extruded=True,
>>> coverage=1)
"""
self.type = type
self.id = id or str(uuid.uuid4())
|
# Add any other kwargs to the JSON output
self._kwargs = kwargs.copy()
if kwargs:
for k, v in kwargs.items():
# We assume strings and arrays of strings are identifiers
# ["lng", "lat"] would be converted to '[lng, lat]'
# TODO given that data here is usually a list of records,
# we could probably check that the identifier is in the row
# Errors on case like get_position='-', however
if isinstance(v, str) and v[0] in QUOTE_CHARS and v[0] == v[-1]:
# Skip quoted strings
kwargs[k] = v.replace(v[0], "")
elif isinstance(v, str) and Image.validate(v):
# Have pydeck convert local images to strings and/or apply extra quotes
kwargs[k] = Image(v)
elif isinstance(v, str):
# Have @deck.gl/json treat strings values as functions
kwargs[k] = FUNCTION_IDENTIFIER + v
elif isinstance(v, list) and v != [] and isinstance(v[0], str):
# Allows the user to pass lists e.g. to specify coordinates
array_as_str = ""
for i, identifier in enumerate(v):
if i == len(v) - 1:
array_as_str += "{}".format(identifier)
else:
array_as_str += "{}, ".format(identifier)
kwargs[k] = "{}[{}]".format(FUNCTION_IDENTIFIER, array_as_str)
self.__dict__.update(kwargs)
self._data = None
self.use_binary_transport = use_binary_transport
self._binary_data = None
self.data = data
@property
def data(self):
return self._data
@data.setter
def data(self, data_set):
"""Make the data attribute a list no matter the input type, unless
use_binary_transport is specified, which case we circumvent
serializing the data to JSON
"""
if self.use_binary_transport:
self._binary_data = self._prepare_binary_data(data_set)
elif is_pandas_df(data_set):
self._data = data_set.to_dict(orient="records")
elif has_geo_interface(data_set):
self._data = records_from_geo_interface(data_set)
else:
self._data = data_set
def get_binary_data(self):
if not self.use_binary_transport:
raise BinaryTransportException("Layer must be flagged with `use_binary_transport=True`")
return self._binary_data
def _prepare_binary_data(self, data_set):
# Binary format conversion gives a sizable speedup but requires
# slightly stricter standards for data input
if not is_pandas_df(data_set):
raise BinaryTransportException("Layer data must be a `pandas.DataFrame` type")
layer_accessors = self._kwargs
inverted_accessor_map = {v: k for k, v in layer_accessors.items() if type(v) not in [list, dict, set]}
binary_transmission = []
# Loop through data columns and convert them to numpy arrays
for column in data_set.columns:
# np.stack will take data arrays and conveniently extract the shape
np_data = np.stack(data_set[column].to_numpy())
# Get rid of the accssor so it doesn't appear in the JSON output
del self.__dict__[inverted_accessor_map[column]]
binary_transmission.append(
{
"layer_id": self.id,
"column_name": column,
"accessor": camel_and_lower(inverted_accessor_map[column]),
"np_data": np_data,
}
)
return binary_transmission
@property
def type(self):
return getattr(self, TYPE_IDENTIFIER)
@type.setter
def type(self, type_name):
self.__setattr__(TYPE_IDENTIFIER, type_name)
|
wizzat/wizzat.py
|
wizzat/dbtable.py
|
Python
|
mit
| 13,346
| 0.011389
|
from __future__ import (absolute_import, division, print_function, unicode_literals)
from builtins import *
from future.utils import with_metaclass
import copy
import types
import wizzat.decorators
from wizzat.pghelper import *
from wizzat.util import set_defaults
__all__ = [
'DBTable',
'DBTableError',
'DBTableConfigError',
'DBTableImmutableFieldError',
]
class DBTableError(Exception): pass
class DBTableConfigError(DBTableError): pass
class DBTableImmutableFieldError(DBTableError): pass
class DBTableMeta(type):
def __init__(cls, name, bases, dct):
super(DBTableMeta, cls).__init__(name, bases, dct)
if 'table_name' not in dct or not isinstance(dct['table_name'], str):
raise DBTableConfigError("table_name is required, and should be a string")
if 'fields' not in dct or not isinstance(dct['fields'], (list, tuple)):
raise DBTableConfigError("fields is required, and should be a list or tuple")
if 'id_field' in dct:
if not isinstance(dct['id_field'], (type(None), str)):
raise DBTableConfigError('id_field is not required, but should be a string or None')
else:
cls.id_field = None
if 'key_fields' in dct:
if not isinstance(dct['key_fields'], (list, tuple)):
raise DBTableConfigError('key_fields is not required, but should be a list of strings or None')
for field in dct['key_fields']:
if not isinstance(field, str):
raise DBTableConfigError('key_fields is not required, but should be a list of strings or None')
else:
cls.key_fields = []
if dct.get('id_field') and dct['id_field'] not in dct['fields']:
raise DBTableConfigError('id field {} not in fields'.format(dct['id_field']))
for field in dct.get('key_fields', []):
if field not in dct['fields']:
raise DBTableConfigError('key field {} not in fields'.format(field))
if dct.get('memoize'):
cls.id_cache = wizzat.decorators.create_cache_obj(
max_size = dct.get('memoize_size', 0),
max_bytes = dct.get('memoize_bytes', 0),
)
cls.key_cache = wizzat.decorators.create_cache_obj(
max_size = dct.get('memoize_size', 0),
max_bytes = dct.get('memoize_bytes', 0),
)
cls._conn = None
cls.default_funcs = {}
for field in dct['fields']:
func_name = 'default_{}'.format(field)
if func_name in dct:
cls.default_funcs[field] = dct[func_name]
class DBTable(with_metaclass(DBTableMeta)):
"""
This is a micro-ORM for the purposes of not having dependencies on Django or SQLAlchemy.
Philosophically, it also supports merely the object abstraction and super simple sql generation.
It requires full knowledge of SQL.
Params:
table_name: string, the name of the table to query
id_field: string, the name of the id field (generally a surrogate key)
key_fields: list[string], the names of the key fields (generally primary or unique key)
fields: list[string], the names of all fields on the object
--
memoize: bool, caches objects from the database locally
memoize_size: int, maximum number of objects to cache from the database (LRU ejection)
memoize_bytes: int, maximum size objects to cache from the database (LRU ejection).
Note that there are two caches, and while references are shared the
cache size here is not absolute.
default_{field}: func, define functions for default behaviors. These functions are executed
in order of definition in the fields array.
"""
memoize = False
table_name = ''
id_field = ''
key_fields = []
fields = []
def __init__(self, _is_in_db = False, **kwargs):
self.db_fields = kwargs if _is_in_db else {}
for field in self.fields:
if field in kwargs:
field_value = kwargs[field]
elif field in self.default_funcs:
field_value = self.default_funcs[field](self)
else:
field_value = None
setattr(self, field, copy.deepcopy(field_value))
self.on_init()
self.cache_obj(self)
def on_init(self):
pass
@classmethod
def check_key_cache(cls, key_fields):
if cls.memoize:
cache_key = tuple(key_fields)
return cls.key_cache.get(cache_key, None)
@classmethod
def check_id_cache(cls, id):
if cls.memoize:
return cls.id_cache.get(id, None)
@classmethod
def cache_obj(cls, obj):
if cls.memoize:
if obj and cls.id_field:
cache_key = getattr(obj, cls.id_field)
cls.id_cache[cache_key] = obj
if obj and cls.key_fields:
cache_key = tuple(getattr(obj, field) for field in cls.key_fields)
cls.key_cache[cache_key] = obj
@classmethod
def clear_cache(cls):
if cls.memoize:
cls.id_cache.clear()
cls.key_cache.clear()
@classmethod
def uncache_obj(cls, obj):
if cls.id_field:
cache_key = getattr(obj, cls.id_field)
cls.id_cache.pop(cache_key, None)
if cls.key_fields:
cache_key = tuple(getattr(obj, field) for field in cls.key_fields)
cls.key_cache.pop(cache_key, None)
@classmethod
def find_by_id(cls, id):
obj = cls.check_id_cache(id)
if obj:
return obj
return cls.find_one(**{ cls.id_field : id })
@classmethod
def find_by_key(cls, *keys):
obj = cls.check_key_cache(keys)
if obj:
return obj
return cls.find_one(**{ field : value for field,value in zip(cls.key_fields, keys) })
@classmethod
def find_one(cls, **kwargs):
found = list(cls.find_by(**kwargs))
if not found:
return None
assert len(found) == 1
return found[0]
@classmethod
def create(cls, *keys, **kwargs):
kwargs = set_defaults(kwargs, { field : value for field, value in zip(cls.key_fields, keys) })
return cls(**kwargs).update()
@classmethod
def find_or_create(cls, *args, **kwargs):
return cls.find_by_key(*args) or cls.create(*args, **kwargs)
@classmethod
def find_or_create_many(cls, *rows):
for row in rows:
return cls.find_or_create(*row)
@classmethod
def find_by(cls, for_update = False, nowait = False, **kwargs):
"""
Returns rows which match all key/value pairs
Additionally, accepts for_update = True/False, nowait = True/False
"""
for_update = 'for update' if for_update else ''
nowait = 'nowait' if nowait else ''
sql = """
SELECT *
FROM {
|
table_name}
where {where_clause}
{for_update} {nowait}
""".format(
table_name = cls.table_name,
where_clause = sql_where_from_params(**kwargs),
for_update = for_update,
nowait = nowait,
|
)
return cls.find_by_sql(sql, **kwargs)
@classmethod
def find_by_sql(cls, sql, **bind_params):
for row in iter_results(cls.conn, sql, **bind_params):
yield cls(_is_in_db = True, **row)
def rowlock(self, nowait = False):
"""
Locks a row in the database for update. Requires a primary key.
"""
nowait = "nowait" if nowait else ""
if self.id_field:
fields = [ self.id_field ]
elif self.key_fields:
fields = self.key_fields
else:
fields = self.fields
filter_clause = ' and '.join([ '{0} = %(orig_{0})s'.format(field) for field in fields ])
bind_params = { 'orig_{}'.format(x) : self.db_fields[x] for x in fields }
sql
|
pytrainer/pytrainer
|
plugins/garmintools/garmintools.py
|
Python
|
gpl-2.0
| 4,768
| 0.027475
|
# -*- coding: utf-8 -*-
#Copyright (C) Fiz Vazquez vud1@sindominio.net
# Modified by dgranda
#This program is free software; you can redistribute it and/or
#modify it under the terms of the GNU General Public License
#as published by the Free Software Foundation; either version 2
#of the License, or (at your option) any later version.
#This program is distributed in the hope that it will be useful,
#but WITHOUT ANY WARRANTY; without even the implied warranty of
#MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
#GNU General Public License for more details.
#You should have received a copy of the GNU General Public License
#along with this program; if not, write to the Free Software
#Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
# Added to support python installations older than 2.6
from __future__ import with_statement
import logging
import os
from io import BytesIO
from lxml import etree
from pytrainer.lib.xmlUtils import XMLParser
from pytrainer.gui.dialogs import fileChooserDialog, guiFlush
from pytrainer.core.activity import Activity
from sqlalchemy.orm import exc
class garmintools():
def __init__(self, parent = None, validate=False):
self.parent = parent
self.pytrainer_main = parent.pytrainer_main
self.tmpdir = self.pytrainer_main.profile.tmpdir
self.data_path = os.path.dirname(__file__)
self.validate = validate
self.sport = self.getConfValue("Force_sport_to")
def getConfValue(self, confVar):
info = XMLParser(self.data_path+"/conf.xml")
code = info.getValue("pytrainer-plugin","plugincode")
plugindir = self.pytrainer_main.profile.plugindir
if not os.path.isfile(plugindir+"/"+code+"/conf.xml"):
value = None
else:
info = XMLParser(plugindir+"/"+code+"/conf.xml")
value = info.getValue("pytrainer-plugin",confVar)
return value
def run(self):
logging.debug(">>")
# able to select multiple files....
selectedFiles = fileChooserDialog(title="Choose a garmintools dump file (or files) to import", multiple=True).getFiles()
guiFlush()
importfiles = []
if not selectedFiles: #Nothing selected
return importfiles
for filename in selectedFiles:
if self.valid_input_file(filename):
#Garmin dump files are not valid xml - need to load into a xmltree
#read file into string
with open(filename, 'r') as f:
xmlString = f.read()
fileString = BytesIO(b"<root>" + xmlString + b"</root>")
#parse string as xml
tree = etree.parse(fileString)
if not self.inDatabase(tree):
sport = self.getSport(tree)
gpxfile = "%s/garmintools-%d.gpx" % (self.tmpdir, len(importfiles))
self.createGPXfile(gpxfile, tree)
importfiles.append((gpxfile, sport))
else:
logging.debug("%s already in database. Skipping import." % (filename,) )
else:
logging.info("File %s failed validation" % (filename))
logging.debug("<<")
return importfiles
def valid_input_file(self, filename):
""" Function to validate input file if requested"""
if not self.validate: #not asked to validate
logging.debug("Not validating %s" % (filename) )
return True
else:
print("Cannot validate garminto
|
ols dump files yet")
logging.debug("Cannot validate garmintools dump files yet")
return True
'''xslfile = os.path.realpath(self.parent.parent.data_path)+ "/schemas/GarminTr
|
ainingCenterDatabase_v2.xsd"
from lib.xmlValidation import xmlValidator
validator = xmlValidator()
return validator.validateXSL(filename, xslfile)'''
def inDatabase(self, tree):
#comparing date and start time (sport may have been changed in DB after import)
time = self.detailsFromFile(tree)
try:
self.pytrainer_main.ddbb.session.query(Activity).filter(Activity.date_time_utc == time).one()
return True
except exc.NoResultFound:
return False
def getSport(self, tree):
#return sport from file or overide if present
if self.sport:
return self.sport
root = tree.getroot()
sportElement = root.find(".//run")
try:
sport = sportElement.get("sport")
sport = sport.capitalize()
except:
sport = "import"
return sport
def detailsFromFile(self, tree):
root = tree.getroot()
#Find first point
pointElement = root.find(".//point")
if pointElement is not None:
#Try to get time from point
time = pointElement.get("time")
print("#TODO first time is different from time used by gpsbabel and has locale embedded: " + time)
return time
return None
def createGPXfile(self, gpxfile, tree):
""" Function to transform a Garmintools dump file to a valid GPX+ file
"""
xslt_doc = etree.parse(self.data_path+"/translate.xsl")
transform = etree.XSLT(xslt_doc)
result_tree = transform(tree)
result_tree.write(gpxfile, xml_declaration=True, encoding='UTF-8')
|
drinkertea/pywinauto
|
pywinauto/handleprops.py
|
Python
|
bsd-3-clause
| 14,767
| 0.002302
|
# GUI Application automation and testing library
# Copyright (C) 2006-2018 Mark Mc Mahon and Contributors
# https://github.com/pywinauto/pywinauto/graphs/contributors
# http://pywinauto.readthedocs.io/en/latest/credits.html
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are perm
|
itted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
#
# * Redistributions in binary form must reproduce the above copyright notice,
# this
|
list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# * Neither the name of pywinauto nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Functions to retrieve properties from a window handle
These are implemented in a procedural way so as to to be
useful to other modules with the least conceptual overhead
"""
import warnings
import win32process
import win32api
import win32con
import win32gui
import pywintypes
from ctypes import wintypes
from ctypes import WINFUNCTYPE
from ctypes import c_int
from ctypes import byref
from ctypes import sizeof
from ctypes import create_unicode_buffer
from . import win32functions
from . import win32defines
from . import win32structures
from .actionlogger import ActionLogger
#=========================================================================
def text(handle):
"""Return the text of the window"""
class_name = classname(handle)
if class_name == 'IME':
return 'Default IME'
if class_name == 'MSCTFIME UI':
return 'M'
if class_name is None:
return None
#length = win32functions.SendMessage(handle, win32defines.WM_GETTEXTLENGTH, 0, 0)
# XXX: there are some very rare cases when WM_GETTEXTLENGTH hangs!
# WM_GETTEXTLENGTH may hang even for notepad.exe main window!
c_length = win32structures.DWORD_PTR(0)
result = win32functions.SendMessageTimeout(
handle,
win32defines.WM_GETTEXTLENGTH,
0,
0,
win32defines.SMTO_ABORTIFHUNG,
500,
byref(c_length)
)
if result == 0:
ActionLogger().log('WARNING! Cannot retrieve text length for handle = ' + str(handle))
return None
else:
length = c_length.value
textval = ''
# In some rare cases, the length returned by WM_GETTEXTLENGTH is <0.
# Guard against this by checking it is >0 (==0 is not of interest):
if length > 0:
length += 1
buffer_ = create_unicode_buffer(length)
ret = win32functions.SendMessage(
handle, win32defines.WM_GETTEXT, length, byref(buffer_))
if ret:
textval = buffer_.value
return textval
#=========================================================================
def classname(handle):
"""Return the class name of the window"""
if handle is None:
return None
class_name = create_unicode_buffer(u"", 257)
win32functions.GetClassName(handle, class_name, 256)
return class_name.value
#=========================================================================
def parent(handle):
"""Return the handle of the parent of the window"""
return win32functions.GetParent(handle)
#=========================================================================
def style(handle):
"""Return the style of the window"""
return win32functions.GetWindowLong(handle, win32defines.GWL_STYLE)
#=========================================================================
def exstyle(handle):
"""Return the extended style of the window"""
return win32functions.GetWindowLong(handle, win32defines.GWL_EXSTYLE)
#=========================================================================
def controlid(handle):
"""Return the ID of the control"""
return win32functions.GetWindowLong(handle, win32defines.GWL_ID)
#=========================================================================
def userdata(handle):
"""Return the value of any user data associated with the window"""
return win32functions.GetWindowLong(handle, win32defines.GWL_USERDATA)
#=========================================================================
def contexthelpid(handle):
"""Return the context help id of the window"""
return win32functions.GetWindowContextHelpId(handle)
#=========================================================================
def iswindow(handle):
"""Return True if the handle is a window"""
return False if handle is None else bool(win32functions.IsWindow(handle))
#=========================================================================
def isvisible(handle):
"""Return True if the window is visible"""
return False if handle is None else bool(win32functions.IsWindowVisible(handle))
#=========================================================================
def isunicode(handle):
"""Return True if the window is a Unicode window"""
return False if handle is None else bool(win32functions.IsWindowUnicode(handle))
#=========================================================================
def isenabled(handle):
"""Return True if the window is enabled"""
return False if handle is None else bool(win32functions.IsWindowEnabled(handle))
#=========================================================================
def is64bitprocess(process_id):
"""Return True if the specified process is a 64-bit process on x64
Return False if it is only a 32-bit process running under Wow64.
Always return False for x86.
"""
from .sysinfo import is_x64_OS
is32 = True
if is_x64_OS():
phndl = win32api.OpenProcess(win32con.MAXIMUM_ALLOWED, 0, process_id)
if phndl:
is32 = win32process.IsWow64Process(phndl)
#print("is64bitprocess, is32: %d, procid: %d" % (is32, process_id))
return (not is32)
#=========================================================================
def is64bitbinary(filename):
"""Check if the file is 64-bit binary"""
import win32file
try:
binary_type = win32file.GetBinaryType(filename)
return binary_type != win32file.SCS_32BIT_BINARY
except Exception as exc:
warnings.warn('Cannot get binary type for file "{}". Error: {}'
.format(filename, exc), RuntimeWarning, stacklevel=2)
return None
#=========================================================================
def clientrect(handle):
"""Return the client rectangle of the control"""
client_rect = win32structures.RECT()
win32functions.GetClientRect(handle, byref(client_rect))
return client_rect
#=========================================================================
def rectangle(handle):
"""Return the rectangle of the window"""
# GetWindowRect returns 4-tuple
try:
return win32structures.RECT(*win32gui.GetWindowRect(handle))
except pywintypes.error:
return win32stru
|
tinkerinestudio/Tinkerine-Suite
|
TinkerineSuite/Cura/util/profile2.py
|
Python
|
agpl-3.0
| 23,092
| 0.027239
|
from __future__ import absolute_import
from __future__ import division
import os, traceback, math, re, zlib, base64, time, sys, platform, glob, string, stat
import cPickle as pickle
if sys.version_info[0] < 3:
import ConfigParser
else:
import configparser as ConfigParser
from Cura.util import resources
from Cura.util import version
#########################################################
## Default settings when none are found.
#########################################################
#Single place to store the defaults, so we have a consistent set of default settings.
profileDefaultSettings = {
'nozzle_size': '0.4',
'layer_height': '0.2',
'wall_thickness': '0.8',
'solid_layer_thickness': '0.6',
'fill_density': '20',
'skirt_line_count': '1',
'skirt_gap': '3.0',
'print_speed': '50',
'print_temperature': '220',
'print_bed_temperature': '70',
'support': 'None',
'filament_diameter': '2.89',
'filament_density': '1.00',
'retraction_min_travel': '5.0',
'retraction_enable': 'False',
'retraction_speed': '40.0',
'retraction_amount': '4.5',
'retraction_extra': '0.0',
'retract_on_jumps_only': 'True',
'travel_speed': '150',
'max_z_speed': '3.0',
'bottom_layer_speed': '20',
'cool_min_layer_time': '5',
'fan_enabled': 'True',
'fan_layer': '1',
'fan_speed': '100',
'fan_speed_max': '100',
'model_scale': '1.0',
'flip_x': 'False',
'flip_y': 'False',
'flip_z': 'False',
'swap_xz': 'False',
'swap_yz': 'False',
'model_rotate_base': '0',
'model_multiply_x': '1',
'model_multiply_y': '1',
'extra_base_wall_thickness': '0.0',
'sequence': 'Loops > Perimeter > Infill',
'force_first_layer_sequence': 'True',
'infill_type': 'Line',
'solid_top': 'True',
'fill_overlap': '15',
'support_rate': '50',
'support_distance': '0.5',
'support_dual_extrusion': 'False',
'joris': 'False',
'enable_skin': 'False',
'enable_raft': 'False',
'cool_min_feedrate': '10',
'bridge_speed': '100',
'raft_margin': '5',
'raft_base_material_amount': '100',
'raft_interface_material_amount': '100',
'bottom_thicknes': '0.3',
'hop_on_move': 'False',
'plugin_config': '',
'object_center_x': '-1',
'object_center_y': '-1',
'add_start_end_gcode': 'True',
'gcode_extension': 'gcode',
'alternative_center': '',
'clear_z': '0.0',
'extruder': '0',
'bottom_surface_thickness_layers': '2',
'top_surface_thickness_layers': '3',
#'extruder': '0',
}
alterationDefault = {
#######################################################################################
'start.gcode': """;Sliced {filename} at: {day} {date} {time}
;Basic settings: Layer height: {layer_height} Walls: {wall_thickness} Fill: {fill_density}
;Print time: {print_time}
;Filament used: {filament_amount}m {filament_weight}g
;Filament cost: {filament_cost}
G21 ;metric values
G90 ;absolute positioning
M107 ;start with the fan off
G28 X0 Y0 ;move X/Y to min endstops
G28 Z0 ;move Z to min endstops
G92 X0 Y0 Z0 E0 ;reset software position to front/left/z=0.0 aaa
G1 Z15.0 F{max_z_speed} ;move the platform down 15mm
G92 E0 ;zero the extruded length
G1 F200 E3 ;extrude 3mm of feed stock
G92 E0 ;zero the extruded length again
G1 F{travel_speed}
""",
#######################################################################################
'end.gcode': """;End GCode
M104 S0 ;extruder heater off
M140 S0 ;heated bed heater off (if you have it)
G91 ;relative positioning
G1 E-1 F300 ;retract the filament a bit before lifting the nozzle, to release some of the pressure
G1 Z+0.5 E-5 X-20 Y-20 F{travel_speed} ;move Z up a bit and retract filament even more
G28 X0 Y0 ;move X/Y to min endstops, so the head is out of the way
M84 ;steppers off
G90 ;absolute positioning
""",
#######################################################################################
'support_start.gcode': '',
'support_end.gcode': '',
'cool_start.gcode': '',
'cool_end.gcode': '',
'replace.csv': '',
#######################################################################################
'nextobject.gcode': """;Move to next object on the platform. clear_z is the minimal z height we need to make sure we do not hit any objects.
G92 E0
G91 ;relative positioning
G1 E-1 F300 ;retract the filament a bit before lifting the nozzle, to release some of the pressure
G1 Z+0.5 E-5 F{travel_speed} ;move Z up a bit and retract filament even more
G90 ;absolute positioning
G1 Z{clear_z} F{max_z_speed}
G92 E0
G1 X{object_center_x} Y{object_center_x} F{travel_speed}
G1 F200 E6
G92 E0
""",
#######################################################################################
'switchExtruder.gcode': """;Switch between the current extruder and the next extruder, when printing with multiple extruders.
G92 E0
G1 E-15 F5000
G92 E0
T{extruder}
G1 E15 F5000
G92 E0
""",
}
preferencesDefaultSettings = {
'startMode': 'Simple',
'lastFile': os.path.normpath(os.path.join(os.path.dirname(os.path.abspath(__file__)), '..', 'resources', 'example', 'UltimakerRobot_support.stl')),
'machine_width': '205',
'machine_depth': '205',
'machine_height': '200',
'machine_type': 'unknown',
'ultimaker_extruder_upgrade': 'False',
'has_heated_bed': 'False',
'extruder_amount': '1',
'extruder_offset_x1': '-22.0',
'extruder_offset_y1': '0.0',
'extruder_offset_x2': '0.0',
'extruder_offset_y2': '0.0',
'extruder_offset_x3': '0.0',
'extruder_offset_y3': '0.0',
'filament_density': '1300',
'steps_per_e': '0',
'serial_port': 'AUTO',
'serial_port_auto': '',
'serial_baud': 'AUTO',
'serial_baud_auto': '',
'slicer': 'Cura (Skeinforge based)',
'save_profile': 'False',
'filament_cost_kg': '0',
'filament_cost_meter': '0',
'sdpath': '',
'sdshortnames': 'True',
'extruder_head_size_min_x': '70.0',
'extruder_head_size_min_y': '18.0',
'extruder_head_size_max_x': '18.0',
'extruder_head_size_max_y':
|
'35.0',
'extruder_head_size_height': '80.0',
'model_colour': '#8BC53F',
'model_colour2': '#CB3030',
'model_colour3': '#DDD93C',
'model_colour4': '#4550D3',
}
#########################################################
## Profile and preferences functions
#########################################################
##
|
Profile functions
def getDefaultProfilePath():
if platform.system() == "Windows":
basePath = os.path.normpath(os.path.join(os.path.dirname(os.path.abspath(__file__)), ".."))
#If we have a frozen python install, we need to step out of the library.zip
if hasattr(sys, 'frozen'):
basePath = os.path.normpath(os.path.join(basePath, ".."))
else:
basePath = os.path.expanduser('~/.cura/%s' % version.getVersion(False))
if not os.path.isdir(basePath):
os.makedirs(basePath)
return os.path.join(basePath, 'current_profile.ini')
def loadGlobalProfile(filename):
#Read a configuration file as global config
global globalProfileParser
globalProfileParser = ConfigParser.ConfigParser()
globalProfileParser.read(filename)
def resetGlobalProfile():
#Read a configuration file as global config
global globalProfileParser
globalProfileParser = ConfigParser.ConfigParser()
if getPreference('machine_type') == 'ultimaker':
putProfileSetting('nozzle_size', '0.4')
if getPreference('ultimaker_extruder_upgrade') == 'True':
putProfileSetting('retraction_enable', 'True')
else:
putProfileSetting('nozzle_size', '0.5')
def saveGlobalProfile(filename):
#Save the current profile to an ini file
globalProfileParser.write(open(filename, 'w'))
def loadGlobalProfileFromString(options):
global globalProfileParser
globalProfileParser = ConfigParser.ConfigParser()
globalProfileParser.add_section('profile')
globalProfileParser.add_section('alterations')
options = base64.b64decode(options)
options = zlib.decompress(options)
(profileOpts, alt) = options.split('\f', 1)
for option in profileOpts.split('\b'):
if len(option) > 0:
(key, value) = option.split('=', 1)
globalProfilePar
|
conorsch/securedrop
|
securedrop/alembic/versions/48a75abc0121_add_seen_tables.py
|
Python
|
agpl-3.0
| 1,840
| 0
|
"""add seen tables
Revision ID: 48a75abc0121
Revises: 35513370ba0d
Create Date: 2020-09-15 22:34:50.116403
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = "48a75abc0121"
down_revision = "35513370ba0d"
branch_labels = None
depends_on = None
def upgrade():
op.create_table(
"seen_files",
sa.Column("id", sa.Integer(), nullable=False),
sa.Column("file_id", sa.Integer(), nullable=False),
sa.Column("journalist_id", sa.Integer(), nullable=True),
sa.PrimaryKeyConstraint("id"),
sa.UniqueConstraint("file_id", "journalist_id"),
sa.ForeignKeyConstraint(["file_id"], ["submissions.id"]),
sa.ForeignKeyConstraint(["journalist_id"], ["journalists.id"]),
)
op.create_table(
"seen_messag
|
es",
sa.Column("id", sa.Integer(), nullable=False),
sa.Column("message_i
|
d", sa.Integer(), nullable=False),
sa.Column("journalist_id", sa.Integer(), nullable=True),
sa.PrimaryKeyConstraint("id"),
sa.UniqueConstraint("message_id", "journalist_id"),
sa.ForeignKeyConstraint(["message_id"], ["submissions.id"]),
sa.ForeignKeyConstraint(["journalist_id"], ["journalists.id"]),
)
op.create_table(
"seen_replies",
sa.Column("id", sa.Integer(), nullable=False),
sa.Column("reply_id", sa.Integer(), nullable=False),
sa.Column("journalist_id", sa.Integer(), nullable=True),
sa.PrimaryKeyConstraint("id"),
sa.UniqueConstraint("reply_id", "journalist_id"),
sa.ForeignKeyConstraint(["reply_id"], ["replies.id"]),
sa.ForeignKeyConstraint(["journalist_id"], ["journalists.id"]),
)
def downgrade():
op.drop_table("seen_files")
op.drop_table("seen_messages")
op.drop_table("seen_replies")
|
whitepyro/debian_server_setup
|
tornado/netutil.py
|
Python
|
gpl-3.0
| 17,330
| 0.000404
|
#!/usr/bin/env python
#
# Copyright 2011 Facebook
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Miscellaneous network utility code."""
from __future__ import absolute_import, division, print_function, with_statement
import errno
import os
import platform
import socket
import stat
from tornado.concurrent import dummy_executor, run_on_executor
from tornado.ioloop import IOLoop
from tornado.platform.auto import set_close_exec
from tornado.util import u, Configurable, errno_from_exception
try:
import ssl
except ImportError:
# ssl is not available on Google App Engine
ssl = None
if hasattr(ssl, 'match_hostname') and hasattr(ssl, 'CertificateError'): # python 3.2+
ssl_match_hostname = ssl.match_hostname
SSLCertificateError = ssl.CertificateError
elif ssl is None:
ssl_match_hostname = SSLCertificateError = None
else:
import backports.ssl_match_hostname
ssl_match_hostname = backports.ssl_match_hostname.match_hostname
SSLCertificateError = backports.ssl_match_hostname.CertificateError
# ThreadedResolver runs getaddrinfo on a thread. If the hostname is unicode,
# getaddrinfo attempts to import encodings.idna. If this is done at
# module-import time, the import lock is already held by the main thread,
# leading to deadlock. Avoid it by caching the idna encoder on the main
# thread now.
u('foo').encode('idna')
# These errnos indicate that a non-blocking operation must be retried
# at a later time. On most platforms they're the same value, but on
# some they differ.
_ERRNO_WOULDBLOCK = (errno.EWOULDBLOCK, errno.EAGAIN)
if hasattr(errno, "WSAEWOULDBLOCK"):
_ERRNO_WOULDBLOCK += (errno.WSAEWOULDBLOCK,)
def bind_sockets(port, address=None, family=socket.AF_UNSPEC, backlog=128, flags=None):
"""Creates listening sockets bound to the given port and address.
Returns a list of socket objects (multiple sockets are returned if
the given address maps to multiple IP addresses, which is most common
for mixed IPv4 and IPv6 use).
Address may be either an IP address or hostname. If it's a hostname,
the server will listen on all IP addresses associated with the
name. Address may be an empty string or None to listen on all
available interfaces. Family may be set to either `socket.AF_INET`
or `socket.AF_INET6` to restrict to IPv4 or IPv6 addresses, otherwise
both will be used if available.
The ``backlog`` argument has the same meaning as for
`socket.listen() <socket.socket.listen>`.
``flags`` is a bitmask of AI_* flags to `~socket.getaddrinfo`, like
``socket.AI_PASSIVE | socket.AI_NUMERICHOST``.
"""
sockets = []
if address == "":
address = None
if not socket.has_ipv6 and family == socket.AF_UNSPEC:
# Python can be compiled with --disable-ipv6, which causes
# operations on AF_INET6 sockets to fail, but does not
# automatically exclude those results from getaddrinfo
# results.
# http://bugs.python.org/issue16208
family = socket.AF_INET
if flags is None:
flags = socket.AI_PASSIVE
bound_port = None
for res in set(socket.getaddrinfo(address, port, family, socket.SOCK_STREAM,
0, flags)):
af, socktype, proto, canonname, sockaddr = res
if (platform.system() == 'Darwin' and address == 'localhost' and
af == socket.AF_INET6 and sockaddr[3] != 0):
# Mac OS X includes a link-local address fe80::1%lo0 in the
# getaddrinfo results for 'localhost'. However, the firewall
# doesn't understand that this is a local address and will
# prompt for access (often repeatedly, due to an apparent
# bug in its ability to remember granting access to an
# application). Skip these addresses.
continue
try:
sock = socket.socket(af, socktype, proto)
except socket.error as e:
if errno_from_exception(e) == errno.EAFNOSUPPORT:
continue
raise
set_close_exec(sock.fileno())
if os.name != 'nt':
sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
if af == socket.AF_INET6:
# On linux, ipv6 sockets accept ipv4 too by default,
# but this makes it impossible to bind to both
# 0.0.0.0 in ipv4 and :: in ipv6. On other systems,
# separate sockets *must* be used to listen for both ipv4
# and ipv6. For consistency, always disable ipv4 on our
# ipv6 sockets and use a separate ipv4 socket when needed.
#
# Python 2.x on windows doesn't have IPPROTO_IPV6.
if hasattr(socket, "IPPROTO_IPV6"):
sock.setsockopt(socket.IPPROTO_IPV6, socket.IPV6_V6ONLY, 1)
# automatic port allocation with port=None
# should bind on the same port on IPv4 and IPv6
host, requested_port = sockaddr[:2]
if requested_port == 0 and bound_port is not None:
sockaddr = tuple([host, bound_port] + list(sockaddr[2:]))
sock.setblocking(0)
sock.bind(sockaddr)
bound_port = sock.getsockname()[1]
sock.listen(backlog)
sockets.append(sock)
return sockets
if hasattr(socket, 'AF_UNIX'):
def bind_unix_socket(file, mode=0o600, backlog=128):
"""Creates a listening unix socket.
If a socket with the given name already exists, it will be deleted.
If any other file with that name exists, an exception will be
raised.
Returns a socket object (not a list of socket objects like
`bind_sockets`)
"""
sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
set_close_exec(sock.fileno())
sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
sock.setblocking(0)
try:
st = os.stat(file)
except OSError as err:
if errno_from_exception(err) != errno.ENOENT:
raise
else:
if stat.S_ISSOCK(st.st_mode):
os.remove(file)
else:
raise ValueError("File %s exists and is not a socket", file)
sock.bind(file)
os.chmod(file, mode)
sock.listen(backlog)
return sock
def add_accept_handler(sock, callback, io_loop=None):
"""Adds an `.IOLoop` event handler to accept new connections on ``sock``.
When a connection is accepted, ``callback(connection, address)`` will
be run (``connection`` is a socket object, and ``address`` is the
address of the other end of the connection). Note that this signature
is different from the ``callback(fd, events)`` signature used for
`.IOLoop` handlers.
"""
if io_loop is None:
io_loop = IOLoop.current()
def accept_handler(fd, events):
while True:
try:
connection, address = sock.accept()
except socket.error as e:
# _ERRNO_WOULDBLOCK indicate we have accepted every
# connection that is available.
if errno_from_exception(e) in _ERRNO_WOULDBLOCK:
return
# ECONNABORTED indicates that there was a connection
# but it was closed while still in the accept queue.
# (observed on FreeBSD).
if errno_from_exception(e) == errno.ECONNABORTED:
continue
|
raise
callback(connection, address)
io_loop.add_handler(sock, accept_handler, IOLoop.READ)
def is_valid_ip(ip):
"""Returns true if the g
|
iven
|
bswartz/cinder
|
cinder/volume/drivers/huawei/huawei_driver.py
|
Python
|
apache-2.0
| 97,319
| 0
|
# Copyright (c) 2016 Huawei Technologies Co., Ltd.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import collections
import json
import math
import re
import six
import uuid
from oslo_config import cfg
from oslo_log import log as logging
from oslo_utils import excutils
from oslo_utils import units
from cinder import context
from cinder import exception
from cinder.i18n import _, _LE, _LI, _LW
from cinder import interface
from cinder import utils
from cinder.volume import driver
from cinder.volume.drivers.huawei import constants
from cinder.volume.drivers.huawei import fc_zone_helper
from cinder.volume.drivers.huawei import huawei_conf
from cinder.volume.drivers.huawei import huawei_utils
from cinder.volume.drivers.huawei import hypermetro
from cinder.volume.drivers.huawei import replication
from cinder.volume.drivers.huawei import rest_client
from cinder.volume.drivers.huawei import smartx
from cinder.volume import utils as volume_utils
from cinder.volume import volume_types
from cinder.zonemanager import utils as fczm_utils
LOG = logging.getLogger(__name__)
huawei_opts = [
cfg.StrOpt('cinder_huawei_conf_file',
default='/etc/cinder/cinder_huawei_conf.xml',
help='The configuration file for the Cinder Huawei driver.'),
cfg.StrOpt('hypermetro_devices',
default=None,
help='The remote device hypermetro will use.'),
cfg.StrOpt('metro_san_user',
default=None,
help='The remote metro device san user.'),
cfg.StrOpt('metro_san_password',
default=None,
help='The remote metro device san password.'),
cfg.StrOpt('metro_domain_name',
default=None,
help='The remote metro device domain name.'),
cfg.StrOpt('metro_san_address',
default=None,
help='The remote metro device request url.'),
cfg.StrOpt('metro_storage_pools',
default=None,
help='The remote metro device pool names.'),
]
CONF = cfg.CONF
CONF.register_opts(huawei_opts)
snap_attrs = ('id', 'volume_id', 'volume', 'provider_location')
Snapshot = collections.namedtuple('Snapshot', snap_attrs)
vol_attrs = ('id', 'lun_type', 'provider_location', 'metadata')
Volume = collections.namedtuple('Volume', vol_attrs)
class HuaweiBaseDriver(driver.VolumeDriver):
def __init__(self, *args, **kwargs):
super(HuaweiBaseDriver, self).__init__(*args, **kwargs)
if not self.configuration:
msg = _('Configuration is not found.')
raise exception.InvalidInput(reason=msg)
self.active_backend_id = kwargs.get('active_backend_id')
self.configuration.append_config_values(huawei_opts)
self.huawei_conf = huawei_conf.HuaweiConf(self.configuration)
self.metro_flag = False
self.replica = None
def get_local_and_remote_dev_conf(self):
self.loc_dev_conf = self.huawei_conf.get_local_device()
# Now just sup
|
port one replication_devices.
replica_devs = self.huawei_conf.get_replication_devices()
self.replica_dev_conf = replica_devs[0] if replica_devs else {}
def get_local_and_remote_client_conf(self):
if self.active_backend_id:
return self.replica_dev_conf, self.loc_dev_conf
else:
return self.loc_dev_conf, self.replica_dev_conf
def do_setup(self, context):
"""Instantiate
|
common class and login storage system."""
# Set huawei private configuration into Configuration object.
self.huawei_conf.update_config_value()
self.get_local_and_remote_dev_conf()
client_conf, replica_client_conf = (
self.get_local_and_remote_client_conf())
# init local client
if not client_conf:
msg = _('Get active client failed.')
LOG.error(msg)
raise exception.VolumeBackendAPIException(data=msg)
self.client = rest_client.RestClient(self.configuration,
**client_conf)
self.client.login()
# init remote client
metro_san_address = self.configuration.safe_get("metro_san_address")
metro_san_user = self.configuration.safe_get("metro_san_user")
metro_san_password = self.configuration.safe_get("metro_san_password")
if metro_san_address and metro_san_user and metro_san_password:
metro_san_address = metro_san_address.split(";")
self.rmt_client = rest_client.RestClient(self.configuration,
metro_san_address,
metro_san_user,
metro_san_password)
self.rmt_client.login()
self.metro_flag = True
else:
self.metro_flag = False
LOG.warning(_LW("Remote device not configured in cinder.conf"))
# init replication manager
if replica_client_conf:
self.replica_client = rest_client.RestClient(self.configuration,
**replica_client_conf)
self.replica_client.try_login()
self.replica = replication.ReplicaPairManager(self.client,
self.replica_client,
self.configuration)
def check_for_setup_error(self):
pass
def get_volume_stats(self, refresh=False):
"""Get volume status and reload huawei config file."""
self.huawei_conf.update_config_value()
stats = self.client.update_volume_stats()
stats = self.update_hypermetro_capability(stats)
if self.replica:
stats = self.replica.update_replica_capability(stats)
targets = [self.replica_dev_conf['backend_id']]
stats['replication_targets'] = targets
stats['replication_enabled'] = True
return stats
def update_hypermetro_capability(self, stats):
if self.metro_flag:
version = self.client.find_array_version()
rmt_version = self.rmt_client.find_array_version()
if (version >= constants.ARRAY_VERSION
and rmt_version >= constants.ARRAY_VERSION):
for pool in stats['pools']:
pool['hypermetro'] = True
pool['consistencygroup_support'] = True
return stats
def _get_volume_type(self, volume):
volume_type = None
type_id = volume.volume_type_id
if type_id:
ctxt = context.get_admin_context()
volume_type = volume_types.get_volume_type(ctxt, type_id)
return volume_type
def _get_volume_params(self, volume_type):
"""Return the parameters for creating the volume."""
specs = {}
if volume_type:
specs = dict(volume_type).get('extra_specs')
opts = self._get_volume_params_from_specs(specs)
return opts
def _get_consistencygroup_type(self, group):
specs = {}
opts = {}
type_id = group.volume_type_id.split(",")
if type_id[0] and len(type_id) == 2:
ctxt = context.get_admin_context()
volume_type = volume_types.get_volume_type(ctxt, type_id[0])
specs = dict(volume_type).get('extra_specs')
opts = self._get_volume_params_from_specs(specs)
return opts
def _get_volume_params_from_specs(self, specs):
"""Return the volum
|
andymckay/zamboni
|
mkt/ecosystem/urls.py
|
Python
|
bsd-3-clause
| 4,853
| 0.000824
|
from django.conf.urls import patterns, url
from django.views.generic import RedirectView
from . import views
APP_SLUGS = {
'chrono': 'Chrono',
'face_value': 'Face_Value',
'podcasts': 'Podcasts',
'roller': 'Roller',
'webfighter': 'Webfighter',
'generalnotes': 'General_Notes',
'rtcamera
|
': 'rtcamera'
}
def redirect_doc(uri, request=None):
view = RedirectView.as_view(
url='https://developer.mozilla.org/docs%s' % uri)
return view(request) if request else view
redirect_patterns = patterns('',
url('^docs/firefox_os_guideline$',
redirect_doc('/Web/Apps/D
|
esign'),
name='ecosystem.ffos_guideline'),
url('^docs/responsive_design$',
redirect_doc('/Web_Development/Mobile/Responsive_design'),
name='ecosystem.responsive_design'),
url('^docs/patterns$',
redirect_doc('/Web/Apps/Design/Responsive_Navigation_Patterns'),
name='ecosystem.design_patterns'),
url('^docs/review$',
redirect_doc('/Web/Apps/Publishing/Marketplace_review_criteria'),
name='ecosystem.publish_review'),
url('^docs/deploy$',
redirect_doc('/Mozilla/Marketplace/Options'),
name='ecosystem.publish_deploy'),
url('^docs/hosted$',
redirect_doc('/Mozilla/Marketplace/Publish_options#Hosted_apps'),
name='ecosystem.publish_hosted'),
url('^docs/submission$',
redirect_doc('/Web/Apps/Publishing/Submitting_an_app'),
name='ecosystem.publish_submit'),
url('^docs/packaged$',
redirect_doc('/Web/Apps/Developing/Packaged_apps'),
name='ecosystem.publish_packaged'),
url('^docs/intro_apps$',
redirect_doc('/Web/Apps/Quickstart/Build/Intro_to_open_web_apps'),
name='ecosystem.build_intro'),
url('^docs/firefox_os$',
redirect_doc('/Mozilla/Firefox_OS'),
name='ecosystem.build_ffos'),
url('^docs/manifests$',
redirect_doc('/Web/Apps/FAQs/About_app_manifests'),
name='ecosystem.build_manifests'),
url('^docs/apps_offline$',
redirect_doc('/Web/Apps/Offline_apps'),
name='ecosystem.build_apps_offline'),
url('^docs/game_apps$',
redirect_doc('/Web/Apps/Developing/Games'),
name='ecosystem.build_game_apps'),
url('^docs/mobile_developers$',
redirect_doc('/Web/Apps/Quickstart/Build/For_mobile_developers'),
name='ecosystem.build_mobile_developers'),
url('^docs/web_developers$',
redirect_doc('/Web/Apps/Quickstart/Build/For_Web_developers'),
name='ecosystem.build_web_developers'),
url('^docs/firefox_os_simulator$',
redirect_doc('/Tools/Firefox_OS_Simulator'),
name='ecosystem.firefox_os_simulator'),
url('^docs/payments$',
redirect_doc('/Web/Apps/Quickstart/Build/Payments'),
name='ecosystem.build_payments'),
url('^docs/concept$',
redirect_doc('/Web/Apps/Quickstart/Design/Concept_A_great_app'),
name='ecosystem.design_concept'),
url('^docs/fundamentals$',
redirect_doc('/Web/Apps/Quickstart/Design/Design_Principles'),
name='ecosystem.design_fundamentals'),
url('^docs/ui_guidelines$',
redirect_doc('/Apps/Design'),
name='ecosystem.design_ui'),
url('^docs/quick_start$',
redirect_doc('/Web/Apps/Quickstart/Build/Your_first_app'),
name='ecosystem.build_quick'),
url('^docs/reference_apps$',
redirect_doc('/Web/Apps/Reference_apps'),
name='ecosystem.build_reference'),
url('^docs/apps/(?P<page>\w+)?$',
lambda req, page:
redirect_doc('/Web/Apps/Reference_apps/' + APP_SLUGS.get(page, ''), req),
name='ecosystem.apps_documentation'),
url('^docs/payments/status$',
redirect_doc('/Mozilla/Marketplace/Payments_Status'),
name='ecosystem.publish_payments'),
url('^docs/tools$',
redirect_doc('/Web/Apps/Quickstart/Build/App_tools'),
name='ecosystem.build_tools'),
url('^docs/app_generator$',
redirect_doc('/Web/Apps/Developing/App_templates'),
name='ecosystem.build_app_generator'),
url('^docs/app_manager$',
redirect_doc('/Mozilla/Firefox_OS/Using_the_App_Manager'),
name='ecosystem.app_manager'),
url('^docs/dev_tools$',
redirect_doc('/Tools'),
name='ecosystem.build_dev_tools'),
# Doesn't start with docs/, but still redirects to MDN.
url('^dev_phone$',
redirect_doc('/Mozilla/Firefox_OS/Developer_phone_guide/Flame'),
name='ecosystem.dev_phone'),
)
urlpatterns = redirect_patterns + patterns('',
url('^$', views.landing, name='ecosystem.landing'),
url('^partners$', views.partners, name='ecosystem.partners'),
url('^support$', views.support, name='ecosystem.support'),
url('^docs/badges$', views.publish_badges, name='ecosystem.publish_badges')
)
|
konono/equlipse
|
openstack-install/charm/trusty/charm-keystone/hooks/manager.py
|
Python
|
mit
| 11,385
| 0
|
#!/usr/bin/python
#
# Copyright 2016 Canonical Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from keystoneclient.v2_0 import client
from keystoneclient.v3 import client as keystoneclient_v3
from keystoneclient.auth import token_endpoint
from keystoneclient import session, exceptions
from charmhelpers.core.decorators import retry_on_exception
# Early versions of keystoneclient lib do not have an explicit
# ConnectionRefused
if hasattr(exceptions, 'ConnectionRefused'):
econnrefused = exceptions.ConnectionRefused
else:
econnrefused = exceptions.ConnectionError
def _get_keystone_manager_class(endpoint, token, api_version):
"""Return KeystoneManager class for the given API version
@param endpoint: the keystone endpoint to point client at
@param token: the keystone admin_token
@param api_version: version of the keystone api the client should use
@returns keystonemanager class used for interrogating keystone
"""
if api_version == 2:
return KeystoneManager2(endpoint, token)
if api_version == 3:
return KeystoneManager3(endpoint, token)
raise ValueError('No manager found for api version {}'.format(api_version))
@retry_on_exception(5, base_delay=3, exc_type=econnrefused)
def get_keystone_manager(endpoint, token, api_version=None):
"""Return a keystonemanager for the correct API version
If api_version has not been set then create a manager based on the endpoint
Use this manager to query the catalogue and determine which api version
should actually be being used. Return the correct client based on that.
Function is wrapped in a retry_on_exception to catch the case where the
keystone service is still initialising and not responding to requests yet.
XXX I think the keystone client should be able to do version
detection automatically so the code below could be greatly
simplified
@param endpoint: the keystone endpoint to point client at
@param token: the keystone admin_token
@param api_version: version of the keystone api the client should use
@returns keystonemanager class used for interrogating keystone
"""
if api_version:
return _get_keystone_manager_class(endpoint, token, api_version)
else:
if 'v2.0' in endpoint.split('/'):
manager = _get_keystone_manager_class(endpoint, token, 2)
else:
manager = _get_keystone_manager_class(endpoint, token, 3)
if endpoint.endswith('/'):
base_ep = endpoint.rsplit('/', 2)[0]
else:
base_ep = endpoint.rsplit('/', 1)[0]
svc_id = None
for svc in manager.api.services.list():
if svc.type == 'identity':
svc_id = svc.id
version = None
for ep in manager.api.endpoints.list():
if ep.service_id == svc_id and hasattr(ep, 'adminurl'):
version = ep.adminurl.split('/')[-1]
if version and version == 'v2.0':
new_ep = base_ep + "/" + 'v2.0'
return _get_keystone_manager_class(new_ep, token, 2)
elif version and version == 'v3':
new_ep = base_ep + "/" + 'v3'
return _get_keystone_manager_class(new_ep, token, 3)
else:
return manager
class KeystoneManager(object):
def resolve_domain_id(self, name):
pass
def resolve_role_id(self, name):
"""Find the role_id of a given role"""
roles = [r._info for r in self.api.roles.list()]
for r in roles:
if name.lower() == r['name'].lower():
return r['id']
def resolve_service_id(self, name, service_type=None):
"""Find the service_id of a given service"""
services = [s._info for s in self.api.services.list()]
for s in services:
if service_type:
if (name.lower() == s['name'].lower() and
service_type == s['type']):
return s['id']
else:
if name.lower() == s['name'].lower():
return s['id']
def resolve_service_id_by_type(self, type):
"""Find the service_id of a given service"""
services = [s._info for s in self.api.services.list()]
for s in services:
if type == s['type']:
return s['id']
class KeystoneManager2(KeystoneManager):
def __init__(self, endpoint, token):
self.api_version = 2
self.api = client.Client(endpoint=endpoint, token=token)
def resolve_user_id(self, name, user_domain=None):
"""Find the user_id of a given user"""
users = [u._info for u in self.api.users.list()]
for u in users:
if name.lower() == u['name'].lower():
return u['id']
def create_endpoints(self, region, service_id, publicurl, adminurl,
internalurl):
self.api.endpoints.create(region=region, service_id=service_id,
publicurl=publicurl, adminurl=adminurl,
internalurl=internalurl)
def tenants_list(self):
return self.api.tenants.list()
def resolve_tenant_id(self, name, domain=None):
"""Find the tenant_id of a given tenant"""
tenants = [t._info for t in self.api.tenants.list()]
for t in tenants:
if name.lower() == t['name'].lower():
return t['id']
def create_tenant(self, tenant_name, description, domain='default'):
self.api.tenants.create(tenant_name=tenant_name,
description=description)
def delete_tenant(self, tenant_id):
self.api.tenants.delete(tenant_id)
def create_user(self, name, password, email, tenant_id=None,
domain_id=None):
self.ap
|
i.users.create(name=name,
passwor
|
d=password,
email=email,
tenant_id=tenant_id)
def update_password(self, user, password):
self.api.users.update_password(user=user, password=password)
def roles_for_user(self, user_id, tenant_id=None, domain_id=None):
return self.api.roles.roles_for_user(user_id, tenant_id)
def add_user_role(self, user, role, tenant, domain):
self.api.roles.add_user_role(user=user, role=role, tenant=tenant)
class KeystoneManager3(KeystoneManager):
def __init__(self, endpoint, token):
self.api_version = 3
keystone_auth_v3 = token_endpoint.Token(endpoint=endpoint, token=token)
keystone_session_v3 = session.Session(auth=keystone_auth_v3)
self.api = keystoneclient_v3.Client(session=keystone_session_v3)
def resolve_tenant_id(self, name, domain=None):
"""Find the tenant_id of a given tenant"""
if domain:
domain_id = self.resolve_domain_id(domain)
tenants = [t._info for t in self.api.projects.list()]
for t in tenants:
if name.lower() == t['name'].lower() and \
(domain is None or t['domain_id'] == domain_id):
return t['id']
def resolve_domain_id(self, name):
"""Find the domain_id of a given domain"""
domains = [d._info for d in self.api.domains.list()]
for d in domains:
if name.lower() == d['name'].lower():
return d['id']
def resolve_user_id(self, name, user_domain=None):
"""Find the user_id of a given user"""
domain_id = None
if user_domain:
domain_id = self.resolve_domain_id(user_domain)
for user in self.api.users.list(domain=domain_id
|
marmyshev/item_title
|
openlp/plugins/songs/lib/songimport.py
|
Python
|
gpl-2.0
| 15,903
| 0.001698
|
# -*- coding: utf-8 -*-
# vim: autoindent shiftwidth=4 expandtab textwidth=120 tabstop=4 softtabstop=4
###############################################################################
# OpenLP - Open Source Lyrics Projection #
# --------------------------------------------------------------------------- #
# Copyright (c) 2008-2013 Raoul Snyman #
# Portions copyright (c) 2008-2013 Tim Bentley, Gerald Britton, Jonathan #
# Corwin, Samuel Findlay, Michael Gorven, Scott Guerrieri, Matthias Hub, #
# Meinert Jordan, Armin Köhler, Erik Lundin, Edwin Lunando, Brian T. Meyer. #
# Joshua Miller, Stevan Pettit, Andreas Preikschat, Mattias Põldaru, #
# Christian Richter, Philip Ridout, Simon Scudder, Jeffrey Smith, #
# Maikel Stuivenberg, Martin Thompson, Jon Tibble, Dave Warnock, #
# Frode Woldsund, Martin Zibricky, Patrick Zimmermann #
# --------------------------------------------------------------------------- #
# This program is free software; you can redistribute it and/or modify it #
# under the terms of the GNU General Public License as published by the Free #
# Software Foundation; version 2 of the License. #
# #
# This program is distributed in the hope that it will be useful, but WITHOUT #
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or #
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for #
# more details. #
# #
# You should have received a copy of the GNU General Public License along #
# with this program; if not, write to the Free Software Foundation, Inc., 59 #
# Temple Place, Suite 330, Boston, MA 02111-1307 USA #
###############################################################################
import logging
import re
import shutil
import os
from PyQt4 import QtCore
from openlp.core.lib import Registry, translate, check_directory_exists
from openlp.core.ui.wizard import WizardStrings
from openlp.core.utils import AppLocation
from openlp.plugins.songs.lib import clean_song, VerseType
from openlp.plugins.songs.lib.db import Song, Author, Topic, Book, MediaFile
from openlp.plugins.songs.lib.ui import SongStrings
from openlp.plugins.songs.lib.xml import SongXML
log = logging.getLogger(__name__)
class SongImport(QtCore.QObject):
"""
Helper class for import a song from a third party source into OpenLP
This class just takes the raw strings, and will work out for itself
whether the authors etc already exi
|
st and add them or refer to them
as necessary
"""
@staticmethod
def isValidSource(import_source):
"""
Override this method to validate the source prior to import.
"""
return True
def __init__(self, manager, **kwargs):
"""
Initialise and create defaults for properties
`
|
`manager``
An instance of a SongManager, through which all database access is
performed.
"""
self.manager = manager
QtCore.QObject.__init__(self)
if 'filename' in kwargs:
self.import_source = kwargs['filename']
elif 'filenames' in kwargs:
self.import_source = kwargs['filenames']
elif 'folder' in kwargs:
self.import_source = kwargs['folder']
else:
raise KeyError('Keyword arguments "filename[s]" or "folder" not supplied.')
log.debug(self.import_source)
self.import_wizard = None
self.song = None
self.stop_import_flag = False
self.setDefaults()
Registry().register_function('openlp_stop_wizard', self.stop_import)
def setDefaults(self):
"""
Create defaults for properties - call this before each song
if importing many songs at once to ensure a clean beginning
"""
self.title = ''
self.songNumber = ''
self.alternate_title = ''
self.copyright = ''
self.comments = ''
self.themeName = ''
self.ccliNumber = ''
self.authors = []
self.topics = []
self.mediaFiles = []
self.songBookName = ''
self.songBookPub = ''
self.verseOrderListGeneratedUseful = False
self.verseOrderListGenerated = []
self.verseOrderList = []
self.verses = []
self.verseCounts = {}
self.copyrightString = translate('SongsPlugin.SongImport', 'copyright')
def logError(self, filepath, reason=SongStrings.SongIncomplete):
"""
This should be called, when a song could not be imported.
``filepath``
This should be the file path if ``self.import_source`` is a list
with different files. If it is not a list, but a single file (for
instance a database), then this should be the song's title.
``reason``
The reason why the import failed. The string should be as
informative as possible.
"""
self.setDefaults()
if self.import_wizard is None:
return
if self.import_wizard.error_report_text_edit.isHidden():
self.import_wizard.error_report_text_edit.setText(translate('SongsPlugin.SongImport',
'The following songs could not be imported:'))
self.import_wizard.error_report_text_edit.setVisible(True)
self.import_wizard.error_copy_to_button.setVisible(True)
self.import_wizard.error_save_to_button.setVisible(True)
self.import_wizard.error_report_text_edit.append('- %s (%s)' % (filepath, reason))
def stop_import(self):
"""
Sets the flag for importers to stop their import
"""
log.debug('Stopping songs import')
self.stop_import_flag = True
def register(self, import_wizard):
self.import_wizard = import_wizard
def tidyText(self, text):
"""
Get rid of some dodgy unicode and formatting characters we're not
interested in. Some can be converted to ascii.
"""
text = text.replace('\u2018', '\'')
text = text.replace('\u2019', '\'')
text = text.replace('\u201c', '"')
text = text.replace('\u201d', '"')
text = text.replace('\u2026', '...')
text = text.replace('\u2013', '-')
text = text.replace('\u2014', '-')
# Remove surplus blank lines, spaces, trailing/leading spaces
text = re.sub(r'[ \t\v]+', ' ', text)
text = re.sub(r' ?(\r\n?|\n) ?', '\n', text)
text = re.sub(r' ?(\n{5}|\f)+ ?', '\f', text)
return text
def processSongText(self, text):
verse_texts = text.split('\n\n')
for verse_text in verse_texts:
if verse_text.strip() != '':
self.processVerseText(verse_text.strip())
def processVerseText(self, text):
lines = text.split('\n')
if text.lower().find(self.copyrightString) >= 0 or text.find(str(SongStrings.CopyrightSymbol)) >= 0:
copyright_found = False
for line in lines:
if (copyright_found or line.lower().find(self.copyrightString) >= 0 or
line.find(str(SongStrings.CopyrightSymbol)) >= 0):
copyright_found = True
self.addCopyright(line)
else:
self.parse_author(line)
return
if len(lines) == 1:
self.parse_author(lines[0])
return
if not self.title:
self.title = lines[0]
self.addVerse(text)
def addCopyright(self, copyright):
"""
Build the copyright field
"""
if self.copyright.find(copyright) >= 0:
return
if self.copyright != '':
self.copyright += ' '
self.copyright += copyright
def parse
|
pandas-dev/pandas
|
pandas/tests/frame/methods/test_tz_localize.py
|
Python
|
bsd-3-clause
| 2,050
| 0.000488
|
import numpy as np
import pytest
from pandas import (
|
DataFrame,
Series,
date_range,
)
import pandas._testing as tm
class TestTZLocalize:
# See also:
# test_tz_convert_and_local
|
ize in test_tz_convert
def test_tz_localize(self, frame_or_series):
rng = date_range("1/1/2011", periods=100, freq="H")
obj = DataFrame({"a": 1}, index=rng)
obj = tm.get_obj(obj, frame_or_series)
result = obj.tz_localize("utc")
expected = DataFrame({"a": 1}, rng.tz_localize("UTC"))
expected = tm.get_obj(expected, frame_or_series)
assert result.index.tz.zone == "UTC"
tm.assert_equal(result, expected)
def test_tz_localize_axis1(self):
rng = date_range("1/1/2011", periods=100, freq="H")
df = DataFrame({"a": 1}, index=rng)
df = df.T
result = df.tz_localize("utc", axis=1)
assert result.columns.tz.zone == "UTC"
expected = DataFrame({"a": 1}, rng.tz_localize("UTC"))
tm.assert_frame_equal(result, expected.T)
def test_tz_localize_naive(self, frame_or_series):
# Can't localize if already tz-aware
rng = date_range("1/1/2011", periods=100, freq="H", tz="utc")
ts = Series(1, index=rng)
ts = frame_or_series(ts)
with pytest.raises(TypeError, match="Already tz-aware"):
ts.tz_localize("US/Eastern")
@pytest.mark.parametrize("copy", [True, False])
def test_tz_localize_copy_inplace_mutate(self, copy, frame_or_series):
# GH#6326
obj = frame_or_series(
np.arange(0, 5), index=date_range("20131027", periods=5, freq="1H", tz=None)
)
orig = obj.copy()
result = obj.tz_localize("UTC", copy=copy)
expected = frame_or_series(
np.arange(0, 5),
index=date_range("20131027", periods=5, freq="1H", tz="UTC"),
)
tm.assert_equal(result, expected)
tm.assert_equal(obj, orig)
assert result.index is not obj.index
assert result is not obj
|
40323155/2016springcd_aG6
|
users/a/g6/ag6.py
|
Python
|
agpl-3.0
| 14,459
| 0.006189
|
from flask import Blueprint, render_template
# 利用 Blueprint建立 ag1, 並且 url 前綴為 /ag1, 並設定 template 存放目錄
ag6 = Blueprint('ag6', __name__, url_prefix='/ag6', template_folder='templates')
# 展示傳回 Brython 程式
@ag6.route('/a40323152')
def task1():
outstring = '''
<!DOCTYPE html>
<html>
<head>
<meta charset="UTF-8">
<title>網際 2D 繪圖</title>
<!-- IE 9: display inline SVG -->
<meta http-equiv="X-UA-Compatible" content="IE=9">
<script type="text/javascript" src="http://brython.info/src/brython_dist.js"></script>
<script type="text/javascript" src="http://cptocadp-2015fallhw.rhcloud.com/static/Cango-8v03.js"></script>
<script type="text/javascript" src="http://cptocadp-2015fallhw.rhcloud.com/static/Cango2D-6v13.js"></script>
<script type="text/javascript" src="http://cptocadp-2015fallhw.rhcloud.com/static/CangoAxes-1v33.js"></script>
</head>
<body>
<script>
window.onload=function(){
brython(1);
}
</script>
<canvas id="plotarea" width="3000" height="3000"></canvas>
<script type="text/python">
from javascript import JSConstructor
from browser import window
import math
cango = JSConstructor(window.Cango)
cobj = JSConstructor(window.Cobj)
shapedefs = window.shapeDefs
obj2d = JSConstructor(window.Obj2D)
cgo = cango("plotarea")
cgo.setWorldCoords(-250, -4500, 5000, 5000)
# 決定要不要畫座標軸線
#cgo.drawAxes(0, 240, 0, 240, {
# "strokeColor":"#aaaaaa",
# "fillColor": "#aaaaaa",
# "xTickInterval": 20,
# "xLabelInterval": 20,
# "yTickInterval": 20,
# "yLabelInterval": 20})
#cgo.drawText("使用 Cango 繪圖程式庫!", 0, 0, {"fontSize":60, "fontWeight": 1200, "lorg":5 })
deg = math.pi/180
def O(x, y, rx, ry, rot, color, border, linewidth):
# 旋轉必須要針對相對中心 rot not working yet
chamber = "M -6.8397, -1.4894 \
A 7, 7, 0, 1, 0, 6.8397, -1.4894 \
A 40, 40, 0, 0, 1, 6.8397, -18.511 \
A 7, 7, 0, 1, 0, -6.8397, -18.511 \
A 40, 40, 0, 0, 1, -6.8397, -1.4894 z"
cgoChamber = window.svgToCgoSVG(chamber)
cmbr = cobj(cgoChamber, "SHAPE", {
"fillColor": color,
"border": border,
"strokeColor": "tan",
"lineWidth": linewidth })
# 複製 cmbr, 然後命名為 basic1
basic3 = cmbr.dup()
basic3.rotate(0)
basic3.translate(0, 20)
basic4 = cmbr.dup()
basic4.rotate(60)
basic4.translate(0, 20)
cmbr.appendPath(basic3)
cmbr.appendPath(basic4)
# hole 為原點位置
hole = cobj(shapedefs.circle(4), "PATH")
cmbr.appendPath(hole)
# 表示放大 3 倍
#cgo.render(cmbr, x, y, 3, rot)
# 放大 5 倍
cgo.render(cmbr, x, y, 5, rot)
O(0, 0, 0, 0, 0, "lightyellow", True, 4)
</script>
</body>
</html>
'''
return outstring
@ag6.route('/a40323133')
def task2():
outstring = '''
<!DOCTYPE html>
<html>
<head>
<meta charset="UTF-8">
<title>網際 2D 繪圖</title>
<!-- IE 9: display inline SVG -->
<meta http-equiv="X-UA-Compatible" content="IE=9">
<script type="text/javascript" src="http://brython.info/src/brython_dist.js"></script>
<script type="text/javascript" src="http://cptocadp-2015fallhw.rhcloud.com/static/Cango-8v03.js"></script>
<script type="text/javascript" src="http://cptocadp-2015fallhw.rhcloud.com/static/Cango2D-6v13.js"></script>
<script type="text/javascript" src="http://cptocadp-2015fallhw.rhcloud.com/static/CangoAxes-1v33.js"></script>
</head>
<body>
<script>
window.onload=function(){
brython(1);
}
</script>
<canvas id="plotarea" width="3000" height="3000"></canvas>
<script type="text/python">
from javascript import JSConstructor
from browser import window
import math
cango = JSConstructor(window.Cango)
cobj = JSConstructor(window.Cobj)
shapedefs = window.shapeDefs
obj2d = JSConstructor(window.Obj2D)
cgo = cango("plotarea")
cgo.setWorldCoords(-250, -4500, 5000, 5000)
# 決定要不要畫座標軸線
#cgo.drawAxes(0, 240, 0, 240, {
# "strokeColor":"#aaaaaa",
# "fillColor": "#aaaaaa",
# "xTickInterval": 20,
# "xLabelInterval": 20,
# "yTickInterval": 20,
# "yLabelInterval": 20})
#cgo.drawText("使用 Cango 繪圖程式庫!", 0, 0, {"fontSize":60, "fontWeight": 1200, "lorg":5 })
deg = math.pi/180
def O(x, y, rx, ry, rot, color, border, linewidth):
# 旋轉必須要針對相對中心 rot not working yet
chamber = "M -6.8397, -1.4894 \
A 7, 7, 0, 1, 0, 6.8397, -1.4894 \
A 40, 40, 0, 0, 1, 6.8397, -18.511 \
A 7, 7, 0, 1, 0, -6.8397, -18.511 \
A 40, 40, 0, 0, 1, -6.8397, -1.4894 z"
cgoChamber = window.svgToCgoSVG(chamber)
cmbr = cobj(cgoChamber, "SHAPE", {
"fillColor": color,
"border": border,
"strokeColor": "tan",
"lineWidth": linewidth })
# 複製 cmbr, 然後命名為 basic1
basic3 = cmbr.dup()
basic3.rotate(0)
basic3.translate(0, 20)
basic4 = cmbr.dup()
basic4.rotate(0)
basic4.translate(0, 40)
basic5 = cmbr.dup()
basic5.rotate(120)
basic5.translate(0, 40)
cmbr.appendPath(basic3)
cmbr.appendPath(basic4)
cmbr.appendPath(basic5)
# hole 為原點位置
hole = cobj(shapedefs.circle(4), "PATH")
cmbr.appendPath(hole)
# 表示放大 3 倍
#cgo.render(cmbr, x, y, 3, rot)
# 放大 5 倍
cgo.render(cmbr, x, y, 5, rot)
O(0, 0, 0, 0, 0, "lightyellow", True, 4)
</script>
</body>
</html>
'''
return outstring
@ag6.route('/a40323147')
def task3():
outstring = '''
<!DOCTYPE html>
<html>
<head>
<meta charset="UTF-8">
<title>網際 2D 繪圖</title>
<!-- IE 9: display inline SVG -->
<meta http-equiv="X-UA-Compatible" content="IE=9">
<script type="text/javascript" src="http://brython.info/src/brython_dist.js"></script>
<script type="text/javascript" src="http://cptocadp-2015fallhw.rhcloud.com/static/Cango-8v03.js"></script>
<script type="text/javascript" src="http://cptocadp-2015fallhw.rhcloud.com/static/Cango2D-6v13.js"></script>
<script type="text/javascript" src="http://cptocadp-2015fallhw.rhcloud.com/static/CangoAxes-1v33.js"></script>
</head>
<body>
<script>
window.onload=function(){
brython(1);
}
</script>
<canvas id="plotarea" width="3000" height="3000"></canvas>
<script type="text/python">
from javascript import JSConstructor
from browser import window
import math
cango = JSConstructor(window.Cango)
cobj = JSConstructor(window.Cobj)
shapedefs = window.shapeDefs
obj2d = JSConstructor(window.Obj2D)
cgo = cango("plotarea")
cgo.setWorldCoords(-250, -4500, 5000, 5000)
# 決定要不要畫座標軸線
#cgo.drawAxes(0, 240, 0, 240, {
# "strokeColor":"#aaaaaa",
# "fillColor": "#aaaaaa",
# "xTickInterval": 20,
# "xLabelInterval": 20,
# "yTickInterval": 20,
# "yLabelInterval": 20})
#cgo.drawText("使用 Cango 繪圖程式庫!", 0, 0, {"fontSize":60, "fontWeight": 1200, "lorg":5 })
deg = math.pi/180
def O(x, y, rx, ry, rot, color, border, linewidth):
# 旋轉必須要針對相對中心 rot not working yet
chamber = "M -6.8397, -1.4894 \
A 7, 7, 0, 1, 0, 6.8397, -1.4894 \
A 40, 40, 0, 0, 1, 6.8397, -18.511 \
A 7, 7, 0, 1, 0, -6.8397, -18.511 \
A 40, 40, 0, 0, 1, -6.8397, -1.4894 z"
cgoChamber = window.svgToCgoSVG(chamber)
cmbr = cobj(cgoChamber, "SHAPE", {
"fillColor": color,
"border": border,
"strokeColor":
|
"tan",
"lineWidth": linewidth })
# 複製 cmbr, 然後命名為 basic1
basic3 = cmbr.dup()
|
basic3.rotate(0)
basic3.translate(0, 20)
basic4 = cmbr.dup()
basic4.rotate(0)
basic4.translate(0, 40)
basic5 = cmbr.dup()
basic5.rotate(90)
basic5.translate(0, 40)
basic6 = cmbr.dup()
basic6.rotate(20)
basic6.translate(20, 40)
cmbr.appendPath(basic3)
cmbr.appendPath(basic4)
cmbr.appendPath(basic5)
cmbr.appendPath(basic6)
# hole 為原點位置
hole = cobj(shapedefs.circle(4), "PATH")
cmbr.appendPath(hole)
# 表示放大 3 倍
#cgo.render(cmbr, x, y, 3, rot)
# 放大 5 倍
cgo.render(cmbr, x, y, 5, rot)
O(0, 0, 0, 0, 0, "lightyellow", True, 4)
</script>
</body>
</html>
'''
return outstring
@ag6.route('/a40323155')
def task4():
outstring = '''
<!DOCTYPE html>
<html>
|
edudobay/mingus
|
mingus/containers/Instrument.py
|
Python
|
gpl-3.0
| 7,067
| 0.001415
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
"""
================================================================================
mingus - Music theory Python package, Instrument module
Copyright (C) 2008-2009, Bart Spaans
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
================================================================================
"""
from mingus.containers.Note import Note
from mt_exceptions import UnexpectedObjectError
class Instrument:
"""The Instrument class is pretty self explanatory. Instruments can be used \
with [refMingusContainersTrack Tracks] to define which instrument plays \
what, with the added bonus of checking whether the entered notes are in the \
range of the instrument.
It's probably easiest to subclass your own Instruments (see \
[refMingusContainersPiano Piano] and [refMingusContainersGuitar Guitar] for \
examples)."""
name = 'Instrument'
range = (Note('C', 0), Note('C', 8))
clef = 'bass and treble'
tuning = None # optional StringTuning object
def __init__(self):
pass
def set_range(self, range):
"""Sets the range of the instrument. A range is a tuple of two \
[refMingusContainersNote Notes] or note strings."""
if type(range[0]) == str:
range[0] = Note(range[0])
range[1] = Note(range[1])
if not hasattr(range[0], 'name'):
raise UnexpectedObjectError, \
"Unexpected object '%s'. Expecting a mingus.containers.Note object"\
% range[0]
self.range = range
def note_in_range(self, note):
"""Tests whether note is in the range of this Instrument. Returns `True` if \
so, `False` otherwise"""
if type(note) == str:
note = Note(note)
if not hasattr(note, 'name'):
raise UnexpectedObjectError, \
"Unexpected object '%s'. Expecting a mingus.containers.Note object"\
% note
if note >= self.range[0] and note <= self.range[1]:
return True
ret
|
urn False
def notes_in_range(self, notes):
"""An alias for can_play_notes"""
return can_play_notes(notes)
def can_play_notes(self, notes):
"""Will test if the notes lie within the range of the instrument. Returns \
`True` if so, `False` otherwise."""
if hasattr(note
|
s, 'notes'):
notes = notes.notes
if type(notes) != list:
notes = [notes]
for n in notes:
if not self.note_in_range(n):
return False
return True
def __repr__(self):
"""A string representation of the object"""
return '%s [%s - %s]' % (self.name, self.range[0], self.range[1])
class Piano(Instrument):
name = 'Piano'
range = (Note('F', 0), Note('B', 8))
def __init__(self):
Instrument.__init__(self)
class Guitar(Instrument):
name = 'Guitar'
range = (Note('E', 3), Note('E', 7))
clef = 'Treble'
def __init__(self):
Instrument.__init__(self)
def can_play_notes(self, notes):
if len(notes) > 6:
return False
return Instrument.can_play_notes(self, notes)
class MidiInstrument(Instrument):
range = (Note('C', 0), Note('B', 8))
instrument_nr = 1
name = ''
names = [
'Acoustic Grand Piano',
'Bright Acoustic Piano',
'Electric Grand Piano',
'Honky-tonk Piano',
'Electric Piano 1',
'Electric Piano 2',
'Harpsichord',
'Clavi',
'Celesta',
'Glockenspiel',
'Music Box',
'Vibraphone',
'Marimba',
'Xylophone',
'Tubular Bells',
'Dulcimer',
'Drawbar Organ',
'Percussive Organ',
'Rock Organ',
'Church Organ',
'Reed Organ',
'Accordion',
'Harmonica',
'Tango Accordion',
'Acoustic Guitar (nylon)',
'Acoustic Guitar (steel)',
'Electric Guitar (jazz)',
'Electric Guitar (clean)',
'Electric Guitar (muted)',
'Overdriven Guitar',
'Distortion Guitar',
'Guitar harmonics',
'Acoustic Bass',
'Electric Bass (finger)',
'Electric Bass (pick)',
'Fretless Bass',
'Slap Bass 1',
'Slap Bass 2',
'Synth Bass 1',
'Synth Bass 2',
'Violin',
'Viola',
'Cello',
'Contrabass',
'Tremolo Strings',
'Pizzicato Strings',
'Orchestral Harp',
'Timpani',
'String Ensemble 1',
'String Ensemble 2',
'SynthStrings 1',
'SynthStrings 2',
'Choir Aahs',
'Voice Oohs',
'Synth Voice',
'Orchestra Hit',
'Trumpet',
'Trombone',
'Tuba',
'Muted Trumpet',
'French Horn',
'Brass Section',
'SynthBrass 1',
'SynthBrass 2',
'Soprano Sax',
'Alto Sax',
'Tenor Sax',
'Baritone Sax',
'Oboe',
'English Horn',
'Bassoon',
'Clarinet',
'Piccolo',
'Flute',
'Recorder',
'Pan Flute',
'Blown Bottle',
'Shakuhachi',
'Whistle',
'Ocarina',
'Lead1 (square)',
'Lead2 (sawtooth)',
'Lead3 (calliope)',
'Lead4 (chiff)',
'Lead5 (charang)',
'Lead6 (voice)',
'Lead7 (fifths)',
'Lead8 (bass + lead)',
'Pad1 (new age)',
'Pad2 (warm)',
'Pad3 (polysynth)',
'Pad4 (choir)',
'Pad5 (bowed)',
'Pad6 (metallic)',
'Pad7 (halo)',
'Pad8 (sweep)',
'FX1 (rain)',
'FX2 (soundtrack)',
'FX 3 (crystal)',
'FX 4 (atmosphere)',
'FX 5 (brightness)',
'FX 6 (goblins)',
'FX 7 (echoes)',
'FX 8 (sci-fi)',
'Sitar',
'Banjo',
'Shamisen',
'Koto',
'Kalimba',
'Bag pipe',
'Fiddle',
'Shanai',
'Tinkle Bell',
'Agogo',
'Steel Drums',
'Woodblock',
'Taiko Drum',
'Melodic Tom',
'Synth Drum',
'Reverse Cymbal',
'Guitar Fret Noise',
'Breath Noise',
'Seashore',
'Bird Tweet',
'Telephone Ring',
'Helicopter',
'Applause',
'Gunshot',
]
def __init__(self, name=''):
self.name = name
|
oconnor663/peru
|
tests/test_runtime.py
|
Python
|
mit
| 466
| 0
|
import os
import peru.runtime as runtime
import shared
class RuntimeTest(shared.PeruTest):
def t
|
est_find_peru_file(self):
test_dir = shared.create_dir({
'a/find_me': 'junk',
'a/b/c/junk': 'junk',
})
|
result = runtime.find_project_file(
os.path.join(test_dir, 'a', 'b', 'c'),
'find_me')
expected = os.path.join(test_dir, 'a', 'find_me')
self.assertEqual(expected, result)
|
kyuhojeong/controllers
|
controller/modules/svpn/TincanDispatcher.py
|
Python
|
mit
| 6,334
| 0.000316
|
import json
import sys
from controller.framework.ControllerModule import ControllerModule
import controller.framework.ipoplib as ipoplib
class TincanDispatcher(ControllerModule):
def __init__(self, CFxHandle, paramDict):
super(TincanDispatcher, self).__init__()
self.CFxHandle = CFxHandle
self.CMConfig = paramDict
def initialize(self):
logCBT = self.CFxHandle.createCBT(initiator='TincanDispatcher',
recipient='Logger',
action='info',
data="TincanDispatcher Loaded")
self.CFxHandle.submitCBT(logCBT)
def processCBT(self, cbt):
data = cbt.data[0]
addr = cbt.data[1]
# Data format:
# ---------------------------------------------------------------
# | offset(byte) | |
# ---------------------------------------------------------------
# | 0 | ipop version |
# | 1 | message type |
# | 2 | Payload (JSON formatted control message) |
# ---------------------------------------------------------------
if data[0] != ipoplib.ipop_ver:
logCBT = self.CFxHandle.createCBT(initiator='TincanDispatcher',
recipient='Logger',
action='debug',
data="ipop version mismatch:"
"tincan:{0} controller: {1}"
.format(data[0].encode("hex"),
ipoplib.ipop_ver.encode("hex")))
self.CFxHandle.submitCBT(logCBT)
sys.exit()
if data[1] == ipoplib.tincan_control:
msg = json.loads(data[2:])
logCBT = self.CFxHandle.createCBT(initiator='TincanDispatcher',
recipient='Logger',
action='debug',
data="recv {0} {1}"
.format(addr, data[2:]))
self.CFxHandle.submitCBT(logCBT)
msg_type = msg.get("type", None)
if msg_type == "echo_request":
# Reply to the echo_request
echo_data = {
'm_type': ipoplib.tincan_control,
'dest_addr': addr[0],
'dest_port': addr[1]
}
echoCBT = self.CFxHandle.createCBT(initiator='Tincan'
'Dispatcher',
recipient='TincanSender',
action='ECHO_REPLY',
data=echo_data)
self.CFxHandle.submitCBT(echoCBT)
elif msg_type == "local_state":
# Send CBT to Watchdog to store ipop_state
CBT = self.CFxHandle.createCBT(initiator='TincanDispatcher',
recipient='Watchdog',
action='STORE_IPOP_STATE',
data=msg)
self.CFxHandle.submitCBT(CBT)
elif msg_type == "peer_state":
# Send CBT to Monitor to store peer state
CBT = self.CFxHandle.createCBT(initiator='TincanDispatcher',
recipient='Monitor',
action='PEER_STATE',
data=msg)
self.CFxHandle.submitCBT(CBT)
elif (msg_type == "con_stat" or msg_type == "con_req" or
msg_type == "con_resp" or msg_type == "send_msg"):
CBT = self.CFxHandle.createCBT(initiator='TincanDispatcher',
recipient='BaseTopologyManager',
action='TINCAN_MSG',
data=msg)
self.CFxHandle.submitCBT(CBT)
# If a packet that is destined to yet no p2p connection
# established node, the packet as a whole is forwarded to
# controller
# |-------------------------------------------------------------|
# | offset(byte) | |
# |-------------------------------------------------------------|
# | 0 | ipop version |
# | 1 | message type |
# | 2 | source uid |
# | 22 | destination uid |
# | 42 | Payload (Ethernet frame) |
# |-------------------------------------------------------------|
# Pass for now
elif data[1] == ipoplib.tincan_packet:
pass
else:
logCBT = self.CFxHandle.createCBT(initiator='TincanDispatcher',
|
recipie
|
nt='Logger',
action='error',
data="Tincan: "
"Unrecognized message "
"received from Tincan")
self.CFxHandle.submitCBT(logCBT)
logCBT = self.CFxHandle.createCBT(initiator='TincanDispatcher',
recipient='Logger',
action='debug',
data="{0}".format(data[0:].
encode("hex")))
self.CFxHandle.submitCBT(logCBT)
sys.exit()
def timer_method(self):
pass
def terminate(self):
pass
|
ekohl/ganeti
|
qa/qa_config.py
|
Python
|
gpl-2.0
| 3,219
| 0.013669
|
#
#
# Copyright (C) 2007, 2011 Google Inc.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301, USA.
"""QA configuration.
"""
from ganeti import utils
from ganeti import serializer
from ganeti import compat
import qa_error
cfg = None
options = None
def Load(path):
"""Loads the passed configuration file.
"""
global cfg # pylint: disable-msg=W0603
cfg = serializer.LoadJson(utils.ReadFile(path))
Validate()
def Validate():
if len(cfg['nodes']) < 1:
raise qa_error.Error("Need at least one node")
if len(cfg['instances']) < 1:
raise qa_error.Error("Need at least one instance")
if len(cfg["disk"]) != len(cfg["d
|
isk-growth"]):
raise qa_error.Error("Config options 'disk' and 'disk-growth' must have"
" the same number of items")
def ge
|
t(name, default=None):
return cfg.get(name, default)
def TestEnabled(tests):
"""Returns True if the given tests are enabled.
@param tests: a single test, or a list of tests to check
"""
if isinstance(tests, basestring):
tests = [tests]
return compat.all(cfg.get("tests", {}).get(t, True) for t in tests)
def GetMasterNode():
return cfg['nodes'][0]
def AcquireInstance():
"""Returns an instance which isn't in use.
"""
# Filter out unwanted instances
tmp_flt = lambda inst: not inst.get('_used', False)
instances = filter(tmp_flt, cfg['instances'])
del tmp_flt
if len(instances) == 0:
raise qa_error.OutOfInstancesError("No instances left")
inst = instances[0]
inst['_used'] = True
return inst
def ReleaseInstance(inst):
inst['_used'] = False
def AcquireNode(exclude=None):
"""Returns the least used node.
"""
master = GetMasterNode()
# Filter out unwanted nodes
# TODO: Maybe combine filters
if exclude is None:
nodes = cfg['nodes'][:]
elif isinstance(exclude, (list, tuple)):
nodes = filter(lambda node: node not in exclude, cfg['nodes'])
else:
nodes = filter(lambda node: node != exclude, cfg['nodes'])
tmp_flt = lambda node: node.get('_added', False) or node == master
nodes = filter(tmp_flt, nodes)
del tmp_flt
if len(nodes) == 0:
raise qa_error.OutOfNodesError("No nodes left")
# Get node with least number of uses
def compare(a, b):
result = cmp(a.get('_count', 0), b.get('_count', 0))
if result == 0:
result = cmp(a['primary'], b['primary'])
return result
nodes.sort(cmp=compare)
node = nodes[0]
node['_count'] = node.get('_count', 0) + 1
return node
def ReleaseNode(node):
node['_count'] = node.get('_count', 0) - 1
|
othreecodes/MY-RIDE
|
broadcast/migrations/0001_initial.py
|
Python
|
mit
| 1,998
| 0.003504
|
# -*- coding: utf-8 -*-
# Generated by Django 1.9.7 on 2016-08-01 19:47
from __future__ import unicode_literals
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
import django.utils.timezone
class Migration(migrations.Migration):
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='Broadcast',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('date', models.DateField(default=django.utils.timezone.now, verbose_name='broadcast date')),
('time', models.TimeField(default=django.utils.timezone.now, verbose_name='time')),
('send_to_all', models.BooleanField(default=False, verbose_name='send to all')),
],
),
migrations.CreateModel(
name='ImageBroadcast',
fields=[
('broadcast_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='broadcast.Broadcast')),
('image', models.ImageField(upload_to='')),
],
bases=('broadcast.broadcast',),
),
migrations.CreateMod
|
el(
name='TextBroadcast',
fields=[
('broadcast_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE,
|
parent_link=True, primary_key=True, serialize=False, to='broadcast.Broadcast')),
('message', models.TextField()),
],
bases=('broadcast.broadcast',),
),
migrations.AddField(
model_name='broadcast',
name='user',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL),
),
]
|
lud4ik/txAWS
|
txaws/tests/test_exception.py
|
Python
|
mit
| 4,188
| 0
|
# Copyright (c) 2009 Canonical Ltd <duncan.mcgreggor@canonical.com>
# Licenced under the txaws licence available at /LICENSE in the txaws source.
from twisted.trial.unittest import TestCase
from txaws.exception import AWSError
from txaws.exception import AWSResponseParseError
from txaws.util import XML
REQUEST_ID = "0ef9fc37-6230-4d81-b2e6-1b36277d4247"
class AWSErrorTestCase(TestCase):
def test_creation(self):
error = AWSError("<dummy1 />", 500, "Server Error", "<dummy2 />")
self.assertEquals(error.status, 500)
self.assertEquals(error.response, "<dummy2 />")
self.assertEquals(error.original, "<dummy1 />")
self.assertEquals(error.errors, [])
self.assertEquals(error.request_id, "")
def test_node_to_dict(self):
xml = "<parent><child1>text1</child1><child2>text2</child2><
|
/parent>"
error = AWSError("<dummy />", 400)
data
|
= error._node_to_dict(XML(xml))
self.assertEquals(data, {"child1": "text1", "child2": "text2"})
def test_set_request_id(self):
xml = "<a><b /><RequestID>%s</RequestID></a>" % REQUEST_ID
error = AWSError("<dummy />", 400)
error._set_request_id(XML(xml))
self.assertEquals(error.request_id, REQUEST_ID)
def test_set_host_id(self):
host_id = "ASD@#FDG$E%FG"
xml = "<a><b /><HostID>%s</HostID></a>" % host_id
error = AWSError("<dummy />", 400)
error._set_host_id(XML(xml))
self.assertEquals(error.host_id, host_id)
def test_set_empty_errors(self):
xml = "<a><Errors /><b /></a>"
error = AWSError("<dummy />", 500)
error._set_500_error(XML(xml))
self.assertEquals(error.errors, [])
def test_set_empty_error(self):
xml = "<a><Errors><Error /><Error /></Errors><b /></a>"
error = AWSError("<dummy />", 500)
error._set_500_error(XML(xml))
self.assertEquals(error.errors, [])
def test_parse_without_xml(self):
xml = "<dummy />"
error = AWSError(xml, 400)
error.parse()
self.assertEquals(error.original, xml)
def test_parse_with_xml(self):
xml1 = "<dummy1 />"
xml2 = "<dummy2 />"
error = AWSError(xml1, 400)
error.parse(xml2)
self.assertEquals(error.original, xml2)
def test_parse_html(self):
xml = "<html><body>a page</body></html>"
self.assertRaises(AWSResponseParseError, AWSError, xml, 400)
def test_empty_xml(self):
self.assertRaises(ValueError, AWSError, "", 400)
def test_no_request_id(self):
errors = "<Errors><Error><Code /><Message /></Error></Errors>"
xml = "<Response>%s<RequestID /></Response>" % errors
error = AWSError(xml, 400)
self.assertEquals(error.request_id, "")
def test_no_request_id_node(self):
errors = "<Errors><Error><Code /><Message /></Error></Errors>"
xml = "<Response>%s</Response>" % errors
error = AWSError(xml, 400)
self.assertEquals(error.request_id, "")
def test_no_errors_node(self):
xml = "<Response><RequestID /></Response>"
error = AWSError(xml, 400)
self.assertEquals(error.errors, [])
def test_no_error_node(self):
xml = "<Response><Errors /><RequestID /></Response>"
error = AWSError(xml, 400)
self.assertEquals(error.errors, [])
def test_no_error_code_node(self):
errors = "<Errors><Error><Message /></Error></Errors>"
xml = "<Response>%s<RequestID /></Response>" % errors
error = AWSError(xml, 400)
self.assertEquals(error.errors, [])
def test_no_error_message_node(self):
errors = "<Errors><Error><Code /></Error></Errors>"
xml = "<Response>%s<RequestID /></Response>" % errors
error = AWSError(xml, 400)
self.assertEquals(error.errors, [])
def test_set_500_error(self):
xml = "<Error><Code>500</Code><Message>Oops</Message></Error>"
error = AWSError("<dummy />", 500)
error._set_500_error(XML(xml))
self.assertEquals(error.errors[0]["Code"], "500")
self.assertEquals(error.errors[0]["Message"], "Oops")
|
lazytech-org/RIOT
|
tests/periph_uart/tests/periph_uart_if.py
|
Python
|
lgpl-2.1
| 1,079
| 0
|
# Copyright (c) 2018 Kevin Weiss, for HAW Hamburg <kevin.wei
|
ss@haw-hamburg.de>
#
# This file is subject to the terms and conditions of the GNU Lesser
# General Public License v2.1. See the file LICENSE in the top level
# directory for more details.
"""@package PyToAPI
This module handles parsing of informatio
|
n from RIOT periph_uart test.
"""
try:
from riot_pal import DutShell
except ImportError:
raise ImportError('Cannot find riot_pal, try "pip install riot_pal"')
class PeriphUartIf(DutShell):
"""Interface to the node with periph_uart firmware."""
def uart_init(self, dev, baud):
"""Initialize DUT's UART."""
return self.send_cmd("init {} {}".format(dev, baud))
def uart_mode(self, dev, data_bits, parity, stop_bits):
"""Setup databits, parity and stopbits."""
return self.send_cmd(
"mode {} {} {} {}".format(dev, data_bits, parity, stop_bits))
def uart_send_string(self, dev, test_string):
"""Send data via DUT's UART."""
return self.send_cmd("send {} {}".format(dev, test_string))
|
openattic/openattic
|
backend/volumes/migrations/0002_remove.py
|
Python
|
gpl-2.0
| 5,838
| 0.001884
|
# -*- coding: utf-8 -*-
"""
* Copyright (c) 2017 SUSE LLC
*
* openATTIC is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License as published by
* the Free Software Foundation; version 2.
*
* This package is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public
|
License for more details.
"""
# Django 1.7+ is not capable to render a consistent state of executing this migrations. Thus,
# Django is also not capable of generating the SQL statements.
# I'm getting:
#
# > django.db.migrations.state.InvalidBasesError: Cannot resolve bases for [<ModelState: 'vol
|
umes.GenericDisk'>]
# > This can happen if you are inheriting models from an app with migrations (e.g. contrib.auth)
# > in an app with no migrations; see https://docs.djangoproject.com/en/1.8/topics/migrations/#dependencies for more
#
# This is wrong. This migration now uses manual written SQL to perform the same steps as an
# automatic migration would generate. It then applies the operations to the database state.
#
# Without `state_operations`, `manage.py makemigrations` would generate the same migrations again,
# as it doesn't understand the custom SQL.
from __future__ import unicode_literals
from django.db import migrations, models
sql_clear = """
BEGIN;
ALTER TABLE "volumes_diskdevice" DROP COLUMN "host_id" CASCADE;
ALTER TABLE "volumes_diskdevice" DROP COLUMN "physicalblockdevice_ptr_id" CASCADE;
ALTER TABLE "volumes_filesystemprovider" DROP COLUMN "filesystemvolume_ptr_id" CASCADE;
ALTER TABLE "volumes_filesystemvolume" DROP COLUMN "owner_id" CASCADE;
ALTER TABLE "volumes_filesystemvolume" DROP COLUMN "volume_type_id" CASCADE;
ALTER TABLE "volumes_genericdisk" DROP COLUMN "disk_device_id" CASCADE;
ALTER TABLE "volumes_physicalblockdevice" DROP COLUMN "device_type_id" CASCADE;
ALTER TABLE "volumes_physicalblockdevice" DROP COLUMN "storageobj_id" CASCADE;
ALTER TABLE "volumes_storageobject" DROP COLUMN "source_pool_id" CASCADE;
ALTER TABLE "volumes_volumepool" DROP COLUMN "storageobj_id" CASCADE;
ALTER TABLE "volumes_volumepool" DROP COLUMN "volumepool_type_id" CASCADE;
DROP TABLE "volumes_diskdevice" CASCADE;
DROP TABLE "volumes_filesystemprovider" CASCADE;
DROP TABLE "volumes_filesystemvolume" CASCADE;
DROP TABLE "volumes_physicalblockdevice" CASCADE;
DROP TABLE "volumes_volumepool" CASCADE;
ALTER TABLE "volumes_blockvolume" DROP COLUMN "storageobj_id" CASCADE;
ALTER TABLE "volumes_blockvolume" DROP COLUMN "volume_type_id" CASCADE;
ALTER TABLE "volumes_storageobject" DROP COLUMN "snapshot_id" CASCADE;
ALTER TABLE "volumes_storageobject" DROP COLUMN "upper_id" CASCADE;
ALTER TABLE "volumes_genericdisk" DROP COLUMN "blockvolume_ptr_id" CASCADE;
DROP TABLE "volumes_storageobject" CASCADE;
DROP TABLE "volumes_blockvolume" CASCADE;
DROP TABLE IF EXISTS "volumes_genericdisk" CASCADE ;
COMMIT;
"""
state_operations = [
migrations.RemoveField(
model_name='blockvolume',
name='storageobj',
),
migrations.RemoveField(
model_name='blockvolume',
name='volume_type',
),
migrations.RemoveField(
model_name='diskdevice',
name='host',
),
migrations.RemoveField(
model_name='diskdevice',
name='physicalblockdevice_ptr',
),
migrations.RemoveField(
model_name='filesystemprovider',
name='filesystemvolume_ptr',
),
migrations.RemoveField(
model_name='filesystemvolume',
name='owner',
),
migrations.RemoveField(
model_name='filesystemvolume',
name='storageobj',
),
migrations.RemoveField(
model_name='filesystemvolume',
name='volume_type',
),
migrations.RemoveField(
model_name='genericdisk',
name='blockvolume_ptr',
),
migrations.RemoveField(
model_name='genericdisk',
name='disk_device',
),
migrations.RemoveField(
model_name='physicalblockdevice',
name='device_type',
),
migrations.RemoveField(
model_name='physicalblockdevice',
name='storageobj',
),
migrations.RemoveField(
model_name='storageobject',
name='snapshot',
),
migrations.RemoveField(
model_name='storageobject',
name='source_pool',
),
migrations.RemoveField(
model_name='storageobject',
name='upper',
),
migrations.RemoveField(
model_name='volumepool',
name='storageobj',
),
migrations.RemoveField(
model_name='volumepool',
name='volumepool_type',
),
migrations.DeleteModel(
name='BlockVolume',
),
migrations.DeleteModel(
name='DiskDevice',
),
migrations.DeleteModel(
name='FileSystemProvider',
),
migrations.DeleteModel(
name='FileSystemVolume',
),
migrations.DeleteModel(
name='GenericDisk',
),
migrations.DeleteModel(
name='PhysicalBlockDevice',
),
migrations.DeleteModel(
name='StorageObject',
),
migrations.DeleteModel(
name='VolumePool',
),
]
class Migration(migrations.Migration):
dependencies = [
('volumes', '0001_initial'),
]
operations = [
migrations.RunSQL(sql=sql_clear, state_operations=state_operations)
]
|
jkokorian/ODMAnalysis
|
odmanalysis/scripts/ViewCyclesInteractive.py
|
Python
|
gpl-3.0
| 3,813
| 0.012851
|
"""
Copyright (C) 2014 Delft University of Technology, The Netherlands
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
Created on Fri Dec 20 11:29:47 2013
@author: jkokorian
"""
import sys as _sys
import os as _os
import odmanalysis as _
|
odm
import odmanalysis.gui as _gui
from PyQt4 import QtCore as q
from PyQt4 import QtGui as qt
import pyqtgraph as pg
import numpy as _np
class InteractiveCycleViewer(qt.QWidget):
def __init__(self,df,parent=None):
qt.QWidget.__init__(self,parent)
self.setWindowTitle("Interactive Voltage-Displacement Cycle Viewer")
layout = qt.QVBoxLayout()
self.setLayout(layout)
hLayout = qt.QHBoxLayout()
layout.addLayo
|
ut(hLayout)
self.cycleSlider = qt.QSlider(q.Qt.Horizontal)
self.cycleSlider.setTickPosition(qt.QSlider.TicksBothSides)
self.cycleSlider.setTickInterval(1)
hLayout.addWidget(self.cycleSlider)
self.cycleNumberLabel = qt.QLabel("cycle 1")
hLayout.addWidget(self.cycleNumberLabel)
self.graph = pg.PlotWidget()
layout.addWidget(self.graph)
self.graph.setLabel('left', 'Displacement', units='nm')
self.graph.setLabel('bottom', 'Voltage', units='V')
self.forwardPlot = self.graph.plot(name="forward")
self.forwardPlot.setPen((200,200,100))
self.backwardPlot = self.graph.plot(name="backward")
self.backwardPlot.setPen((100,200,200))
self.graph.addLegend()
self.df = df
self.cycleSlider.setMinimum(1)
self.cycleSlider.setMaximum(int(df.cycleNumber.max()))
# connect signals
self.cycleSlider.valueChanged.connect(self.showCycle)
self.cycleSlider.valueChanged.connect(lambda i: self.cycleNumberLabel.setText("cycle %i" % i))
self.showCycle(1)
def showCycle(self,cycleNumber):
df = self.df
dfFwd = df[(df.cycleNumber == cycleNumber) & (df.direction == 'forward')]
dfBwd = df[(df.cycleNumber == cycleNumber) & (df.direction == 'backward')]
self.forwardPlot.setData(x=_np.array(dfFwd.actuatorVoltage),y=_np.array(dfFwd.displacement_nm))
self.backwardPlot.setData(x=_np.array(dfBwd.actuatorVoltage),y=_np.array(dfBwd.displacement_nm))
def main():
if (len(_sys.argv) > 1 and _os.path.exists(_sys.argv[1]) and _os.path.isfile(_sys.argv[1])):
filename = _sys.argv[1]
else:
filename = _gui.get_path("*.csv",defaultFile="odmanalysis.csv")
commonPath = _os.path.abspath(_os.path.split(filename)[0])
measurementName = _os.path.split(_os.path.split(filename)[0])[1]
print "loading settings from " + commonPath + "/odmSettings.ini"
settings = _odm.CurveFitSettings.loadFromFile(commonPath+"/odmSettings.ini")
df = _odm.readAnalysisData(filename)
df['displacement_nm'] = df.displacement * settings.pxToNm
app = qt.QApplication(_sys.argv)
cycleViewer = InteractiveCycleViewer(df)
cycleViewer.show()
app.exec_()
if __name__ == "__main__":
main()
|
ofer43211/unisubs
|
apps/teams/search_indexes.py
|
Python
|
agpl-3.0
| 6,266
| 0.001596
|
# Amara, universalsubtitles.org
#
# Copyright (C) 2013 Participatory Culture Foundation
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see
# http://www.gnu.org/licenses/agpl-3.0.html.
from django.conf import settings
from django.db.models import Count
from haystack import site
from haystack.backends import SQ
from haystack.indexes import (
IntegerField, CharField, BooleanField, SearchIndex, DateTimeField,
MultiValueField
)
from haystack.query import SearchQuerySet
from teams import models
from subtitles.models import SubtitleLanguage
from haystack.exceptions import AlreadyRegistered
class TeamVideoLanguagesIndex(SearchIndex):
text = CharField(
document=True, use_template=True,
template_name="teams/teamvideo_languages_for_search.txt")
team_id = IntegerField()
team_video_pk = IntegerField(indexed=False)
video_pk = IntegerField(indexed=False)
video_id = CharField(indexed=False)
video_title = CharField(faceted=True)
video_url = CharField(indexed=False)
original_language = CharField()
original_language_display = CharField(indexed=False)
absolute_url = CharField(indexed=False)
project_pk = IntegerField(indexed=True)
task_count = IntegerField()
# never store an absolute url with solr
# since the url changes according to the user
# one cannot construct the url at index time
# video_absolute_url = CharField(indexed=False)
thumbnail = CharField(indexed=False)
title = CharField()
project_name = CharField(indexed=False)
project_slug = CharField(indexed=False)
description = CharField(indexed=True)
is_complete = BooleanField()
video_complete_date = DateTimeField(null=True)
# list of completed language codes
video_completed_langs = MultiValueField()
# list of completed language absolute urls. should have 1-1 mapping to video_compelted_langs
video_completed_lang_urls = MultiValueField(indexed=False)
latest_submission_date = DateTimeField(null=True)
team_video_create_date = DateTimeField()
# possible values for visibility:
# is_public=True anyone can see
# is_public=False and owned_by_team_id=None -> a regular user owns, no teams can list this video
# is_public=False and owned_by_team_id=X -> only team X can see this video
is_public = BooleanField()
owned_by_team_id = IntegerField(null=True)
# All subtitle languages containing at least one version are included in the total count.
num_total_langs = IntegerField()
# Completed languages are languages which have at least one version that is:
#
# * Public
# * Covers all dialog
# * Fully synced
# * Fully translated, if a translation
num_completed_langs = IntegerField()
def prepare(self, obj):
self.prepared_data = super(TeamVideoLanguagesIndex, self).prepare(obj)
self.prepared_data['team_id'] = obj.team.id
self.prepared_data['team_video_pk'] = obj.id
self.prepared_data['video_pk'] = obj.video.id
self.prepared_data['video_id'] = obj.video.video_id
self.prepared_data['video_title'] = obj.video.title.strip()
self.prepared_data['video_url'] = obj.video.get_video_url()
original_sl = obj.video.subtitle_language()
if original_sl:
self.prepared_data['original_language_display'] = original_sl.get_language_code_display
self.prepared_data['original_language'] = original_sl.language_code
else:
self.prepared_data['original_language_display'] = ''
self.prepared_data['original_language'] = ''
self.prepared_data['absolute_url'] = obj.get_absolute_url()
self.prepared_data['thumbnail'] = obj.get_thumbnail()
self.prepared_data['title'] = obj.video.title_display()
self.prepared_data['description'] = obj.description
self.prepared_data['is_complete'] = obj.video.complete_date is not None
self.prepared_data['video_complete_date'] = obj.video.complete_date
self.prepared_data['project_pk'] = obj.project.pk
self.prepared_data['project_name'] = obj.project.name
self.prepared_data['project_slug'] = obj.project.slug
self.prepared_data['team_video_create_date'] = obj.created
completed_sls = list(obj.video.completed_subtitle_languages())
all_sls = obj.video.newsubtitlelanguage_set.having_nonempty_tip()
self.prepared_data['num_total_langs'] = all_sls.count()
self.prepared_data['num_completed_langs'] = len(completed_sls)
self.prepared_data['video_completed_langs'] = \
[sl.language_code for sl in completed_sls]
self.prepared_data['video_completed_lang_urls'] = \
[sl.get_absolute_url() for sl in completed_sls]
self.prepared_data['task_count'] = models.Task.objects.incomplete().filter(team_video=obj).count()
team_video = obj.video.get_team_video()
self.prepared_data['is_public'] = team_video.team.is_visible
self.prepared_data["owned_by_tea
|
m_id"] = team_video.team.id if team_video else None
return self.prepare
|
d_data
@classmethod
def results_for_members(self, team):
base_qs = SearchQuerySet().models(models.TeamVideo)
public = SQ(is_public=True)
mine = SQ(is_public=False, owned_by_team_id=team.pk)
return base_qs.filter(public | mine)
@classmethod
def results(self):
return SearchQuerySet().models(models.TeamVideo).filter(is_public=True)
try:
site.register(models.TeamVideo, TeamVideoLanguagesIndex)
except AlreadyRegistered:
# i hate python imports with all my will.
# i hope they die.
pass
|
juju-solutions/charms.reactive
|
tests/data/reactive/relations/test-alt/provides.py
|
Python
|
apache-2.0
| 561
| 0
|
from charms.reactive import Endpoint, when
class TestAltProvides(Endpoint):
invocations = []
@when('endpoint.{endpoint_name}.joine
|
d')
def handle_joined(self):
self.invocations.append('joined: {}'.format(self.endpoint_name))
|
@when('endpoint.{endpoint_name}.changed')
def handle_changed(self):
self.invocations.append('changed: {}'.format(self.endpoint_name))
@when('endpoint.{endpoint_name}.changed.foo')
def handle_changed_foo(self):
self.invocations.append('changed.foo: {}'.format(self.endpoint_name))
|
Ledoux/ShareYourSystem
|
Pythonlogy/ShareYourSystem/Standards/Viewers/Texter/__init__.py
|
Python
|
mit
| 2,100
| 0.045238
|
# -*- coding: utf-8 -*-
"""
<DefineSource>
@Date : Fri Nov 14 13:20:38 2014 \n
@Author : Erwan Ledoux \n\n
</DefineSource>
A Viewer
"""
#<DefineAugmentation>
import ShareYourSystem as SYS
BaseModuleStr="ShareYourSystem.Standards.Viewers.Viewer"
DecorationModuleStr="ShareYourSystem.Standards.Classors.Classer"
SYS.setSubModule(globals())
#</DefineAugmentation>
#<ImportSpecificModules>
#</ImportSpecificModules>
#<DefineClass>
@DecorationClass()
class TexterClass(BaseClass):
#Definition
RepresentingKeyStrsList=[
'TextingConditionVariable',
'TextingQueryVariable',
'TextedCollectionStrsList',
'TextedConsoleStr'
]
def default_init(self,
_TextingConditionVariable=None,
_TextingQueryVariable=None,
_TextedCollectionStrsList=None,
_TextedConsoleStr="",
**_KwargVariablesDict
):
#Call the parent __init__ method
BaseClass.__init__(self,**_KwargVariablesDict)
def do_text(self):
#Init
self.TextedConsoleStr=""
#Check
if hasattr(self,'hdfview'):
#Add
self.TextedConsoleStr+="Associated Hdf fil
|
e :\n"
self.TextedConsoleStr+=self.ViewedPointDeriveControllerVariable.hdfview(
).HdformatedConsoleStr
#Check
if self.ViewedPointDeriveControllerVariable.PymongoneDatabaseVariable!=None:
#Check
if self.TextingConditionVariable==None:
self.TextingConditionVariable={}
#map
self.TextedCollectionStrsList
|
=map(
lambda __CollectionTuple:
__CollectionTuple[0]+' : \n'+SYS._str(
list(
__CollectionTuple[1].find(
self.TextingConditionVariable[__CollectionTuple[0]]
if __CollectionTuple[0] in self.TextingConditionVariable
else {},
self.TextingQueryVariable[__CollectionTuple[0]]
if __CollectionTuple[0] in self.TextingQueryVariable
else {}
)
)
),
self.ViewedPointDeriveControllerVariable.PymongoneDatabaseVariable.__dict__.items()
)
self.TextedConsoleStr+="Associated Mongo db :\n"
self.TextedConsoleStr='\n'.join(self.TextedCollectionStrsList)
#</DefineClass>
|
Byronic94/py-blog
|
fabfile.py
|
Python
|
gpl-2.0
| 5,473
| 0.00676
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
__author__ = 'Yan Yan'
'''
Deployment toolkit.
'''
import os, re
from datetime import datetime
from fabric.api import *
env.user = 'michael'
env.sudo_user = 'root'
env.hosts = ['192.168.0.3']
db_user = 'www-data'
db_password = 'www-data'
_TAR_FILE = 'dist-awesome.tar.gz'
_REMOTE_TMP_TAR = '/tmp/%s' % _TAR_FILE
_REMOTE_BASE_DIR = '/srv/awesome'
def _current_path():
return os.path.abspath('.')
def _now():
return datetime.now().strftime('%y-%m-%d_%H.%M.%S')
def backup():
'''
Dump entire database on server and backup to local.
'''
dt = _now()
f = 'backup-awesome-%s.sql' % dt
with cd('/tmp'):
run('mysqldump --user=%s --password=%s --skip-opt --add-dro
|
p-table --default-character-set=utf8 --quick awesome > %s' % (db_user, db_password, f))
run('tar -czvf %s.tar.gz %s' % (f, f))
get('%s.tar.gz' % f, '%s/backup/' % _current_path())
run('rm -f %s' % f)
run('rm -f %s.tar.gz' % f)
def build():
'''
Build dist package.
'''
includes = ['static',
|
'templates', 'transwarp', 'favicon.ico', '*.py']
excludes = ['test', '.*', '*.pyc', '*.pyo']
local('rm -f dist/%s' % _TAR_FILE)
with lcd(os.path.join(_current_path(), 'www')):
cmd = ['tar', '--dereference', '-czvf', '../dist/%s' % _TAR_FILE]
cmd.extend(['--exclude=\'%s\'' % ex for ex in excludes])
cmd.extend(includes)
local(' '.join(cmd))
def deploy():
newdir = 'www-%s' % _now()
run('rm -f %s' % _REMOTE_TMP_TAR)
put('dist/%s' % _TAR_FILE, _REMOTE_TMP_TAR)
with cd(_REMOTE_BASE_DIR):
sudo('mkdir %s' % newdir)
with cd('%s/%s' % (_REMOTE_BASE_DIR, newdir)):
sudo('tar -xzvf %s' % _REMOTE_TMP_TAR)
with cd(_REMOTE_BASE_DIR):
sudo('rm -f www')
sudo('ln -s %s www' % newdir)
sudo('chown www-data:www-data www')
sudo('chown -R www-data:www-data %s' % newdir)
with settings(warn_only=True):
sudo('supervisorctl stop awesome')
sudo('supervisorctl start awesome')
sudo('/etc/init.d/nginx reload')
RE_FILES = re.compile('\r?\n')
def rollback():
'''
rollback to previous version
'''
with cd(_REMOTE_BASE_DIR):
r = run('ls -p -1')
files = [s[:-1] for s in RE_FILES.split(r) if s.startswith('www-') and s.endswith('/')]
files.sort(cmp=lambda s1, s2: 1 if s1 < s2 else -1)
r = run('ls -l www')
ss = r.split(' -> ')
if len(ss) != 2:
print ('ERROR: \'www\' is not a symbol link.')
return
current = ss[1]
print ('Found current symbol link points to: %s\n' % current)
try:
index = files.index(current)
except ValueError, e:
print ('ERROR: symbol link is invalid.')
return
if len(files) == index + 1:
print ('ERROR: already the oldest version.')
old = files[index + 1]
print ('==================================================')
for f in files:
if f == current:
print (' Current ---> %s' % current)
elif f == old:
print (' Rollback to ---> %s' % old)
else:
print (' %s' % f)
print ('==================================================')
print ('')
yn = raw_input ('continue? y/N ')
if yn != 'y' and yn != 'Y':
print ('Rollback cancelled.')
return
print ('Start rollback...')
sudo('rm -f www')
sudo('ln -s %s www' % old)
sudo('chown www-data:www-data www')
with settings(warn_only=True):
sudo('supervisorctl stop awesome')
sudo('supervisorctl start awesome')
sudo('/etc/init.d/nginx reload')
print ('ROLLBACKED OK.')
def restore2local():
'''
Restore db to local
'''
backup_dir = os.path.join(_current_path(), 'backup')
fs = os.listdir(backup_dir)
files = [f for f in fs if f.startswith('backup-') and f.endswith('.sql.tar.gz')]
files.sort(cmp=lambda s1, s2: 1 if s1 < s2 else -1)
if len(files)==0:
print 'No backup files found.'
return
print ('Found %s backup files:' % len(files))
print ('==================================================')
n = 0
for f in files:
print ('%s: %s' % (n, f))
n = n + 1
print ('==================================================')
print ('')
try:
num = int(raw_input ('Restore file: '))
except ValueError:
print ('Invalid file number.')
return
restore_file = files[num]
yn = raw_input('Restore file %s: %s? y/N ' % (num, restore_file))
if yn != 'y' and yn != 'Y':
print ('Restore cancelled.')
return
print ('Start restore to local database...')
p = raw_input('Input mysql root password: ')
sqls = [
'drop database if exists awesome;',
'create database awesome;',
'grant select, insert, update, delete on awesome.* to \'%s\'@\'localhost\' identified by \'%s\';' % (db_user, db_password)
]
for sql in sqls:
local(r'mysql -uroot -p%s -e "%s"' % (p, sql))
with lcd(backup_dir):
local('tar zxvf %s' % restore_file)
local(r'mysql -uroot -p%s awesome < backup/%s' % (p, restore_file[:-7]))
with lcd(backup_dir):
local('rm -f %s' % restore_file[:-7])
|
andrewkaufman/gaffer
|
python/GafferSceneUI/CameraUI.py
|
Python
|
bsd-3-clause
| 15,659
| 0.026968
|
# -*- coding: utf-8 -*-
##########################################################################
#
# Copyright (c) 2014, Image Engine Design Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above
# copyright notice, this list of conditions and the following
# disclaimer.
#
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials provided with
# the distribution.
#
# * Neither the name of John Haddon nor the names of
# any other contributors to this software may be used to endorse or
# promote products derived from this software without specific prior
# written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
# IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
# THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
##########################################################################
import math
import functools
import imath
import IECoreScene
import Gaffer
import GafferUI
import GafferScene
import GafferSceneUI
##########################################################################
# Metadata
##########################################################################
plugsMetadata = {
"sets" : [
"layout:divider", True,
],
"projection" : [
"description",
"""
The base camera type.
Supports two standard projections: orthographic and
perspective. For less standard projections that require
renderer-specific implementations, such as spherical, you
will need to use a downstream CameraTweaks node to adjust
this camera's parameters.
""",
"preset:Perspective", "perspective",
"preset:Orthographic", "orthographic",
"plugValueWidget:type", "GafferUI.PresetsPlugValueWidget",
"layout:divider", True,
],
"perspectiveMode" : [
"description",
"""
The input values to use in defining the perspective
projection. They can be either a horizontal field of view
(`fieldOfView`), or a film back/sensor (`aperture`) and
focal length (`focalLength`). The latter two can take the
exact measurements from a real camera and lens setup. With
either perspective mode, perspective is stored as
`aperture` and `focalLength` parameters on the camera.
""",
"preset:Field Of View", GafferScene.Camera.PerspectiveMode.FieldOfView,
"preset:Aperture and Focal Length", GafferScene.Camera.PerspectiveMode.ApertureFocalLength,
"plugValueWidget:type", "GafferUI.PresetsPlugValueWidget",
"layout:visibilityActivator", "perspective",
],
"fieldOfView" : [
"description",
"""
The horizontal field of view, in degrees.
In the camera's parameters, projection is always stored as
`aperture` and `focalLength`. When using the _Field of
View_ perspectiv
|
e mode, the aperture has the fixed
dimensions of `1, 1`, and this plug drives the
`focalLength` parameter.
""",
"layout:visibilityActivato
|
r", "perspectiveModeFOV",
],
"apertureAspectRatio" : [
"description",
"""
The vertical field of view, according to the ratio
`(horizontal FOV) / (vertical FOV)`. A value of 1 would
result in a square aperture, while a value of 1.778 would
result in a 16:9 aperture.
"Aperture" in this sense is equivalent to film back/sensor.
The final projection of a render using this camera will
depend on these settings in combination with the
`resolution` and `filmFit` render settings.
""",
"layout:visibilityActivator", "perspectiveModeFOV",
] ,
"aperture" : [
"description",
"""
The width and height of the aperture when using the
_Aperture and Focal Length_ perspective mode. Use this in
conjunction with a focal length to define the camera's
equivalent field of view.
"Aperture" here is equivalent to the film back/sensor on a
real camera. A handful of default camera presets are
provided, including Full Frame 35mm and several popular
Alexa and RED bodies. Once the aperture is set, the focal
length can then be adjusted on its own to control the field
of view, just like on a real camera.
When setting the aperture manually, the `x` and `y`
dimensions can be measured in any unit of length, so long
as they use the same unit as the focal length. You can
safely follow convention and use millimeters for both.
The final field of view of a render will depend on these
settings in combination with the `resolution` and `filmFit`
render options.
""",
"layout:visibilityActivator", "perspectiveModeFocalLength",
"preset:Academy 35mm 21.946 × 16.000", imath.V2f( 21.946, 16 ),
"preset:Super 35mm 24.892 × 18.669", imath.V2f( 24.892, 18.669 ),
"preset:Micro Four Thirds 17.30 × 13.00", imath.V2f( 17.3, 13 ),
"preset:APS-C 22.30 × 14.90", imath.V2f( 22.3, 14.9 ),
"preset:Full Frame 35mm 36.00 × 24.00", imath.V2f( 36, 24 ),
"preset:Alexa SXT 4:3 2.8k 23.76 × 17.82", imath.V2f( 23.76, 17.82 ),
"preset:Alexa SXT Open Gate 3.4k 28.25 × 18.17", imath.V2f( 28.25, 18.17 ),
"preset:Alexa 65 16:9 5.1k 42.24 × 23.76", imath.V2f( 42.24, 23.76 ),
"preset:Alexa 65 Open Gate 6.5k 54.12 × 25.58", imath.V2f( 54.12, 25.58 ),
"preset:RED EPIC-W 5K S35 30.72 × 18.00", imath.V2f( 30.72, 18 ),
"preset:RED EPIC-W 8K S35 29.90 × 15.77", imath.V2f( 29.9, 15.77 ),
"presetsPlugValueWidget:allowCustom", True,
"plugValueWidget:type", "GafferUI.PresetsPlugValueWidget",
],
"focalLength" : [
"description",
"""
The focal length portion of the _Aperture and Focal Length_
perspective mode. This is equivalent to the lens's focal
length in a real camera setup. Use this in conjunction with
the aperture to set the camera's equivalent field of view.
Like on a real camera, the aperture is typically constant,
and the focal length is then adjusted to control the field
of view.
This can be a distance in any unit of length, as long as
you use the same unit for the aperture. You can safely
follow convention and use millimeters for both.
The final field of view of a render using this camera will
depend on these settings in combination with the
`resolution` and `filmFit` render options.
""",
"layout:visibilityActivator", "perspectiveModeFocalLength",
],
"orthographicAperture" : [
"description",
"""
The width and height of the orthographic camera's aperture,
in world space units.
""",
"layout:visibilityActivator", "orthographic",
"layout:divider", True,
],
"apertureOffset" : [
"description",
"""
Offsets the aperture parallel to the image plane, to
achieve a skewed viewing frustum. The scale of the offset
depends on the projection and perspective mode:
- Perspective projection:
- _Field Of View_ mode: 1 offset = 1 horizontal field
of view.
- _Aperture and Focal Length_ mode: 1 offset = 1
aperture unit of measure (for example, 1mm).
- Orthographic projection: 1 offset = 1 world space unit.
For use in special cases, such as simulating a tilt-shift
lens, rendering tiles for a large panorama, or matching a
plate that has been asymmetrically cropped.
""",
],
"fStop" : [
"description",
"""
The setting equivalent to the f-number on a camera, which ultimately determines the strength of the depth of field blur. A low
|
aino/django-arrayfields
|
arrayfields/__init__.py
|
Python
|
bsd-3-clause
| 70
| 0
|
from .field
|
s import CharArrayField, TextArrayField, IntegerArrayF
|
ield
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.