code
stringlengths 3
1.05M
| repo_name
stringlengths 5
104
| path
stringlengths 4
251
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 3
1.05M
|
|---|---|---|---|---|---|
# encoding: utf-8
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'Field.placeholder_text'
db.add_column('forms_field', 'placeholder_text', self.gf('django.db.models.fields.CharField')(default='', max_length=100, blank=True), keep_default=False)
def backwards(self, orm):
# Deleting field 'Field.placeholder_text'
db.delete_column('forms_field', 'placeholder_text')
models = {
'core.keyword': {
'Meta': {'object_name': 'Keyword'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'slug': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'forms.field': {
'Meta': {'object_name': 'Field'},
'_order': ('django.db.models.fields.IntegerField', [], {'null': 'True'}),
'choices': ('django.db.models.fields.CharField', [], {'max_length': '1000', 'blank': 'True'}),
'default': ('django.db.models.fields.CharField', [], {'max_length': '2000', 'blank': 'True'}),
'field_type': ('django.db.models.fields.CharField', [], {'max_length': '55'}),
'form': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'fields'", 'to': "orm['forms.Form']"}),
'help_text': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'label': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'placeholder_text': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'}),
'required': ('django.db.models.fields.BooleanField', [], {'default': 'True', 'blank': 'True'}),
'visible': ('django.db.models.fields.BooleanField', [], {'default': 'True', 'blank': 'True'})
},
'forms.fieldentry': {
'Meta': {'object_name': 'FieldEntry'},
'entry': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'fields'", 'to': "orm['forms.FormEntry']"}),
'field_id': ('django.db.models.fields.IntegerField', [], {}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'value': ('django.db.models.fields.CharField', [], {'max_length': '2000'})
},
'forms.form': {
'Meta': {'object_name': 'Form', '_ormbases': ['pages.Page']},
'button_text': ('django.db.models.fields.CharField', [], {'default': "u'Submit'", 'max_length': '50'}),
'content': ('mezzanine.core.fields.HtmlField', [], {}),
'email_copies': ('django.db.models.fields.CharField', [], {'max_length': '200', 'blank': 'True'}),
'email_from': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'email_message': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'email_subject': ('django.db.models.fields.CharField', [], {'max_length': '200', 'blank': 'True'}),
'page_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['pages.Page']", 'unique': 'True', 'primary_key': 'True'}),
'response': ('mezzanine.core.fields.HtmlField', [], {}),
'send_email': ('django.db.models.fields.BooleanField', [], {'default': 'True', 'blank': 'True'})
},
'forms.formentry': {
'Meta': {'object_name': 'FormEntry'},
'entry_time': ('django.db.models.fields.DateTimeField', [], {}),
'form': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'entries'", 'to': "orm['forms.Form']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'})
},
'pages.page': {
'Meta': {'object_name': 'Page'},
'_keywords': ('django.db.models.fields.CharField', [], {'max_length': '500'}),
'_order': ('django.db.models.fields.IntegerField', [], {'null': 'True'}),
'content_model': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True'}),
'description': ('mezzanine.core.fields.HtmlField', [], {'blank': 'True'}),
'expiry_date': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'in_footer': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'in_navigation': ('django.db.models.fields.BooleanField', [], {'default': 'True', 'blank': 'True'}),
'keywords': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['core.Keyword']", 'symmetrical': 'False', 'blank': 'True'}),
'login_required': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'parent': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'children'", 'null': 'True', 'to': "orm['pages.Page']"}),
'publish_date': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'short_url': ('django.db.models.fields.URLField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'slug': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'status': ('django.db.models.fields.IntegerField', [], {'default': '1'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'titles': ('django.db.models.fields.CharField', [], {'max_length': '1000', 'null': 'True'})
}
}
complete_apps = ['forms']
|
orlenko/bccf
|
src/mezzanine/forms/migrations/0002_auto__add_field_field_placeholder_text.py
|
Python
|
unlicense
| 6,019
|
'''
Created on Mar 2, 2012
@author: Stephen O'Hara
Latent Configuration Clustering
Algorithm:
Compute Proximity Forest over a data set that has an unknown configuration
Compute graph from connectivity of leaf nodes in forest
Cluster using the weighted graph, hierarchical or spectral clustering
Copyright (C) 2012 Stephen O'Hara
This program is free software: you can redistribute it and/or modify it
under the terms of the GNU General Public License as published by the
Free Software Foundation, either version 3 of the License, or (at your
option) any later version.
This program is distributed in the hope that it will be useful, but
WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
General Public License for more details.
You should have received a copy of the GNU General Public License along
with this program. If not, see <http://www.gnu.org/licenses/>.
'''
import proximityforest as pf
import pylab as pl
import networkx as nx
try:
from IPython.parallel import require
except:
print "Unable to import IPython parallel library. Parallel implementations will be unavailable."
import pyvision as pv
import cv
import scipy
import scipy.cluster.hierarchy as spc
try:
import sklearn
except:
print "Warning: Could not import scikits.learn (sklearn) package. Swiss Roll data set and related samples won't work."
rint=scipy.random.randint
def genRandom2DPoints(N=3000, size=(640,480)):
xs = rint(low=0, high=size[0]+1, size=N)
ys = rint(low=0, high=size[1]+1, size=N)
return zip(xs,ys)
def genNormalClusters(N=100, size=(640,480)):
group1 = zip( 96*scipy.randn(N)+size[0]/4, 96*scipy.randn(N)+size[1]/4)
group2 = zip( 96*scipy.randn(N)+3*size[0]/4, 96*scipy.randn(N)+size[1]/4)
group3 = zip( 96*scipy.randn(N)+3*size[0]/4, 96*scipy.randn(N)+3*size[1]/4)
group4 = zip( 96*scipy.randn(N)+size[0]/4, 96*scipy.randn(N)+3*size[1]/4)
samples = group1+group2+group3+group4
labels = [0]*N + [1]*N + [2]*N + [3]*N
return (samples, labels)
def plotPoints(pts, size=(640,480), colors=None, window="Points"):
X = scipy.zeros(size)
img = pv.Image(X) #convert zeros matrix to an all-black image
points = [ pv.Point(x, y) for (x,y) in pts]
if colors is None:
colors = ['red' for p in pts]
for (p,c) in zip(points,colors):
img.annotatePoint(p, color=c)
if not window is None:
img.show(window=window, delay=0)
return img
def plotClusts(pts, labels, size=(640,480), colors=None, window="Points"):
cv_img = cv.CreateImage(size, 8, 1)
cv.Set(cv_img, 225.0) #light gray background
img = pv.Image(cv_img) #convert to a pyvision image
points = [ pv.Point(x, y) for (x,y) in pts]
if colors is None:
colors = ['red' for p in pts]
for (p,l,c) in zip(points,labels,colors):
#img.annotateLabel(p, str(l), color=c)
#img.annotatePoint(p, color=c)
img.annotateCircle(p,radius=5,color=c, fill=c)
if not window is None: img.show(window=window, delay=0)
return img
def plotProximityTreeLeaves(ptree, size=(640,480)):
X = scipy.zeros(size) #clear image data each time, because points can change leaf node membership as tree grows
img = pv.Image(X) #convert zeros matrix to an all-black image
nodes = ptree.getLeafNodes()
colordict={}
for node_id,n in enumerate(nodes):
if node_id in colordict:
hc = colordict[node_id]
else:
colr = (rint(80,256), rint(80,256), rint(80,256))
hc = RGBToHTMLColor(colr)
colordict[node_id] = hc
pts = [pt for (pt,_) in n.items]
poly = [pv.Point(*p) for p in convexHull(pts)]
img.annotatePolygon(poly, color=hc, width=1)
img.show(window="Proximity Tree Construction", delay=0)
#@require(scipy) #NOTE: If using an IPython parallel proximity forest, you may need to uncomment this @requires statement.
def pt_dist(pt1,pt2):
u = scipy.array(pt1)
v = scipy.array(pt2)
return scipy.sqrt(scipy.sum((u-v)**2))
def RGBToHTMLColor(rgb_tuple):
""" convert an (R, G, B) tuple to #RRGGBB """
hexcolor = '#%02x%02x%02x' % rgb_tuple
return hexcolor
def convexHull(pts):
storage = cv.CreateMemStorage(0)
hull = cv.ConvexHull2(pts, storage, cv.CV_CLOCKWISE, 1)
polygon = []
for i in hull:
polygon.append(i)
return polygon
def demoSwiss(k=6, parallel_client=None):
'''
Demonstrate the performance of LCC
on the swiss roll data set.
Some of the code is from the scikits.learn example for applying
ward's clustering to the swiss roll data, but appropriately modified
to use LCC instead.
Original authors of the non-LCC version:
# Authors : Vincent Michel, 2010
# Alexandre Gramfort, 2010
# Gael Varoquaux, 2010
# License: BSD
'''
import numpy as np
import pylab as pl
import mpl_toolkits.mplot3d.axes3d as p3
from sklearn.datasets.samples_generator import make_swiss_roll
# Generate data (swiss roll dataset)
n_samples = 1000
noise = 0.05
X, _ = make_swiss_roll(n_samples, noise)
# Make it thinner
X[:, 1] *= .5
#Convert data matrix X to a list of samples
N = X.shape[0]
dat = [X[i,:] for i in range(N)]
#generate LCC clustering
print "Generating LCC Clustering"
(label, _, _, _) = pf.LatentConfigurationClustering(dat, pt_dist, k, numtrees=27, parallel_client=parallel_client)
# Plot result
fig = pl.figure()
ax = p3.Axes3D(fig)
ax.view_init(7, -80)
for l in np.unique(label):
ax.plot3D(X[label == l, 0], X[label == l, 1], X[label == l, 2],
'o', color=pl.cm.jet(np.float(l) / np.max(label + 1)))
pl.title('Latent Configuration Clustering')
pl.show()
def demoFourGs():
'''
Demonstrate the performance of LCC
on points drawn from a four gaussians
'''
s=(640,480)
dat = genNormalClusters(N=100, size=s)
cList = ['red', 'blue','green','yellow']
img_truth = plotClusts(dat[0], dat[1], size=s,
colors=[cList[i] for i in dat[1]], window=None)
#generate normal hierarchical clustering off euclidean data points
print "Generating Hierarchical Clustering on Raw Data"
Z2 = spc.ward(scipy.array(dat[0]))
clusts2 = spc.fcluster(Z2, 4, criterion="maxclust")
img_HC = plotClusts(dat[0], clusts2, size=s,
colors=[cList[i-1] for i in clusts2], window=None)
#generate LCC clustering
print "Generating LCC Clustering"
(clusts, _,_,_) = pf.LatentConfigurationClustering(dat[0], pt_dist, 4, numtrees=27)
img_LCC = plotClusts(dat[0], clusts, size=s,
colors=[cList[i-1] for i in clusts], window=None)
im = pv.ImageMontage([img_truth, img_LCC, img_HC], layout=(1,3), gutter=3,
tileSize=(320,240), labels=None )
im.show(window="Truth vs. LCC vs. HC")
def position_dictionary(pts):
return {i:pts[i] for i in range(len(pts))}
def demoCluster500(K=10, edgeThresh=9):
'''
Clusters 500 2d points into K clusters. Shows the clusters
using colors and overlays the connectivity graph for all edges
with weights >= edgeThresh
'''
pts = genRandom2DPoints(500)
pos_dict = position_dictionary(pts)
(clusts, _, pforest, g) = pf.LatentConfigurationClustering(pts, pt_dist, K)
g2 = pf.filter_edges(g, thresh=edgeThresh)
colors = [ float(c+1) for c in clusts]
nx.draw(g2, pos=pos_dict, with_labels=False,node_size=35, node_color=colors)
pl.show()
return (clusts, pforest, g)
def demoLCC_stages():
'''
Plots three diagrams showing input points, connectivity graph, and output labels
'''
pts, labels = genNormalClusters(N=250, size=(1200,800))
pos_dict = position_dictionary(pts)
(clusts, _, pforest, g) = pf.LatentConfigurationClustering(pts, pt_dist, 4)
g2 = pf.filter_edges(g, thresh=9)
colors = [ float(c+1) for c in clusts]
pl.figure(1, (18,6))
#figure 1, input points
yl = (-200,1000)
pl.subplot(1,3,1)
nx.draw_networkx_nodes(g2, pos=pos_dict, with_labels=False, node_size=35)
pl.ylim(yl)
#figure 2, connections above a threshold
pl.subplot(1,3,2)
nx.draw_networkx_edges(g2, pos=pos_dict, with_labels=False)
pl.ylim(yl)
#figure 3, coloring of nodes based on cluster labels
pl.subplot(1,3,3)
nx.draw_networkx(g2, pos=pos_dict, with_labels=False, node_size=35, node_color=colors, cmap="jet")
pl.ylim(yl)
pl.subplots_adjust(left=0.05, right=0.95)
pl.show()
return (pts, labels, clusts, pforest, g)
if __name__ == '__main__':
pass
|
Sciumo/ProximityForest
|
samples/LatentConfigurationClustering_Demo.py
|
Python
|
gpl-3.0
| 8,896
|
#!python
# coding=utf-8
# Package level logger
import logging
logger = logging.getLogger("pocean")
logger.addHandler(logging.NullHandler())
__version__ = "1.0.0"
|
joefutrelle/pocean-core
|
pocean/__init__.py
|
Python
|
mit
| 164
|
import docutils.nodes
import sphinx.addnodes
class GetError(LookupError):
pass
def get_fullname(node):
if isinstance(node, docutils.nodes.section):
return get_sectionname(node)
if isinstance(node, sphinx.addnodes.desc):
return get_descname(node)
raise TypeError("Unrecognized node type '%s'" % (node.__class__,))
def get_descname(desc):
try:
sig = desc[0]
except IndexError:
raise GetError("No fullname: missing children in desc")
try:
names = sig['names']
except KeyError:
raise GetError(
"No fullname: missing names attribute in desc's child")
try:
return names[0]
except IndexError:
raise GetError("No fullname: desc's child has empty names list")
def get_sectionname(section):
try:
names = section['names']
except KeyError:
raise GetError("No fullname: missing names attribute in section")
try:
return names[0]
except IndexError:
raise GetError("No fullname: section has empty names list")
def get_refuri(node):
return as_refuri(get_refid(node))
def get_refid(node):
try:
return get_ids(node)[0]
except IndexError:
raise GetError("Node has emtpy ids list")
def as_refid(refuri):
return refuri[1:]
def as_refuri(refid):
return NUMBERSIGN + refid
def get_ids(node):
if isinstance(node, docutils.nodes.section):
try:
return node['ids']
except KeyError:
raise GetError("No ids: section missing ids attribute")
if isinstance(node, sphinx.addnodes.desc):
try:
sig = node[0]
except IndexError:
raise GetError("No ids: missing desc children")
try:
return sig['ids']
except KeyError:
raise GetError("No ids: desc's child missing ids attribute")
raise TypeError("Unrecognized node type '%s'" % (node.__class__,))
def isections(doctree):
for node in doctree:
if isinstance(node, docutils.nodes.section):
yield node
def get_name(fullname):
return fullname.split('.')[-1]
def geterror ():
return sys.exc_info()[1]
try:
_unicode = unicode
except NameError:
_unicode = str
# Represent escaped bytes and strings in a portable way.
#
# as_bytes: Allow a Python 3.x string to represent a bytes object.
# e.g.: as_bytes("a\x01\b") == b"a\x01b" # Python 3.x
# as_bytes("a\x01\b") == "a\x01b" # Python 2.x
# as_unicode: Allow a Python "r" string to represent a unicode string.
# e.g.: as_unicode(r"Bo\u00F6tes") == u"Bo\u00F6tes" # Python 2.x
# as_unicode(r"Bo\u00F6tes") == "Bo\u00F6tes" # Python 3.x
try:
eval("u'a'")
def as_bytes(string):
""" '<binary literal>' => '<binary literal>' """
return string
def as_unicode(rstring):
""" r'<Unicode literal>' => u'<Unicode literal>' """
return rstring.decode('unicode_escape', 'strict')
except SyntaxError:
def as_bytes(string):
""" '<binary literal>' => b'<binary literal>' """
return string.encode('latin-1', 'strict')
def as_unicode(rstring):
""" r'<Unicode literal>' => '<Unicode literal>' """
return rstring.encode('ascii', 'strict').decode('unicode_escape',
'stict')
# Ensure Visitor is a new-style class
_SparseNodeVisitor = docutils.nodes.SparseNodeVisitor
if not hasattr(_SparseNodeVisitor, '__class__'):
class _SparseNodeVisitor(object, docutils.nodes.SparseNodeVisitor):
pass
class Visitor(_SparseNodeVisitor):
skip_node = docutils.nodes.SkipNode()
skip_departure = docutils.nodes.SkipDeparture()
def __init__(self, app, document_node):
docutils.nodes.SparseNodeVisitor.__init__(self, document_node)
self.app = app
self.env = app.builder.env
def unknown_visit(self, node):
return
def unknown_departure(self, node):
return
EMPTYSTR = as_unicode('')
NUMBERSIGN = as_unicode('#')
|
gmittal/aar-nlp-research-2016
|
src/pygame-pygame-6625feb3fc7f/docs/reST/ext/utils.py
|
Python
|
mit
| 4,072
|
#
# Copyright (c) 2008-2015 Citrix Systems, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License")
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from nssrc.com.citrix.netscaler.nitro.resource.base.base_resource import base_resource
from nssrc.com.citrix.netscaler.nitro.resource.base.base_resource import base_response
from nssrc.com.citrix.netscaler.nitro.service.options import options
from nssrc.com.citrix.netscaler.nitro.exception.nitro_exception import nitro_exception
from nssrc.com.citrix.netscaler.nitro.util.nitro_util import nitro_util
class netbridge(base_resource) :
""" Configuration for network bridge resource. """
def __init__(self) :
self._name = ""
self.___count = 0
@property
def name(self) :
ur"""The name of the network bridge.
"""
try :
return self._name
except Exception as e:
raise e
@name.setter
def name(self, name) :
ur"""The name of the network bridge.
"""
try :
self._name = name
except Exception as e:
raise e
def _get_nitro_response(self, service, response) :
ur""" converts nitro response into object and returns the object array in case of get request.
"""
try :
result = service.payload_formatter.string_to_resource(netbridge_response, response, self.__class__.__name__)
if(result.errorcode != 0) :
if (result.errorcode == 444) :
service.clear_session(self)
if result.severity :
if (result.severity == "ERROR") :
raise nitro_exception(result.errorcode, str(result.message), str(result.severity))
else :
raise nitro_exception(result.errorcode, str(result.message), str(result.severity))
return result.netbridge
except Exception as e :
raise e
def _get_object_name(self) :
ur""" Returns the value of object identifier argument
"""
try :
if self.name is not None :
return str(self.name)
return None
except Exception as e :
raise e
@classmethod
def add(cls, client, resource) :
ur""" Use this API to add netbridge.
"""
try :
if type(resource) is not list :
addresource = netbridge()
addresource.name = resource.name
return addresource.add_resource(client)
else :
if (resource and len(resource) > 0) :
addresources = [ netbridge() for _ in range(len(resource))]
for i in range(len(resource)) :
addresources[i].name = resource[i].name
result = cls.add_bulk_request(client, addresources)
return result
except Exception as e :
raise e
@classmethod
def delete(cls, client, resource) :
ur""" Use this API to delete netbridge.
"""
try :
if type(resource) is not list :
deleteresource = netbridge()
if type(resource) != type(deleteresource):
deleteresource.name = resource
else :
deleteresource.name = resource.name
return deleteresource.delete_resource(client)
else :
if type(resource[0]) != cls :
if (resource and len(resource) > 0) :
deleteresources = [ netbridge() for _ in range(len(resource))]
for i in range(len(resource)) :
deleteresources[i].name = resource[i]
else :
if (resource and len(resource) > 0) :
deleteresources = [ netbridge() for _ in range(len(resource))]
for i in range(len(resource)) :
deleteresources[i].name = resource[i].name
result = cls.delete_bulk_request(client, deleteresources)
return result
except Exception as e :
raise e
@classmethod
def get(cls, client, name="", option_="") :
ur""" Use this API to fetch all the netbridge resources that are configured on netscaler.
"""
try :
if not name :
obj = netbridge()
response = obj.get_resources(client, option_)
else :
if type(name) != cls :
if type(name) is not list :
obj = netbridge()
obj.name = name
response = obj.get_resource(client, option_)
else :
if name and len(name) > 0 :
response = [netbridge() for _ in range(len(name))]
obj = [netbridge() for _ in range(len(name))]
for i in range(len(name)) :
obj[i] = netbridge()
obj[i].name = name[i]
response[i] = obj[i].get_resource(client, option_)
return response
except Exception as e :
raise e
@classmethod
def get_filtered(cls, client, filter_) :
ur""" Use this API to fetch filtered set of netbridge resources.
filter string should be in JSON format.eg: "port:80,servicetype:HTTP".
"""
try :
obj = netbridge()
option_ = options()
option_.filter = filter_
response = obj.getfiltered(client, option_)
return response
except Exception as e :
raise e
@classmethod
def count(cls, client) :
ur""" Use this API to count the netbridge resources configured on NetScaler.
"""
try :
obj = netbridge()
option_ = options()
option_.count = True
response = obj.get_resources(client, option_)
if response :
return response[0].__dict__['___count']
return 0
except Exception as e :
raise e
@classmethod
def count_filtered(cls, client, filter_) :
ur""" Use this API to count filtered the set of netbridge resources.
Filter string should be in JSON format.eg: "port:80,servicetype:HTTP".
"""
try :
obj = netbridge()
option_ = options()
option_.count = True
option_.filter = filter_
response = obj.getfiltered(client, option_)
if response :
return response[0].__dict__['___count']
return 0
except Exception as e :
raise e
class netbridge_response(base_response) :
def __init__(self, length=1) :
self.netbridge = []
self.errorcode = 0
self.message = ""
self.severity = ""
self.sessionid = ""
self.netbridge = [netbridge() for _ in range(length)]
|
benfinke/ns_python
|
nssrc/com/citrix/netscaler/nitro/resource/config/network/netbridge.py
|
Python
|
apache-2.0
| 6,086
|
# coding=utf-8
"""
"""
__author__ = 'Alisue <lambdalisue@hashnote.net>'
import numpy as np
from xlwt import Formula
from xlwt import Workbook
from xlwt.Utils import rowcol_pair_to_cellrange
from maidenhair.utils.peakset import find_peakset
from txt2xls.writer.utils import ensure_iterable
from txt2xls.writer.utils import prefer_alphabet
from txt2xls.writer.utils import get_sheet_name
from txt2xls.function import parse_function
class Writer(object):
def __init__(self, conf):
self.default_filename = conf['default_filename']
def config_peakset(conf):
self.peakset_method = conf['method']
self.peakset_basecolumn = conf['basecolumn']
self.peakset_where_function = parse_function(conf['where_function'])
config_peakset(conf['peakset'])
def write(self, collection, filename=None, fail_silently=False):
# create new book
book = Workbook()
# write dataset
for name, dataset in collection.items():
self._write_dataset(dataset, book)
# write peakset if there are more than single dataset
if len(dataset) > 1:
sheet = book.add_sheet('peakset')
offsets = [0, 1]
for name, dataset in collection.items():
# write classify name
# Note: +1 for heading line
sheet.write(offsets[0]+1, 0, get_sheet_name(name))
# write peakset
self._write_peakset(dataset, offsets, sheet,
self.peakset_basecolumn,
self.peakset_method,
self.peakset_where_function)
# update offsets
offsets[0] += len(dataset) + 1
# save
book.save(filename or self.default_filename)
def _write_axis_header(self, a, axis, offsets, sheet):
"""
Write axis header
Args:
a (int): An index of the axis
axis (iterable): An axis
offsets (list): A list of row and column offsets
sheet (instance): An instance of xlwt sheet
Returns:
header: A list of header strings
"""
# find the maximum number of columns
ncol = max([len(columns) for columns in axis])
# create alphabetic headers
header = ["%s %d" % (prefer_alphabet(a), i+1) for i in range(ncol)]
# are Avg and Std columns required?
avg_required = ncol > 1
std_required = ncol > 2
# add Avg and/or Std columns if these are required
if avg_required:
header.append('Avg')
if std_required:
header.append('Std')
# write header
for c, cell in enumerate(header):
sheet.write(offsets[0], offsets[1]+c, cell)
return header
def _write_axes(self, axes, offsets, sheet):
"""
Write axes
Args:
axes (list): A list of axes
offsets (list): A list of row and column offsets
sheet (instance): An instance of xlwt sheet
"""
for a, axis in enumerate(axes):
# ensure axis is iterable
axis = ensure_iterable(axis)
# write header
header = self._write_axis_header(a, axis, offsets, sheet)
# write data
for r, columns in enumerate(axis):
for c, cell in enumerate(columns):
sheet.write(offsets[0]+1+r,
offsets[1]+c,
cell)
# create cell range
crange = rowcol_pair_to_cellrange(
offsets[0]+1+r, offsets[1],
offsets[0]+1+r, offsets[1] + len(columns) - 1)
# Add average if it is required
if 'Avg' in header:
ind = header.index('Avg')
sheet.write(offsets[0]+1+r,
offsets[1]+ind,
Formula('average(%s)' % crange))
# Add stdev if it is required
if 'Std' in header:
ind = header.index('Std')
sheet.write(offsets[0]+1+r,
offsets[1]+ind,
Formula('stdev(%s)' % crange))
# update column offset
offsets[1] += len(header) + 1
def _write_dataset(self, dataset, book):
"""
Write dataset
Args:
dataset (list): A list of data
book (instance): An instance of xlwt book
"""
for i, data in enumerate(dataset):
# create sheet name from dataset name
sheet_name = get_sheet_name(data[0])
# create sheet
sheet = book.add_sheet(sheet_name)
# write axes
self._write_axes(data[1:], [0, 0], sheet)
def _write_peakset(self, dataset, offsets, sheet,
peakset_basecolumn,
peakset_method,
peakset_where_function):
"""
Write peakset of the dataset
Args:
dataset (list): A list of data
offsets (list): A list of row and column offsets
sheet (instance): An instance of xlwt sheet
instance
Returns:
sheet: An instance of xlwt sheet
"""
# write filenames
for r, data in enumerate(dataset):
name = get_sheet_name(data[0])
# Note: +1 for heading line
sheet.write(offsets[0]+r+1, offsets[1], name)
# write peakset
# Note: dataset which passed to the maidenhair find_peakset
# should not contain filename column so remove it.
dataset = [data[1:] for data in dataset]
peakset = find_peakset(dataset,
basecolumn=peakset_basecolumn,
method=peakset_method,
where=peakset_where_function)
self._write_axes(peakset, [offsets[0], offsets[1]+1], sheet)
|
lambdalisue/txt2xls
|
src/txt2xls/writer/__init__.py
|
Python
|
mit
| 6,172
|
"""
.. module:: follower
:synopsis: Log tailing classes
.. moduleauthor:: Colin Alston <colin@imcol.in>
"""
import os
class LogFollower(object):
"""Provides a class for following log files between runs
:param logfile: Full path to logfile
:type logfile: str
:param parser: Optional parser method for log lines
:type parser: str
"""
def __init__(self, logfile, parser=None, tmp_path="/var/lib/duct/",
history=False):
self.logfile = logfile
self.tmp = os.path.join(
tmp_path,
'%s.lf' % self.logfile.lstrip('/').replace('/', '-')
)
self.history = history
self.lastInode = None
self.lastSize = -1
self.readLast()
self.parser = parser
def cleanStore(self):
"""Clear up the log position store
"""
os.unlink(self.tmp)
def storeLast(self):
"""Persist the current position in the file
"""
fi = open(self.tmp, 'wt')
fi.write('%s:%s' % (self.lastSize, self.lastInode))
fi.close()
def readLast(self):
"""Read the latest changes in the file
"""
if os.path.exists(self.tmp):
fi = open(self.tmp, 'rt')
ls, li = fi.read().split(':')
self.lastSize = int(ls)
self.lastInode = int(li)
else:
if self.history:
self.lastSize = 0
self.lastInode = 0
else:
# Don't re-read the entire file
stat = os.stat(self.logfile)
self.lastSize = stat.st_size
self.lastInode = stat.st_ino
def get_fn(self, fn, max_lines=None):
"""Passes each parsed log line to `fn`
This is a better idea than storing a giant log file in memory
"""
stat = os.stat(self.logfile)
if (stat.st_ino == self.lastInode) and (stat.st_size == self.lastSize):
# Nothing new
return []
# Handle rollover and rotations vaguely
if (stat.st_ino != self.lastInode) or (stat.st_size < self.lastSize):
self.lastSize = 0
fi = open(self.logfile, 'rt')
fi.seek(self.lastSize)
self.lastInode = stat.st_ino
lines = 0
for i in fi:
lines += 1
if max_lines and (lines > max_lines):
self.storeLast()
fi.close()
return
if '\n' in i:
self.lastSize += len(i)
if self.parser:
line = self.parser(i.strip('\n'))
else:
line = i.strip('\n')
fn(line)
self.storeLast()
fi.close()
def get(self, max_lines=None):
"""Returns a big list of all log lines since the last run
"""
rows = []
self.get_fn(rows.append, max_lines=max_lines)
return rows
|
ducted/duct
|
duct/logs/follower.py
|
Python
|
mit
| 2,971
|
from kivy.app import App
from kivy.factory import Factory
from kivy.properties import ObjectProperty
from kivy.lang import Builder
from electrum.util import base_units_list
from electrum.i18n import languages
from electrum.gui.kivy.i18n import _
from electrum.plugin import run_hook
from electrum import coinchooser
from .choice_dialog import ChoiceDialog
Builder.load_string('''
#:import partial functools.partial
#:import _ electrum.gui.kivy.i18n._
<SettingsDialog@Popup>
id: settings
title: _('Electrum Settings')
disable_pin: False
use_encryption: False
BoxLayout:
orientation: 'vertical'
ScrollView:
GridLayout:
id: scrollviewlayout
cols:1
size_hint: 1, None
height: self.minimum_height
padding: '10dp'
SettingsItem:
lang: settings.get_language_name()
title: 'Language' + ': ' + str(self.lang)
description: _('Language')
action: partial(root.language_dialog, self)
CardSeparator
SettingsItem:
disabled: root.disable_pin
title: _('PIN code')
description: _("Change your PIN code.")
action: partial(root.change_password, self)
CardSeparator
SettingsItem:
bu: app.base_unit
title: _('Denomination') + ': ' + self.bu
description: _("Base unit for Bitcoin amounts.")
action: partial(root.unit_dialog, self)
CardSeparator
SettingsItem:
status: root.fx_status()
title: _('Fiat Currency') + ': ' + self.status
description: _("Display amounts in fiat currency.")
action: partial(root.fx_dialog, self)
CardSeparator
SettingsItem:
status: 'ON' if bool(app.plugins.get('labels')) else 'OFF'
title: _('Labels Sync') + ': ' + self.status
description: _("Save and synchronize your labels.")
action: partial(root.plugin_dialog, 'labels', self)
CardSeparator
SettingsItem:
status: 'ON' if app.use_rbf else 'OFF'
title: _('Replace-by-fee') + ': ' + self.status
description: _("Create replaceable transactions.")
message:
_('If you check this box, your transactions will be marked as non-final,') \
+ ' ' + _('and you will have the possibility, while they are unconfirmed, to replace them with transactions that pays higher fees.') \
+ ' ' + _('Note that some merchants do not accept non-final transactions until they are confirmed.')
action: partial(root.boolean_dialog, 'use_rbf', _('Replace by fee'), self.message)
CardSeparator
SettingsItem:
status: _('Yes') if app.use_unconfirmed else _('No')
title: _('Spend unconfirmed') + ': ' + self.status
description: _("Use unconfirmed coins in transactions.")
message: _('Spend unconfirmed coins')
action: partial(root.boolean_dialog, 'use_unconfirmed', _('Use unconfirmed'), self.message)
CardSeparator
SettingsItem:
status: _('Yes') if app.use_change else _('No')
title: _('Use change addresses') + ': ' + self.status
description: _("Send your change to separate addresses.")
message: _('Send excess coins to change addresses')
action: partial(root.boolean_dialog, 'use_change', _('Use change addresses'), self.message)
# disabled: there is currently only one coin selection policy
#CardSeparator
#SettingsItem:
# status: root.coinselect_status()
# title: _('Coin selection') + ': ' + self.status
# description: "Coin selection method"
# action: partial(root.coinselect_dialog, self)
''')
class SettingsDialog(Factory.Popup):
def __init__(self, app):
self.app = app
self.plugins = self.app.plugins
self.config = self.app.electrum_config
Factory.Popup.__init__(self)
layout = self.ids.scrollviewlayout
layout.bind(minimum_height=layout.setter('height'))
# cached dialogs
self._fx_dialog = None
self._proxy_dialog = None
self._language_dialog = None
self._unit_dialog = None
self._coinselect_dialog = None
def update(self):
self.wallet = self.app.wallet
self.disable_pin = self.wallet.is_watching_only() if self.wallet else True
self.use_encryption = self.wallet.has_password() if self.wallet else False
def get_language_name(self):
return languages.get(self.config.get('language', 'en_UK'), '')
def change_password(self, item, dt):
self.app.change_password(self.update)
def language_dialog(self, item, dt):
if self._language_dialog is None:
l = self.config.get('language', 'en_UK')
def cb(key):
self.config.set_key("language", key, True)
item.lang = self.get_language_name()
self.app.language = key
self._language_dialog = ChoiceDialog(_('Language'), languages, l, cb)
self._language_dialog.open()
def unit_dialog(self, item, dt):
if self._unit_dialog is None:
def cb(text):
self.app._set_bu(text)
item.bu = self.app.base_unit
self._unit_dialog = ChoiceDialog(_('Denomination'), base_units_list,
self.app.base_unit, cb, keep_choice_order=True)
self._unit_dialog.open()
def coinselect_status(self):
return coinchooser.get_name(self.app.electrum_config)
def coinselect_dialog(self, item, dt):
if self._coinselect_dialog is None:
choosers = sorted(coinchooser.COIN_CHOOSERS.keys())
chooser_name = coinchooser.get_name(self.config)
def cb(text):
self.config.set_key('coin_chooser', text)
item.status = text
self._coinselect_dialog = ChoiceDialog(_('Coin selection'), choosers, chooser_name, cb)
self._coinselect_dialog.open()
def proxy_status(self):
net_params = self.app.network.get_parameters()
proxy = net_params.proxy
return proxy.get('host') +':' + proxy.get('port') if proxy else _('None')
def proxy_dialog(self, item, dt):
network = self.app.network
if self._proxy_dialog is None:
net_params = network.get_parameters()
proxy = net_params.proxy
def callback(popup):
nonlocal net_params
if popup.ids.mode.text != 'None':
proxy = {
'mode':popup.ids.mode.text,
'host':popup.ids.host.text,
'port':popup.ids.port.text,
'user':popup.ids.user.text,
'password':popup.ids.password.text
}
else:
proxy = None
net_params = net_params._replace(proxy=proxy)
network.run_from_another_thread(network.set_parameters(net_params))
item.status = self.proxy_status()
popup = Builder.load_file('electrum/gui/kivy/uix/ui_screens/proxy.kv')
popup.ids.mode.text = proxy.get('mode') if proxy else 'None'
popup.ids.host.text = proxy.get('host') if proxy else ''
popup.ids.port.text = proxy.get('port') if proxy else ''
popup.ids.user.text = proxy.get('user') if proxy else ''
popup.ids.password.text = proxy.get('password') if proxy else ''
popup.on_dismiss = lambda: callback(popup)
self._proxy_dialog = popup
self._proxy_dialog.open()
def plugin_dialog(self, name, label, dt):
from .checkbox_dialog import CheckBoxDialog
def callback(status):
self.plugins.enable(name) if status else self.plugins.disable(name)
label.status = 'ON' if status else 'OFF'
status = bool(self.plugins.get(name))
dd = self.plugins.descriptions.get(name)
descr = dd.get('description')
fullname = dd.get('fullname')
d = CheckBoxDialog(fullname, descr, status, callback)
d.open()
def fee_status(self):
return self.config.get_fee_status()
def boolean_dialog(self, name, title, message, dt):
from .checkbox_dialog import CheckBoxDialog
CheckBoxDialog(title, message, getattr(self.app, name), lambda x: setattr(self.app, name, x)).open()
def fx_status(self):
fx = self.app.fx
if fx.is_enabled():
source = fx.exchange.name()
ccy = fx.get_currency()
return '%s [%s]' %(ccy, source)
else:
return _('None')
def fx_dialog(self, label, dt):
if self._fx_dialog is None:
from .fx_dialog import FxDialog
def cb():
label.status = self.fx_status()
self._fx_dialog = FxDialog(self.app, self.plugins, self.config, cb)
self._fx_dialog.open()
|
cryptapus/electrum
|
electrum/gui/kivy/uix/dialogs/settings.py
|
Python
|
mit
| 9,722
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from gbpservice.nfp.core import common as nfp_common
from gbpservice.nfp.core import poll as core_pt
import gbpservice.nfp.lib.transport as transport
from gbpservice.nfp.proxy_agent.lib import topics as a_topics
from neutron import context as n_context
from oslo_log import log as oslo_logging
import sys
import traceback
LOGGER = oslo_logging.getLogger(__name__)
LOG = nfp_common.log
ResourceMap = {
'device_orch': a_topics.DEVICE_ORCH_TOPIC,
'service_orch': a_topics.SERVICE_ORCH_TOPIC,
'nas_service': a_topics.CONFIG_ORCH_TOPIC
}
"""Periodic Class to pull notification from configurator"""
class PullNotification(core_pt.PollEventDesc):
def __init__(self, sc, conf):
self._sc = sc
self._conf = conf
def handle_event(self, ev):
self._sc.poll_event(ev)
def _method_handler(self, notification):
# Method handles notification as per resource, resource_type and method
try:
requester = notification['info']['context']['requester']
topic = ResourceMap[requester]
context = notification['info']['context']['neutron_context']
rpcClient = transport.RPCClient(topic)
rpc_ctx = n_context.Context.from_dict(context)
rpcClient.cctxt.cast(rpc_ctx,
'network_function_notification',
notification_data=notification)
except Exception as e:
raise Exception(e)
@core_pt.poll_event_desc(event='PULL_NOTIFICATIONS', spacing=1)
def pull_notifications(self, ev):
"""Pull and handle notification from configurator."""
notifications = transport.get_response_from_configurator(self._conf)
if not isinstance(notifications, list):
LOG(LOGGER, 'ERROR', "Notfications not list, %s" % (notifications))
else:
for notification in notifications:
if not notification:
LOG(LOGGER, 'INFO', "Receiver Response: Empty")
continue
try:
self._method_handler(notification)
except AttributeError:
exc_type, exc_value, exc_traceback = sys.exc_info()
LOG(LOGGER, 'ERROR',
"AttributeError while handling message %s : %s " % (
notification, traceback.format_exception(
exc_type, exc_value, exc_traceback)))
except Exception as e:
exc_type, exc_value, exc_traceback = sys.exc_info()
LOG(LOGGER, 'ERROR', "Generic exception (%s) \
while handling message (%s) : %s" % (
e, notification, traceback.format_exception(
exc_type, exc_value, exc_traceback)))
|
jiahaoliang/group-based-policy
|
gbpservice/nfp/proxy_agent/notifications/pull.py
|
Python
|
apache-2.0
| 3,442
|
import os
import time
import cPickle
import datetime
import logging
import flask
import werkzeug
import optparse
import tornado.wsgi
import tornado.httpserver
import numpy as np
import pandas as pd
from PIL import Image as PILImage
import cStringIO as StringIO
import urllib
import caffe
import exifutil
REPO_DIRNAME = os.path.abspath(os.path.dirname(__file__) + '/../..')
UPLOAD_FOLDER = '/tmp/caffe_demos_uploads'
ALLOWED_IMAGE_EXTENSIONS = set(['png', 'bmp', 'jpg', 'jpe', 'jpeg', 'gif'])
# Obtain the flask app object
app = flask.Flask(__name__)
@app.route('/')
def index():
return flask.render_template('index.html', has_result=False)
@app.route('/classify_url', methods=['GET'])
def classify_url():
imageurl = flask.request.args.get('imageurl', '')
try:
string_buffer = StringIO.StringIO(
urllib.urlopen(imageurl).read())
image = caffe.io.load_image(string_buffer)
except Exception as err:
# For any exception we encounter in reading the image, we will just
# not continue.
logging.info('URL Image open error: %s', err)
return flask.render_template(
'index.html', has_result=True,
result=(False, 'Cannot open image from URL.')
)
logging.info('Image: %s', imageurl)
result = app.clf.classify_image(image)
return flask.render_template(
'index.html', has_result=True, result=result, imagesrc=imageurl)
@app.route('/classify_upload', methods=['POST'])
def classify_upload():
try:
# We will save the file to disk for possible data collection.
imagefile = flask.request.files['imagefile']
filename_ = str(datetime.datetime.now()).replace(' ', '_') + \
werkzeug.secure_filename(imagefile.filename)
filename = os.path.join(UPLOAD_FOLDER, filename_)
imagefile.save(filename)
logging.info('Saving to %s.', filename)
image = exifutil.open_oriented_im(filename)
except Exception as err:
logging.info('Uploaded image open error: %s', err)
return flask.render_template(
'index.html', has_result=True,
result=(False, 'Cannot open uploaded image.')
)
result = app.clf.classify_image(image)
return flask.render_template(
'index.html', has_result=True, result=result,
imagesrc=embed_image_html(image)
)
def embed_image_html(image):
"""Creates an image embedded in HTML base64 format."""
image_pil = PILImage.fromarray((255 * image).astype('uint8'))
image_pil = image_pil.resize((256, 256))
string_buf = StringIO.StringIO()
image_pil.save(string_buf, format='png')
data = string_buf.getvalue().encode('base64').replace('\n', '')
return 'data:image/png;base64,' + data
def allowed_file(filename):
return (
'.' in filename and
filename.rsplit('.', 1)[1] in ALLOWED_IMAGE_EXTENSIONS
)
class ImagenetClassifier(object):
default_args = {
'model_def_file': (
'{}/models/bvlc_reference_caffenet/deploy.prototxt'.format(REPO_DIRNAME)),
'pretrained_model_file': (
'{}/models/bvlc_reference_caffenet/bvlc_reference_caffenet.caffemodel'.format(REPO_DIRNAME)),
'mean_file': (
'{}/python/caffe/imagenet/ilsvrc_2012_mean.npy'.format(REPO_DIRNAME)),
'class_labels_file': (
'{}/data/ilsvrc12/synset_words.txt'.format(REPO_DIRNAME)),
'bet_file': (
'{}/data/ilsvrc12/imagenet.bet.pickle'.format(REPO_DIRNAME)),
}
for key, val in default_args.iteritems():
if not os.path.exists(val):
raise Exception(
"File for {} is missing. Should be at: {}".format(key, val))
default_args['image_dim'] = 227
default_args['raw_scale'] = 255.
default_args['gpu_mode'] = False
def __init__(self, model_def_file, pretrained_model_file, mean_file,
raw_scale, class_labels_file, bet_file, image_dim, gpu_mode):
logging.info('Loading net and associated files...')
self.net = caffe.Classifier(
model_def_file, pretrained_model_file,
image_dims=(image_dim, image_dim), raw_scale=raw_scale,
mean=np.load(mean_file), channel_swap=(2, 1, 0), gpu=gpu_mode
)
with open(class_labels_file) as f:
labels_df = pd.DataFrame([
{
'synset_id': l.strip().split(' ')[0],
'name': ' '.join(l.strip().split(' ')[1:]).split(',')[0]
}
for l in f.readlines()
])
self.labels = labels_df.sort('synset_id')['name'].values
self.bet = cPickle.load(open(bet_file))
# A bias to prefer children nodes in single-chain paths
# I am setting the value to 0.1 as a quick, simple model.
# We could use better psychological models here...
self.bet['infogain'] -= np.array(self.bet['preferences']) * 0.1
def classify_image(self, image):
try:
starttime = time.time()
scores = self.net.predict([image], oversample=True).flatten()
endtime = time.time()
indices = (-scores).argsort()[:5]
predictions = self.labels[indices]
# In addition to the prediction text, we will also produce
# the length for the progress bar visualization.
meta = [
(p, '%.5f' % scores[i])
for i, p in zip(indices, predictions)
]
logging.info('result: %s', str(meta))
# Compute expected information gain
expected_infogain = np.dot(
self.bet['probmat'], scores[self.bet['idmapping']])
expected_infogain *= self.bet['infogain']
# sort the scores
infogain_sort = expected_infogain.argsort()[::-1]
bet_result = [(self.bet['words'][v], '%.5f' % expected_infogain[v])
for v in infogain_sort[:5]]
logging.info('bet result: %s', str(bet_result))
return (True, meta, bet_result, '%.3f' % (endtime - starttime))
except Exception as err:
logging.info('Classification error: %s', err)
return (False, 'Something went wrong when classifying the '
'image. Maybe try another one?')
def start_tornado(app, port=5000):
http_server = tornado.httpserver.HTTPServer(
tornado.wsgi.WSGIContainer(app))
http_server.listen(port)
print("Tornado server starting on port {}".format(port))
tornado.ioloop.IOLoop.instance().start()
def start_from_terminal(app):
"""
Parse command line options and start the server.
"""
parser = optparse.OptionParser()
parser.add_option(
'-d', '--debug',
help="enable debug mode",
action="store_true", default=False)
parser.add_option(
'-p', '--port',
help="which port to serve content on",
type='int', default=5000)
parser.add_option(
'-g', '--gpu',
help="use gpu mode",
action='store_true', default=False)
opts, args = parser.parse_args()
ImagenetClassifier.default_args.update({'gpu_mode': opts.gpu})
# Initialize classifier
app.clf = ImagenetClassifier(**ImagenetClassifier.default_args)
if opts.debug:
app.run(debug=True, host='0.0.0.0', port=opts.port)
else:
start_tornado(app, opts.port)
if __name__ == '__main__':
logging.getLogger().setLevel(logging.INFO)
if not os.path.exists(UPLOAD_FOLDER):
os.makedirs(UPLOAD_FOLDER)
start_from_terminal(app)
|
chizhizhen/DNT
|
caffe/examples/web_demo/app.py
|
Python
|
mit
| 7,659
|
import sys
from setuptools import setup, find_packages
import lazyconf
l = 'lazyconf/'
s = 'schema/'
setup(
name='lazyconf',
version='0.5.4',
author='Fareed Dudhia',
author_email='fareeddudhia@gmail.com',
packages=find_packages(),
package_data={"lazyconf": ['schema/' 'lazyconf/schema/*.json',]},
include_package_data=True,
py_modules=['lazyconf.lazyconf','lazyconf.console'],
entry_points={
'console_scripts': ['lazyconf = lazyconf.console:console',]},
url='https://www.github.com/fmd/lazyconf',
license='LICENSE.rst',
description='Insultingly simple configuration for Python 2.7 applications.',
long_description=open('README.rst').read(),
)
|
fmd/lazyconf
|
setup.py
|
Python
|
mit
| 705
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('products', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='Variation',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('title', models.CharField(max_length=120)),
('price', models.DecimalField(max_digits=1000, decimal_places=2)),
('sale_price', models.DecimalField(null=True, max_digits=1000, decimal_places=2, blank=True)),
('active', models.BooleanField(default=True)),
('inventory', models.IntegerField(null=True, blank=True)),
('product', models.ForeignKey(to='products.Product')),
],
),
]
|
kronicz/ecommerce-2
|
src/products/migrations/0002_variation.py
|
Python
|
mit
| 927
|
from matplotlib.colors import LinearSegmentedColormap
from numpy import nan, inf
cm_data = [[0.0000208612, 0.0000200049, 0.0000198463],
[0.000405063, 0.000332768, 0.000320958],
[0.00119499, 0.000941283, 0.00090058],
[0.00236152, 0.0018053, 0.00171716],
[0.00389363, 0.00290336, 0.00274803],
[0.00578633, 0.00422109, 0.00397776],
[0.00803761, 0.00574787, 0.00539478],
[0.0106472, 0.00747537, 0.0069899],
[0.013616, 0.00939683, 0.00875557],
[0.0169455, 0.0115066, 0.0106854],
[0.020638, 0.0137998, 0.0127739],
[0.0246962, 0.0162724, 0.0150162],
[0.0291229, 0.0189206, 0.0174081],
[0.0339214, 0.0217412, 0.0199457],
[0.0390952, 0.0247314, 0.0226256],
[0.0444589, 0.0278886, 0.0254447],
[0.0497683, 0.0312104, 0.0284001],
[0.0550376, 0.0346947, 0.031489],
[0.060271, 0.0383395, 0.0347092],
[0.0654721, 0.0420948, 0.0380583],
[0.0706441, 0.0457977, 0.0415078],
[0.0757898, 0.0494543, 0.0449095],
[0.0809117, 0.0530675, 0.0482648],
[0.0860121, 0.05664, 0.0515762],
[0.091093, 0.0601739, 0.0548462],
[0.0961562, 0.0636715, 0.0580769],
[0.101204, 0.0671347, 0.0612703],
[0.106236, 0.0705652, 0.0644282],
[0.111256, 0.0739647, 0.0675522],
[0.116264, 0.0773346, 0.070644],
[0.121262, 0.0806763, 0.073705],
[0.12625, 0.0839911, 0.0767363],
[0.13123, 0.0872802, 0.0797393],
[0.136203, 0.0905447, 0.0827151],
[0.141168, 0.0937856, 0.0856648],
[0.146128, 0.0970038, 0.0885893],
[0.151084, 0.1002, 0.0914895],
[0.156035, 0.103376, 0.0943664],
[0.160982, 0.106531, 0.0972207],
[0.165927, 0.109667, 0.100053],
[0.170869, 0.112785, 0.102865],
[0.17581, 0.115884, 0.105656],
[0.18075, 0.118966, 0.108427],
[0.185689, 0.122031, 0.111179],
[0.190628, 0.12508, 0.113913],
[0.195567, 0.128113, 0.116629],
[0.200507, 0.131131, 0.119327],
[0.205449, 0.134135, 0.122008],
[0.210392, 0.137124, 0.124673],
[0.215338, 0.140099, 0.127322],
[0.220286, 0.143061, 0.129955],
[0.225237, 0.146009, 0.132572],
[0.230191, 0.148946, 0.135176],
[0.235148, 0.151869, 0.137764],
[0.24011, 0.154781, 0.140339],
[0.245076, 0.157682, 0.1429],
[0.250046, 0.160571, 0.145447],
[0.255021, 0.163449, 0.147981],
[0.260001, 0.166316, 0.150503],
[0.264987, 0.169173, 0.153012],
[0.269978, 0.17202, 0.155509],
[0.274975, 0.174857, 0.157994],
[0.279979, 0.177684, 0.160467],
[0.284989, 0.180502, 0.162929],
[0.290005, 0.183311, 0.16538],
[0.295028, 0.186111, 0.16782],
[0.300059, 0.188903, 0.170249],
[0.305097, 0.191686, 0.172668],
[0.310142, 0.19446, 0.175077],
[0.315195, 0.197227, 0.177475],
[0.320257, 0.199985, 0.179864],
[0.325326, 0.202736, 0.182243],
[0.330404, 0.20548, 0.184612],
[0.335491, 0.208216, 0.186972],
[0.340586, 0.210945, 0.189323],
[0.34569, 0.213667, 0.191666],
[0.350804, 0.216382, 0.193999],
[0.355926, 0.21909, 0.196324],
[0.361059, 0.221792, 0.19864],
[0.366201, 0.224488, 0.200948],
[0.371353, 0.227177, 0.203248],
[0.376515, 0.22986, 0.205539],
[0.381687, 0.232537, 0.207823],
[0.386869, 0.235208, 0.2101],
[0.392063, 0.237873, 0.212368],
[0.397267, 0.240533, 0.214629],
[0.402481, 0.243187, 0.216883],
[0.407707, 0.245836, 0.219129],
[0.412944, 0.24848, 0.221369],
[0.418193, 0.251119, 0.223601],
[0.423453, 0.253752, 0.225826],
[0.428724, 0.256381, 0.228045],
[0.434008, 0.259004, 0.230257],
[0.439303, 0.261623, 0.232463],
[0.44461, 0.264237, 0.234662],
[0.44993, 0.266847, 0.236854],
[0.455262, 0.269452, 0.239041],
[0.460607, 0.272053, 0.241221],
[0.465964, 0.27465, 0.243395],
[0.471334, 0.277242, 0.245563],
[0.476718, 0.27983, 0.247725],
[0.482114, 0.282415, 0.249882],
[0.487523, 0.284995, 0.252033],
[0.492946, 0.287572, 0.254178],
[0.498383, 0.290144, 0.256317],
[0.503833, 0.292713, 0.258451],
[0.509297, 0.295278, 0.26058],
[0.514775, 0.29784, 0.262703],
[0.520267, 0.300398, 0.264822],
[0.525773, 0.302953, 0.266935],
[0.531293, 0.305505, 0.269043],
[0.536829, 0.308053, 0.271146],
[0.542378, 0.310598, 0.273244],
[0.547943, 0.31314, 0.275337],
[0.553522, 0.315679, 0.277425],
[0.559116, 0.318214, 0.279509],
[0.564726, 0.320747, 0.281588],
[0.57035, 0.323277, 0.283662],
[0.57599, 0.325804, 0.285732],
[0.581646, 0.328328, 0.287797],
[0.587318, 0.330849, 0.289858],
[0.593005, 0.333368, 0.291914],
[0.598708, 0.335884, 0.293967],
[0.604427, 0.338398, 0.296015],
[0.610163, 0.340909, 0.298059],
[0.615915, 0.343418, 0.300098],
[0.621683, 0.345924, 0.302134],
[0.627468, 0.348428, 0.304166],
[0.631798, 0.35197, 0.307537],
[0.634653, 0.356555, 0.312257],
[0.6375, 0.361141, 0.316988],
[0.640339, 0.36573, 0.321727],
[0.643169, 0.370321, 0.326477],
[0.645992, 0.374915, 0.331236],
[0.648807, 0.379511, 0.336004],
[0.651614, 0.384111, 0.340782],
[0.654414, 0.388713, 0.34557],
[0.657208, 0.393319, 0.350368],
[0.659995, 0.397928, 0.355175],
[0.662775, 0.402541, 0.359992],
[0.665549, 0.407158, 0.364818],
[0.668318, 0.411778, 0.369654],
[0.67108, 0.416403, 0.3745],
[0.673838, 0.421032, 0.379355],
[0.67659, 0.425666, 0.38422],
[0.679337, 0.430304, 0.389095],
[0.68208, 0.434946, 0.39398],
[0.684818, 0.439594, 0.398874],
[0.687552, 0.444246, 0.403778],
[0.690282, 0.448904, 0.408692],
[0.693008, 0.453567, 0.413615],
[0.695731, 0.458235, 0.418549],
[0.698451, 0.462909, 0.423492],
[0.701167, 0.467588, 0.428445],
[0.703881, 0.472274, 0.433407],
[0.706593, 0.476965, 0.43838],
[0.709302, 0.481662, 0.443363],
[0.712009, 0.486365, 0.448355],
[0.714715, 0.491075, 0.453358],
[0.717419, 0.495791, 0.45837],
[0.720121, 0.500513, 0.463392],
[0.722823, 0.505242, 0.468424],
[0.725524, 0.509978, 0.473467],
[0.728224, 0.514721, 0.478519],
[0.730924, 0.51947, 0.483581],
[0.733623, 0.524227, 0.488654],
[0.736323, 0.528991, 0.493737],
[0.739024, 0.533762, 0.498829],
[0.741725, 0.538541, 0.503932],
[0.744427, 0.543327, 0.509045],
[0.74713, 0.548121, 0.514169],
[0.749834, 0.552922, 0.519302],
[0.75254, 0.557732, 0.524446],
[0.755248, 0.562549, 0.529601],
[0.757958, 0.567374, 0.534765],
[0.760671, 0.572208, 0.53994],
[0.763386, 0.57705, 0.545126],
[0.766103, 0.5819, 0.550322],
[0.768824, 0.586759, 0.555528],
[0.771549, 0.591626, 0.560746],
[0.774277, 0.596502, 0.565973],
[0.777009, 0.601387, 0.571212],
[0.779744, 0.606281, 0.576461],
[0.782485, 0.611184, 0.58172],
[0.78523, 0.616096, 0.586991],
[0.78798, 0.621017, 0.592272],
[0.790735, 0.625948, 0.597564],
[0.793495, 0.630888, 0.602867],
[0.796261, 0.635837, 0.608181],
[0.799033, 0.640797, 0.613506],
[0.801811, 0.645766, 0.618842],
[0.804596, 0.650745, 0.624189],
[0.807387, 0.655733, 0.629547],
[0.810186, 0.660732, 0.634916],
[0.812991, 0.665741, 0.640296],
[0.815804, 0.670761, 0.645688],
[0.818625, 0.675791, 0.651091],
[0.821454, 0.680831, 0.656506],
[0.824292, 0.685882, 0.661931],
[0.827138, 0.690943, 0.667368],
[0.829993, 0.696016, 0.672817],
[0.832857, 0.701099, 0.678277],
[0.835731, 0.706193, 0.683749],
[0.838615, 0.711299, 0.689233],
[0.841508, 0.716415, 0.694728],
[0.844412, 0.721543, 0.700235],
[0.847327, 0.726683, 0.705753],
[0.850253, 0.731834, 0.711284],
[0.85319, 0.736996, 0.716826],
[0.856139, 0.742171, 0.722381],
[0.8591, 0.747357, 0.727947],
[0.862073, 0.752555, 0.733525],
[0.865059, 0.757765, 0.739116],
[0.868057, 0.762987, 0.744719],
[0.871069, 0.768222, 0.750333],
[0.874094, 0.773468, 0.75596],
[0.877134, 0.778728, 0.761599],
[0.880188, 0.783999, 0.767251],
[0.883256, 0.789284, 0.772915],
[0.886339, 0.794581, 0.778591],
[0.889438, 0.799891, 0.78428],
[0.892553, 0.805213, 0.789981],
[0.895684, 0.810549, 0.795694],
[0.898831, 0.815898, 0.80142],
[0.901996, 0.82126, 0.807159],
[0.905178, 0.826636, 0.81291],
[0.908378, 0.832024, 0.818673],
[0.911596, 0.837426, 0.82445],
[0.914833, 0.842842, 0.830238],
[0.918089, 0.848271, 0.836039],
[0.921365, 0.853714, 0.841853],
[0.924661, 0.85917, 0.847679],
[0.927978, 0.864641, 0.853518],
[0.931317, 0.870125, 0.859369],
[0.934677, 0.875622, 0.865233],
[0.93806, 0.881134, 0.871109],
[0.941467, 0.88666, 0.876997],
[0.944897, 0.892199, 0.882897],
[0.948352, 0.897753, 0.888809],
[0.951833, 0.90332, 0.894733],
[0.95534, 0.908901, 0.900668],
[0.958874, 0.914496, 0.906615],
[0.962437, 0.920105, 0.912573],
[0.96603, 0.925727, 0.918541],
[0.969653, 0.931363, 0.92452],
[0.973309, 0.937012, 0.930509],
[0.976999, 0.942673, 0.936506],
[0.980725, 0.948347, 0.942512],
[0.984489, 0.954033, 0.948526],
[0.988295, 0.95973, 0.954545],
[0.992146, 0.965438, 0.960568],
[0.996048, 0.971154, 0.966592],
[1., 0.976876, 0.972614],
[1., 0.982601, 0.978627],
[1., 0.98832, 0.984617],
[1., 0.994001, 0.990529]]
test_cm = LinearSegmentedColormap.from_list(__file__, cm_data)
if __name__ == "__main__":
import matplotlib.pyplot as plt
import numpy as np
try:
from pycam02ucs.cm.viscm import viscm
viscm(test_cm)
except ImportError:
print("pycam02ucs not found, falling back on simple display")
plt.imshow(np.linspace(0, 100, 256)[None, :], aspect='auto',
cmap=test_cm)
plt.show()
|
planetarymike/IDL-Colorbars
|
IDL_py_test/082_Singlehue_RedOrange.py
|
Python
|
gpl-2.0
| 8,754
|
# -*- coding: utf-8 -*-
"""
dummydrivers
~~~~~~~~~~~~
Just some fake drivers to show how the backend, frontend works.
:copyright: 2015 by Lantz Authors, see AUTHORS for more details.
:license: BSD, see LICENSE for more details.
"""
import numpy as np
from lantz import Driver, Feat, ureg, Action
class DummyFunGen(Driver):
"""A Function Generator Driver.
"""
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self._amplitude = ureg.volt
self._frequency = ureg.Hz
@Feat(units='Hz')
def frequency(self):
return self._frequency
@frequency.setter
def frequency(self, value):
self._frequency = value
@Feat(units='volt')
def amplitude(self):
return self._amplitude
@amplitude.setter
def amplitude(self, value):
self._amplitude = value
class DummyOsci(Driver):
"""An Oscilloscope Driver.
"""
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self._amplitude = ureg.volt
@Action()
def measure(self):
return np.random.random((100, ))
class DummyShutter(Driver):
"""A Shutter Driver.
"""
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self._opened = False
@Feat(values={True, False})
def opened(self):
return self._opened
@opened.setter
def opened(self, value):
self._opened = value
|
varses/awsch
|
lantz/drivers/examples/dummydrivers.py
|
Python
|
bsd-3-clause
| 1,485
|
###
### This script can be run with pvpython rather than pvbatch, as it does not
### need mpi.
###
### Purpose:
###
### Read all timesteps of the halos point files (vtu) which have fewer
### halo points and the raw-particles (vtu) which have many more halo points.
### For the few halo points, we create a glyph filter, scaling the glyph size
### and coloring by the magnitude density. For the many halo points, we create
### 5 threshold filters, whose ranges are dynamically reset at each time step
### so that each threshold contains roughly the same number of points. These
### thresholds are also colored by velocity magnitude, which we have to compute
### specifically in this case because the data files don't have that array to
### begin with.
###
### Input Files:
###
### 1) DataExploration/Output/Cosmology/data/run-1/halos-%d.vtu
### 2) DataExploration/Data/Cosmology/data/analysis/raw-particles/499-%d.vtu
###
### Output Files:
###
### A cinema dataset into: DataExploration/Output/Cosmology/volume_time
###
import os, sys, math
from paraview.simple import *
from paraview import data_exploration as wx
from cinema_utilities import *
# Need this one to directly rescale transfer functions to data range
from vtkPVServerManagerRenderingPython import *
LoadDistributedPlugin('RGBZView', ns=globals())
# -----------------------------------------------------------------------------
# Helper methods
# -----------------------------------------------------------------------------
def buildSpectralLUT(name):
return GetLookupTableForArray( name,
1,
RGBPoints = [0.0, 0.368627, 0.309804, 0.635294, 90.00072000576006, 0.196078, 0.533333, 0.741176, 180.00144001152012, 0.4, 0.760784, 0.647059, 270.0021600172801, 0.670588, 0.866667, 0.643137, 360.00288002304023, 0.901961, 0.960784, 0.596078, 450.00360002880024, 1.0, 1.0, 0.74902, 540.0043200345602, 0.996078, 0.878431, 0.545098, 630.0050400403203, 0.992157, 0.682353, 0.380392, 720.0057600460805, 0.956863, 0.427451, 0.262745, 810.0064800518404, 0.835294, 0.243137, 0.309804, 900.0, 0.619608, 0.00392157, 0.258824],
NanColor = [0.500008, 0.0, 0.0],
ColorSpace = 'RGB',
ScalarRangeInitialized=1.0,
LockScalarRange=0)
# -----------------------------------------------------------------------------
# Output configuration
# -----------------------------------------------------------------------------
path_root = '/media/scott/CINEMA FAT'
output_working_dir = os.path.join(path_root, 'DataExploration/Output/Cosmology/point_thresholds/')
title = "499-2 - Probe the Cosmic Structure of the Dark Universe"
description = """
In the standard model of cosmology, dark energy and dark matter
together account for 95 percent of the mass energy of the universe;
however, their ultimate origin remains a mystery. The Argonne
Leadership Computing Facility (ALCF) will allocate significant
supercomputing resources towards unraveling one of the key
puzzles-the nature of the dark energy causing the universe to
accelerate its current expansion rate.
"""
analysis = wx.AnalysisManager( output_working_dir, title, description,
author="Salman Habib and Katrin Heitmann",
code_name="HACC",
code_version="HACC 0.1",
cores=128)
# -----------------------------------------------------------------------------
# Image size, camera angles, and view information
# -----------------------------------------------------------------------------
resolution = 500
#phi_angles = [ float(r) for r in range(0, 360, 15)]
#theta_angles = [ -60.0, -45.0, -30.0, -15.0, 0, 15.0, 30.0, 45.0, 60.0 ]
# A small number of camera angles for when we're testing our pipeline and such
phi_angles = [ 180.0, 270.0 ]
theta_angles = [ 15.0, 45.0 ]
distance = 420
rotation_axis = [0.0, 1.0, 0.0]
center_of_rotation = [64.69269952178001, 65.57341161370277, 65.48730944097042]
# -----------------------------------------------------------------------------
# Set up lists of files to process
# -----------------------------------------------------------------------------
# List of halo files to include
halo_file_format = 'DataExploration/Data/Cosmology/data/run-1/halos-%d.vtu'
halo_file_names = [ os.path.join(path_root, halo_file_format % i) for i in xrange(0, 451, 50) ]
# List of point density files to include
# Data/Cosmology/data/analysis/raw-particles
points_file_format = 'DataExploration/Data/Cosmology/data/analysis/raw-particles/499-%d.vtu'
points_file_names = [ os.path.join(path_root, points_file_format % i) for i in xrange(0, 451, 50) ]
# -----------------------------------------------------------------------------
# Create data exploration
# -----------------------------------------------------------------------------
id = 'composite'
title = '3D composite'
description = "Show Threshold Densities and Halos."
analysis.register_analysis(id, title, description, '{time}/{theta}/{phi}/{filename}', wx.CompositeImageExporter.get_data_type())
fng = analysis.get_file_name_generator(id)
# -----------------------------------------------------------------------------
# Set up pipelines
# -----------------------------------------------------------------------------
halos_reader = XMLUnstructuredGridReader( FileName=halo_file_names )
glyph = Glyph(Input = halos_reader, GlyphType="Sphere", GlyphTransform="Transform2" )
glyph.Scalars = ['POINTS', 'magnitude']
glyph.ScaleFactor = 0.005
glyph.ScaleMode = 'scalar'
glyph.GlyphMode = 'All Points'
glyph.GlyphType.ThetaResolution = 16
glyph.GlyphType.PhiResolution = 16
# Create reader for large halo particle files
point_reader = XMLUnstructuredGridReader( FileName=points_file_names )
outline = Outline(Input=point_reader)
outlineRep = Show(outline)
outlineRep.ColorArrayName = [None, '']
outlineRep.DiffuseColor = [0.0, 0.0, 0.0]
outlineRep.LineWidth = 1.0
# create a new 'Calculator'
calculator1 = Calculator(Input=point_reader)
calculator1.ResultArrayName = 'magnitude'
calculator1.Function = 'sqrt((vx*vx)+(vy*vy)+(vz*vz))'
# create some new 'Threshold' filters
#den1 = Threshold(Input = calculator1, Scalars=['POINTS', 'magnitude'], ThresholdRange=[900, 17000] )
den1 = Threshold(Input = calculator1, Scalars=['POINTS', 'magnitude'], ThresholdRange=[900, 3601] )
den2 = Threshold(Input = calculator1, Scalars=['POINTS', 'magnitude'], ThresholdRange=[700, 900] )
den3 = Threshold(Input = calculator1, Scalars=['POINTS', 'magnitude'], ThresholdRange=[500, 700] )
den4 = Threshold(Input = calculator1, Scalars=['POINTS', 'magnitude'], ThresholdRange=[300, 500] )
den5 = Threshold(Input = calculator1, Scalars=['POINTS', 'magnitude'], ThresholdRange=[100, 300] )
# -----------------------------------------------------------------------------
# Representations
# -----------------------------------------------------------------------------
###
### The second argument here is a View, but don't bother passing one in with
### your background color of choice set. The View you pass in will get clobbered
### with a custom one. Instead, use the camera handlers get_view() method to
### get your hands on the custom view created in the CompositeImageExporter, set
### your background color on that, then put it back using set_view(). See the
### "Customize view" section, below.
###
camera_handler = wx.ThreeSixtyCameraHandler(fng, None, phi_angles, theta_angles, center_of_rotation, rotation_axis, distance)
# Arguments: file_name_generator, view, focal_point, view_up, camera_position
#camera_handler = wx.FixCameraHandler(fng, None, [64.693, 65.573, 65.487], [0, 0, 1], [64.693, 65.573 + 420, 65.487] )
points_colors = [('POINT_DATA', 'magnitude')]
# These three arrays must be parallel to each other
composite_list = [ outline, glyph, den1, den2, den3, den4, den5 ]
composite_description = [ {'name': 'Outline'}, {'name': 'Halos'}, {'name': '[900, +]', 'parent':'Magnitude Thresholds'}, {'name': '[700, 900]', 'parent':'Magnitude Thresholds'}, {'name': '[500, 700]', 'parent':'Magnitude Thresholds'}, {'name': '[300, 500]', 'parent':'Magnitude Thresholds'}, {'name': '[100, 300]', 'parent':'Magnitude Thresholds'} ]
composite_colors = [ [('SOLID_COLOR', [0.0, 0.0, 0.0])], points_colors, points_colors, points_colors, points_colors, points_colors, points_colors ]
luts = {
"magnitude" : buildSpectralLUT('magnitude')
}
# -----------------------------------------------------------------------------
# Data exploration
# -----------------------------------------------------------------------------
exporter = wx.CompositeImageExporter(fng,
composite_list,
composite_colors,
luts,
camera_handler,
[resolution,resolution],
composite_description,
format='png') # 0, 0)
exporter.set_analysis(analysis)
# -----------------------------------------------------------------------------
# Custumize view and some representations
# -----------------------------------------------------------------------------
exporter.view.Background = [1.0, 1.0, 1.0]
exporter.view.OrientationAxesVisibility = 0
exporter.view.CenterAxesVisibility = 0
calculatorRepr = Show(calculator1, exporter.view)
den1Repr = Show(den1, exporter.view)
den2Repr = Show(den2, exporter.view)
den3Repr = Show(den3, exporter.view)
den4Repr = Show(den4, exporter.view)
den5Repr = Show(den5, exporter.view)
# -----------------------------------------------------------------------------
# Processing
# -----------------------------------------------------------------------------
analysis.begin()
for time in range(0, len(halo_file_names), 1):
GetAnimationScene().TimeKeeper.Time = float(time)
fng.update_active_arguments(time=time)
print "moving to timestep ",time
# The point of these two lines is to allow the histogram filter to be
# updated for the time step and then to reset the ranges on the thresholds
# so that for this time step, each threshold contains roughly the same
# number of points.
UpdatePipeline(time)
#eachTimePipelineChanges(time, histogram1, calculator1, [den1, den2, den3, den4, den5])
# Now rescale the transfer function for the 'magnitude' array for the data
# range of the current step. You could argue this isn't what you'd want, but
# it results in more colorful data
#vtkSMPVRepresentationProxy.RescaleTransferFunctionToDataRange(calculatorRepr.SMProxy)
vtkSMTransferFunctionProxy.RescaleTransferFunction(den1Repr.SMProxy, 0.0, 900.0, False)
vtkSMTransferFunctionProxy.RescaleTransferFunction(den2Repr.SMProxy, 0.0, 900.0, False)
vtkSMTransferFunctionProxy.RescaleTransferFunction(den3Repr.SMProxy, 0.0, 900.0, False)
vtkSMTransferFunctionProxy.RescaleTransferFunction(den4Repr.SMProxy, 0.0, 900.0, False)
vtkSMTransferFunctionProxy.RescaleTransferFunction(den5Repr.SMProxy, 0.0, 900.0, False)
# Trigger the exporter to write out the next batch of images
exporter.UpdatePipeline(time)
analysis.end()
|
Kitware/cinema
|
scripts/data_generation/cosmology/hacc_composite_time.py
|
Python
|
bsd-3-clause
| 11,672
|
# -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'PaymentRequest.payment_methods'
db.add_column('skrill_paymentrequest', 'payment_methods',
self.gf('multiselectfield.db.fields.MultiSelectField')(max_length=100, null=True, blank=True),
keep_default=False)
def backwards(self, orm):
# Deleting field 'PaymentRequest.payment_methods'
db.delete_column('skrill_paymentrequest', 'payment_methods')
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'skrill.paymentrequest': {
'Meta': {'ordering': "['time']", 'object_name': 'PaymentRequest'},
'address': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'address2': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'amount': ('django.db.models.fields.DecimalField', [], {'max_digits': '18', 'decimal_places': '2'}),
'amount2': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '18', 'decimal_places': '2', 'blank': 'True'}),
'amount2_description': ('django.db.models.fields.CharField', [], {'max_length': '240', 'null': 'True', 'blank': 'True'}),
'amount3': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '18', 'decimal_places': '2', 'blank': 'True'}),
'amount3_description': ('django.db.models.fields.CharField', [], {'max_length': '240', 'null': 'True', 'blank': 'True'}),
'amount4': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '18', 'decimal_places': '2', 'blank': 'True'}),
'amount4_description': ('django.db.models.fields.CharField', [], {'max_length': '240', 'null': 'True', 'blank': 'True'}),
'cancel_url': ('django.db.models.fields.URLField', [], {'default': "u'http://example.com/skrill/cancel/'", 'max_length': '240', 'null': 'True', 'blank': 'True'}),
'cancel_url_target': ('django.db.models.fields.SmallIntegerField', [], {'default': '1'}),
'city': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True', 'blank': 'True'}),
'confirmation_note': ('django.db.models.fields.CharField', [], {'default': 'None', 'max_length': '240', 'null': 'True', 'blank': 'True'}),
'country': ('django.db.models.fields.CharField', [], {'max_length': '3', 'null': 'True', 'blank': 'True'}),
'currency': ('django.db.models.fields.CharField', [], {'default': "'EUR'", 'max_length': '3'}),
'custom_field_1': ('django.db.models.fields.CharField', [], {'max_length': '240', 'null': 'True', 'blank': 'True'}),
'custom_field_2': ('django.db.models.fields.CharField', [], {'max_length': '240', 'null': 'True', 'blank': 'True'}),
'custom_field_3': ('django.db.models.fields.CharField', [], {'max_length': '240', 'null': 'True', 'blank': 'True'}),
'custom_field_4': ('django.db.models.fields.CharField', [], {'max_length': '240', 'null': 'True', 'blank': 'True'}),
'custom_field_5': ('django.db.models.fields.CharField', [], {'max_length': '240', 'null': 'True', 'blank': 'True'}),
'date_of_birth': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'detail1_description': ('django.db.models.fields.CharField', [], {'max_length': '240', 'null': 'True', 'blank': 'True'}),
'detail1_text': ('django.db.models.fields.CharField', [], {'max_length': '240', 'null': 'True', 'blank': 'True'}),
'detail2_description': ('django.db.models.fields.CharField', [], {'max_length': '240', 'null': 'True', 'blank': 'True'}),
'detail2_text': ('django.db.models.fields.CharField', [], {'max_length': '240', 'null': 'True', 'blank': 'True'}),
'detail3_description': ('django.db.models.fields.CharField', [], {'max_length': '240', 'null': 'True', 'blank': 'True'}),
'detail3_text': ('django.db.models.fields.CharField', [], {'max_length': '240', 'null': 'True', 'blank': 'True'}),
'detail4_description': ('django.db.models.fields.CharField', [], {'max_length': '240', 'null': 'True', 'blank': 'True'}),
'detail4_text': ('django.db.models.fields.CharField', [], {'max_length': '240', 'null': 'True', 'blank': 'True'}),
'detail5_description': ('django.db.models.fields.CharField', [], {'max_length': '240', 'null': 'True', 'blank': 'True'}),
'detail5_text': ('django.db.models.fields.CharField', [], {'max_length': '240', 'null': 'True', 'blank': 'True'}),
'ext_ref_id': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'firstname': ('django.db.models.fields.CharField', [], {'max_length': '20', 'null': 'True', 'blank': 'True'}),
'hide_login': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'language': ('django.db.models.fields.CharField', [], {'default': "'EN'", 'max_length': '2'}),
'lastname': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True', 'blank': 'True'}),
'logo_url': ('django.db.models.fields.URLField', [], {'default': 'None', 'max_length': '240', 'null': 'True', 'blank': 'True'}),
'new_window_redirect': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'pay_from_email': ('django.db.models.fields.EmailField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'pay_to_email': ('django.db.models.fields.EmailField', [], {'default': "'help@bbg-entertainment.com'", 'max_length': '50'}),
'payment_methods': ('multiselectfield.db.fields.MultiSelectField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'phone_number': ('django.db.models.fields.PositiveIntegerField', [], {'max_length': '20', 'null': 'True', 'blank': 'True'}),
'postal_code': ('django.db.models.fields.CharField', [], {'max_length': '9', 'null': 'True', 'blank': 'True'}),
'prepare_only': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'recipient_description': ('django.db.models.fields.CharField', [], {'default': "'myBrainTraining.com'", 'max_length': '30', 'null': 'True', 'blank': 'True'}),
'return_url': ('django.db.models.fields.URLField', [], {'default': "u'http://example.com/skrill/return/'", 'max_length': '240', 'null': 'True', 'blank': 'True'}),
'return_url_target': ('django.db.models.fields.SmallIntegerField', [], {'default': '1'}),
'return_url_text': ('django.db.models.fields.CharField', [], {'default': "'Back to myBrainTraining.com'", 'max_length': '35', 'null': 'True', 'blank': 'True'}),
'rid': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'state': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True', 'blank': 'True'}),
'status_url': ('django.db.models.fields.CharField', [], {'default': "u'http://example.com/skrill/status_report/'", 'max_length': '255', 'null': 'True', 'blank': 'True'}),
'status_url2': ('django.db.models.fields.CharField', [], {'default': 'None', 'max_length': '255', 'null': 'True', 'blank': 'True'}),
'submitted': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'test': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'time': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '3', 'null': 'True', 'blank': 'True'}),
'transaction_id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"})
},
'skrill.statusreport': {
'Meta': {'ordering': "['time']", 'object_name': 'StatusReport'},
'amount': ('django.db.models.fields.DecimalField', [], {'max_digits': '18', 'decimal_places': '2'}),
'currency': ('django.db.models.fields.CharField', [], {'max_length': '3'}),
'custom_field_1': ('django.db.models.fields.CharField', [], {'max_length': '240', 'null': 'True', 'blank': 'True'}),
'custom_field_2': ('django.db.models.fields.CharField', [], {'max_length': '240', 'null': 'True', 'blank': 'True'}),
'custom_field_3': ('django.db.models.fields.CharField', [], {'max_length': '240', 'null': 'True', 'blank': 'True'}),
'custom_field_4': ('django.db.models.fields.CharField', [], {'max_length': '240', 'null': 'True', 'blank': 'True'}),
'custom_field_5': ('django.db.models.fields.CharField', [], {'max_length': '240', 'null': 'True', 'blank': 'True'}),
'customer_id': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True', 'blank': 'True'}),
'failed_reason_code': ('django.db.models.fields.CharField', [], {'max_length': '2', 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'mb_amount': ('django.db.models.fields.DecimalField', [], {'max_digits': '18', 'decimal_places': '2'}),
'mb_currency': ('django.db.models.fields.CharField', [], {'max_length': '3'}),
'mb_transaction_id': ('django.db.models.fields.PositiveIntegerField', [], {}),
'md5sig': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'merchant_id': ('django.db.models.fields.PositiveIntegerField', [], {}),
'pay_from_email': ('django.db.models.fields.EmailField', [], {'max_length': '50'}),
'pay_to_email': ('django.db.models.fields.EmailField', [], {'default': "'help@bbg-entertainment.com'", 'max_length': '50'}),
'payment_request': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['skrill.PaymentRequest']"}),
'payment_type': ('django.db.models.fields.CharField', [], {'max_length': '10', 'null': 'True', 'blank': 'True'}),
'sha2sig': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True', 'blank': 'True'}),
'signal_sent': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'status': ('django.db.models.fields.IntegerField', [], {}),
'time': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'valid': ('django.db.models.fields.BooleanField', [], {'default': 'False'})
}
}
complete_apps = ['skrill']
|
byteweaver/django-skrill
|
skrill/migrations/0002_auto__add_field_paymentrequest_payment_methods.py
|
Python
|
bsd-3-clause
| 14,044
|
import exceptions
class LconfError (exceptions.Exception):
def __init__(self, args):
self.args = args
class OptionError (exceptions.Exception):
def __init__(self, args):
self.args = args
|
behlendorf/lustre
|
lustre/utils/Lustre/error.py
|
Python
|
gpl-2.0
| 214
|
#!/usr/bin/env python
"""Generates the PDB structures given a LightDock swarm results file"""
import argparse
import os
import numpy as np
from lightdock.error.lightdock_errors import LightDockError
from lightdock.util.logger import LoggingManager
from lightdock.constants import DEFAULT_LIST_EXTENSION, DEFAULT_LIGHTDOCK_PREFIX, \
DEFAULT_NMODES_REC, DEFAULT_NMODES_LIG, DEFAULT_REC_NM_FILE, DEFAULT_LIG_NM_FILE
from lightdock.pdbutil.PDBIO import parse_complex_from_file, write_pdb_to_file
from lightdock.structure.complex import Complex
from lightdock.mathutil.cython.quaternion import Quaternion
from lightdock.structure.nm import read_nmodes
from lightdock.prep.simulation import get_setup_from_file
from lightdock.util.parser import CommandLineParser, get_lightdock_structures
log = LoggingManager.get_logger('generate_conformations')
def parse_output_file(lightdock_output, num_anm_rec, num_anm_lig):
translations = []
rotations = []
receptor_ids = []
ligand_ids = []
rec_extents = []
lig_extents = []
data_file = open(lightdock_output)
lines = data_file.readlines()
data_file.close()
counter = 0
for line in lines:
if line[0] == '(':
counter += 1
last = line.index(')')
coord = line[1:last].split(',')
translations.append([float(coord[0]), float(coord[1]), float(coord[2])])
rotations.append(Quaternion(float(coord[3]), float(coord[4]), float(coord[5]), float(coord[6])))
if len(coord) > 7:
rec_extents.append(np.array([float(x) for x in coord[7:7+num_anm_rec]]))
lig_extents.append(np.array([float(x) for x in coord[-num_anm_lig:]]))
raw_data = line[last+1:].split()
receptor_id = int(raw_data[0])
ligand_id = int(raw_data[1])
receptor_ids.append(receptor_id)
ligand_ids.append(ligand_id)
log.info("Read %s coordinate lines" % counter)
return translations, rotations, receptor_ids, ligand_ids, rec_extents, lig_extents
if __name__ == "__main__":
parser = argparse.ArgumentParser(prog="conformer_conformations")
# Receptor
parser.add_argument("receptor_structures", help="receptor structures: PDB file or list of PDB files",
type=CommandLineParser.valid_file, metavar="receptor_structure")
# Ligand
parser.add_argument("ligand_structures", help="ligand structures: PDB file or list of PDB files",
type=CommandLineParser.valid_file, metavar="ligand_structure")
# Lightdock output file
parser.add_argument("lightdock_output", help="lightdock output file",
type=CommandLineParser.valid_file, metavar="lightdock_output")
# Number of glowworms
parser.add_argument("glowworms", help="number of glowworms", type=CommandLineParser.valid_integer_number)
# Optional, setup file
parser.add_argument("--setup", "-setup", "-s", help="Simulation setup file",
dest="setup_file", metavar="setup_file", type=CommandLineParser.valid_file,
default=None)
args = parser.parse_args()
# Load setup configuration if provided
setup = get_setup_from_file(args.setup_file) if args.setup_file else None
num_anm_rec = DEFAULT_NMODES_REC
num_anm_lig = DEFAULT_NMODES_LIG
if setup and setup['use_anm']:
num_anm_rec = setup['anm_rec']
num_anm_lig = setup['anm_lig']
# Receptor
structures = []
for structure in get_lightdock_structures(args.receptor_structures):
log.info("Reading %s receptor PDB file..." % structure)
atoms, residues, chains = parse_complex_from_file(structure)
structures.append({'atoms': atoms, 'residues': residues, 'chains': chains, 'file_name': structure})
log.info("%s atoms, %s residues read." % (len(atoms), len(residues)))
receptor = Complex.from_structures(structures)
# Ligand
structures = []
for structure in get_lightdock_structures(args.ligand_structures):
log.info("Reading %s ligand PDB file..." % structure)
atoms, residues, chains = parse_complex_from_file(structure)
structures.append({'atoms': atoms, 'residues': residues, 'chains': chains, 'file_name': structure})
log.info("%s atoms, %s residues read." % (len(atoms), len(residues)))
ligand = Complex.from_structures(structures)
# Output file
translations, rotations, receptor_ids, ligand_ids, \
rec_extents, lig_extents = parse_output_file(args.lightdock_output, num_anm_rec, num_anm_lig)
found_conformations = len(translations)
num_conformations = args.glowworms
if num_conformations > found_conformations:
log.warning("Number of conformations is bigger than found solutions (%s > %s)" % (num_conformations,
found_conformations))
log.warning("Clipping number of conformations to %s" % found_conformations)
num_conformations = found_conformations
# Destination path is the same as the lightdock output
destination_path = os.path.dirname(args.lightdock_output)
# If normal modes used, need to read them
nmodes_rec = nmodes_lig = None
if len(rec_extents):
nm_path = os.path.abspath(os.path.dirname(args.receptor_structures))
nmodes_rec = read_nmodes(os.path.join(nm_path, DEFAULT_REC_NM_FILE + '.npy'))
if len(lig_extents):
nm_path = os.path.abspath(os.path.dirname(args.ligand_structures))
nmodes_lig = read_nmodes(os.path.join(nm_path, DEFAULT_LIG_NM_FILE + '.npy'))
for i in range(num_conformations):
receptor_pose = receptor.atom_coordinates[receptor_ids[i]].clone()
ligand_pose = ligand.atom_coordinates[ligand_ids[i]].clone()
# Use normal modes if provided:
if len(rec_extents):
try:
for nm in range(num_anm_rec):
receptor_pose.coordinates += nmodes_rec[nm] * rec_extents[i][nm]
except ValueError:
log.error("Problem found on calculating ANM for receptor:")
log.error("Number of atom coordinates is: %s" % str(receptor_pose.coordinates.shape))
log.error("Number of ANM is: %s" % str(nmodes_rec.shape))
raise SystemExit
except IndexError:
log.error("Problem found on calculating ANM for receptor:")
log.error("If you have used anm_rec different than default, please use --setup")
raise SystemExit
if len(lig_extents):
try:
for nm in range(num_anm_lig):
ligand_pose.coordinates += nmodes_lig[nm] * lig_extents[i][nm]
except ValueError:
log.error("Problem found on calculating ANM for ligand:")
log.error("Number of atom coordinates is: %s" % str(receptor_pose.coordinates.shape))
log.error("Number of ANM is: %s" % str(nmodes_rec.shape))
raise SystemExit
except IndexError:
log.error("Problem found on calculating ANM for ligand:")
log.error("If you have used anm_lig different than default, please use --setup")
raise SystemExit
# We rotate first, ligand it's at initial position
ligand_pose.rotate(rotations[i])
ligand_pose.translate(translations[i])
write_pdb_to_file(receptor, os.path.join(destination_path, 'lightdock_%s.pdb' % i), receptor_pose)
write_pdb_to_file(ligand, os.path.join(destination_path, 'lightdock_%s.pdb' % i), ligand_pose)
log.info("Generated %d conformations" % num_conformations)
|
brianjimenez/lightdock
|
bin/post/lgd_generate_conformations.py
|
Python
|
gpl-3.0
| 7,790
|
#!/usr/bin/python
from __future__ import print_function
import sys
from PIL import Image
import math
try:
range = xrange
except:
pass
if len(sys.argv) <= 2:
print("Usage: ./cinemut-nm-decode.py <inputfile> <outputfile>")
sys.exit(0)
try:
import numpy
def encode(im, len=len):
im.load()
r,g,b,a = im.split()
atan = numpy.arctan
tan = numpy.tan
e = 2.0
u = numpy.array(r.getdata(), dtype=numpy.float32)
u += numpy.array(g.getdata(), dtype=numpy.float32)
u += numpy.array(b.getdata(), dtype=numpy.float32)
v = numpy.array(a.getdata(), dtype=numpy.float32)
u *= 1.0/(3*255)
u -= 0.5
v *= 1.0/255
v -= 0.5
z = 1.0/numpy.sqrt(0.25*0.25 + u*u + v*v)
u *= z
v *= z
z *= 0.25
u *= 0.5
v *= 0.5
u += 0.5
v += 0.5
u *= 255
v *= 255
z *= 255
r.putdata(u)
g.putdata(v)
b.putdata(z)
a.putdata(numpy.zeros(len(u))+255)
im = Image.merge("RGBA", (r,g,b,a))
return im
except ImportError:
print("Warning: numpy not found. Install it to greatly improve performance.", file=sys.stderr)
def encode(im, int=int):
data = im.load()
sqrt = math.sqrt
e = 2.0
for x in range(im.size[0]):
for y in range(im.size[1]):
loc = x,y
r,g,b,a = data[loc]
u = (r+g+b) / (3*255.0) - 0.5
v = a / 255.0 - 0.5
z = 1.0/sqrt(0.25*0.25+u*u+v*v)
u *= z
v *= z
z *= 0.25
r = (u*0.5+0.5)*255
g = (v*0.5+0.5)*255
b = z*255
a = 255
data[loc] = int(r),int(g),int(b),int(a)
return im
im = Image.open(sys.argv[1])
if im.mode != "RGBA":
im = im.convert("RGBA")
im = encode(im)
im.save(sys.argv[2])
|
vinni-au/vega-strike
|
vegastrike/tools/cinemut-nm-decode.py
|
Python
|
gpl-2.0
| 2,052
|
#!/usr/bin/env python
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "dailydream.settings")
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
|
KellyChan/python-examples
|
python/django/elf/dailydream/dailydream/manage.py
|
Python
|
mit
| 253
|
class A(object):
__slots__ = ["a","b","c"]
def __init__(self):
self.a="x"
self.b="xx"
self.c="xxx"
def __str__(self):
"list all slot attributes"
return "\n".join(["%s: %s" %
(self.__slots__[i],
self.__getattribute__(self.__slots__[i]))
for i in xrange(len(self.__slots__))])
if __name__ == "__main__":
a = A()
print a
|
gizela/gizela
|
example/code/slots.py
|
Python
|
gpl-3.0
| 560
|
import requests
import rdflib
from whyis import nanopub
import datetime
import pytz
import dateutil.parser
from dateutil.tz import tzlocal
from werkzeug.datastructures import FileStorage
from werkzeug.http import http_date
from setlr import FileLikeFromIter
import re
import os
from requests_testadapter import Resp
import magic
import mimetypes
import traceback
import sys
from whyis.namespace import np, prov, dc, sio
class Importer:
min_modified = 0
import_once = False
def last_modified(self, entity_name, db, nanopubs):
old_nps = [nanopubs.get(x) for x, in db.query('''select ?np where {
?np np:hasAssertion ?assertion.
?assertion a np:Assertion; prov:wasQuotedFrom ?mapped_uri.
}''', initNs=dict(np=np, prov=prov), initBindings=dict(mapped_uri=rdflib.URIRef(entity_name)))]
modified = None
for old_np in old_nps:
m = old_np.modified
if m is not None:
m = m
# m = pytz.utc.localize(m)
if m is None:
continue
if modified is None or m > modified:
print(m, modified, old_np.modified)
modified = m
return modified
def load(self, entity_name, db, nanopubs):
entity_name = rdflib.URIRef(entity_name)
print("Fetching", entity_name)
old_nps = [nanopubs.get(x) for x, in db.query('''select ?np where {
?np np:hasAssertion ?assertion.
?assertion a np:Assertion; prov:wasQuotedFrom ?mapped_uri.
}''', initNs=dict(np=np, prov=prov), initBindings=dict(mapped_uri=rdflib.URIRef(entity_name)))]
updated = self.modified(entity_name)
if updated is None:
updated = datetime.datetime.now(pytz.utc)
#try:
g = self.fetch(entity_name)
#except Exception as e:
# print("Error loading %s: %s" % (entity_name, e))
# traceback.print_exc(file=sys.stdout)
# return
for new_np in nanopubs.prepare(g):
print("Adding new nanopub:", new_np.identifier)
self.explain(new_np, entity_name)
new_np.add((new_np.identifier, sio.isAbout, entity_name))
if updated is not None:
new_np.pubinfo.add(
(new_np.assertion.identifier, dc.modified, rdflib.Literal(updated, datatype=rdflib.XSD.dateTime)))
for old_np in old_nps:
new_np.pubinfo.add((old_np.assertion.identifier, prov.invalidatedAtTime,
rdflib.Literal(updated, datatype=rdflib.XSD.dateTime)))
nanopubs.publish(new_np)
for old_np in old_nps:
print("retiring", old_np.identifier)
nanopubs.retire(old_np.identifier)
def explain(self, new_np, entity_name):
activity = rdflib.BNode()
new_np.provenance.add((activity, rdflib.RDF.type, self.app.NS.whyis.KnowledgeImport))
new_np.provenance.add((new_np.assertion.identifier, prov.wasGeneratedBy, activity))
new_np.provenance.add((activity, prov.used, rdflib.URIRef(entity_name)))
new_np.provenance.add((new_np.assertion.identifier, prov.wasQuotedFrom, rdflib.URIRef(entity_name)))
new_np.provenance.add((new_np.assertion.identifier, prov.wasDerivedFrom, rdflib.URIRef(entity_name)))
|
tetherless-world/graphene
|
whyis/importer/importer.py
|
Python
|
apache-2.0
| 3,304
|
#!/usr/bin/env python
# vim:fileencoding=utf-8
from __future__ import (unicode_literals, division, absolute_import,
print_function)
__license__ = 'GPL v3'
__copyright__ = '2013, Kovid Goyal <kovid at kovidgoyal.net>'
from PyQt4.Qt import QTextCharFormat
from calibre.ebooks.oeb.base import OEB_DOCS, OEB_STYLES
from calibre.ebooks.oeb.polish.container import guess_type
def syntax_from_mime(name, mime):
if mime in OEB_DOCS:
return 'html'
if mime in OEB_STYLES:
return 'css'
if mime in {guess_type('a.opf'), guess_type('a.ncx'), guess_type('a.xml'), 'application/oebps-page-map+xml'}:
return 'xml'
if mime.startswith('text/'):
return 'text'
if mime.startswith('image/') and mime.partition('/')[-1].lower() in {
'jpeg', 'jpg', 'gif', 'png'}:
return 'raster_image'
def editor_from_syntax(syntax, parent=None):
if syntax in {'text', 'html', 'css', 'xml'}:
from calibre.gui2.tweak_book.editor.widget import Editor
return Editor(syntax, parent=parent)
elif syntax == 'raster_image':
from calibre.gui2.tweak_book.editor.image import Editor
return Editor(syntax, parent=parent)
SYNTAX_PROPERTY = QTextCharFormat.UserProperty
class SyntaxTextCharFormat(QTextCharFormat):
def __init__(self, *args):
QTextCharFormat.__init__(self, *args)
self.setProperty(SYNTAX_PROPERTY, True)
|
insomnia-lab/calibre
|
src/calibre/gui2/tweak_book/editor/__init__.py
|
Python
|
gpl-3.0
| 1,430
|
from django.http import HttpResponse
class IcalResponse(HttpResponse):
def __init__(self, data, **kwargs):
kwargs.setdefault('content_type', 'text/calendar')
data = data.to_ical()
super(IcalResponse, self).__init__(content=data, **kwargs)
|
Inter-Actief/alexia
|
alexia/http/response.py
|
Python
|
bsd-3-clause
| 269
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.contrib.auth.models import User
from django.db import models
from imagekit.models import ProcessedImageField, ImageSpecField
from pilkit.processors import ResizeToFit, ResizeToFill
from bands.helpers import RandomFileName
from bands.models import ProfessionalTag
class Professional(models.Model):
user = models.ForeignKey(User, null=True, blank=True, verbose_name='Responsable', on_delete=models.CASCADE)
name = models.CharField(null=False, verbose_name='Nombre', max_length=240)
tags = models.ManyToManyField(ProfessionalTag, default=None, verbose_name='Categorías', related_name="pros")
description = models.TextField(null=False, blank=True)
latitude = models.FloatField(null=False)
longitude = models.FloatField(null=False)
image = ProcessedImageField(null=True, blank=True, upload_to=RandomFileName('venue/'), verbose_name='Imagen de cabecera',
processors=[ResizeToFit(900, 900, upscale=False)], format='JPEG')
profile_image = ProcessedImageField(null=True, blank=True, upload_to=RandomFileName('venue/'),
verbose_name='Imagen de perfil',
processors=[ResizeToFit(512, 512, upscale=False)], format='JPEG')
profile_thumbnail = ImageSpecField(source='profile_image',
processors=[ResizeToFill(150, 150, upscale=False)],
format='JPEG',
options={'quality': 70})
embed_code = models.TextField(null=True, blank=True,
verbose_name='Códido embed para escucha (Bandcamp, Soundcloud, Spotify...')
embed_media = models.TextField(null=True, blank=True, verbose_name='Códido embed de vídeo (Youtube, Vimeo...')
facebook_link = models.CharField(null=True, blank=True, verbose_name='Página de Facebook', max_length=250)
twitter_link = models.CharField(null=True, blank=True, verbose_name='Perfil de Twitter', max_length=250)
webpage_link = models.CharField(null=True, blank=True, verbose_name='Página web', max_length=250)
youtube_link = models.CharField(null=True, blank=True, verbose_name='Canal de Youtube', max_length=250)
presskit_link = models.CharField(null=True, blank=True, verbose_name='Presskit', max_length=250)
class Meta:
verbose_name = 'Profesional'
verbose_name_plural = 'Profesionales'
ordering = ['name']
permissions = (
("manage_pro", "Puede gestionar un perfil profesional"),
)
def __unicode__(self):
return self.name
|
InsulaCoworking/MusicCity
|
bands/models/professional.py
|
Python
|
gpl-2.0
| 2,695
|
"""
Tests of the neo.core.container.Container class
"""
import unittest
import numpy as np
try:
from IPython.lib.pretty import pretty
except ImportError as err:
HAVE_IPYTHON = False
else:
HAVE_IPYTHON = True
from neo.core.container import Container, unique_objs
class Test_unique_objs(unittest.TestCase):
'''
TestCase for unique_objs
'''
def test_some(self):
a = 1
b = np.array([3.14159265, 3.1415])
c = [1, '1', 2.3, '5 8']
d = {1, '2', 'spam'}
objs = [a, b, b, b, c, b, a, d, b, b, a, d, d, d, c, d, b, d, c, a]
targ = [a, b, c, d]
res = unique_objs(objs)
self.assertEqual(targ, res)
class TestContainerNeo(unittest.TestCase):
'''
TestCase to make sure basic initialization and methods work
'''
def test_init(self):
'''test to make sure initialization works properly'''
container = Container(name='a container', description='this is a test')
self.assertEqual(container.name, 'a container')
self.assertEqual(container.description, 'this is a test')
self.assertEqual(container.file_origin, None)
def test__children(self):
container = Container()
self.assertEqual(container._parent_objects, ())
self.assertEqual(container._parent_containers, ())
self.assertEqual(container._parent_objects, ())
self.assertEqual(container._parent_containers, ())
self.assertEqual(container._container_child_objects, ())
self.assertEqual(container._data_child_objects, ())
self.assertEqual(container._multi_child_objects, ())
self.assertEqual(container._child_properties, ())
self.assertEqual(container._repr_pretty_containers, ())
self.assertEqual(container._single_child_objects, ())
self.assertEqual(container._container_child_containers, ())
self.assertEqual(container._data_child_containers, ())
self.assertEqual(container._single_child_containers, ())
self.assertEqual(container._multi_child_containers, ())
self.assertEqual(container._child_objects, ())
self.assertEqual(container._child_containers, ())
self.assertEqual(container._multi_children, ())
self.assertEqual(container._single_children, ())
self.assertEqual(container.data_children, ())
self.assertEqual(container.container_children, ())
self.assertEqual(container.children, ())
self.assertEqual(container.parents, ())
self.assertEqual(container.data_children_recur, ())
self.assertEqual(container.container_children_recur, ())
self.assertEqual(container.children_recur, ())
self.assertEqual(container.filter(test=1), [])
self.assertEqual(container.filter(data=True, container=False, test=1),
[])
self.assertEqual(container.filter(data=False, container=False, test=1),
[])
self.assertEqual(container.filter(data=True, container=True, test=1),
[])
self.assertEqual(container.filter(data=False, container=True, test=1),
[])
self.assertEqual(container.size, {})
container.create_many_to_one_relationship()
container.create_many_to_many_relationship()
container.create_relationship()
def test_filter(self):
container = Container()
self.assertRaises(TypeError, container.filter, "foo")
class Test_Container_merge(unittest.TestCase):
'''
TestCase to make sure merge method works
'''
def setUp(self):
self.name1 = 'a container 1'
self.name2 = 'a container 2'
self.description1 = 'this is a test 1'
self.description2 = 'this is a test 2'
self.cont1 = Container(name=self.name1, description=self.description1)
self.cont2 = Container(name=self.name2, description=self.description2)
def test_merge__dict(self):
self.cont1.annotations = {'val1': 1, 'val2': 2.2, 'val3': 'test1'}
self.cont2.annotations = {'val2': 2.2, 'val3': 'test2',
'val4': [4, 4.4], 'val5': True}
ann1 = self.cont1.annotations
ann1c = self.cont1.annotations.copy()
ann2c = self.cont2.annotations.copy()
targ = {'val1': 1, 'val2': 2.2, 'val3': 'test1;test2',
'val4': [4, 4.4], 'val5': True}
self.cont1.merge(self.cont2)
self.assertEqual(ann1, self.cont1.annotations)
self.assertNotEqual(ann1c, self.cont1.annotations)
self.assertEqual(ann2c, self.cont2.annotations)
self.assertEqual(targ, self.cont1.annotations)
self.assertEqual(self.name1, self.cont1.name)
self.assertEqual(self.name2, self.cont2.name)
self.assertEqual(self.description1, self.cont1.description)
self.assertEqual(self.description2, self.cont2.description)
def test_merge__different_type_AssertionError(self):
self.cont1.annotations = {'val1': 1, 'val2': 2.2, 'val3': 'tester'}
self.cont2.annotations = {'val3': False, 'val4': [4, 4.4],
'val5': True}
self.cont1.merge(self.cont2)
self.assertEqual(self.cont1.annotations,
{'val1': 1,
'val2': 2.2,
'val3': 'MERGE CONFLICT',
'val4': [4, 4.4],
'val5': True})
def test_merge__unmergable_unequal_AssertionError(self):
self.cont1.annotations = {'val1': 1, 'val2': 2.2, 'val3': True}
self.cont2.annotations = {'val3': False, 'val4': [4, 4.4],
'val5': True}
self.cont1.merge(self.cont2)
self.assertEqual(self.cont1.annotations,
{'val1': 1,
'val2': 2.2,
'val3': 'MERGE CONFLICT',
'val4': [4, 4.4],
'val5': True})
@unittest.skipUnless(HAVE_IPYTHON, "requires IPython")
class Test_pprint(unittest.TestCase):
def test__pretty(self):
name = 'an object'
description = 'this is a test'
obj = Container(name=name, description=description)
res = pretty(obj)
targ = "Container with name: '{}' description: '{}'".format(name,
description)
self.assertEqual(res, targ)
if __name__ == "__main__":
unittest.main()
|
samuelgarcia/python-neo
|
neo/test/coretest/test_container.py
|
Python
|
bsd-3-clause
| 6,562
|
"""Base Command class, and related routines"""
import os
import sys
import tempfile
import traceback
import time
import optparse
from pip import cmdoptions
from pip.log import logger
from pip.download import PipSession
from pip.exceptions import (BadCommand, InstallationError, UninstallationError,
CommandError, PreviousBuildDirError)
from pip.backwardcompat import StringIO
from pip.baseparser import ConfigOptionParser, UpdatingDefaultsHelpFormatter
from pip.status_codes import (SUCCESS, ERROR, UNKNOWN_ERROR, VIRTUALENV_NOT_FOUND,
PREVIOUS_BUILD_DIR_ERROR)
from pip.util import get_prog
__all__ = ['Command']
class Command(object):
name = None
usage = None
hidden = False
def __init__(self):
parser_kw = {
'usage': self.usage,
'prog': '%s %s' % (get_prog(), self.name),
'formatter': UpdatingDefaultsHelpFormatter(),
'add_help_option': False,
'name': self.name,
'description': self.__doc__,
}
self.parser = ConfigOptionParser(**parser_kw)
# Commands should add options to this option group
optgroup_name = '%s Options' % self.name.capitalize()
self.cmd_opts = optparse.OptionGroup(self.parser, optgroup_name)
# Add the general options
gen_opts = cmdoptions.make_option_group(cmdoptions.general_group, self.parser)
self.parser.add_option_group(gen_opts)
def _build_session(self, options):
session = PipSession()
# Handle custom ca-bundles from the user
if options.cert:
session.verify = options.cert
# Handle timeouts
if options.timeout:
session.timeout = options.timeout
# Handle configured proxies
if options.proxy:
session.proxies = {
"http": options.proxy,
"https": options.proxy,
}
# Determine if we can prompt the user for authentication or not
session.auth.prompting = not options.no_input
return session
def setup_logging(self):
pass
def parse_args(self, args):
# factored out for testability
return self.parser.parse_args(args)
def main(self, args):
options, args = self.parse_args(args)
level = 1 # Notify
level += options.verbose
level -= options.quiet
level = logger.level_for_integer(4 - level)
complete_log = []
logger.add_consumers(
(level, sys.stdout),
(logger.DEBUG, complete_log.append),
)
if options.log_explicit_levels:
logger.explicit_levels = True
self.setup_logging()
#TODO: try to get these passing down from the command?
# without resorting to os.environ to hold these.
if options.no_input:
os.environ['PIP_NO_INPUT'] = '1'
if options.exists_action:
os.environ['PIP_EXISTS_ACTION'] = ' '.join(options.exists_action)
if options.require_venv:
# If a venv is required check if it can really be found
if not os.environ.get('VIRTUAL_ENV'):
logger.fatal('Could not find an activated virtualenv (required).')
sys.exit(VIRTUALENV_NOT_FOUND)
if options.log:
log_fp = open_logfile(options.log, 'a')
logger.add_consumers((logger.DEBUG, log_fp))
else:
log_fp = None
exit = SUCCESS
store_log = False
try:
status = self.run(options, args)
# FIXME: all commands should return an exit status
# and when it is done, isinstance is not needed anymore
if isinstance(status, int):
exit = status
except PreviousBuildDirError:
e = sys.exc_info()[1]
logger.fatal(str(e))
logger.info('Exception information:\n%s' % format_exc())
store_log = True
exit = PREVIOUS_BUILD_DIR_ERROR
except (InstallationError, UninstallationError):
e = sys.exc_info()[1]
logger.fatal(str(e))
logger.info('Exception information:\n%s' % format_exc())
store_log = True
exit = ERROR
except BadCommand:
e = sys.exc_info()[1]
logger.fatal(str(e))
logger.info('Exception information:\n%s' % format_exc())
store_log = True
exit = ERROR
except CommandError:
e = sys.exc_info()[1]
logger.fatal('ERROR: %s' % e)
logger.info('Exception information:\n%s' % format_exc())
exit = ERROR
except KeyboardInterrupt:
logger.fatal('Operation cancelled by user')
logger.info('Exception information:\n%s' % format_exc())
store_log = True
exit = ERROR
except:
logger.fatal('Exception:\n%s' % format_exc())
store_log = True
exit = UNKNOWN_ERROR
if store_log:
log_file_fn = options.log_file
text = '\n'.join(complete_log)
try:
log_file_fp = open_logfile(log_file_fn, 'w')
except IOError:
temp = tempfile.NamedTemporaryFile(delete=False)
log_file_fn = temp.name
log_file_fp = open_logfile(log_file_fn, 'w')
logger.fatal('Storing debug log for failure in %s' % log_file_fn)
log_file_fp.write(text)
log_file_fp.close()
if log_fp is not None:
log_fp.close()
return exit
def format_exc(exc_info=None):
if exc_info is None:
exc_info = sys.exc_info()
out = StringIO()
traceback.print_exception(*exc_info, **dict(file=out))
return out.getvalue()
def open_logfile(filename, mode='a'):
"""Open the named log file in append mode.
If the file already exists, a separator will also be printed to
the file to separate past activity from current activity.
"""
filename = os.path.expanduser(filename)
filename = os.path.abspath(filename)
dirname = os.path.dirname(filename)
if not os.path.exists(dirname):
os.makedirs(dirname)
exists = os.path.exists(filename)
log_fp = open(filename, mode)
if exists:
log_fp.write('%s\n' % ('-' * 60))
log_fp.write('%s run on %s\n' % (sys.argv[0], time.strftime('%c')))
return log_fp
|
alquerci/pip
|
pip/basecommand.py
|
Python
|
mit
| 6,530
|
##
##SMART FP7 - Search engine for MultimediA enviRonment generated contenT
##Webpage: http://smartfp7.eu
##
## This Source Code Form is subject to the terms of the Mozilla Public
## License, v. 2.0. If a copy of the MPL was not distributed with this
## file, You can obtain one at http://mozilla.org/MPL/2.0/.
##
## The Original Code is Copyright (c) 2012-2013 Atos
## All Rights Reserved
##
## Contributor(s):
## Jose Miguel Garrido, jose.garridog at atos dot net
##
"""The third Multimedia Data Manager.
This module stores the metadata from XML files to a SQLite database.
The video generator uses this database to create the actual video clips"""
# This file must work in python >2.7 and >3.3
import sys
p_v = 2 if sys.version_info < (3,) else 3
if p_v == 2:
import urllib, urllib2
import ConfigParser as cp
else:
import urllib.request, urllib.parse, urllib.error
import configparser as cp
import json
import couchdb
import argparse
import logging
import time, datetime
def getConf(filename,section):
dict1 = {}
config = cp.ConfigParser()
config.read(filename)
options = config.options(section)
for option in options:
try:
dict1[option] = config.get(section, option)
except:
print("exception on {}!".format(option))
dict1[option] = None
dict1["wait_time"] = int(dict1["wait_time"])
dict1["couch_server"] = dict1["couch_server"] if (dict1["couch_server"]!="None") else None
return dict1
def createURL(conf):
query = { "@id": conf["id"] }
if conf["search_type"] == "textual":
command = "txtSearch"
if conf["search_for"] == "venue":
target = "venues"
else:
target = "activities"
if p_v == 2:
url = '{}/{}/{}?label=%22{}%22'.format(conf["url_base"],command,
target,
urllib.quote(conf["keywords"]))
else:
url = '{}/{}/{}?label=%22{}%22'.format(conf["url_base"],command,
target,
urllib.parse.quote(conf["keywords"]))
query.update({ "keywords":conf["keywords"].split(),
"searched_item":conf["search_for"],
"search_type":"textual" })
elif conf["search_type"] == "geo-search":
command = "structuredSearch"
query.update({"search_type":"geo-search"})
if conf["search_for"] == "venue":
query.update({"searched_item":"venues"})
if conf["coord_type"] == "square":
target = "locRec"
query.update({"search_coords":[conf["coord1_long"],conf["coord1_lat"],
conf["coord2_long"],conf["coord2_lat"]]})
else:
target = "locCirc"
query.update({"search_coords":[conf["coord1_long"],conf["coord1_lat"],
conf["radius"]]})
else:
query.update({"searched_item":"activities"})
if conf["coord_type"] == "square":
target = "actRec"
query.update({"search_coords":[conf["coord1_long"],conf["coord1_lat"],
conf["coord2_long"],conf["coord2_lat"]]})
else:
target = "actCirc"
query.update({"search_coords":[conf["coord1_long"],conf["coord1_lat"],
conf["radius"]]})
if target in ("actCirc","locCirc"):
url = '{}/{}/{}?lat1={}&long1={}&radius={}'.format(conf["url_base"],
command,
target,
conf["coord1_lat"],
conf["coord1_long"],
conf["radius"])
else:
url = '{}/{}/{}?lat1={}&long1={}&lat2={}&long2={}'.format(conf["url_base"],
command,target,
conf["coord1_lat"],
conf["coord1_long"],
conf["coord2_lat"],
conf["coord2_long"])
logging.debug(url)
logging.debug(query)
return url, query
def formatItem(key,doc,time_query,query_info,num):
data = {}
data["time"] = time_query
ldm_result = {}
ldm_result.update(query_info)
ldm_result["key"] = key
if query_info["search_type"] == "textual":
ldm_result["location"] = doc["location"]
else:
ldm_result["location"] = [i["location"] for i in doc["location"]]
ldm_result["location_long"] = [i["long"] for i in doc["location"]]
ldm_result["location_lat"] = [i["lat"] for i in doc["location"]]
if "isPrimaryTopicOf" in doc:
ldm_result["is_primary_topic_of"] = doc["isPrimaryTopicOf"]
if "txt" in doc:
ldm_result["txt"] = doc["txt"]
if "label" in doc:
ldm_result["label"] = doc["label"]
if "date" in doc:
ldm_result["date"] = doc["date"]
if "name" in doc:
ldm_result["name"] = doc["name"]
if "attendance" in doc:
ldm_result["attendance"] = doc["attendance"]
data["ldm_result"] = ldm_result
timestamp = time.time()+(num/1000.0)
time_txt = datetime.datetime.utcfromtimestamp(timestamp).isoformat()+"Z"
item = { "_id":time_txt, "data":data, "timestamp":str(int(timestamp*1000))}
# check for not intended results
remainder = set(doc.keys()) - set(("location", "isPrimaryTopicOf", "txt", "label","date","name","attendance") )
if remainder:
logging.warning("WARNING")
logging.warning(remainder)
logging.debug(item)
return item
def storeItem(db,item):
db.save(item)
if __name__ == '__main__':
#inicialization
logging.basicConfig(level=logging.DEBUG,format='%(asctime)s-> %(message)s')
parser = argparse.ArgumentParser()
parser.add_argument("-c", "--conf_file",type=str,
help="configuration file path")
parser.add_argument("-s", "--section",type=str,
help="section of the configuration to apply")
args = parser.parse_args()
conf_file = args.conf_file if args.conf_file else "ldm_feeder_conf.ini"
section = args.section if args.conf_file else "default"
while True: #until loop
conf = getConf(conf_file,section)
couch = couchdb.Server(conf["couch_server"]) if conf["couch_server"] else couchdb.Server()
db = couch[conf["couch_database"]]
#the program itself
url, query_info = createURL(conf)
if p_v == 2:
response = urllib2.urlopen(url).read()
else:
response = urllib.request.urlopen(url).read()
response = response.decode("utf-8")
response = json.loads(response)
if "locations" in response["data"]:
items = "locations"
elif "activities" in response["data"]:
items = "activities"
for num, i in enumerate(response["data"][items]):
responseItem = formatItem(i,response["data"][items][i],
response["data"]["time"],query_info, num)
storeItem(db, responseItem)
if conf["wait_time"] == 0:
break
else:
time.sleep(conf["wait_time"])
|
SmartSearch/Edge-Node
|
LinkedDataManager/feed_generator/ldm_feeder.py
|
Python
|
mpl-2.0
| 8,016
|
import os
import json
import datetime
import pprint
import logging
log = logging.getLogger()
pp = pprint.PrettyPrinter()
class Cache(object):
def __init__(self, app, frequency):
self.app = app
self.frequency = frequency
self.data = self.init_data()
self.stored_data = self.read_old_cache()
dt = datetime.datetime.now()
dt = dt.replace(minute=0, second=0, microsecond=0)
self.date = dt.timestamp()
self.stored_data['passed'].insert(len(self.stored_data['passed']), [self.date, 0])
self.stored_data['failed'].insert(len(self.stored_data['failed']), [self.date, 0])
self.stored_data['nonapplicable'].insert(len(self.stored_data['nonapplicable']), [self.date, 0])
self.stored_data['exception'].insert(len(self.stored_data['exception']), [self.date, 0])
def init_data(self):
d = dict()
d['achievement-results-by-category'] = dict()
d['achievement-results-sunburn-chart'] = dict()
d['achievement-results-sunburn-chart']['name'] = 'init'
d['achievement-results-sunburn-chart']['children'] = list()
d['achievement-results-sunburn-chart']['root'] = 'root'
return d
def read_old_cache(self):
cache_db = os.path.join(self.app['DB_CACHE_PATH'])
achievements_cache_path = os.path.join(cache_db, 'cache-achievements.db')
if not os.path.isfile(achievements_cache_path):
with open(achievements_cache_path, 'w') as file:
data = dict()
data = json.dumps(data, sort_keys=True,indent=4, separators=(',', ': '))
file.write(data)
with open(achievements_cache_path, 'r') as file:
data = json.load(file)
if 'achievements-by-time' not in data:
self.data['achievements-by-time'] = dict()
self.data['achievements-by-time']['passed'] = list()
self.data['achievements-by-time']['failed'] = list()
self.data['achievements-by-time']['nonapplicable'] = list()
self.data['achievements-by-time']['exception'] = list()
return self.data['achievements-by-time']
return data['achievements-by-time']
def write_cache_file(self):
cache_db = self.app['DB_CACHE_PATH']
achievements_cache_path = os.path.join(cache_db, 'cache-achievements.db')
self.data['achievements-by-time'] = dict()
self.data['achievements-by-time'] = self.stored_data
# self.data = {**self.data, **self.stored_data}
data = json.dumps(self.data, sort_keys=True,indent=4, separators=(',', ': '))
with open(achievements_cache_path, 'w') as f:
f.write(data)
def update_sunburn_size_entry(self, buffer_stat, sub_category):
updated = False
for d in buffer_stat['children']:
if d['name'] == sub_category and 'size' in d:
d['size'] += 1
updated = True
return buffer_stat['children'], updated
def add_new_sunburn_child_entry(self, root, sub_category):
entry = dict()
entry['name'] = str(sub_category)
entry['root'] = root
entry['children'] = list()
return entry
def initiate_new_sunburn_size_entry(self, root, sub_category):
entry = dict()
entry['name'] = str(sub_category)
entry['root'] = root
entry['size'] = 1
return entry
def next_hierarchy_level(self, buffer_stat, sub_category):
for d in buffer_stat['children']:
if d['name'] == sub_category and 'children' in d:
buffer_stat = d
return True, buffer_stat
return False, None
def update(self, achievement):
if self.frequency != "daily":
# FIXME: daily or maybe hourly?
return None
category = str(achievement.container.categories)
result = achievement.result
if category not in self.data['achievement-results-by-category']:
self.data['achievement-results-by-category'][category] = list()
self.data['achievement-results-by-category'][category].append(result)
self.update_sunburn_data(category, result)
# achievements saved by date/time
# index '-1' is the latest date entry
# at the end is a list with [date, amount_of_results]
if result == 'passed':
self.stored_data['passed'][-1][1] += 1
elif result == 'failed':
self.stored_data['failed'][-1][1] += 1
elif result == 'nonapplicable':
self.stored_data['nonapplicable'][-1][1] += 1
elif result == 'exception':
self.stored_data['exception'][-1][1] += 1
else:
log.error('unassignable result: {}'.format(result))
def update_sunburn_data(self, category, result):
# save here results for sunburn data structre which is specified
# with keys like name, children, size (root is added for color selection method)
# children-parent-hierarchy based on categories levels e.g. team->topic->subtopic
# data structure (for input category ['team:bar'] and ['team:foo', 'topic:ip'])
# is looking like (size is here the amount of tests):
#
# "children": [
# {
# "name": "team:bar",
# "root": "team:bar",
# "size": 1
# },
# {
# "name": "team:foo",
# "root": "team:foo",
# "children": [
# {
# "name": "topic:ip",
# "root": "team:foo",
# "size": 1
# }
# ]
# }
# ]
buffer_stat = self.data['achievement-results-sunburn-chart']
cat = eval(category)
root = cat[0]
# cat is a list of categories ordered in a hierarchy
# e.g. ['team:red', 'topic:foo', 'subtopic:foobar']
# root is the very first sub category
for j in range(0,len(cat)):
if j < len(cat)-1:
# when there are more levels of categories
# going deeper inside the hierarchy
found, next_level_entry = self.next_hierarchy_level(buffer_stat, cat[j])
if found:
buffer_stat = next_level_entry
continue
entry = self.add_new_sunburn_child_entry(root, cat[j])
buffer_stat['children'].append(entry)
buffer_stat = entry
continue
else:
# in the last level of the category hierarchy size have to be
# added or updated when a size entry is already available
updated_entry, updated = self.update_sunburn_size_entry(buffer_stat, cat[j])
if updated:
buffer_stat['children'] = updated_entry
continue
entry = self.initiate_new_sunburn_size_entry(root, cat[j])
buffer_stat['children'].append(entry)
def sort(self, data):
data = sorted(data, key=lambda k: k['name'])
for d in data:
if 'children' in d:
d['children'] = self.sort(d['children'])
else:
continue
return data
def order_for_sunburn(self):
# ordering categories by name so if a category has only one level, it is not seperated
# from the same root category with more than one level in the list
# e.g. ['team:red'] and ['team:red', 'topic:ip']
all_categories = self.data['achievement-results-sunburn-chart']['children']
self.data['achievement-results-sunburn-chart']['children'] = self.sort(all_categories)
cache_db = self.app['DB_CACHE_PATH']
achievements_cache_path = os.path.join(cache_db, 'cache-achievements.db')
data = json.dumps(self.data, sort_keys=True,indent=4, separators=(',', ': '))
with open(achievements_cache_path, 'w') as f:
f.write(data)
|
hgn/hippod
|
hippod/cache_achievements.py
|
Python
|
mit
| 8,131
|
# Simple test program to debug + play with assassination models.
from os import path
import sys
from pprint import pprint
sys.path.append(path.abspath(path.join(path.dirname(__file__), '..')))
from shadowcraft.calcs.rogue.Aldriana import AldrianasRogueDamageCalculator
from shadowcraft.calcs.rogue.Aldriana import settings
from shadowcraft.objects import buffs
from shadowcraft.objects import race
from shadowcraft.objects import stats
from shadowcraft.objects import procs
from shadowcraft.objects import talents
from shadowcraft.objects import artifact
from shadowcraft.core import i18n
# Set up language. Use 'en_US', 'es_ES', 'fr' for specific languages.
test_language = 'local'
i18n.set_language(test_language)
# Set up level/class/race
test_level = 110
test_race = race.Race('pandaren')
test_class = 'rogue'
test_spec = 'outlaw'
# Set up buffs.
test_buffs = buffs.Buffs('short_term_haste_buff',
'flask_wod_agi',
'food_wod_versatility')
# Set up weapons. mark_of_the_frostwolf mark_of_the_shattered_hand
test_mh = stats.Weapon(4821.0, 2.6, 'sword', None)
test_oh = stats.Weapon(4821.0, 2.6, 'sword', None)
# Set up procs.
#test_procs = procs.ProcsList(('assurance_of_consequence', 588),
#('draenic_philosophers_stone', 620), 'virmens_bite', 'virmens_bite_prepot',
#'archmages_incandescence') #trinkets, other things (legendary procs)
test_procs = procs.ProcsList()
# Set up gear buffs.
test_gear_buffs = stats.GearBuffs('gear_specialization') #tier buffs located here
# Set up a calcs object..
test_stats = stats.Stats(test_mh, test_oh, test_procs, test_gear_buffs,
agi=21122,
stam=28367,
crit=6306,
haste=3260,
mastery=3706,
versatility=3486,)
# Initialize talents..
test_talents = talents.Talents('1010022', test_spec, test_class, level=test_level)
#initialize artifact traits..
test_traits = artifact.Artifact(test_spec, test_class, '100000000000000000')
# Set up settings.
test_cycle = settings.OutlawCycle(blade_flurry=False,
jolly_roger_reroll=1,
grand_melee_reroll=1,
shark_reroll=1,
true_bearing_reroll=1,
buried_treasure_reroll=1,
broadsides_reroll=1,
between_the_eyes_policy='never'
)
test_settings = settings.Settings(test_cycle, response_time=.5, duration=360,
adv_params="", is_demon=True, num_boss_adds=0,
finisher_threshold=5)
# Build a DPS object.
calculator = AldrianasRogueDamageCalculator(test_stats, test_talents, test_traits, test_buffs, test_race, test_spec, test_settings, test_level)
# Compute DPS Breakdown.
dps_breakdown = calculator.get_dps_breakdown()
total_dps = sum(entry[1] for entry in dps_breakdown.items())
# Compute EP values.
#ep_values = calculator.get_ep(baseline_dps=total_dps)
#tier_ep_values = calculator.get_other_ep(['rogue_t16_2pc', 'rogue_t16_4pc'])
#mh_enchants_and_dps_ep_values, oh_enchants_and_dps_ep_values =
#calculator.get_weapon_ep(dps=True, enchants=True)
#talent_ranks = calculator.get_talents_ranking()
#trait_ranks = calculator.get_trait_ranking()
def max_length(dict_list):
max_len = 0
for i in dict_list:
dict_values = i.items()
if max_len < max(len(entry[0]) for entry in dict_values):
max_len = max(len(entry[0]) for entry in dict_values)
return max_len
def pretty_print(dict_list, total_sum=1., show_percent=False):
max_len = max_length(dict_list)
for i in dict_list:
dict_values = i.items()
dict_values.sort(key=lambda entry: entry[1], reverse=True)
for value in dict_values:
#print value[0] + ':' + ' ' * (max_len - len(value[0])),
#str(value[1])
if show_percent and ("{0:.2f}".format(float(value[1]) / total_dps)) != '0.00':
print value[0] + ':' + ' ' * (max_len - len(value[0])), str(value[1]) + ' (' + str("{0:.2f}".format(100 * float(value[1]) / total_sum)) + '%)'
else:
print value[0] + ':' + ' ' * (max_len - len(value[0])), str(value[1])
print '-' * (max_len + 15)
dicts_for_pretty_print = [#ep_values,
#tier_ep_values,
#talent_ranks,
#trinkets_ep_value,
dps_breakdown,
#trait_ranks
]
pretty_print(dicts_for_pretty_print)
print ' ' * (max_length(dicts_for_pretty_print) + 1), total_dps, ("total damage per second.")
#pprint(talent_ranks)
|
Mystler/ShadowCraft-Engine
|
scripts/outlaw.py
|
Python
|
lgpl-3.0
| 4,715
|
import re
from valideer import *
class where(Pattern):
name = "where"
regexp = re.compile(r"[\w&<>|*/\+\-\(\)]+")
rp = re.compile(r"(\w+)")
def validate(self, value, adapt=True):
super(where, self).validate(value)
keys = self.rp.findall(value)
value = self.rp.sub(r'%(\1)s', value)
value = value.replace(' ', ' and ')
value = value.replace('|', ' or ')
value = value.replace('&', ' and ')
return keys, '('+value+')'
class boolean(Validator):
name = "bool"
true = ("y", "yes", "1", "t", "true", "on")
false = ("n", "no", "0", "f", "false", "off")
def validate(self, value, adapt=True):
if type(value) is bool:
return value
_value = str(value).lower()
if _value in self.true:
return True if adapt else value
elif _value in self.false:
return False if adapt else value
else:
self.error("bool is not valid")
# class figure(Validator):
# name = "figure"
# tables = HomogeneousSequence(Pattern(r"(from|(inner|outer|full|right) join) .*"))
# seed = None # todod
# arguments = Mapping(Pattern(r"(\&\-)\w+(\[\])?", seed))
# validate = Object(required=dict(tables=tables,
# arguments=arguments,
# outline=Mapping("string", Type(dict))),
# optional=dict(help="string"),
# additional=False)
|
stevepeak/inquiry
|
inquiry/validators.py
|
Python
|
apache-2.0
| 1,497
|
#!/usr/bin/env python
'''
For Python 2.7
'''
import re, sys, os, glob, time
from twisted.web import server, resource
from twisted.internet import reactor
timestamp_re = re.compile("<!\[CDATA\[(\d+)\]\]>")
timestamp_format = "<![CDATA[{timestamp}]]>"
class BicingSimResource(resource.Resource):
isLeaf = True
def __init__(self, keep_timestamp = False):
data_files_dir = os.path.dirname(os.path.realpath(__file__))
data_files_paths = glob.glob(os.path.join(data_files_dir, "bicing*_UTC.xml"))
# whether to keep the original timestamp when serving files
# or using the current time
self._keep_timestamp = keep_timestamp
self._files = []
# take all the files
for data_file_path in data_files_paths:
with open(data_file_path, 'r') as data_file:
self._files.append(data_file.read())
print "Loaded files", data_files_paths
# -1 to start at 0 in first request
self._current_file = -1
def render_GET(self, request):
request.setHeader("content-type", "text/xml")
self._current_file = (self._current_file + 1) % len(self._files)
print "Using file", self._current_file
if self._keep_timestamp:
return self._files[self._current_file]
else:
now_timestamp = int(time.time())
return timestamp_re.sub(timestamp_format.format(timestamp = now_timestamp), self._files[self._current_file])
if __name__ == '__main__':
if len(sys.argv) < 2:
print 'Usage:', sys.argv[0], '<port>', '[keep]'
print '\texample:', sys.argv[0], '9999'
sys.exit(1)
port = int(sys.argv[1])
keep_timestamp = False
if len(sys.argv) >= 2:
print "Keeping original timestamps"
keep_timestamp = True
print 'Running service at http://localhost:' + str(port)
reactor.listenTCP(port, server.Site(BicingSimResource(keep_timestamp)))
reactor.run()
|
juanrh/bicing-bcn
|
storm/ingestion/src/test/resources/bicing_test_service.py
|
Python
|
apache-2.0
| 1,975
|
import datetime
import numpy as np
# Class which can have attributes set.
class expando(object): pass
class Dataserver:
'''
Loads in and serves data between the dates indicated.
Forecast is returned for firstday for the period from
firstday + day_ahead_delta until day_ahead_period later.
'''
def __init__(self,firstday=datetime.datetime(2012,01,01,00),lastday=datetime.datetime(2012,01,03,00),day_ahead_delta=24,day_ahead_period=24, forecastdir='../../Data/nodal_fc', signaldir='../../Data/nodal_ts', \
windforecastfilename='WNDpower_onshore-V90_3MW_offshore-V164_7MW_offshore.npz', \
solarforecastfilename='PVpower_Scheuten215IG.npz', \
windsignalfilename='WNDpower_onshore-V90_3MW_offshore-V164_7MW_offshore-{0:04d}{1:02d}.npz', \
solarsignalfilename='PVpower_Scheuten215IG-{0:04d}{1:02d}.npz', \
loadsignalfilename='load-{0:04d}{1:02d}.npz'):
self.firstdate = firstday
self.curdate = firstday
self.lastdate = lastday
self.dadelta = day_ahead_delta
self.datimedelta = datetime.timedelta(hours=day_ahead_delta)
self.period = day_ahead_period
self.timeperiod = datetime.timedelta(hours=day_ahead_period)
self.files = expando()
self.files.fcdir = forecastdir
self.files.tsdir = signaldir
self.files.windfc = windforecastfilename
self.files.solarfc = solarforecastfilename
self.files.windts = windsignalfilename
self.files.solarts = solarsignalfilename
self.files.loadts = loadsignalfilename
self.files.fcdirformat = '{0:04d}{1:02d}{2:02d}{3:02d}'
self.cache = expando()
def next(self):
if self.curdate > self.lastdate:
raise StopIteration
self.curdate += self.timeperiod
return self.load_current_data()
def __reset__(self):
self.curdate = self.firstdate
def load_current_data(self):
'''
Returns data for the current date/time.
Out: ((times_fc,wind_fc, solar_fc),(times_ts,wind_ts, solar_ts, load_ts))
'''
return (self.load_current_forecasts(),self.load_current_signals())
def load_current_forecasts(self):
curfcdir = self.files.fcdirformat.format(self.curdate.year, self.curdate.month, self.curdate.day, self.curdate.hour)
wind_fc = np.load(self.files.fcdir + '/' + curfcdir +'/' + self.files.windfc)
times_fc, wind_fc = wind_fc['dates'],wind_fc['data']
solar_fc = np.load(self.files.fcdir + '/' + curfcdir + '/' + self.files.windfc)
solar_fc = solar_fc['data']
return (times_fc[self.dadelta:self.dadelta + self.period],wind_fc[self.dadelta:self.dadelta + self.period], solar_fc[self.dadelta:self.dadelta + self.period])
def load_current_signals(self):
tsdate = self.curdate + self.datimedelta
wind_ts = np.load(self.files.tsdir + '/' + self.files.windts.format(tsdate.year,tsdate.month))
times_ts, wind_ts = wind_ts['dates'],wind_ts['data']
indx = times_ts.tolist().index(tsdate)
solar_ts = np.load(self.files.tsdir + '/' + self.files.solarts.format(tsdate.year,tsdate.month))
solar_ts = solar_ts['data']
load_ts = np.load(self.files.tsdir + '/' + self.files.loadts.format(tsdate.year,tsdate.month))
load_ts = load_ts['data']
return (times_ts[indx:indx+ self.period],wind_ts[indx:indx+ self.period],solar_ts[indx:indx+ self.period],load_ts[indx:indx+ self.period])
|
DTU-ELMA/European_Dataset
|
Scripts/Dataserver/data_server.py
|
Python
|
apache-2.0
| 3,189
|
import math
def main():
"""Main program"""
for n in xrange(1, 500):
for m in xrange(n + 1, 500):
a = int(math.pow(m, 2) - math.pow(n, 2))
b = 2 * m * n
c = int(math.pow(m, 2) + math.pow(n, 2))
if a + b + c == 1000:
product = a * b * c
print "a = %d, b = %d, c = %d" % (a, b, c)
print "Product = %d" % (product)
if __name__ == '__main__':
main()
|
SenFuerte/dt211-3-cloud
|
Euler/Sol_nr9.py
|
Python
|
mit
| 376
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations
from django.conf import settings
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('auth', '0006_require_contenttypes_0002'),
('gallery', '0010_auto_20150911_1549'),
]
operations = [
migrations.RenameModel('GalleryGroup', 'GalleryGroupPermission'),
migrations.RenameModel('GalleryUser', 'GalleryUserPermission'),
]
|
picobyte/photo-gallery
|
gallery/gallery/migrations/0011_auto_20150911_1700.py
|
Python
|
gpl-3.0
| 539
|
#!/usr/bin/env python3
# Copyright (c) 2018-2020 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
import argparse
import os
import subprocess
import sys
def setup():
global args, workdir
programs = ['ruby', 'git', 'make', 'wget', 'curl']
if args.kvm:
programs += ['apt-cacher-ng', 'python-vm-builder', 'qemu-kvm', 'qemu-utils']
elif args.docker:
if not os.path.isfile('/lib/systemd/system/docker.service'):
dockers = ['docker.io', 'docker-ce']
for i in dockers:
return_code = subprocess.call(['sudo', 'apt-get', 'install', '-qq', i])
if return_code == 0:
break
if return_code != 0:
print('Cannot find any way to install Docker.', file=sys.stderr)
sys.exit(1)
else:
programs += ['apt-cacher-ng', 'lxc', 'debootstrap']
subprocess.check_call(['sudo', 'apt-get', 'install', '-qq'] + programs)
if not os.path.isdir('gitian.sigs'):
subprocess.check_call(['git', 'clone', 'https://github.com/bitcoin-core/gitian.sigs.git'])
if not os.path.isdir('bitcoin-detached-sigs'):
subprocess.check_call(['git', 'clone', 'https://github.com/bitcoin-core/bitcoin-detached-sigs.git'])
if not os.path.isdir('gitian-builder'):
subprocess.check_call(['git', 'clone', 'https://github.com/devrandom/gitian-builder.git'])
if not os.path.isdir('bitcoin'):
subprocess.check_call(['git', 'clone', 'https://github.com/bitcoin/bitcoin.git'])
os.chdir('gitian-builder')
make_image_prog = ['bin/make-base-vm', '--suite', 'focal', '--arch', 'amd64']
if args.docker:
make_image_prog += ['--docker']
elif not args.kvm:
make_image_prog += ['--lxc', '--disksize', '13000']
subprocess.check_call(make_image_prog)
os.chdir(workdir)
if args.is_focal and not args.kvm and not args.docker:
subprocess.check_call(['sudo', 'sed', '-i', 's/lxcbr0/br0/', '/etc/default/lxc-net'])
print('Reboot is required')
sys.exit(0)
def build():
global args, workdir
os.makedirs('bitcoin-binaries/' + args.version, exist_ok=True)
print('\nBuilding Dependencies\n')
os.chdir('gitian-builder')
os.makedirs('inputs', exist_ok=True)
subprocess.check_call(['wget', '-O', 'inputs/osslsigncode-2.0.tar.gz', 'https://github.com/mtrojnar/osslsigncode/archive/2.0.tar.gz'])
subprocess.check_call(["echo '5a60e0a4b3e0b4d655317b2f12a810211c50242138322b16e7e01c6fbb89d92f inputs/osslsigncode-2.0.tar.gz' | sha256sum -c"], shell=True)
subprocess.check_call(['make', '-C', '../bitcoin/depends', 'download', 'SOURCES_PATH=' + os.getcwd() + '/cache/common'])
if args.linux:
print('\nCompiling ' + args.version + ' Linux')
subprocess.check_call(['bin/gbuild', '-j', args.jobs, '-m', args.memory, '--commit', 'bitcoin='+args.commit, '--url', 'bitcoin='+args.url, '../bitcoin/contrib/gitian-descriptors/gitian-linux.yml'])
subprocess.check_call(['bin/gsign', '-p', args.sign_prog, '--signer', args.signer, '--release', args.version+'-linux', '--destination', '../gitian.sigs/', '../bitcoin/contrib/gitian-descriptors/gitian-linux.yml'])
subprocess.check_call('mv build/out/bitcoin-*.tar.gz build/out/src/bitcoin-*.tar.gz ../bitcoin-binaries/'+args.version, shell=True)
if args.windows:
print('\nCompiling ' + args.version + ' Windows')
subprocess.check_call(['bin/gbuild', '-j', args.jobs, '-m', args.memory, '--commit', 'bitcoin='+args.commit, '--url', 'bitcoin='+args.url, '../bitcoin/contrib/gitian-descriptors/gitian-win.yml'])
subprocess.check_call(['bin/gsign', '-p', args.sign_prog, '--signer', args.signer, '--release', args.version+'-win-unsigned', '--destination', '../gitian.sigs/', '../bitcoin/contrib/gitian-descriptors/gitian-win.yml'])
subprocess.check_call('mv build/out/bitcoin-*-win-unsigned.tar.gz inputs/', shell=True)
subprocess.check_call('mv build/out/bitcoin-*.zip build/out/bitcoin-*.exe build/out/src/bitcoin-*.tar.gz ../bitcoin-binaries/'+args.version, shell=True)
if args.macos:
print('\nCompiling ' + args.version + ' MacOS')
subprocess.check_call(['bin/gbuild', '-j', args.jobs, '-m', args.memory, '--commit', 'bitcoin='+args.commit, '--url', 'bitcoin='+args.url, '../bitcoin/contrib/gitian-descriptors/gitian-osx.yml'])
subprocess.check_call(['bin/gsign', '-p', args.sign_prog, '--signer', args.signer, '--release', args.version+'-osx-unsigned', '--destination', '../gitian.sigs/', '../bitcoin/contrib/gitian-descriptors/gitian-osx.yml'])
subprocess.check_call('mv build/out/bitcoin-*-osx-unsigned.tar.gz inputs/', shell=True)
subprocess.check_call('mv build/out/bitcoin-*.tar.gz build/out/bitcoin-*.dmg build/out/src/bitcoin-*.tar.gz ../bitcoin-binaries/'+args.version, shell=True)
os.chdir(workdir)
if args.commit_files:
print('\nCommitting '+args.version+' Unsigned Sigs\n')
os.chdir('gitian.sigs')
subprocess.check_call(['git', 'add', args.version+'-linux/'+args.signer])
subprocess.check_call(['git', 'add', args.version+'-win-unsigned/'+args.signer])
subprocess.check_call(['git', 'add', args.version+'-osx-unsigned/'+args.signer])
subprocess.check_call(['git', 'commit', '-m', 'Add '+args.version+' unsigned sigs for '+args.signer])
os.chdir(workdir)
def sign():
global args, workdir
os.chdir('gitian-builder')
if args.windows:
print('\nSigning ' + args.version + ' Windows')
subprocess.check_call('cp inputs/bitcoin-' + args.version + '-win-unsigned.tar.gz inputs/bitcoin-win-unsigned.tar.gz', shell=True)
subprocess.check_call(['bin/gbuild', '--skip-image', '--upgrade', '--commit', 'signature='+args.commit, '../bitcoin/contrib/gitian-descriptors/gitian-win-signer.yml'])
subprocess.check_call(['bin/gsign', '-p', args.sign_prog, '--signer', args.signer, '--release', args.version+'-win-signed', '--destination', '../gitian.sigs/', '../bitcoin/contrib/gitian-descriptors/gitian-win-signer.yml'])
subprocess.check_call('mv build/out/bitcoin-*win64-setup.exe ../bitcoin-binaries/'+args.version, shell=True)
if args.macos:
print('\nSigning ' + args.version + ' MacOS')
subprocess.check_call('cp inputs/bitcoin-' + args.version + '-osx-unsigned.tar.gz inputs/bitcoin-osx-unsigned.tar.gz', shell=True)
subprocess.check_call(['bin/gbuild', '--skip-image', '--upgrade', '--commit', 'signature='+args.commit, '../bitcoin/contrib/gitian-descriptors/gitian-osx-signer.yml'])
subprocess.check_call(['bin/gsign', '-p', args.sign_prog, '--signer', args.signer, '--release', args.version+'-osx-signed', '--destination', '../gitian.sigs/', '../bitcoin/contrib/gitian-descriptors/gitian-osx-signer.yml'])
subprocess.check_call('mv build/out/bitcoin-osx-signed.dmg ../bitcoin-binaries/'+args.version+'/bitcoin-'+args.version+'-osx.dmg', shell=True)
os.chdir(workdir)
if args.commit_files:
print('\nCommitting '+args.version+' Signed Sigs\n')
os.chdir('gitian.sigs')
subprocess.check_call(['git', 'add', args.version+'-win-signed/'+args.signer])
subprocess.check_call(['git', 'add', args.version+'-osx-signed/'+args.signer])
subprocess.check_call(['git', 'commit', '-a', '-m', 'Add '+args.version+' signed binary sigs for '+args.signer])
os.chdir(workdir)
def verify():
global args, workdir
rc = 0
os.chdir('gitian-builder')
print('\nVerifying v'+args.version+' Linux\n')
if subprocess.call(['bin/gverify', '-v', '-d', '../gitian.sigs/', '-r', args.version+'-linux', '../bitcoin/contrib/gitian-descriptors/gitian-linux.yml']):
print('Verifying v'+args.version+' Linux FAILED\n')
rc = 1
print('\nVerifying v'+args.version+' Windows\n')
if subprocess.call(['bin/gverify', '-v', '-d', '../gitian.sigs/', '-r', args.version+'-win-unsigned', '../bitcoin/contrib/gitian-descriptors/gitian-win.yml']):
print('Verifying v'+args.version+' Windows FAILED\n')
rc = 1
print('\nVerifying v'+args.version+' MacOS\n')
if subprocess.call(['bin/gverify', '-v', '-d', '../gitian.sigs/', '-r', args.version+'-osx-unsigned', '../bitcoin/contrib/gitian-descriptors/gitian-osx.yml']):
print('Verifying v'+args.version+' MacOS FAILED\n')
rc = 1
print('\nVerifying v'+args.version+' Signed Windows\n')
if subprocess.call(['bin/gverify', '-v', '-d', '../gitian.sigs/', '-r', args.version+'-win-signed', '../bitcoin/contrib/gitian-descriptors/gitian-win-signer.yml']):
print('Verifying v'+args.version+' Signed Windows FAILED\n')
rc = 1
print('\nVerifying v'+args.version+' Signed MacOS\n')
if subprocess.call(['bin/gverify', '-v', '-d', '../gitian.sigs/', '-r', args.version+'-osx-signed', '../bitcoin/contrib/gitian-descriptors/gitian-osx-signer.yml']):
print('Verifying v'+args.version+' Signed MacOS FAILED\n')
rc = 1
os.chdir(workdir)
return rc
def main():
global args, workdir
parser = argparse.ArgumentParser(description='Script for running full Gitian builds.')
parser.add_argument('-c', '--commit', action='store_true', dest='commit', help='Indicate that the version argument is for a commit or branch')
parser.add_argument('-p', '--pull', action='store_true', dest='pull', help='Indicate that the version argument is the number of a github repository pull request')
parser.add_argument('-u', '--url', dest='url', default='https://github.com/bitcoin/bitcoin', help='Specify the URL of the repository. Default is %(default)s')
parser.add_argument('-v', '--verify', action='store_true', dest='verify', help='Verify the Gitian build')
parser.add_argument('-b', '--build', action='store_true', dest='build', help='Do a Gitian build')
parser.add_argument('-s', '--sign', action='store_true', dest='sign', help='Make signed binaries for Windows and MacOS')
parser.add_argument('-B', '--buildsign', action='store_true', dest='buildsign', help='Build both signed and unsigned binaries')
parser.add_argument('-o', '--os', dest='os', default='lwm', help='Specify which Operating Systems the build is for. Default is %(default)s. l for Linux, w for Windows, m for MacOS')
parser.add_argument('-j', '--jobs', dest='jobs', default='2', help='Number of processes to use. Default %(default)s')
parser.add_argument('-m', '--memory', dest='memory', default='2000', help='Memory to allocate in MiB. Default %(default)s')
parser.add_argument('-k', '--kvm', action='store_true', dest='kvm', help='Use KVM instead of LXC')
parser.add_argument('-d', '--docker', action='store_true', dest='docker', help='Use Docker instead of LXC')
parser.add_argument('-S', '--setup', action='store_true', dest='setup', help='Set up the Gitian building environment. Only works on Debian-based systems (Ubuntu, Debian)')
parser.add_argument('-D', '--detach-sign', action='store_true', dest='detach_sign', help='Create the assert file for detached signing. Will not commit anything.')
parser.add_argument('-n', '--no-commit', action='store_false', dest='commit_files', help='Do not commit anything to git')
parser.add_argument('signer', nargs='?', help='GPG signer to sign each build assert file')
parser.add_argument('version', nargs='?', help='Version number, commit, or branch to build. If building a commit or branch, the -c option must be specified')
args = parser.parse_args()
workdir = os.getcwd()
args.is_focal = b'focal' in subprocess.check_output(['lsb_release', '-cs'])
if args.kvm and args.docker:
raise Exception('Error: cannot have both kvm and docker')
# Ensure no more than one environment variable for gitian-builder (USE_LXC, USE_VBOX, USE_DOCKER) is set as they
# can interfere (e.g., USE_LXC being set shadows USE_DOCKER; for details see gitian-builder/libexec/make-clean-vm).
os.environ['USE_LXC'] = ''
os.environ['USE_VBOX'] = ''
os.environ['USE_DOCKER'] = ''
if args.docker:
os.environ['USE_DOCKER'] = '1'
elif not args.kvm:
os.environ['USE_LXC'] = '1'
if 'GITIAN_HOST_IP' not in os.environ.keys():
os.environ['GITIAN_HOST_IP'] = '10.0.3.1'
if 'LXC_GUEST_IP' not in os.environ.keys():
os.environ['LXC_GUEST_IP'] = '10.0.3.5'
if args.setup:
setup()
if args.buildsign:
args.build = True
args.sign = True
if not args.build and not args.sign and not args.verify:
sys.exit(0)
args.linux = 'l' in args.os
args.windows = 'w' in args.os
args.macos = 'm' in args.os
# Disable for MacOS if no SDK found
if args.macos and not os.path.isfile('gitian-builder/inputs/Xcode-11.3.1-11C505-extracted-SDK-with-libcxx-headers.tar.gz'):
print('Cannot build for MacOS, SDK does not exist. Will build for other OSes')
args.macos = False
args.sign_prog = 'true' if args.detach_sign else 'gpg --detach-sign'
script_name = os.path.basename(sys.argv[0])
if not args.signer:
print(script_name+': Missing signer')
print('Try '+script_name+' --help for more information')
sys.exit(1)
if not args.version:
print(script_name+': Missing version')
print('Try '+script_name+' --help for more information')
sys.exit(1)
# Add leading 'v' for tags
if args.commit and args.pull:
raise Exception('Cannot have both commit and pull')
args.commit = ('' if args.commit else 'v') + args.version
os.chdir('bitcoin')
if args.pull:
subprocess.check_call(['git', 'fetch', args.url, 'refs/pull/'+args.version+'/merge'])
os.chdir('../gitian-builder/inputs/bitcoin')
subprocess.check_call(['git', 'fetch', args.url, 'refs/pull/'+args.version+'/merge'])
args.commit = subprocess.check_output(['git', 'show', '-s', '--format=%H', 'FETCH_HEAD'], universal_newlines=True, encoding='utf8').strip()
args.version = 'pull-' + args.version
print(args.commit)
subprocess.check_call(['git', 'fetch'])
subprocess.check_call(['git', 'checkout', args.commit])
os.chdir(workdir)
os.chdir('gitian-builder')
subprocess.check_call(['git', 'pull'])
os.chdir(workdir)
if args.build:
build()
if args.sign:
sign()
if args.verify:
os.chdir('gitian.sigs')
subprocess.check_call(['git', 'pull'])
os.chdir(workdir)
sys.exit(verify())
if __name__ == '__main__':
main()
|
n1bor/bitcoin
|
contrib/gitian-build.py
|
Python
|
mit
| 14,742
|
#!/Users/saz/Workspace/spaceapps/spaceapps/bin/python3.5
#
# The Python Imaging Library.
# $Id$
#
# convert image files
#
# History:
# 0.1 96-04-20 fl Created
# 0.2 96-10-04 fl Use draft mode when converting images
# 0.3 96-12-30 fl Optimize output (PNG, JPEG)
# 0.4 97-01-18 fl Made optimize an option (PNG, JPEG)
# 0.5 98-12-30 fl Fixed -f option (from Anthony Baxter)
#
from __future__ import print_function
import getopt
import string
import sys
from PIL import Image
def usage():
print("PIL Convert 0.5/1998-12-30 -- convert image files")
print("Usage: pilconvert [option] infile outfile")
print()
print("Options:")
print()
print(" -c <format> convert to format (default is given by extension)")
print()
print(" -g convert to greyscale")
print(" -p convert to palette image (using standard palette)")
print(" -r convert to rgb")
print()
print(" -o optimize output (trade speed for size)")
print(" -q <value> set compression quality (0-100, JPEG only)")
print()
print(" -f list supported file formats")
sys.exit(1)
if len(sys.argv) == 1:
usage()
try:
opt, argv = getopt.getopt(sys.argv[1:], "c:dfgopq:r")
except getopt.error as v:
print(v)
sys.exit(1)
output_format = None
convert = None
options = {}
for o, a in opt:
if o == "-f":
Image.init()
id = sorted(Image.ID)
print("Supported formats (* indicates output format):")
for i in id:
if i in Image.SAVE:
print(i+"*", end=' ')
else:
print(i, end=' ')
sys.exit(1)
elif o == "-c":
output_format = a
if o == "-g":
convert = "L"
elif o == "-p":
convert = "P"
elif o == "-r":
convert = "RGB"
elif o == "-o":
options["optimize"] = 1
elif o == "-q":
options["quality"] = string.atoi(a)
if len(argv) != 2:
usage()
try:
im = Image.open(argv[0])
if convert and im.mode != convert:
im.draft(convert, im.size)
im = im.convert(convert)
if output_format:
im.save(argv[1], output_format, **options)
else:
im.save(argv[1], **options)
except:
print("cannot convert image", end=' ')
print("(%s:%s)" % (sys.exc_info()[0], sys.exc_info()[1]))
|
saaqibz/SpaceApps2017
|
bin/pilconvert.py
|
Python
|
mit
| 2,396
|
from dmpr import DMPR
from dmpr.path import Path, LinkAttributes
class TestPath:
@staticmethod
def get_path():
return Path(path='A>[1]>B>[2]>C',
attributes=LinkAttributes({
'1': {'loss': 10},
'2': {'loss': 20}
}),
next_hop='B',
next_hop_interface='wlan0')
def test_correct_splitting(self):
path = self.get_path()
assert path.links == ['1', '2']
assert path.nodes == ['A', 'B', 'C']
assert path.next_hop_interface == 'wlan0'
def test_correct_appending(self):
path = self.get_path()
path.append('D', 'tetra', {'loss': 30})
assert path.next_hop_interface == 'tetra'
assert path.links[0] == '3'
assert path.attributes['3']['loss'] == 30
def test_correct_applying_to_new(self):
path = self.get_path()
attributes = LinkAttributes()
path.apply_attributes(attributes)
assert {'loss': 20} in attributes.values()
assert {'loss': 10} in attributes.values()
def test_correct_applying_to_others(self):
path = self.get_path()
attributes = LinkAttributes({'1': {'loss': 30}})
path.apply_attributes(attributes)
assert {'loss': 20} in attributes.values()
assert {'loss': 10} in attributes.values()
assert attributes['1'] == {'loss': 30}
def test_str(self):
path = self.get_path()
attributes = LinkAttributes()
path.apply_attributes(attributes)
expected = "A>[{}]>B>[{}]>C"
loss_10 = list(attributes.keys())[
list(attributes.values()).index({'loss': 10})]
loss_20 = list(attributes.keys())[
list(attributes.values()).index({'loss': 20})]
expected = expected.format(loss_10, loss_20)
assert str(path) == expected
def test_eq(self):
path1 = self.get_path()
path2 = self.get_path()
path3 = self.get_path()
path4 = self.get_path()
path5 = self.get_path()
path6 = self.get_path()
path7 = self.get_path()
path3.append('X', 'wlan', {'loss': 2})
path4.append('X', 'tetra', {'loss': 2})
path5.append('X', 'wlan', {'loss': 3})
path6.append('Y', 'wlan', {'loss': 2})
path7.append('X', 'wlan', {'loss': 2})
assert path1 is not path2
assert path2 is not path3
assert path1 is not path3
assert path3 is not path4
assert path3 is not path7
assert path1 == path2
assert path1 != path3
assert path1 == path1
assert path3 == path3
assert path3 != path4
assert path3 != path5
assert path3 != path6
assert path3 == path7
class TestMergeNetworks:
def test_simple(self):
networks = [{'1': {}, '2': {}, '3': {'retracted': True}}]
expected = {'1': {}, '2': {}, '3': {'retracted': True}}
result = DMPR._merge_networks(networks)
assert expected == result
def test_overwrite(self):
networks = [{'1': {}, '2': {}, '3': {'retracted': True}},
{'1': {}, '2': {}, '3': {'retracted': False}}]
expected = {'1': {}, '2': {}, '3': {'retracted': True}}
result = DMPR._merge_networks(networks)
assert expected == result
def test_multi_overwrite(self):
networks = [{'1': {}, '2': {}, '3': {'retracted': True}},
{'1': {}, '2': {}, '3': {'retracted': False}},
{'1': {}, '2': {'retracted': True}, '3': {}}]
expected = {'1': {}, '2': {'retracted': True}, '3': {'retracted': True}}
result = DMPR._merge_networks(networks)
assert expected == result
def test_init_dmpr():
dmpr = DMPR(log=True)
|
protocollabs/dmpr-core
|
test_dmpr.py
|
Python
|
mit
| 3,838
|
# -*- coding: utf-8; -*-
#
# Licensed to CRATE Technology GmbH ("Crate") under one or more contributor
# license agreements. See the NOTICE file distributed with this work for
# additional information regarding copyright ownership. Crate licenses
# this file to you under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License. You may
# obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
# However, if you have executed another commercial license agreement
# with Crate these terms will supersede the license and you may use the
# software solely pursuant to the terms of the relevant commercial agreement.
"""Bootstrap a buildout-based project
Simply run this script in a directory containing a buildout.cfg.
The script accepts buildout command-line options, so you can
use the -c option to specify an alternate configuration file.
"""
import os
import shutil
import sys
import tempfile
from optparse import OptionParser
tmpeggs = tempfile.mkdtemp()
usage = '''\
[DESIRED PYTHON FOR BUILDOUT] bootstrap.py [options]
Bootstraps a buildout-based project.
Simply run this script in a directory containing a buildout.cfg, using the
Python that you want bin/buildout to use.
Note that by using --find-links to point to local resources, you can keep
this script from going over the network.
'''
parser = OptionParser(usage=usage)
parser.add_option("-v", "--version", help="use a specific zc.buildout version")
parser.add_option("-t", "--accept-buildout-test-releases",
dest='accept_buildout_test_releases',
action="store_true", default=False,
help=("Normally, if you do not specify a --version, the "
"bootstrap script and buildout gets the newest "
"*final* versions of zc.buildout and its recipes and "
"extensions for you. If you use this flag, "
"bootstrap and buildout will get the newest releases "
"even if they are alphas or betas."))
parser.add_option("-c", "--config-file",
help=("Specify the path to the buildout configuration "
"file to be used."))
parser.add_option("-f", "--find-links",
help=("Specify a URL to search for buildout releases"))
options, args = parser.parse_args()
######################################################################
# load/install setuptools
to_reload = False
try:
import pkg_resources
import setuptools
except ImportError:
ez = {}
try:
from urllib.request import urlopen
except ImportError:
from urllib2 import urlopen
# XXX use a more permanent ez_setup.py URL when available.
exec(urlopen('https://bitbucket.org/pypa/setuptools/raw/0.7.2/ez_setup.py'
).read(), ez)
setup_args = dict(to_dir=tmpeggs, download_delay=0)
ez['use_setuptools'](**setup_args)
if to_reload:
reload(pkg_resources)
import pkg_resources
# This does not (always?) update the default working set. We will
# do it.
for path in sys.path:
if path not in pkg_resources.working_set.entries:
pkg_resources.working_set.add_entry(path)
######################################################################
# Install buildout
ws = pkg_resources.working_set
cmd = [sys.executable, '-c',
'from setuptools.command.easy_install import main; main()',
'-mZqNxd', tmpeggs]
find_links = os.environ.get(
'bootstrap-testing-find-links',
options.find_links or
('http://downloads.buildout.org/'
if options.accept_buildout_test_releases else None)
)
if find_links:
cmd.extend(['-f', find_links])
setuptools_path = ws.find(
pkg_resources.Requirement.parse('setuptools')).location
requirement = 'zc.buildout'
version = options.version
if version is None and not options.accept_buildout_test_releases:
# Figure out the most recent final version of zc.buildout.
import setuptools.package_index
_final_parts = '*final-', '*final'
def _final_version(parsed_version):
for part in parsed_version:
if (part[:1] == '*') and (part not in _final_parts):
return False
return True
index = setuptools.package_index.PackageIndex(
search_path=[setuptools_path])
if find_links:
index.add_find_links((find_links,))
req = pkg_resources.Requirement.parse(requirement)
if index.obtain(req) is not None:
best = []
bestv = None
for dist in index[req.project_name]:
distv = dist.parsed_version
if _final_version(distv):
if bestv is None or distv > bestv:
best = [dist]
bestv = distv
elif distv == bestv:
best.append(dist)
if best:
best.sort()
version = best[-1].version
if version:
requirement = '=='.join((requirement, version))
cmd.append(requirement)
import subprocess
if subprocess.call(cmd, env=dict(os.environ, PYTHONPATH=setuptools_path)) != 0:
raise Exception(
"Failed to execute command:\n%s",
repr(cmd)[1:-1])
######################################################################
# Import and run buildout
ws.add_entry(tmpeggs)
ws.require(requirement)
import zc.buildout.buildout
if not [a for a in args if '=' not in a]:
args.append('bootstrap')
# if -c was provided, we push it back into args for buildout' main function
if options.config_file is not None:
args[0:0] = ['-c', options.config_file]
zc.buildout.buildout.main(args)
shutil.rmtree(tmpeggs)
|
mfelsche/crate-django
|
bootstrap.py
|
Python
|
apache-2.0
| 6,073
|
# -*- coding: utf-8 -*-
#
# Python BloomFilter documentation build configuration file, created by
# sphinx-quickstart on Wed Mar 31 16:25:58 2010.
#
# This file is execfile()d with the current directory set to its containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys, os
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#sys.path.append(os.path.abspath('.'))
# -- General configuration -----------------------------------------------------
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = ['sphinx.ext.doctest', 'sphinx.ext.coverage', 'sphinxtogithub']
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.txt'
# The encoding of source files.
#source_encoding = 'utf-8'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'Python BloomFilter'
copyright = u'2010-2012, Michael Axiak'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '0.3.2'
# The full version, including alpha/beta/rc tags.
release = '0.3.2'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of documents that shouldn't be included in the build.
#unused_docs = []
# List of directories, relative to source directory, that shouldn't be searched
# for source files.
exclude_trees = ['_build','html']
# The reST default role (used for this markup: `text`) to use for all documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# -- Options for HTML output ---------------------------------------------------
# The theme to use for HTML and HTML Help pages. Major themes that come with
# Sphinx are currently 'default' and 'sphinxdoc'.
html_theme = 'default'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_use_modindex = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# If nonempty, this is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = ''
# Output file base name for HTML help builder.
htmlhelp_basename = 'PythonBloomFilterdoc'
# -- Options for LaTeX output --------------------------------------------------
# The paper size ('letter' or 'a4').
#latex_paper_size = 'letter'
# The font size ('10pt', '11pt' or '12pt').
#latex_font_size = '10pt'
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass [howto/manual]).
latex_documents = [
('index', 'PythonBloomFilter.tex', u'Python BloomFilter Documentation',
u'Michael Axiak', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# Additional stuff for the LaTeX preamble.
#latex_preamble = ''
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_use_modindex = True
|
raven47git/pybloomfiltermmap
|
docs/conf.py
|
Python
|
mit
| 6,423
|
from __future__ import print_function, division
from sympy.core import Function, S, sympify
from sympy.core.add import Add
from sympy.core.containers import Tuple
from sympy.core.operations import LatticeOp, ShortCircuit
from sympy.core.function import (Application, Lambda,
ArgumentIndexError)
from sympy.core.expr import Expr
from sympy.core.mod import Mod
from sympy.core.mul import Mul
from sympy.core.numbers import Rational
from sympy.core.power import Pow
from sympy.core.relational import Eq, Relational
from sympy.core.singleton import Singleton
from sympy.core.symbol import Dummy
from sympy.core.rules import Transform
from sympy.core.compatibility import with_metaclass, range
from sympy.core.logic import fuzzy_and, fuzzy_or, _torf
from sympy.logic.boolalg import And, Or
def _minmax_as_Piecewise(op, *args):
# helper for Min/Max rewrite as Piecewise
from sympy.functions.elementary.piecewise import Piecewise
ec = []
for i, a in enumerate(args):
c = []
for j in range(i + 1, len(args)):
c.append(Relational(a, args[j], op))
ec.append((a, And(*c)))
return Piecewise(*ec)
class IdentityFunction(with_metaclass(Singleton, Lambda)):
"""
The identity function
Examples
========
>>> from sympy import Id, Symbol
>>> x = Symbol('x')
>>> Id(x)
x
"""
def __new__(cls):
x = Dummy('x')
#construct "by hand" to avoid infinite loop
return Expr.__new__(cls, Tuple(x), x)
Id = S.IdentityFunction
###############################################################################
############################# ROOT and SQUARE ROOT FUNCTION ###################
###############################################################################
def sqrt(arg, evaluate=None):
"""The square root function
sqrt(x) -> Returns the principal square root of x.
The parameter evaluate determines if the expression should be evaluated.
If None, its value is taken from global_evaluate
Examples
========
>>> from sympy import sqrt, Symbol
>>> x = Symbol('x')
>>> sqrt(x)
sqrt(x)
>>> sqrt(x)**2
x
Note that sqrt(x**2) does not simplify to x.
>>> sqrt(x**2)
sqrt(x**2)
This is because the two are not equal to each other in general.
For example, consider x == -1:
>>> from sympy import Eq
>>> Eq(sqrt(x**2), x).subs(x, -1)
False
This is because sqrt computes the principal square root, so the square may
put the argument in a different branch. This identity does hold if x is
positive:
>>> y = Symbol('y', positive=True)
>>> sqrt(y**2)
y
You can force this simplification by using the powdenest() function with
the force option set to True:
>>> from sympy import powdenest
>>> sqrt(x**2)
sqrt(x**2)
>>> powdenest(sqrt(x**2), force=True)
x
To get both branches of the square root you can use the rootof function:
>>> from sympy import rootof
>>> [rootof(x**2-3,i) for i in (0,1)]
[-sqrt(3), sqrt(3)]
See Also
========
sympy.polys.rootoftools.rootof, root, real_root
References
==========
.. [1] https://en.wikipedia.org/wiki/Square_root
.. [2] https://en.wikipedia.org/wiki/Principal_value
"""
# arg = sympify(arg) is handled by Pow
return Pow(arg, S.Half, evaluate=evaluate)
def cbrt(arg, evaluate=None):
"""This function computes the principal cube root of `arg`, so
it's just a shortcut for `arg**Rational(1, 3)`.
The parameter evaluate determines if the expression should be evaluated.
If None, its value is taken from global_evaluate.
Examples
========
>>> from sympy import cbrt, Symbol
>>> x = Symbol('x')
>>> cbrt(x)
x**(1/3)
>>> cbrt(x)**3
x
Note that cbrt(x**3) does not simplify to x.
>>> cbrt(x**3)
(x**3)**(1/3)
This is because the two are not equal to each other in general.
For example, consider `x == -1`:
>>> from sympy import Eq
>>> Eq(cbrt(x**3), x).subs(x, -1)
False
This is because cbrt computes the principal cube root, this
identity does hold if `x` is positive:
>>> y = Symbol('y', positive=True)
>>> cbrt(y**3)
y
See Also
========
sympy.polys.rootoftools.rootof, root, real_root
References
==========
* https://en.wikipedia.org/wiki/Cube_root
* https://en.wikipedia.org/wiki/Principal_value
"""
return Pow(arg, Rational(1, 3), evaluate=evaluate)
def root(arg, n, k=0, evaluate=None):
"""root(x, n, k) -> Returns the k-th n-th root of x, defaulting to the
principal root (k=0).
The parameter evaluate determines if the expression should be evaluated.
If None, its value is taken from global_evaluate.
Examples
========
>>> from sympy import root, Rational
>>> from sympy.abc import x, n
>>> root(x, 2)
sqrt(x)
>>> root(x, 3)
x**(1/3)
>>> root(x, n)
x**(1/n)
>>> root(x, -Rational(2, 3))
x**(-3/2)
To get the k-th n-th root, specify k:
>>> root(-2, 3, 2)
-(-1)**(2/3)*2**(1/3)
To get all n n-th roots you can use the rootof function.
The following examples show the roots of unity for n
equal 2, 3 and 4:
>>> from sympy import rootof, I
>>> [rootof(x**2 - 1, i) for i in range(2)]
[-1, 1]
>>> [rootof(x**3 - 1,i) for i in range(3)]
[1, -1/2 - sqrt(3)*I/2, -1/2 + sqrt(3)*I/2]
>>> [rootof(x**4 - 1,i) for i in range(4)]
[-1, 1, -I, I]
SymPy, like other symbolic algebra systems, returns the
complex root of negative numbers. This is the principal
root and differs from the text-book result that one might
be expecting. For example, the cube root of -8 does not
come back as -2:
>>> root(-8, 3)
2*(-1)**(1/3)
The real_root function can be used to either make the principal
result real (or simply to return the real root directly):
>>> from sympy import real_root
>>> real_root(_)
-2
>>> real_root(-32, 5)
-2
Alternatively, the n//2-th n-th root of a negative number can be
computed with root:
>>> root(-32, 5, 5//2)
-2
See Also
========
sympy.polys.rootoftools.rootof
sympy.core.power.integer_nthroot
sqrt, real_root
References
==========
* https://en.wikipedia.org/wiki/Square_root
* https://en.wikipedia.org/wiki/Real_root
* https://en.wikipedia.org/wiki/Root_of_unity
* https://en.wikipedia.org/wiki/Principal_value
* http://mathworld.wolfram.com/CubeRoot.html
"""
n = sympify(n)
if k:
return Mul(Pow(arg, S.One/n, evaluate=evaluate), S.NegativeOne**(2*k/n), evaluate=evaluate)
return Pow(arg, 1/n, evaluate=evaluate)
def real_root(arg, n=None, evaluate=None):
"""Return the real nth-root of arg if possible. If n is omitted then
all instances of (-n)**(1/odd) will be changed to -n**(1/odd); this
will only create a real root of a principal root -- the presence of
other factors may cause the result to not be real.
The parameter evaluate determines if the expression should be evaluated.
If None, its value is taken from global_evaluate.
Examples
========
>>> from sympy import root, real_root, Rational
>>> from sympy.abc import x, n
>>> real_root(-8, 3)
-2
>>> root(-8, 3)
2*(-1)**(1/3)
>>> real_root(_)
-2
If one creates a non-principal root and applies real_root, the
result will not be real (so use with caution):
>>> root(-8, 3, 2)
-2*(-1)**(2/3)
>>> real_root(_)
-2*(-1)**(2/3)
See Also
========
sympy.polys.rootoftools.rootof
sympy.core.power.integer_nthroot
root, sqrt
"""
from sympy.functions.elementary.complexes import Abs, im, sign
from sympy.functions.elementary.piecewise import Piecewise
if n is not None:
return Piecewise(
(root(arg, n, evaluate=evaluate), Or(Eq(n, S.One), Eq(n, S.NegativeOne))),
(Mul(sign(arg), root(Abs(arg), n, evaluate=evaluate), evaluate=evaluate),
And(Eq(im(arg), S.Zero), Eq(Mod(n, 2), S.One))),
(root(arg, n, evaluate=evaluate), True))
rv = sympify(arg)
n1pow = Transform(lambda x: -(-x.base)**x.exp,
lambda x:
x.is_Pow and
x.base.is_negative and
x.exp.is_Rational and
x.exp.p == 1 and x.exp.q % 2)
return rv.xreplace(n1pow)
###############################################################################
############################# MINIMUM and MAXIMUM #############################
###############################################################################
class MinMaxBase(Expr, LatticeOp):
def __new__(cls, *args, **assumptions):
evaluate = assumptions.pop('evaluate', True)
args = (sympify(arg) for arg in args)
# first standard filter, for cls.zero and cls.identity
# also reshape Max(a, Max(b, c)) to Max(a, b, c)
if evaluate:
try:
args = frozenset(cls._new_args_filter(args))
except ShortCircuit:
return cls.zero
else:
args = frozenset(args)
if evaluate:
# remove redundant args that are easily identified
args = cls._collapse_arguments(args, **assumptions)
# find local zeros
args = cls._find_localzeros(args, **assumptions)
if not args:
return cls.identity
if len(args) == 1:
return list(args).pop()
# base creation
_args = frozenset(args)
obj = Expr.__new__(cls, _args, **assumptions)
obj._argset = _args
return obj
@classmethod
def _collapse_arguments(cls, args, **assumptions):
"""Remove redundant args.
Examples
========
>>> from sympy import Min, Max
>>> from sympy.abc import a, b, c, d, e
Any arg in parent that appears in any
parent-like function in any of the flat args
of parent can be removed from that sub-arg:
>>> Min(a, Max(b, Min(a, c, d)))
Min(a, Max(b, Min(c, d)))
If the arg of parent appears in an opposite-than parent
function in any of the flat args of parent that function
can be replaced with the arg:
>>> Min(a, Max(b, Min(c, d, Max(a, e))))
Min(a, Max(b, Min(a, c, d)))
"""
from sympy.utilities.iterables import ordered
from sympy.simplify.simplify import walk
if not args:
return args
args = list(ordered(args))
if cls == Min:
other = Max
else:
other = Min
# find global comparable max of Max and min of Min if a new
# value is being introduced in these args at position 0 of
# the ordered args
if args[0].is_number:
sifted = mins, maxs = [], []
for i in args:
for v in walk(i, Min, Max):
if v.args[0].is_comparable:
sifted[isinstance(v, Max)].append(v)
small = Min.identity
for i in mins:
v = i.args[0]
if v.is_number and (v < small) == True:
small = v
big = Max.identity
for i in maxs:
v = i.args[0]
if v.is_number and (v > big) == True:
big = v
# at the point when this function is called from __new__,
# there may be more than one numeric arg present since
# local zeros have not been handled yet, so look through
# more than the first arg
if cls == Min:
for i in range(len(args)):
if not args[i].is_number:
break
if (args[i] < small) == True:
small = args[i]
elif cls == Max:
for i in range(len(args)):
if not args[i].is_number:
break
if (args[i] > big) == True:
big = args[i]
T = None
if cls == Min:
if small != Min.identity:
other = Max
T = small
elif big != Max.identity:
other = Min
T = big
if T is not None:
# remove numerical redundancy
for i in range(len(args)):
a = args[i]
if isinstance(a, other):
a0 = a.args[0]
if ((a0 > T) if other == Max else (a0 < T)) == True:
args[i] = cls.identity
# remove redundant symbolic args
def do(ai, a):
if not isinstance(ai, (Min, Max)):
return ai
cond = a in ai.args
if not cond:
return ai.func(*[do(i, a) for i in ai.args],
evaluate=False)
if isinstance(ai, cls):
return ai.func(*[do(i, a) for i in ai.args if i != a],
evaluate=False)
return a
for i, a in enumerate(args):
args[i + 1:] = [do(ai, a) for ai in args[i + 1:]]
# factor out common elements as for
# Min(Max(x, y), Max(x, z)) -> Max(x, Min(y, z))
# and vice versa when swapping Min/Max -- do this only for the
# easy case where all functions contain something in common;
# trying to find some optimal subset of args to modify takes
# too long
if len(args) > 1:
common = None
remove = []
sets = []
for i in range(len(args)):
a = args[i]
if not isinstance(a, other):
continue
s = set(a.args)
common = s if common is None else (common & s)
if not common:
break
sets.append(s)
remove.append(i)
if common:
sets = filter(None, [s - common for s in sets])
sets = [other(*s, evaluate=False) for s in sets]
for i in reversed(remove):
args.pop(i)
oargs = [cls(*sets)] if sets else []
oargs.extend(common)
args.append(other(*oargs, evaluate=False))
return args
@classmethod
def _new_args_filter(cls, arg_sequence):
"""
Generator filtering args.
first standard filter, for cls.zero and cls.identity.
Also reshape Max(a, Max(b, c)) to Max(a, b, c),
and check arguments for comparability
"""
for arg in arg_sequence:
# pre-filter, checking comparability of arguments
if not isinstance(arg, Expr) or arg.is_extended_real is False or (
arg.is_number and
not arg.is_comparable):
raise ValueError("The argument '%s' is not comparable." % arg)
if arg == cls.zero:
raise ShortCircuit(arg)
elif arg == cls.identity:
continue
elif arg.func == cls:
for x in arg.args:
yield x
else:
yield arg
@classmethod
def _find_localzeros(cls, values, **options):
"""
Sequentially allocate values to localzeros.
When a value is identified as being more extreme than another member it
replaces that member; if this is never true, then the value is simply
appended to the localzeros.
"""
localzeros = set()
for v in values:
is_newzero = True
localzeros_ = list(localzeros)
for z in localzeros_:
if id(v) == id(z):
is_newzero = False
else:
con = cls._is_connected(v, z)
if con:
is_newzero = False
if con is True or con == cls:
localzeros.remove(z)
localzeros.update([v])
if is_newzero:
localzeros.update([v])
return localzeros
@classmethod
def _is_connected(cls, x, y):
"""
Check if x and y are connected somehow.
"""
from sympy.core.exprtools import factor_terms
def hit(v, t, f):
if not v.is_Relational:
return t if v else f
for i in range(2):
if x == y:
return True
r = hit(x >= y, Max, Min)
if r is not None:
return r
r = hit(y <= x, Max, Min)
if r is not None:
return r
r = hit(x <= y, Min, Max)
if r is not None:
return r
r = hit(y >= x, Min, Max)
if r is not None:
return r
# simplification can be expensive, so be conservative
# in what is attempted
x = factor_terms(x - y)
y = S.Zero
return False
def _eval_derivative(self, s):
# f(x).diff(s) -> x.diff(s) * f.fdiff(1)(s)
i = 0
l = []
for a in self.args:
i += 1
da = a.diff(s)
if da.is_zero:
continue
try:
df = self.fdiff(i)
except ArgumentIndexError:
df = Function.fdiff(self, i)
l.append(df * da)
return Add(*l)
def _eval_rewrite_as_Abs(self, *args, **kwargs):
from sympy.functions.elementary.complexes import Abs
s = (args[0] + self.func(*args[1:]))/2
d = abs(args[0] - self.func(*args[1:]))/2
return (s + d if isinstance(self, Max) else s - d).rewrite(Abs)
def evalf(self, prec=None, **options):
return self.func(*[a.evalf(prec, **options) for a in self.args])
n = evalf
_eval_is_algebraic = lambda s: _torf(i.is_algebraic for i in s.args)
_eval_is_antihermitian = lambda s: _torf(i.is_antihermitian for i in s.args)
_eval_is_commutative = lambda s: _torf(i.is_commutative for i in s.args)
_eval_is_complex = lambda s: _torf(i.is_complex for i in s.args)
_eval_is_composite = lambda s: _torf(i.is_composite for i in s.args)
_eval_is_even = lambda s: _torf(i.is_even for i in s.args)
_eval_is_finite = lambda s: _torf(i.is_finite for i in s.args)
_eval_is_hermitian = lambda s: _torf(i.is_hermitian for i in s.args)
_eval_is_imaginary = lambda s: _torf(i.is_imaginary for i in s.args)
_eval_is_infinite = lambda s: _torf(i.is_infinite for i in s.args)
_eval_is_integer = lambda s: _torf(i.is_integer for i in s.args)
_eval_is_irrational = lambda s: _torf(i.is_irrational for i in s.args)
_eval_is_negative = lambda s: _torf(i.is_negative for i in s.args)
_eval_is_noninteger = lambda s: _torf(i.is_noninteger for i in s.args)
_eval_is_nonnegative = lambda s: _torf(i.is_nonnegative for i in s.args)
_eval_is_nonpositive = lambda s: _torf(i.is_nonpositive for i in s.args)
_eval_is_nonzero = lambda s: _torf(i.is_nonzero for i in s.args)
_eval_is_odd = lambda s: _torf(i.is_odd for i in s.args)
_eval_is_polar = lambda s: _torf(i.is_polar for i in s.args)
_eval_is_positive = lambda s: _torf(i.is_positive for i in s.args)
_eval_is_prime = lambda s: _torf(i.is_prime for i in s.args)
_eval_is_rational = lambda s: _torf(i.is_rational for i in s.args)
_eval_is_real = lambda s: _torf(i.is_real for i in s.args)
_eval_is_extended_real = lambda s: _torf(i.is_extended_real for i in s.args)
_eval_is_transcendental = lambda s: _torf(i.is_transcendental for i in s.args)
_eval_is_zero = lambda s: _torf(i.is_zero for i in s.args)
class Max(MinMaxBase, Application):
"""
Return, if possible, the maximum value of the list.
When number of arguments is equal one, then
return this argument.
When number of arguments is equal two, then
return, if possible, the value from (a, b) that is >= the other.
In common case, when the length of list greater than 2, the task
is more complicated. Return only the arguments, which are greater
than others, if it is possible to determine directional relation.
If is not possible to determine such a relation, return a partially
evaluated result.
Assumptions are used to make the decision too.
Also, only comparable arguments are permitted.
It is named ``Max`` and not ``max`` to avoid conflicts
with the built-in function ``max``.
Examples
========
>>> from sympy import Max, Symbol, oo
>>> from sympy.abc import x, y, z
>>> p = Symbol('p', positive=True)
>>> n = Symbol('n', negative=True)
>>> Max(x, -2)
Max(-2, x)
>>> Max(x, -2).subs(x, 3)
3
>>> Max(p, -2)
p
>>> Max(x, y)
Max(x, y)
>>> Max(x, y) == Max(y, x)
True
>>> Max(x, Max(y, z))
Max(x, y, z)
>>> Max(n, 8, p, 7, -oo)
Max(8, p)
>>> Max (1, x, oo)
oo
* Algorithm
The task can be considered as searching of supremums in the
directed complete partial orders [1]_.
The source values are sequentially allocated by the isolated subsets
in which supremums are searched and result as Max arguments.
If the resulted supremum is single, then it is returned.
The isolated subsets are the sets of values which are only the comparable
with each other in the current set. E.g. natural numbers are comparable with
each other, but not comparable with the `x` symbol. Another example: the
symbol `x` with negative assumption is comparable with a natural number.
Also there are "least" elements, which are comparable with all others,
and have a zero property (maximum or minimum for all elements). E.g. `oo`.
In case of it the allocation operation is terminated and only this value is
returned.
Assumption:
- if A > B > C then A > C
- if A == B then B can be removed
References
==========
.. [1] https://en.wikipedia.org/wiki/Directed_complete_partial_order
.. [2] https://en.wikipedia.org/wiki/Lattice_%28order%29
See Also
========
Min : find minimum values
"""
zero = S.Infinity
identity = S.NegativeInfinity
def fdiff( self, argindex ):
from sympy import Heaviside
n = len(self.args)
if 0 < argindex and argindex <= n:
argindex -= 1
if n == 2:
return Heaviside(self.args[argindex] - self.args[1 - argindex])
newargs = tuple([self.args[i] for i in range(n) if i != argindex])
return Heaviside(self.args[argindex] - Max(*newargs))
else:
raise ArgumentIndexError(self, argindex)
def _eval_rewrite_as_Heaviside(self, *args, **kwargs):
from sympy import Heaviside
return Add(*[j*Mul(*[Heaviside(j - i) for i in args if i!=j]) \
for j in args])
def _eval_rewrite_as_Piecewise(self, *args, **kwargs):
return _minmax_as_Piecewise('>=', *args)
def _eval_is_positive(self):
return fuzzy_or(a.is_positive for a in self.args)
def _eval_is_nonnegative(self):
return fuzzy_or(a.is_nonnegative for a in self.args)
def _eval_is_negative(self):
return fuzzy_and(a.is_negative for a in self.args)
class Min(MinMaxBase, Application):
"""
Return, if possible, the minimum value of the list.
It is named ``Min`` and not ``min`` to avoid conflicts
with the built-in function ``min``.
Examples
========
>>> from sympy import Min, Symbol, oo
>>> from sympy.abc import x, y
>>> p = Symbol('p', positive=True)
>>> n = Symbol('n', negative=True)
>>> Min(x, -2)
Min(-2, x)
>>> Min(x, -2).subs(x, 3)
-2
>>> Min(p, -3)
-3
>>> Min(x, y)
Min(x, y)
>>> Min(n, 8, p, -7, p, oo)
Min(-7, n)
See Also
========
Max : find maximum values
"""
zero = S.NegativeInfinity
identity = S.Infinity
def fdiff( self, argindex ):
from sympy import Heaviside
n = len(self.args)
if 0 < argindex and argindex <= n:
argindex -= 1
if n == 2:
return Heaviside( self.args[1-argindex] - self.args[argindex] )
newargs = tuple([ self.args[i] for i in range(n) if i != argindex])
return Heaviside( Min(*newargs) - self.args[argindex] )
else:
raise ArgumentIndexError(self, argindex)
def _eval_rewrite_as_Heaviside(self, *args, **kwargs):
from sympy import Heaviside
return Add(*[j*Mul(*[Heaviside(i-j) for i in args if i!=j]) \
for j in args])
def _eval_rewrite_as_Piecewise(self, *args, **kwargs):
return _minmax_as_Piecewise('<=', *args)
def _eval_is_positive(self):
return fuzzy_and(a.is_positive for a in self.args)
def _eval_is_nonnegative(self):
return fuzzy_and(a.is_nonnegative for a in self.args)
def _eval_is_negative(self):
return fuzzy_or(a.is_negative for a in self.args)
|
kaushik94/sympy
|
sympy/functions/elementary/miscellaneous.py
|
Python
|
bsd-3-clause
| 25,501
|
#
# Copyright 2014 Quantopian, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import bisect
import logbook
import datetime
import pandas as pd
import numpy as np
from sqlalchemy import create_engine
from zipline.data.loader import load_market_data
from zipline.utils import tradingcalendar
from zipline.assets import AssetFinder
from zipline.assets.asset_writer import (
AssetDBWriterFromList,
AssetDBWriterFromDictionary,
AssetDBWriterFromDataFrame)
from zipline.errors import (
NoFurtherDataError
)
log = logbook.Logger('Trading')
# The financial simulations in zipline depend on information
# about the benchmark index and the risk free rates of return.
# The benchmark index defines the benchmark returns used in
# the calculation of performance metrics such as alpha/beta. Many
# components, including risk, performance, transforms, and
# batch_transforms, need access to a calendar of trading days and
# market hours. The TradingEnvironment maintains two time keeping
# facilities:
# - a DatetimeIndex of trading days for calendar calculations
# - a timezone name, which should be local to the exchange
# hosting the benchmark index. All dates are normalized to UTC
# for serialization and storage, and the timezone is used to
# ensure proper rollover through daylight savings and so on.
#
# User code will not normally need to use TradingEnvironment
# directly. If you are extending zipline's core financial
# components and need to use the environment, you must import the module and
# build a new TradingEnvironment object, then pass that TradingEnvironment as
# the 'env' arg to your TradingAlgorithm.
class TradingEnvironment(object):
# Token used as a substitute for pickling objects that contain a
# reference to a TradingEnvironment
PERSISTENT_TOKEN = "<TradingEnvironment>"
def __init__(
self,
load=None,
bm_symbol='^GSPC',
exchange_tz="US/Eastern",
max_date=None,
env_trading_calendar=tradingcalendar,
asset_db_path=':memory:'
):
"""
@load is function that returns benchmark_returns and treasury_curves
The treasury_curves are expected to be a DataFrame with an index of
dates and columns of the curve names, e.g. '10year', '1month', etc.
"""
self.trading_day = env_trading_calendar.trading_day.copy()
# `tc_td` is short for "trading calendar trading days"
tc_td = env_trading_calendar.trading_days
if max_date:
self.trading_days = tc_td[tc_td <= max_date].copy()
else:
self.trading_days = tc_td.copy()
self.first_trading_day = self.trading_days[0]
self.last_trading_day = self.trading_days[-1]
self.early_closes = env_trading_calendar.get_early_closes(
self.first_trading_day, self.last_trading_day)
self.open_and_closes = env_trading_calendar.open_and_closes.loc[
self.trading_days]
self.prev_environment = self
self.bm_symbol = bm_symbol
if not load:
load = load_market_data
self.benchmark_returns, self.treasury_curves = \
load(self.trading_day, self.trading_days, self.bm_symbol)
if max_date:
tr_c = self.treasury_curves
# Mask the treasury curves down to the current date.
# In the case of live trading, the last date in the treasury
# curves would be the day before the date considered to be
# 'today'.
self.treasury_curves = tr_c[tr_c.index <= max_date]
self.exchange_tz = exchange_tz
self.engine = engine = create_engine('sqlite:///%s' % asset_db_path)
AssetDBWriterFromDictionary().init_db(engine)
self.asset_finder = AssetFinder(engine)
def write_data(self,
engine=None,
equities_data=None,
futures_data=None,
exchanges_data=None,
root_symbols_data=None,
equities_df=None,
futures_df=None,
exchanges_df=None,
root_symbols_df=None,
equities_identifiers=None,
futures_identifiers=None,
exchanges_identifiers=None,
root_symbols_identifiers=None,
allow_sid_assignment=True):
""" Write the supplied data to the database.
Parameters
----------
equities_data: dict, optional
A dictionary of equity metadata
futures_data: dict, optional
A dictionary of futures metadata
exchanges_data: dict, optional
A dictionary of exchanges metadata
root_symbols_data: dict, optional
A dictionary of root symbols metadata
equities_df: pandas.DataFrame, optional
A pandas.DataFrame of equity metadata
futures_df: pandas.DataFrame, optional
A pandas.DataFrame of futures metadata
exchanges_df: pandas.DataFrame, optional
A pandas.DataFrame of exchanges metadata
root_symbols_df: pandas.DataFrame, optional
A pandas.DataFrame of root symbols metadata
equities_identifiers: list, optional
A list of equities identifiers (sids, symbols, Assets)
futures_identifiers: list, optional
A list of futures identifiers (sids, symbols, Assets)
exchanges_identifiers: list, optional
A list of exchanges identifiers (ids or names)
root_symbols_identifiers: list, optional
A list of root symbols identifiers (ids or symbols)
"""
if engine:
self.engine = engine
# If any pandas.DataFrame data has been provided,
# write it to the database.
if (equities_df is not None or futures_df is not None or
exchanges_df is not None or root_symbols_df is not None):
self._write_data_dataframes(equities_df, futures_df,
exchanges_df, root_symbols_df)
if (equities_data is not None or futures_data is not None or
exchanges_data is not None or root_symbols_data is not None):
self._write_data_dicts(equities_data, futures_data,
exchanges_data, root_symbols_data)
# These could be lists or other iterables such as a pandas.Index.
# For simplicity, don't check whether data has been provided.
self._write_data_lists(equities_identifiers,
futures_identifiers,
exchanges_identifiers,
root_symbols_identifiers,
allow_sid_assignment=allow_sid_assignment)
def _write_data_lists(self, equities=None, futures=None, exchanges=None,
root_symbols=None, allow_sid_assignment=True):
AssetDBWriterFromList(equities, futures, exchanges, root_symbols)\
.write_all(self.engine, allow_sid_assignment=allow_sid_assignment)
def _write_data_dicts(self, equities=None, futures=None, exchanges=None,
root_symbols=None, allow_sid_assignment=True):
AssetDBWriterFromDictionary(equities, futures, exchanges, root_symbols)\
.write_all(self.engine)
def _write_data_dataframes(self, equities=None, futures=None,
exchanges=None, root_symbols=None):
AssetDBWriterFromDataFrame(equities, futures, exchanges, root_symbols)\
.write_all(self.engine)
def normalize_date(self, test_date):
test_date = pd.Timestamp(test_date, tz='UTC')
return pd.tseries.tools.normalize_date(test_date)
def utc_dt_in_exchange(self, dt):
return pd.Timestamp(dt).tz_convert(self.exchange_tz)
def exchange_dt_in_utc(self, dt):
return pd.Timestamp(dt, tz=self.exchange_tz).tz_convert('UTC')
def is_market_hours(self, test_date):
if not self.is_trading_day(test_date):
return False
mkt_open, mkt_close = self.get_open_and_close(test_date)
return test_date >= mkt_open and test_date <= mkt_close
def is_trading_day(self, test_date):
dt = self.normalize_date(test_date)
return (dt in self.trading_days)
def next_trading_day(self, test_date):
dt = self.normalize_date(test_date)
delta = datetime.timedelta(days=1)
while dt <= self.last_trading_day:
dt += delta
if dt in self.trading_days:
return dt
return None
def previous_trading_day(self, test_date):
dt = self.normalize_date(test_date)
delta = datetime.timedelta(days=-1)
while self.first_trading_day < dt:
dt += delta
if dt in self.trading_days:
return dt
return None
def add_trading_days(self, n, date):
"""
Adds n trading days to date. If this would fall outside of the
trading calendar, a NoFurtherDataError is raised.
:Arguments:
n : int
The number of days to add to date, this can be positive or
negative.
date : datetime
The date to add to.
:Returns:
new_date : datetime
n trading days added to date.
"""
if n == 1:
return self.next_trading_day(date)
if n == -1:
return self.previous_trading_day(date)
idx = self.get_index(date) + n
if idx < 0 or idx >= len(self.trading_days):
raise NoFurtherDataError(
msg='Cannot add %d days to %s' % (n, date)
)
return self.trading_days[idx]
def days_in_range(self, start, end):
mask = ((self.trading_days >= start) &
(self.trading_days <= end))
return self.trading_days[mask]
def opens_in_range(self, start, end):
return self.open_and_closes.market_open.loc[start:end]
def closes_in_range(self, start, end):
return self.open_and_closes.market_close.loc[start:end]
def minutes_for_days_in_range(self, start, end):
"""
Get all market minutes for the days between start and end, inclusive.
"""
start_date = self.normalize_date(start)
end_date = self.normalize_date(end)
all_minutes = []
for day in self.days_in_range(start_date, end_date):
day_minutes = self.market_minutes_for_day(day)
all_minutes.append(day_minutes)
# Concatenate all minutes and truncate minutes before start/after end.
return pd.DatetimeIndex(
np.concatenate(all_minutes), copy=False, tz='UTC',
)
def next_open_and_close(self, start_date):
"""
Given the start_date, returns the next open and close of
the market.
"""
next_open = self.next_trading_day(start_date)
if next_open is None:
raise NoFurtherDataError(
msg=("Attempt to backtest beyond available history. "
"Last known date: %s" % self.last_trading_day)
)
return self.get_open_and_close(next_open)
def previous_open_and_close(self, start_date):
"""
Given the start_date, returns the previous open and close of the
market.
"""
previous = self.previous_trading_day(start_date)
if previous is None:
raise NoFurtherDataError(
msg=("Attempt to backtest beyond available history. "
"First known date: %s" % self.first_trading_day)
)
return self.get_open_and_close(previous)
def next_market_minute(self, start):
"""
Get the next market minute after @start. This is either the immediate
next minute, or the open of the next market day after start.
"""
next_minute = start + datetime.timedelta(minutes=1)
if self.is_market_hours(next_minute):
return next_minute
return self.next_open_and_close(start)[0]
def previous_market_minute(self, start):
"""
Get the next market minute before @start. This is either the immediate
previous minute, or the close of the market day before start.
"""
prev_minute = start - datetime.timedelta(minutes=1)
if self.is_market_hours(prev_minute):
return prev_minute
return self.previous_open_and_close(start)[1]
def get_open_and_close(self, day):
index = self.open_and_closes.index.get_loc(day.date())
todays_minutes = self.open_and_closes.values[index]
return todays_minutes[0], todays_minutes[1]
def market_minutes_for_day(self, stamp):
market_open, market_close = self.get_open_and_close(stamp)
return pd.date_range(market_open, market_close, freq='T')
def open_close_window(self, start, count, offset=0, step=1):
"""
Return a DataFrame containing `count` market opens and closes,
beginning with `start` + `offset` days and continuing `step` minutes at
a time.
"""
# TODO: Correctly handle end of data.
start_idx = self.get_index(start) + offset
stop_idx = start_idx + (count * step)
index = np.arange(start_idx, stop_idx, step)
return self.open_and_closes.iloc[index]
def market_minute_window(self, start, count, step=1):
"""
Return a DatetimeIndex containing `count` market minutes, starting with
`start` and continuing `step` minutes at a time.
"""
if not self.is_market_hours(start):
raise ValueError("market_minute_window starting at "
"non-market time {minute}".format(minute=start))
all_minutes = []
current_day_minutes = self.market_minutes_for_day(start)
first_minute_idx = current_day_minutes.searchsorted(start)
minutes_in_range = current_day_minutes[first_minute_idx::step]
# Build up list of lists of days' market minutes until we have count
# minutes stored altogether.
while True:
if len(minutes_in_range) >= count:
# Truncate off extra minutes
minutes_in_range = minutes_in_range[:count]
all_minutes.append(minutes_in_range)
count -= len(minutes_in_range)
if count <= 0:
break
if step > 0:
start, _ = self.next_open_and_close(start)
current_day_minutes = self.market_minutes_for_day(start)
else:
_, start = self.previous_open_and_close(start)
current_day_minutes = self.market_minutes_for_day(start)
minutes_in_range = current_day_minutes[::step]
# Concatenate all the accumulated minutes.
return pd.DatetimeIndex(
np.concatenate(all_minutes), copy=False, tz='UTC',
)
def trading_day_distance(self, first_date, second_date):
first_date = self.normalize_date(first_date)
second_date = self.normalize_date(second_date)
# TODO: May be able to replace the following with searchsorted.
# Find leftmost item greater than or equal to day
i = bisect.bisect_left(self.trading_days, first_date)
if i == len(self.trading_days): # nothing found
return None
j = bisect.bisect_left(self.trading_days, second_date)
if j == len(self.trading_days):
return None
return j - i
def get_index(self, dt):
"""
Return the index of the given @dt, or the index of the preceding
trading day if the given dt is not in the trading calendar.
"""
ndt = self.normalize_date(dt)
if ndt in self.trading_days:
return self.trading_days.searchsorted(ndt)
else:
return self.trading_days.searchsorted(ndt) - 1
class SimulationParameters(object):
def __init__(self, period_start, period_end,
capital_base=10e3,
emission_rate='daily',
data_frequency='daily',
env=None):
self.period_start = period_start
self.period_end = period_end
self.capital_base = capital_base
self.emission_rate = emission_rate
self.data_frequency = data_frequency
# copied to algorithm's environment for runtime access
self.arena = 'backtest'
if env is not None:
self.update_internal_from_env(env=env)
def update_internal_from_env(self, env):
assert self.period_start <= self.period_end, \
"Period start falls after period end."
assert self.period_start <= env.last_trading_day, \
"Period start falls after the last known trading day."
assert self.period_end >= env.first_trading_day, \
"Period end falls before the first known trading day."
self.first_open = self._calculate_first_open(env)
self.last_close = self._calculate_last_close(env)
start_index = env.get_index(self.first_open)
end_index = env.get_index(self.last_close)
# take an inclusive slice of the environment's
# trading_days.
self.trading_days = env.trading_days[start_index:end_index + 1]
def _calculate_first_open(self, env):
"""
Finds the first trading day on or after self.period_start.
"""
first_open = self.period_start
one_day = datetime.timedelta(days=1)
while not env.is_trading_day(first_open):
first_open = first_open + one_day
mkt_open, _ = env.get_open_and_close(first_open)
return mkt_open
def _calculate_last_close(self, env):
"""
Finds the last trading day on or before self.period_end
"""
last_close = self.period_end
one_day = datetime.timedelta(days=1)
while not env.is_trading_day(last_close):
last_close = last_close - one_day
_, mkt_close = env.get_open_and_close(last_close)
return mkt_close
@property
def days_in_period(self):
"""return the number of trading days within the period [start, end)"""
return len(self.trading_days)
def __repr__(self):
return """
{class_name}(
period_start={period_start},
period_end={period_end},
capital_base={capital_base},
data_frequency={data_frequency},
emission_rate={emission_rate},
first_open={first_open},
last_close={last_close})\
""".format(class_name=self.__class__.__name__,
period_start=self.period_start,
period_end=self.period_end,
capital_base=self.capital_base,
data_frequency=self.data_frequency,
emission_rate=self.emission_rate,
first_open=self.first_open,
last_close=self.last_close)
|
MonoCloud/zipline
|
zipline/finance/trading.py
|
Python
|
apache-2.0
| 19,626
|
import unittest
import numpy
import chainer
from chainer import cuda
from chainer.functions import vae
from chainer import gradient_check
from chainer import testing
from chainer.testing import attr
from chainer.testing import condition
if cuda.available:
cuda.init()
class TestGaussianKLDivergence(unittest.TestCase):
def setUp(self):
self.mean = numpy.random.uniform(-1, 1, (3,)).astype(numpy.float32)
self.ln_var = numpy.random.uniform(-1, 1, (3,)).astype(numpy.float32)
# Refer to Appendix B in the original paper
# Auto-Encoding Variational Bayes (http://arxiv.org/abs/1312.6114)
J = self.mean.size
self.expect = -(J + numpy.sum(self.ln_var)
- numpy.sum(self.mean * self.mean)
- numpy.sum(numpy.exp(self.ln_var))) * 0.5
def check_gaussian_kl_divergence(self, mean, ln_var):
m = chainer.Variable(mean)
v = chainer.Variable(ln_var)
actual = cuda.to_cpu(vae.gaussian_kl_divergence(m, v).data)
gradient_check.assert_allclose(self.expect, actual)
@condition.retry(3)
def test_gaussian_kl_divergence_cpu(self):
self.check_gaussian_kl_divergence(self.mean, self.ln_var)
@attr.gpu
@condition.retry(3)
def test_gaussian_kl_divergence_gpu(self):
self.check_gaussian_kl_divergence(cuda.to_gpu(self.mean),
cuda.to_gpu(self.ln_var))
class TestBernoulliNLL(unittest.TestCase):
def setUp(self):
self.x = numpy.random.uniform(-1, 1, (3,)).astype(numpy.float32)
self.y = numpy.random.uniform(-1, 1, (3,)).astype(numpy.float32)
# Refer to Appendix C.1 in the original paper
# Auto-Encoding Variational Bayes (http://arxiv.org/abs/1312.6114)
p = 1 / (1 + numpy.exp(-self.y))
self.expect = - (numpy.sum(self.x * numpy.log(p))
+ numpy.sum((1 - self.x) * numpy.log(1 - p)))
def check_bernoulli_nll(self, x_data, y_data):
x = chainer.Variable(x_data)
y = chainer.Variable(y_data)
actual = cuda.to_cpu(vae.bernoulli_nll(x, y).data)
gradient_check.assert_allclose(self.expect, actual)
@condition.retry(3)
def test_bernoulli_nll_cpu(self):
self.check_bernoulli_nll(self.x, self.y)
@attr.gpu
@condition.retry(3)
def test_bernoulli_nll_gpu(self):
self.check_bernoulli_nll(cuda.to_gpu(self.x),
cuda.to_gpu(self.y))
class TestGaussianNLL(unittest.TestCase):
def setUp(self):
self.x = numpy.random.uniform(-1, 1, (3,)).astype(numpy.float32)
self.mean = numpy.random.uniform(-1, 1, (3,)).astype(numpy.float32)
self.ln_var = numpy.random.uniform(-1, 1, (3,)).astype(numpy.float32)
# Refer to Appendix C.2 in the original paper
# Auto-Encoding Variational Bayes (http://arxiv.org/abs/1312.6114)
D = self.x.size
x_d = self.x - self.mean
var = numpy.exp(self.ln_var)
self.expect = (0.5 * D * numpy.log(2 * numpy.pi)
+ 0.5 * numpy.sum(self.ln_var)
+ numpy.sum(x_d * x_d / var) * 0.5)
def check_gaussian_nll(self, x_data, mean_data, ln_var_data):
x = chainer.Variable(x_data)
mean = chainer.Variable(mean_data)
ln_var = chainer.Variable(ln_var_data)
actual = cuda.to_cpu(vae.gaussian_nll(x, mean, ln_var).data)
gradient_check.assert_allclose(self.expect, actual)
@condition.retry(3)
def test_gaussian_nll_cpu(self):
self.check_gaussian_nll(self.x, self.mean, self.ln_var)
@attr.gpu
@condition.retry(3)
def test_gaussian_nll_gpu(self):
self.check_gaussian_nll(cuda.to_gpu(self.x),
cuda.to_gpu(self.mean),
cuda.to_gpu(self.ln_var))
testing.run_module(__name__, __file__)
|
woodshop/chainer
|
tests/functions_tests/test_vae.py
|
Python
|
mit
| 3,930
|
"""Utilities to help convert mp4s to fmp4s."""
import io
def find_box(segment: io.BytesIO, target_type: bytes, box_start: int = 0) -> int:
"""Find location of first box (or sub_box if box_start provided) of given type."""
if box_start == 0:
box_end = segment.seek(0, io.SEEK_END)
segment.seek(0)
index = 0
else:
segment.seek(box_start)
box_end = box_start + int.from_bytes(segment.read(4), byteorder="big")
index = box_start + 8
while 1:
if index > box_end - 8: # End of box, not found
break
segment.seek(index)
box_header = segment.read(8)
if box_header[4:8] == target_type:
yield index
segment.seek(index)
index += int.from_bytes(box_header[0:4], byteorder="big")
def get_init(segment: io.BytesIO) -> bytes:
"""Get init section from fragmented mp4."""
moof_location = next(find_box(segment, b"moof"))
segment.seek(0)
return segment.read(moof_location)
def get_m4s(segment: io.BytesIO, sequence: int) -> bytes:
"""Get m4s section from fragmented mp4."""
moof_location = next(find_box(segment, b"moof"))
mfra_location = next(find_box(segment, b"mfra"))
segment.seek(moof_location)
return segment.read(mfra_location - moof_location)
def get_codec_string(segment: io.BytesIO) -> str:
"""Get RFC 6381 codec string."""
codecs = []
# Find moov
moov_location = next(find_box(segment, b"moov"))
# Find tracks
for trak_location in find_box(segment, b"trak", moov_location):
# Drill down to media info
mdia_location = next(find_box(segment, b"mdia", trak_location))
minf_location = next(find_box(segment, b"minf", mdia_location))
stbl_location = next(find_box(segment, b"stbl", minf_location))
stsd_location = next(find_box(segment, b"stsd", stbl_location))
# Get stsd box
segment.seek(stsd_location)
stsd_length = int.from_bytes(segment.read(4), byteorder="big")
segment.seek(stsd_location)
stsd_box = segment.read(stsd_length)
# Base Codec
codec = stsd_box[20:24].decode("utf-8")
# Handle H264
if (
codec in ("avc1", "avc2", "avc3", "avc4")
and stsd_length > 110
and stsd_box[106:110] == b"avcC"
):
profile = stsd_box[111:112].hex()
compatibility = stsd_box[112:113].hex()
# Cap level at 4.1 for compatibility with some Google Cast devices
level = hex(min(stsd_box[113], 41))[2:]
codec += "." + profile + compatibility + level
# Handle H265
elif (
codec in ("hev1", "hvc1")
and stsd_length > 110
and stsd_box[106:110] == b"hvcC"
):
tmp_byte = int.from_bytes(stsd_box[111:112], byteorder="big")
# Profile Space
codec += "."
profile_space_map = {0: "", 1: "A", 2: "B", 3: "C"}
profile_space = tmp_byte >> 6
codec += profile_space_map[profile_space]
general_profile_idc = tmp_byte & 31
codec += str(general_profile_idc)
# Compatibility
codec += "."
general_profile_compatibility = int.from_bytes(
stsd_box[112:116], byteorder="big"
)
reverse = 0
for i in range(0, 32):
reverse |= general_profile_compatibility & 1
if i == 31:
break
reverse <<= 1
general_profile_compatibility >>= 1
codec += hex(reverse)[2:]
# Tier Flag
if (tmp_byte & 32) >> 5 == 0:
codec += ".L"
else:
codec += ".H"
codec += str(int.from_bytes(stsd_box[122:123], byteorder="big"))
# Constraint String
has_byte = False
constraint_string = ""
for i in range(121, 115, -1):
gci = int.from_bytes(stsd_box[i : i + 1], byteorder="big")
if gci or has_byte:
constraint_string = "." + hex(gci)[2:] + constraint_string
has_byte = True
codec += constraint_string
# Handle Audio
elif codec == "mp4a":
oti = None
dsi = None
# Parse ES Descriptors
oti_loc = stsd_box.find(b"\x04\x80\x80\x80")
if oti_loc > 0:
oti = stsd_box[oti_loc + 5 : oti_loc + 6].hex()
codec += f".{oti}"
dsi_loc = stsd_box.find(b"\x05\x80\x80\x80")
if dsi_loc > 0:
dsi_length = int.from_bytes(
stsd_box[dsi_loc + 4 : dsi_loc + 5], byteorder="big"
)
dsi_data = stsd_box[dsi_loc + 5 : dsi_loc + 5 + dsi_length]
dsi0 = int.from_bytes(dsi_data[0:1], byteorder="big")
dsi = (dsi0 & 248) >> 3
if dsi == 31 and len(dsi_data) >= 2:
dsi1 = int.from_bytes(dsi_data[1:2], byteorder="big")
dsi = 32 + ((dsi0 & 7) << 3) + ((dsi1 & 224) >> 5)
codec += f".{dsi}"
codecs.append(codec)
return ",".join(codecs)
|
tboyce021/home-assistant
|
homeassistant/components/stream/fmp4utils.py
|
Python
|
apache-2.0
| 5,317
|
"""
@package mi.instrument.nortek.driver
@file mi/instrument/nortek/driver.py
@author Bill Bollenbacher
@author Steve Foley
@author Ronald Ronquillo
@brief Base class for Nortek instruments
"""
__author__ = 'Rachel Manoni, Ronald Ronquillo'
__license__ = 'Apache 2.0'
import re
import time
import base64
from mi.core.log import get_logger, get_logging_metaclass
log = get_logger()
from mi.core.instrument.instrument_fsm import InstrumentFSM
from mi.core.instrument.chunker import StringChunker
from mi.core.instrument.data_particle import DataParticle, DataParticleKey, DataParticleValue
from mi.core.instrument.data_particle import CommonDataParticleType
from mi.core.instrument.instrument_protocol import CommandResponseInstrumentProtocol, DEFAULT_WRITE_DELAY
from mi.core.instrument.driver_dict import DriverDict, DriverDictKey
from mi.core.instrument.protocol_cmd_dict import ProtocolCommandDict
from mi.core.instrument.protocol_param_dict import ParameterDictVisibility
from mi.core.instrument.protocol_param_dict import ProtocolParameterDict
from mi.core.instrument.protocol_param_dict import ParameterDictType
from mi.core.driver_scheduler import DriverSchedulerConfigKey, TriggerType
from mi.core.instrument.instrument_driver import DriverEvent
from mi.core.instrument.instrument_driver import DriverConfigKey
from mi.core.instrument.instrument_driver import SingleConnectionInstrumentDriver
from mi.core.instrument.instrument_driver import DriverAsyncEvent
from mi.core.instrument.instrument_driver import DriverProtocolState
from mi.core.instrument.instrument_driver import DriverParameter
from mi.core.instrument.instrument_driver import ResourceAgentState
from mi.core.exceptions import InstrumentCommandException
from mi.core.exceptions import InstrumentStateException
from mi.core.exceptions import InstrumentTimeoutException
from mi.core.exceptions import InstrumentProtocolException
from mi.core.exceptions import InstrumentParameterException
from mi.core.exceptions import SampleException
from mi.core.time import get_timestamp_delayed
from mi.core.common import BaseEnum
# newline.
NEWLINE = '\n\r'
# default timeout.
TIMEOUT = 15
# allowable time delay for sync the clock
TIME_DELAY = 2
# sample collection is ~60 seconds, add padding
SAMPLE_TIMEOUT = 70
# set up the 'structure' lengths (in bytes) and sync/id/size constants
USER_CONFIG_LEN = 512
USER_CONFIG_SYNC_BYTES = '\xa5\x00\x00\x01'
HW_CONFIG_LEN = 48
HW_CONFIG_SYNC_BYTES = '\xa5\x05\x18\x00'
HEAD_CONFIG_LEN = 224
HEAD_CONFIG_SYNC_BYTES = '\xa5\x04\x70\x00'
CHECK_SUM_SEED = 0xb58c
HARDWARE_CONFIG_DATA_PATTERN = r'(%s)(.{14})(.{2})(.{2})(.{2})(.{2})(.{2})(.{2})(.{12})(.{4})(.{2})(\x06\x06)' \
% HW_CONFIG_SYNC_BYTES
HARDWARE_CONFIG_DATA_REGEX = re.compile(HARDWARE_CONFIG_DATA_PATTERN, re.DOTALL)
HEAD_CONFIG_DATA_PATTERN = r'(%s)(.{2})(.{2})(.{2})(.{12})(.{176})(.{22})(.{2})(.{2})(\x06\x06)' % HEAD_CONFIG_SYNC_BYTES
HEAD_CONFIG_DATA_REGEX = re.compile(HEAD_CONFIG_DATA_PATTERN, re.DOTALL)
USER_CONFIG_DATA_PATTERN = r'(%s)(.{2})(.{2})(.{2})(.{2})(.{2})(.{2})(.{2})(.{2})(.{2})(.{2})(.{2})(.{2})(.{2})' \
r'(.{2})(.{2})(.{2})(.{2})(.{2})(.{6})(.{2})(.{6})(.{4})(.{2})(.{2})(.{2})(.{2})(.{2})' \
r'(.{2})(.{2})(.{2})(.{2})(.{180})(.{180})(.{2})(.{2})(.{2})(.{2})(.{2})(.{2})(.{2})(.{2})' \
r'(.{2})(.{2})(.{2})(.{2})(.{2})(.{2})(.{30})(.{16})(.{2})(\x06\x06)' % USER_CONFIG_SYNC_BYTES
USER_CONFIG_DATA_REGEX = re.compile(USER_CONFIG_DATA_PATTERN, re.DOTALL)
# min, sec, day, hour, year, month
CLOCK_DATA_PATTERN = r'([\x00-\x60])([\x00-\x60])([\x01-\x31])([\x00-\x24])([\x00-\x99])([\x01-\x12])\x06\x06'
CLOCK_DATA_REGEX = re.compile(CLOCK_DATA_PATTERN, re.DOTALL)
# Special combined regex to give battery voltage a "unique sync byte" to search for (non-unique regex workaround)
ID_BATTERY_DATA_PATTERN = r'(?:AQD|VEC) [0-9]{4} {0,6}\x06\x06([\x00-\xFF][\x13-\x46])\x06\x06'
ID_BATTERY_DATA_REGEX = re.compile(ID_BATTERY_DATA_PATTERN, re.DOTALL)
# ~5000mV (0x1388) minimum to ~18000mv (0x4650) maximum
BATTERY_DATA_PATTERN = r'([\x00-\xFF][\x13-\x46])\x06\x06'
BATTERY_DATA_REGEX = re.compile(BATTERY_DATA_PATTERN, re.DOTALL)
# [\x00, \x01, \x02, \x04, and \x05]
MODE_DATA_PATTERN = r'([\x00-\x02,\x04,\x05]\x00)(\x06\x06)'
MODE_DATA_REGEX = re.compile(MODE_DATA_PATTERN, re.DOTALL)
# ["VEC 8181", "AQD 8493 "]
ID_DATA_PATTERN = r'((?:AQD|VEC) [0-9]{4}) {0,6}\x06\x06'
ID_DATA_REGEX = re.compile(ID_DATA_PATTERN, re.DOTALL)
NORTEK_COMMON_REGEXES = [USER_CONFIG_DATA_REGEX,
HARDWARE_CONFIG_DATA_REGEX,
HEAD_CONFIG_DATA_REGEX,
ID_BATTERY_DATA_REGEX,
CLOCK_DATA_REGEX]
INTERVAL_TIME_REGEX = r"([0-9][0-9]:[0-9][0-9]:[0-9][0-9])"
class ParameterUnits(BaseEnum):
MILLIMETERS = 'mm'
CENTIMETERS = 'cm'
METERS = 'm'
HERTZ = 'Hz'
SECONDS = 's'
TIME_INTERVAL = 'HH:MM:SS'
METERS_PER_SECOND = 'm/s'
PARTS_PER_TRILLION = 'ppt'
COUNTS = 'counts'
class ScheduledJob(BaseEnum):
"""
List of schedulable events
"""
CLOCK_SYNC = 'clock_sync'
ACQUIRE_STATUS = 'acquire_status'
class NortekDataParticleType(BaseEnum):
"""
List of particles
"""
RAW = CommonDataParticleType.RAW
HARDWARE_CONFIG = 'vel3d_cd_hardware_configuration'
HEAD_CONFIG = 'vel3d_cd_head_configuration'
USER_CONFIG = 'vel3d_cd_user_configuration'
CLOCK = 'vel3d_clock_data'
BATTERY = 'vel3d_cd_battery_voltage'
ID_STRING = 'vel3d_cd_identification_string'
class InstrumentPrompts(BaseEnum):
"""
Device prompts.
"""
AWAKE_NACKS = '\x15\x15\x15\x15\x15\x15'
COMMAND_MODE = 'Command mode'
CONFIRMATION = 'Confirm:'
Z_ACK = '\x06\x06' # attach a 'Z' to the front of these two items to force them to the end of the list
Z_NACK = '\x15\x15' # so the other responses will have priority to be detected if they are present
class InstrumentCmds(BaseEnum):
"""
List of instrument commands
"""
CONFIGURE_INSTRUMENT = 'CC' # sets the user configuration
SOFT_BREAK_FIRST_HALF = '@@@@@@'
SOFT_BREAK_SECOND_HALF = 'K1W%!Q'
AUTOSAMPLE_BREAK = '@'
READ_REAL_TIME_CLOCK = 'RC'
SET_REAL_TIME_CLOCK = 'SC'
CMD_WHAT_MODE = 'II' # to determine the mode of the instrument
READ_USER_CONFIGURATION = 'GC'
READ_HW_CONFIGURATION = 'GP'
READ_HEAD_CONFIGURATION = 'GH'
READ_BATTERY_VOLTAGE = 'BV'
READ_ID = 'ID'
START_MEASUREMENT_WITHOUT_RECORDER = 'ST'
ACQUIRE_DATA = 'AD'
CONFIRMATION = 'MC' # confirm a break request
SAMPLE_WHAT_MODE = 'I'
class InstrumentModes(BaseEnum):
"""
List of possible modes the instrument can be in
"""
FIRMWARE_UPGRADE = '\x00\x00\x06\x06'
MEASUREMENT = '\x01\x00\x06\x06'
COMMAND = '\x02\x00\x06\x06'
DATA_RETRIEVAL = '\x04\x00\x06\x06'
CONFIRMATION = '\x05\x00\x06\x06'
class ProtocolState(BaseEnum):
"""
List of protocol states
"""
UNKNOWN = DriverProtocolState.UNKNOWN
COMMAND = DriverProtocolState.COMMAND
AUTOSAMPLE = DriverProtocolState.AUTOSAMPLE
DIRECT_ACCESS = DriverProtocolState.DIRECT_ACCESS
class ProtocolEvent(BaseEnum):
"""
List of protocol events
"""
# common events from base class
ENTER = DriverEvent.ENTER
EXIT = DriverEvent.EXIT
GET = DriverEvent.GET
SET = DriverEvent.SET
DISCOVER = DriverEvent.DISCOVER
ACQUIRE_SAMPLE = DriverEvent.ACQUIRE_SAMPLE
ACQUIRE_STATUS = DriverEvent.ACQUIRE_STATUS
START_AUTOSAMPLE = DriverEvent.START_AUTOSAMPLE
STOP_AUTOSAMPLE = DriverEvent.STOP_AUTOSAMPLE
START_DIRECT = DriverEvent.START_DIRECT
STOP_DIRECT = DriverEvent.STOP_DIRECT
EXECUTE_DIRECT = DriverEvent.EXECUTE_DIRECT
CLOCK_SYNC = DriverEvent.CLOCK_SYNC
SCHEDULED_CLOCK_SYNC = DriverEvent.SCHEDULED_CLOCK_SYNC
RESET = DriverEvent.RESET
# instrument specific events
SET_CONFIGURATION = "PROTOCOL_EVENT_CMD_SET_CONFIGURATION"
READ_CLOCK = "PROTOCOL_EVENT_CMD_READ_CLOCK"
READ_MODE = "PROTOCOL_EVENT_CMD_READ_MODE"
POWER_DOWN = "PROTOCOL_EVENT_CMD_POWER_DOWN"
READ_BATTERY_VOLTAGE = "PROTOCOL_EVENT_CMD_READ_BATTERY_VOLTAGE"
READ_ID = "PROTOCOL_EVENT_CMD_READ_ID"
GET_HW_CONFIGURATION = "PROTOCOL_EVENT_CMD_GET_HW_CONFIGURATION"
GET_HEAD_CONFIGURATION = "PROTOCOL_EVENT_CMD_GET_HEAD_CONFIGURATION"
GET_USER_CONFIGURATION = "PROTOCOL_EVENT_GET_USER_CONFIGURATION"
SCHEDULED_ACQUIRE_STATUS = "PROTOCOL_EVENT_SCHEDULED_ACQUIRE_STATUS"
class Capability(BaseEnum):
"""
Capabilities that are exposed to the user (subset of above)
"""
GET = ProtocolEvent.GET
SET = ProtocolEvent.SET
ACQUIRE_SAMPLE = ProtocolEvent.ACQUIRE_SAMPLE
START_AUTOSAMPLE = ProtocolEvent.START_AUTOSAMPLE
STOP_AUTOSAMPLE = ProtocolEvent.STOP_AUTOSAMPLE
CLOCK_SYNC = ProtocolEvent.CLOCK_SYNC
START_DIRECT = DriverEvent.START_DIRECT
STOP_DIRECT = DriverEvent.STOP_DIRECT
ACQUIRE_STATUS = DriverEvent.ACQUIRE_STATUS
def hw_config_to_dict(input):
"""
Translate a hardware configuration string into a dictionary, keys being
from the NortekHardwareConfigDataParticleKey class.
@param input The incoming string of characters of the correct length.
Should be the result of a GP command
@retval A dictionary with the translated values
@throws SampleException If there is a problem with sample creation
"""
if str(input[-2:]) == InstrumentPrompts.Z_ACK:
if len(input) != HW_CONFIG_LEN + 2:
raise SampleException("Invalid input when parsing user config. Got input of size %s with an ACK" % len(input))
else:
if len(input) != HW_CONFIG_LEN:
raise SampleException("Invalid input when parsing user config. Got input of size %s with no ACK" % len(input))
parsed = {}
parsed[NortekHardwareConfigDataParticleKey.SERIAL_NUM] = input[4:18]
parsed[NortekHardwareConfigDataParticleKey.CONFIG] = NortekProtocolParameterDict.convert_bytes_to_bit_field(input[18:20])
parsed[NortekHardwareConfigDataParticleKey.BOARD_FREQUENCY] = NortekProtocolParameterDict.convert_word_to_int(input[20:22])
parsed[NortekHardwareConfigDataParticleKey.PIC_VERSION] = NortekProtocolParameterDict.convert_word_to_int(input[22:24])
parsed[NortekHardwareConfigDataParticleKey.HW_REVISION] = NortekProtocolParameterDict.convert_word_to_int(input[24:26])
parsed[NortekHardwareConfigDataParticleKey.RECORDER_SIZE] = NortekProtocolParameterDict.convert_word_to_int(input[26:28])
parsed[NortekHardwareConfigDataParticleKey.STATUS] = NortekProtocolParameterDict.convert_bytes_to_bit_field(input[28:30])
parsed[NortekHardwareConfigDataParticleKey.FW_VERSION] = input[42:46]
parsed[NortekHardwareConfigDataParticleKey.CHECKSUM] = NortekProtocolParameterDict.convert_word_to_int(input[46:48])
return parsed
class NortekHardwareConfigDataParticleKey(BaseEnum):
"""
Particle key for the hw config
"""
SERIAL_NUM = 'instrmt_type_serial_number'
RECORDER_INSTALLED = 'recorder_installed'
COMPASS_INSTALLED = 'compass_installed'
BOARD_FREQUENCY = 'board_frequency'
PIC_VERSION = 'pic_version'
HW_REVISION = 'hardware_revision'
RECORDER_SIZE = 'recorder_size'
VELOCITY_RANGE = 'velocity_range'
FW_VERSION = 'firmware_version'
STATUS = 'status'
CONFIG = 'config'
CHECKSUM = 'checksum'
class NortekHardwareConfigDataParticle(DataParticle):
"""
Routine for parsing hardware config data into a data particle structure for the Nortek sensor.
"""
_data_particle_type = NortekDataParticleType.HARDWARE_CONFIG
def _build_parsed_values(self):
"""
Take the hardware config data and parse it into
values with appropriate tags.
"""
working_value = hw_config_to_dict(self.raw_data)
for key in working_value.keys():
if working_value[key] is None:
raise SampleException("No %s value parsed" % key)
working_value[NortekHardwareConfigDataParticleKey.RECORDER_INSTALLED] = working_value[NortekHardwareConfigDataParticleKey.CONFIG][-1]
working_value[NortekHardwareConfigDataParticleKey.COMPASS_INSTALLED] = working_value[NortekHardwareConfigDataParticleKey.CONFIG][-2]
working_value[NortekHardwareConfigDataParticleKey.VELOCITY_RANGE] = working_value[NortekHardwareConfigDataParticleKey.STATUS][-1]
# report values
result = [{DataParticleKey.VALUE_ID: NortekHardwareConfigDataParticleKey.SERIAL_NUM, DataParticleKey.VALUE: working_value[NortekHardwareConfigDataParticleKey.SERIAL_NUM]},
{DataParticleKey.VALUE_ID: NortekHardwareConfigDataParticleKey.RECORDER_INSTALLED, DataParticleKey.VALUE: working_value[NortekHardwareConfigDataParticleKey.RECORDER_INSTALLED]},
{DataParticleKey.VALUE_ID: NortekHardwareConfigDataParticleKey.COMPASS_INSTALLED, DataParticleKey.VALUE: working_value[NortekHardwareConfigDataParticleKey.COMPASS_INSTALLED]},
{DataParticleKey.VALUE_ID: NortekHardwareConfigDataParticleKey.BOARD_FREQUENCY, DataParticleKey.VALUE: working_value[NortekHardwareConfigDataParticleKey.BOARD_FREQUENCY]},
{DataParticleKey.VALUE_ID: NortekHardwareConfigDataParticleKey.PIC_VERSION, DataParticleKey.VALUE: working_value[NortekHardwareConfigDataParticleKey.PIC_VERSION]},
{DataParticleKey.VALUE_ID: NortekHardwareConfigDataParticleKey.HW_REVISION, DataParticleKey.VALUE: working_value[NortekHardwareConfigDataParticleKey.HW_REVISION]},
{DataParticleKey.VALUE_ID: NortekHardwareConfigDataParticleKey.RECORDER_SIZE, DataParticleKey.VALUE: working_value[NortekHardwareConfigDataParticleKey.RECORDER_SIZE]},
{DataParticleKey.VALUE_ID: NortekHardwareConfigDataParticleKey.VELOCITY_RANGE, DataParticleKey.VALUE: working_value[NortekHardwareConfigDataParticleKey.VELOCITY_RANGE]},
{DataParticleKey.VALUE_ID: NortekHardwareConfigDataParticleKey.FW_VERSION, DataParticleKey.VALUE: working_value[NortekHardwareConfigDataParticleKey.FW_VERSION]}]
calculated_checksum = NortekProtocolParameterDict.calculate_checksum(self.raw_data, HW_CONFIG_LEN)
if working_value[NortekHardwareConfigDataParticleKey.CHECKSUM] != calculated_checksum:
log.warn("Calculated checksum: %s did not match packet checksum: %s",
calculated_checksum, working_value[NortekHardwareConfigDataParticleKey.CHECKSUM])
self.contents[DataParticleKey.QUALITY_FLAG] = DataParticleValue.CHECKSUM_FAILED
log.debug('NortekHardwareConfigDataParticle: particle=%s', result)
return result
def head_config_to_dict(input):
"""
Translate a head configuration string into a dictionary, keys being
from the NortekHeadConfigDataParticleKey class.
@param input The incoming string of characters of the correct length.
Should be the result of a GH command
@retval A dictionary with the translated values
@throws SampleException If there is a problem with sample creation
"""
if str(input[-2:]) == InstrumentPrompts.Z_ACK:
if len(input) != HEAD_CONFIG_LEN + 2:
raise SampleException("Invalid input when parsing user config. Got input of size %s with an ACK" % len(input))
else:
if len(input) != HEAD_CONFIG_LEN:
raise SampleException("Invalid input when parsing user config. Got input of size %s with no ACK" % len(input))
parsed = {}
parsed[NortekHeadConfigDataParticleKey.CONFIG] = NortekProtocolParameterDict.convert_bytes_to_bit_field(input[4:6])
parsed[NortekHeadConfigDataParticleKey.HEAD_FREQ] = NortekProtocolParameterDict.convert_word_to_int(input[6:8])
parsed[NortekHeadConfigDataParticleKey.HEAD_TYPE] = NortekProtocolParameterDict.convert_word_to_int(input[8:10])
parsed[NortekHeadConfigDataParticleKey.HEAD_SERIAL] = NortekProtocolParameterDict.convert_bytes_to_string(input[10:22])
parsed[NortekHeadConfigDataParticleKey.SYSTEM_DATA] = base64.b64encode(input[22:198])
parsed[NortekHeadConfigDataParticleKey.NUM_BEAMS] = NortekProtocolParameterDict.convert_word_to_int(input[220:222])
parsed[NortekHeadConfigDataParticleKey.CHECKSUM] = NortekProtocolParameterDict.convert_word_to_int(input[222:224])
return parsed
class NortekHeadConfigDataParticleKey(BaseEnum):
"""
Particle key for the head config
"""
PRESSURE_SENSOR = 'pressure_sensor'
MAG_SENSOR = 'magnetometer_sensor'
TILT_SENSOR = 'tilt_sensor'
TILT_SENSOR_MOUNT = 'tilt_sensor_mounting'
HEAD_FREQ = 'head_frequency'
HEAD_TYPE = 'head_type'
HEAD_SERIAL = 'head_serial_number'
SYSTEM_DATA = 'system_data'
NUM_BEAMS = 'number_beams'
CONFIG = 'config'
CHECKSUM = 'checksum'
class NortekHeadConfigDataParticle(DataParticle):
"""
Routine for parsing head config data into a data particle structure for the Nortek sensor.
"""
_data_particle_type = NortekDataParticleType.HEAD_CONFIG
def _build_parsed_values(self):
"""
Take the head config data and parse it into
values with appropriate tags.
@throws SampleException If there is a problem with sample creation
"""
working_value = head_config_to_dict(self.raw_data)
for key in working_value.keys():
if working_value[key] is None:
raise SampleException("No %s value parsed" % key)
working_value[NortekHeadConfigDataParticleKey.PRESSURE_SENSOR] = working_value[NortekHeadConfigDataParticleKey.CONFIG][-1]
working_value[NortekHeadConfigDataParticleKey.MAG_SENSOR] = working_value[NortekHeadConfigDataParticleKey.CONFIG][-2]
working_value[NortekHeadConfigDataParticleKey.TILT_SENSOR] = working_value[NortekHeadConfigDataParticleKey.CONFIG][-3]
working_value[NortekHeadConfigDataParticleKey.TILT_SENSOR_MOUNT] = working_value[NortekHeadConfigDataParticleKey.CONFIG][-4]
# report values
result = [{DataParticleKey.VALUE_ID: NortekHeadConfigDataParticleKey.PRESSURE_SENSOR, DataParticleKey.VALUE: working_value[NortekHeadConfigDataParticleKey.PRESSURE_SENSOR]},
{DataParticleKey.VALUE_ID: NortekHeadConfigDataParticleKey.MAG_SENSOR, DataParticleKey.VALUE: working_value[NortekHeadConfigDataParticleKey.MAG_SENSOR]},
{DataParticleKey.VALUE_ID: NortekHeadConfigDataParticleKey.TILT_SENSOR, DataParticleKey.VALUE: working_value[NortekHeadConfigDataParticleKey.TILT_SENSOR]},
{DataParticleKey.VALUE_ID: NortekHeadConfigDataParticleKey.TILT_SENSOR_MOUNT, DataParticleKey.VALUE: working_value[NortekHeadConfigDataParticleKey.TILT_SENSOR_MOUNT]},
{DataParticleKey.VALUE_ID: NortekHeadConfigDataParticleKey.HEAD_FREQ, DataParticleKey.VALUE: working_value[NortekHeadConfigDataParticleKey.HEAD_FREQ]},
{DataParticleKey.VALUE_ID: NortekHeadConfigDataParticleKey.HEAD_TYPE, DataParticleKey.VALUE: working_value[NortekHeadConfigDataParticleKey.HEAD_TYPE]},
{DataParticleKey.VALUE_ID: NortekHeadConfigDataParticleKey.HEAD_SERIAL, DataParticleKey.VALUE: working_value[NortekHeadConfigDataParticleKey.HEAD_SERIAL]},
{DataParticleKey.VALUE_ID: NortekHeadConfigDataParticleKey.SYSTEM_DATA, DataParticleKey.VALUE: working_value[NortekHeadConfigDataParticleKey.SYSTEM_DATA],
DataParticleKey.BINARY: True},
{DataParticleKey.VALUE_ID: NortekHeadConfigDataParticleKey.NUM_BEAMS, DataParticleKey.VALUE: working_value[NortekHeadConfigDataParticleKey.NUM_BEAMS]}]
calculated_checksum = NortekProtocolParameterDict.calculate_checksum(self.raw_data, HEAD_CONFIG_LEN)
if working_value[NortekHeadConfigDataParticleKey.CHECKSUM] != calculated_checksum:
log.warn("Calculated checksum: %s did not match packet checksum: %s",
calculated_checksum, working_value[NortekHeadConfigDataParticleKey.CHECKSUM])
self.contents[DataParticleKey.QUALITY_FLAG] = DataParticleValue.CHECKSUM_FAILED
log.debug('NortekHeadConfigDataParticle: particle=%s', result)
return result
def user_config_to_dict(input):
"""
Translate a user configuration string into a dictionary, keys being
from the NortekUserConfigDataParticleKey class.
@param input The incoming string of characters of the correct length.
Should be the result of a GC command
@retval A dictionary with the translated values
@throws SampleException If there is a problem with sample creation
"""
#check for the ACK and the response is the correct length
if str(input[-2:]) == InstrumentPrompts.Z_ACK:
if len(input) != USER_CONFIG_LEN + 2:
raise SampleException("Invalid input when parsing user config. Got input of size %s with an ACK" % len(input))
else:
if len(input) != USER_CONFIG_LEN:
raise SampleException("Invalid input when parsing user config. Got input of size %s with no ACK" % len(input))
parsed = {}
parsed[NortekUserConfigDataParticleKey.TX_LENGTH] = NortekProtocolParameterDict.convert_word_to_int(input[4:6])
parsed[NortekUserConfigDataParticleKey.BLANK_DIST] = NortekProtocolParameterDict.convert_word_to_int(input[6:8])
parsed[NortekUserConfigDataParticleKey.RX_LENGTH] = NortekProtocolParameterDict.convert_word_to_int(input[8:10])
parsed[NortekUserConfigDataParticleKey.TIME_BETWEEN_PINGS] = NortekProtocolParameterDict.convert_word_to_int(input[10:12])
parsed[NortekUserConfigDataParticleKey.TIME_BETWEEN_BURSTS] = NortekProtocolParameterDict.convert_word_to_int(input[12:14])
parsed[NortekUserConfigDataParticleKey.NUM_PINGS] = NortekProtocolParameterDict.convert_word_to_int(input[14:16])
parsed[NortekUserConfigDataParticleKey.AVG_INTERVAL] = NortekProtocolParameterDict.convert_word_to_int(input[16:18])
parsed[NortekUserConfigDataParticleKey.NUM_BEAMS] = NortekProtocolParameterDict.convert_word_to_int(input[18:20])
parsed[NortekUserConfigDataParticleKey.TCR] = NortekProtocolParameterDict.convert_bytes_to_bit_field(input[20:22])
parsed[NortekUserConfigDataParticleKey.PCR] = NortekProtocolParameterDict.convert_bytes_to_bit_field(input[22:24])
parsed[NortekUserConfigDataParticleKey.COMPASS_UPDATE_RATE] = NortekProtocolParameterDict.convert_word_to_int(input[30:32])
parsed[NortekUserConfigDataParticleKey.COORDINATE_SYSTEM] = NortekProtocolParameterDict.convert_word_to_int(input[32:34])
parsed[NortekUserConfigDataParticleKey.NUM_CELLS] = NortekProtocolParameterDict.convert_word_to_int(input[34:36])
parsed[NortekUserConfigDataParticleKey.CELL_SIZE] = NortekProtocolParameterDict.convert_word_to_int(input[36:38])
parsed[NortekUserConfigDataParticleKey.MEASUREMENT_INTERVAL] = NortekProtocolParameterDict.convert_word_to_int(input[38:40])
parsed[NortekUserConfigDataParticleKey.DEPLOYMENT_NAME] = NortekProtocolParameterDict.convert_bytes_to_string(input[40:46])
parsed[NortekUserConfigDataParticleKey.WRAP_MODE] = NortekProtocolParameterDict.convert_word_to_int(input[46:48])
parsed[NortekUserConfigDataParticleKey.DEPLOY_START_TIME] = NortekProtocolParameterDict.convert_words_to_datetime(input[48:54])
parsed[NortekUserConfigDataParticleKey.DIAG_INTERVAL] = NortekProtocolParameterDict.convert_double_word_to_int(input[54:58])
parsed[NortekUserConfigDataParticleKey.MODE] = NortekProtocolParameterDict.convert_bytes_to_bit_field(input[58:60])
parsed[NortekUserConfigDataParticleKey.SOUND_SPEED_ADJUST] = NortekProtocolParameterDict.convert_word_to_int(input[60:62])
parsed[NortekUserConfigDataParticleKey.NUM_DIAG_SAMPLES] = NortekProtocolParameterDict.convert_word_to_int(input[62:64])
parsed[NortekUserConfigDataParticleKey.NUM_BEAMS_PER_CELL] = NortekProtocolParameterDict.convert_word_to_int(input[64:66])
parsed[NortekUserConfigDataParticleKey.NUM_PINGS_DIAG] = NortekProtocolParameterDict.convert_word_to_int(input[66:68])
parsed[NortekUserConfigDataParticleKey.MODE_TEST] = NortekProtocolParameterDict.convert_bytes_to_bit_field(input[68:70])
parsed[NortekUserConfigDataParticleKey.ANALOG_INPUT_ADDR] = NortekProtocolParameterDict.convert_word_to_int(input[70:72])
parsed[NortekUserConfigDataParticleKey.SW_VER] = NortekProtocolParameterDict.convert_word_to_int(input[72:74])
parsed[NortekUserConfigDataParticleKey.VELOCITY_ADJ_FACTOR] = base64.b64encode(input[76:256])
parsed[NortekUserConfigDataParticleKey.FILE_COMMENTS] = NortekProtocolParameterDict.convert_bytes_to_string(input[256:436])
parsed[NortekUserConfigDataParticleKey.WAVE_MODE] = NortekProtocolParameterDict.convert_bytes_to_bit_field(input[436:438])
parsed[NortekUserConfigDataParticleKey.PERCENT_WAVE_CELL_POS] = NortekProtocolParameterDict.convert_word_to_int(input[438:440])
parsed[NortekUserConfigDataParticleKey.WAVE_TX_PULSE] = NortekProtocolParameterDict.convert_word_to_int(input[440:442])
parsed[NortekUserConfigDataParticleKey.FIX_WAVE_BLANK_DIST] = NortekProtocolParameterDict.convert_word_to_int(input[442:444])
parsed[NortekUserConfigDataParticleKey.WAVE_CELL_SIZE] = NortekProtocolParameterDict.convert_word_to_int(input[444:446])
parsed[NortekUserConfigDataParticleKey.NUM_DIAG_PER_WAVE] = NortekProtocolParameterDict.convert_word_to_int(input[446:448])
parsed[NortekUserConfigDataParticleKey.NUM_SAMPLE_PER_BURST] = NortekProtocolParameterDict.convert_word_to_int(input[452:454])
parsed[NortekUserConfigDataParticleKey.ANALOG_SCALE_FACTOR] = NortekProtocolParameterDict.convert_word_to_int(input[456:458])
parsed[NortekUserConfigDataParticleKey.CORRELATION_THRS] = NortekProtocolParameterDict.convert_word_to_int(input[458:460])
parsed[NortekUserConfigDataParticleKey.TX_PULSE_LEN_2ND] = NortekProtocolParameterDict.convert_word_to_int(input[462:464])
parsed[NortekUserConfigDataParticleKey.FILTER_CONSTANTS] = base64.b64encode(input[494:510])
parsed[NortekUserConfigDataParticleKey.CHECKSUM] = NortekProtocolParameterDict.convert_word_to_int(input[510:512])
return parsed
class NortekUserConfigDataParticleKey(BaseEnum):
"""
User Config particle keys
"""
TX_LENGTH = 'transmit_pulse_length'
BLANK_DIST = 'blanking_distance'
RX_LENGTH = 'receive_length'
TIME_BETWEEN_PINGS = 'time_between_pings'
TIME_BETWEEN_BURSTS = 'time_between_bursts'
NUM_PINGS = 'number_pings'
AVG_INTERVAL = 'average_interval'
NUM_BEAMS = 'number_beams'
PROFILE_TYPE = 'profile_type'
MODE_TYPE = 'mode_type'
TCR = 'tcr'
PCR = 'pcr'
POWER_TCM1 = 'power_level_tcm1'
POWER_TCM2 = 'power_level_tcm2'
SYNC_OUT_POSITION = 'sync_out_position'
SAMPLE_ON_SYNC = 'sample_on_sync'
START_ON_SYNC = 'start_on_sync'
POWER_PCR1 = 'power_level_pcr1'
POWER_PCR2 = 'power_level_pcr2'
COMPASS_UPDATE_RATE = 'compass_update_rate'
COORDINATE_SYSTEM = 'coordinate_system'
NUM_CELLS = 'number_cells'
CELL_SIZE = 'cell_size'
MEASUREMENT_INTERVAL = 'measurement_interval'
DEPLOYMENT_NAME = 'deployment_name'
WRAP_MODE = 'wrap_moder'
DEPLOY_START_TIME = 'deployment_start_time'
DIAG_INTERVAL = 'diagnostics_interval'
MODE = 'mode'
USE_SPEC_SOUND_SPEED = 'use_specified_sound_speed'
DIAG_MODE_ON = 'diagnostics_mode_enable'
ANALOG_OUTPUT_ON = 'analog_output_enable'
OUTPUT_FORMAT = 'output_format_nortek'
SCALING = 'scaling'
SERIAL_OUT_ON = 'serial_output_enable'
STAGE_ON = 'stage_enable'
ANALOG_POWER_OUTPUT = 'analog_power_output'
SOUND_SPEED_ADJUST = 'sound_speed_adjust_factor'
NUM_DIAG_SAMPLES = 'number_diagnostics_samples'
NUM_BEAMS_PER_CELL = 'number_beams_per_cell'
NUM_PINGS_DIAG = 'number_pings_diagnostic'
MODE_TEST = 'mode_test'
USE_DSP_FILTER = 'use_dsp_filter'
FILTER_DATA_OUTPUT = 'filter_data_output'
ANALOG_INPUT_ADDR = 'analog_input_address'
SW_VER = 'software_version'
VELOCITY_ADJ_FACTOR = 'velocity_adjustment_factor'
FILE_COMMENTS = 'file_comments'
WAVE_MODE = 'wave_mode'
WAVE_DATA_RATE = 'wave_data_rate'
WAVE_CELL_POS = 'wave_cell_position'
DYNAMIC_POS_TYPE = 'dynamic_position_type'
PERCENT_WAVE_CELL_POS = 'percent_wave_cell_position'
WAVE_TX_PULSE = 'wave_transmit_pulse'
FIX_WAVE_BLANK_DIST = 'fixed_wave_blanking_distance'
WAVE_CELL_SIZE = 'wave_measurement_cell_size'
NUM_DIAG_PER_WAVE = 'number_diagnostics_per_wave'
NUM_SAMPLE_PER_BURST = 'number_samples_per_burst'
ANALOG_SCALE_FACTOR = 'analog_scale_factor'
CORRELATION_THRS = 'correlation_threshold'
TX_PULSE_LEN_2ND = 'transmit_pulse_length_2nd'
FILTER_CONSTANTS = 'filter_constants'
CHECKSUM = 'checksum'
class Parameter(DriverParameter):
"""
Device parameters
"""
# user configuration
TRANSMIT_PULSE_LENGTH = NortekUserConfigDataParticleKey.TX_LENGTH
BLANKING_DISTANCE = NortekUserConfigDataParticleKey.BLANK_DIST # T2
RECEIVE_LENGTH = NortekUserConfigDataParticleKey.RX_LENGTH # T3
TIME_BETWEEN_PINGS = NortekUserConfigDataParticleKey.TIME_BETWEEN_PINGS # T4
TIME_BETWEEN_BURST_SEQUENCES = NortekUserConfigDataParticleKey.TIME_BETWEEN_BURSTS # T5
NUMBER_PINGS = NortekUserConfigDataParticleKey.NUM_PINGS # number of beam sequences per burst
AVG_INTERVAL = NortekUserConfigDataParticleKey.AVG_INTERVAL
USER_NUMBER_BEAMS = NortekUserConfigDataParticleKey.NUM_BEAMS
TIMING_CONTROL_REGISTER = NortekUserConfigDataParticleKey.TCR
POWER_CONTROL_REGISTER = NortekUserConfigDataParticleKey.PCR
A1_1_SPARE = 'a1_1spare'
B0_1_SPARE = 'b0_1spare'
B1_1_SPARE = 'b1_1spare'
COMPASS_UPDATE_RATE = NortekUserConfigDataParticleKey.COMPASS_UPDATE_RATE
COORDINATE_SYSTEM = NortekUserConfigDataParticleKey.COORDINATE_SYSTEM
NUMBER_BINS = NortekUserConfigDataParticleKey.NUM_CELLS
BIN_LENGTH = NortekUserConfigDataParticleKey.CELL_SIZE
MEASUREMENT_INTERVAL = NortekUserConfigDataParticleKey.MEASUREMENT_INTERVAL
DEPLOYMENT_NAME = NortekUserConfigDataParticleKey.DEPLOYMENT_NAME
WRAP_MODE = NortekUserConfigDataParticleKey.WRAP_MODE
CLOCK_DEPLOY = NortekUserConfigDataParticleKey.DEPLOY_START_TIME
DIAGNOSTIC_INTERVAL = NortekUserConfigDataParticleKey.DIAG_INTERVAL
MODE = NortekUserConfigDataParticleKey.MODE
ADJUSTMENT_SOUND_SPEED = NortekUserConfigDataParticleKey.SOUND_SPEED_ADJUST
NUMBER_SAMPLES_DIAGNOSTIC = NortekUserConfigDataParticleKey.NUM_DIAG_SAMPLES
NUMBER_BEAMS_CELL_DIAGNOSTIC = NortekUserConfigDataParticleKey.NUM_BEAMS_PER_CELL
NUMBER_PINGS_DIAGNOSTIC = NortekUserConfigDataParticleKey.NUM_PINGS_DIAG
MODE_TEST = NortekUserConfigDataParticleKey.MODE_TEST
ANALOG_INPUT_ADDR = NortekUserConfigDataParticleKey.ANALOG_INPUT_ADDR
SW_VERSION = NortekUserConfigDataParticleKey.SW_VER
USER_1_SPARE = 'spare_1'
VELOCITY_ADJ_TABLE = NortekUserConfigDataParticleKey.VELOCITY_ADJ_FACTOR
COMMENTS = NortekUserConfigDataParticleKey.FILE_COMMENTS
WAVE_MEASUREMENT_MODE = NortekUserConfigDataParticleKey.WAVE_MODE
DYN_PERCENTAGE_POSITION = NortekUserConfigDataParticleKey.PERCENT_WAVE_CELL_POS
WAVE_TRANSMIT_PULSE = NortekUserConfigDataParticleKey.WAVE_TX_PULSE
WAVE_BLANKING_DISTANCE = NortekUserConfigDataParticleKey.FIX_WAVE_BLANK_DIST
WAVE_CELL_SIZE = NortekUserConfigDataParticleKey.WAVE_CELL_SIZE
NUMBER_DIAG_SAMPLES = NortekUserConfigDataParticleKey.NUM_DIAG_PER_WAVE
A1_2_SPARE = 'a1_2spare'
B0_2_SPARE = 'b0_2spare'
NUMBER_SAMPLES_PER_BURST = NortekUserConfigDataParticleKey.NUM_SAMPLE_PER_BURST
USER_2_SPARE = 'spare_2'
ANALOG_OUTPUT_SCALE = NortekUserConfigDataParticleKey.ANALOG_SCALE_FACTOR
CORRELATION_THRESHOLD = NortekUserConfigDataParticleKey.CORRELATION_THRS
USER_3_SPARE = 'spare_3'
TRANSMIT_PULSE_LENGTH_SECOND_LAG = NortekUserConfigDataParticleKey.TX_PULSE_LEN_2ND
USER_4_SPARE = 'spare_4'
QUAL_CONSTANTS = NortekUserConfigDataParticleKey.FILTER_CONSTANTS
class EngineeringParameter(DriverParameter):
"""
Driver Parameters (aka, engineering parameters)
"""
CLOCK_SYNC_INTERVAL = 'ClockSyncInterval'
ACQUIRE_STATUS_INTERVAL = 'AcquireStatusInterval'
class NortekUserConfigDataParticle(DataParticle):
"""
Routine for parsing user config data into a data particle structure for the Nortek sensor.
"""
_data_particle_type = NortekDataParticleType.USER_CONFIG
def _build_parsed_values(self):
"""
Take the user config data and parse it into
values with appropriate tags.
@throws SampleException If there is a problem with sample creation
"""
working_value = user_config_to_dict(self.raw_data)
for key in working_value.keys():
if working_value[key] is None:
raise SampleException("No %s value parsed" % key)
# Break down the byte data to its bits and apply to particle keys
working_value[NortekUserConfigDataParticleKey.PROFILE_TYPE] = working_value[NortekUserConfigDataParticleKey.TCR][-2]
working_value[NortekUserConfigDataParticleKey.MODE_TYPE] = working_value[NortekUserConfigDataParticleKey.TCR][-3]
working_value[NortekUserConfigDataParticleKey.POWER_TCM1] = working_value[NortekUserConfigDataParticleKey.TCR][-6]
working_value[NortekUserConfigDataParticleKey.POWER_TCM2] = working_value[NortekUserConfigDataParticleKey.TCR][-7]
working_value[NortekUserConfigDataParticleKey.SYNC_OUT_POSITION] = working_value[NortekUserConfigDataParticleKey.TCR][-8]
working_value[NortekUserConfigDataParticleKey.SAMPLE_ON_SYNC] = working_value[NortekUserConfigDataParticleKey.TCR][-9]
working_value[NortekUserConfigDataParticleKey.START_ON_SYNC] = working_value[NortekUserConfigDataParticleKey.TCR][-10]
working_value[NortekUserConfigDataParticleKey.POWER_PCR1] = working_value[NortekUserConfigDataParticleKey.PCR][-6]
working_value[NortekUserConfigDataParticleKey.POWER_PCR2] = working_value[NortekUserConfigDataParticleKey.PCR][-7]
working_value[NortekUserConfigDataParticleKey.USE_SPEC_SOUND_SPEED] = working_value[NortekUserConfigDataParticleKey.MODE][-1]
working_value[NortekUserConfigDataParticleKey.DIAG_MODE_ON] = working_value[NortekUserConfigDataParticleKey.MODE][-2]
working_value[NortekUserConfigDataParticleKey.ANALOG_OUTPUT_ON] = working_value[NortekUserConfigDataParticleKey.MODE][-3]
working_value[NortekUserConfigDataParticleKey.OUTPUT_FORMAT] = working_value[NortekUserConfigDataParticleKey.MODE][-4]
working_value[NortekUserConfigDataParticleKey.SCALING] = working_value[NortekUserConfigDataParticleKey.MODE][-5]
working_value[NortekUserConfigDataParticleKey.SERIAL_OUT_ON] = working_value[NortekUserConfigDataParticleKey.MODE][-6]
working_value[NortekUserConfigDataParticleKey.STAGE_ON] = working_value[NortekUserConfigDataParticleKey.MODE][-8]
working_value[NortekUserConfigDataParticleKey.ANALOG_POWER_OUTPUT] = working_value[NortekUserConfigDataParticleKey.MODE][-9]
working_value[NortekUserConfigDataParticleKey.USE_DSP_FILTER] = working_value[NortekUserConfigDataParticleKey.MODE_TEST][-1]
working_value[NortekUserConfigDataParticleKey.FILTER_DATA_OUTPUT] = working_value[NortekUserConfigDataParticleKey.MODE_TEST][-2]
working_value[NortekUserConfigDataParticleKey.WAVE_DATA_RATE] = working_value[NortekUserConfigDataParticleKey.WAVE_MODE][-1]
working_value[NortekUserConfigDataParticleKey.WAVE_CELL_POS] = working_value[NortekUserConfigDataParticleKey.WAVE_MODE][-2]
working_value[NortekUserConfigDataParticleKey.DYNAMIC_POS_TYPE] = working_value[NortekUserConfigDataParticleKey.WAVE_MODE][-3]
# report values
result = [{DataParticleKey.VALUE_ID: NortekUserConfigDataParticleKey.TX_LENGTH, DataParticleKey.VALUE: working_value[NortekUserConfigDataParticleKey.TX_LENGTH]},
{DataParticleKey.VALUE_ID: NortekUserConfigDataParticleKey.BLANK_DIST, DataParticleKey.VALUE: working_value[NortekUserConfigDataParticleKey.BLANK_DIST]},
{DataParticleKey.VALUE_ID: NortekUserConfigDataParticleKey.RX_LENGTH, DataParticleKey.VALUE: working_value[NortekUserConfigDataParticleKey.RX_LENGTH]},
{DataParticleKey.VALUE_ID: NortekUserConfigDataParticleKey.TIME_BETWEEN_PINGS, DataParticleKey.VALUE: working_value[NortekUserConfigDataParticleKey.TIME_BETWEEN_PINGS]},
{DataParticleKey.VALUE_ID: NortekUserConfigDataParticleKey.TIME_BETWEEN_BURSTS, DataParticleKey.VALUE: working_value[NortekUserConfigDataParticleKey.TIME_BETWEEN_BURSTS]},
{DataParticleKey.VALUE_ID: NortekUserConfigDataParticleKey.NUM_PINGS, DataParticleKey.VALUE: working_value[NortekUserConfigDataParticleKey.NUM_PINGS]},
{DataParticleKey.VALUE_ID: NortekUserConfigDataParticleKey.AVG_INTERVAL, DataParticleKey.VALUE: working_value[NortekUserConfigDataParticleKey.AVG_INTERVAL]},
{DataParticleKey.VALUE_ID: NortekUserConfigDataParticleKey.NUM_BEAMS, DataParticleKey.VALUE: working_value[NortekUserConfigDataParticleKey.NUM_BEAMS]},
{DataParticleKey.VALUE_ID: NortekUserConfigDataParticleKey.PROFILE_TYPE, DataParticleKey.VALUE: working_value[NortekUserConfigDataParticleKey.PROFILE_TYPE]},
{DataParticleKey.VALUE_ID: NortekUserConfigDataParticleKey.MODE_TYPE, DataParticleKey.VALUE: working_value[NortekUserConfigDataParticleKey.MODE_TYPE]},
{DataParticleKey.VALUE_ID: NortekUserConfigDataParticleKey.POWER_TCM1, DataParticleKey.VALUE: working_value[NortekUserConfigDataParticleKey.POWER_TCM1]},
{DataParticleKey.VALUE_ID: NortekUserConfigDataParticleKey.POWER_TCM2, DataParticleKey.VALUE: working_value[NortekUserConfigDataParticleKey.POWER_TCM2]},
{DataParticleKey.VALUE_ID: NortekUserConfigDataParticleKey.SYNC_OUT_POSITION, DataParticleKey.VALUE: working_value[NortekUserConfigDataParticleKey.SYNC_OUT_POSITION]},
{DataParticleKey.VALUE_ID: NortekUserConfigDataParticleKey.SAMPLE_ON_SYNC, DataParticleKey.VALUE: working_value[NortekUserConfigDataParticleKey.SAMPLE_ON_SYNC]},
{DataParticleKey.VALUE_ID: NortekUserConfigDataParticleKey.START_ON_SYNC, DataParticleKey.VALUE: working_value[NortekUserConfigDataParticleKey.START_ON_SYNC]},
{DataParticleKey.VALUE_ID: NortekUserConfigDataParticleKey.POWER_PCR1, DataParticleKey.VALUE: working_value[NortekUserConfigDataParticleKey.POWER_PCR1]},
{DataParticleKey.VALUE_ID: NortekUserConfigDataParticleKey.POWER_PCR2, DataParticleKey.VALUE: working_value[NortekUserConfigDataParticleKey.POWER_PCR2]},
{DataParticleKey.VALUE_ID: NortekUserConfigDataParticleKey.COMPASS_UPDATE_RATE, DataParticleKey.VALUE: working_value[NortekUserConfigDataParticleKey.COMPASS_UPDATE_RATE]},
{DataParticleKey.VALUE_ID: NortekUserConfigDataParticleKey.COORDINATE_SYSTEM, DataParticleKey.VALUE: working_value[NortekUserConfigDataParticleKey.COORDINATE_SYSTEM]},
{DataParticleKey.VALUE_ID: NortekUserConfigDataParticleKey.NUM_CELLS, DataParticleKey.VALUE: working_value[NortekUserConfigDataParticleKey.NUM_CELLS]},
{DataParticleKey.VALUE_ID: NortekUserConfigDataParticleKey.CELL_SIZE, DataParticleKey.VALUE: working_value[NortekUserConfigDataParticleKey.CELL_SIZE]},
{DataParticleKey.VALUE_ID: NortekUserConfigDataParticleKey.MEASUREMENT_INTERVAL, DataParticleKey.VALUE: working_value[NortekUserConfigDataParticleKey.MEASUREMENT_INTERVAL]},
{DataParticleKey.VALUE_ID: NortekUserConfigDataParticleKey.DEPLOYMENT_NAME, DataParticleKey.VALUE: working_value[NortekUserConfigDataParticleKey.DEPLOYMENT_NAME]},
{DataParticleKey.VALUE_ID: NortekUserConfigDataParticleKey.WRAP_MODE, DataParticleKey.VALUE: working_value[NortekUserConfigDataParticleKey.WRAP_MODE]},
{DataParticleKey.VALUE_ID: NortekUserConfigDataParticleKey.DEPLOY_START_TIME, DataParticleKey.VALUE: working_value[NortekUserConfigDataParticleKey.DEPLOY_START_TIME]},
{DataParticleKey.VALUE_ID: NortekUserConfigDataParticleKey.DIAG_INTERVAL, DataParticleKey.VALUE: working_value[NortekUserConfigDataParticleKey.DIAG_INTERVAL]},
{DataParticleKey.VALUE_ID: NortekUserConfigDataParticleKey.USE_SPEC_SOUND_SPEED, DataParticleKey.VALUE: working_value[NortekUserConfigDataParticleKey.USE_SPEC_SOUND_SPEED]},
{DataParticleKey.VALUE_ID: NortekUserConfigDataParticleKey.DIAG_MODE_ON, DataParticleKey.VALUE: working_value[NortekUserConfigDataParticleKey.DIAG_MODE_ON]},
{DataParticleKey.VALUE_ID: NortekUserConfigDataParticleKey.ANALOG_OUTPUT_ON, DataParticleKey.VALUE: working_value[NortekUserConfigDataParticleKey.ANALOG_OUTPUT_ON]},
{DataParticleKey.VALUE_ID: NortekUserConfigDataParticleKey.OUTPUT_FORMAT, DataParticleKey.VALUE: working_value[NortekUserConfigDataParticleKey.OUTPUT_FORMAT]},
{DataParticleKey.VALUE_ID: NortekUserConfigDataParticleKey.SCALING, DataParticleKey.VALUE: working_value[NortekUserConfigDataParticleKey.SCALING]},
{DataParticleKey.VALUE_ID: NortekUserConfigDataParticleKey.SERIAL_OUT_ON, DataParticleKey.VALUE: working_value[NortekUserConfigDataParticleKey.SERIAL_OUT_ON]},
{DataParticleKey.VALUE_ID: NortekUserConfigDataParticleKey.STAGE_ON, DataParticleKey.VALUE: working_value[NortekUserConfigDataParticleKey.STAGE_ON]},
{DataParticleKey.VALUE_ID: NortekUserConfigDataParticleKey.ANALOG_POWER_OUTPUT, DataParticleKey.VALUE: working_value[NortekUserConfigDataParticleKey.ANALOG_POWER_OUTPUT]},
{DataParticleKey.VALUE_ID: NortekUserConfigDataParticleKey.SOUND_SPEED_ADJUST, DataParticleKey.VALUE: working_value[NortekUserConfigDataParticleKey.SOUND_SPEED_ADJUST]},
{DataParticleKey.VALUE_ID: NortekUserConfigDataParticleKey.NUM_DIAG_SAMPLES, DataParticleKey.VALUE: working_value[NortekUserConfigDataParticleKey.NUM_DIAG_SAMPLES]},
{DataParticleKey.VALUE_ID: NortekUserConfigDataParticleKey.NUM_BEAMS_PER_CELL, DataParticleKey.VALUE: working_value[NortekUserConfigDataParticleKey.NUM_BEAMS_PER_CELL]},
{DataParticleKey.VALUE_ID: NortekUserConfigDataParticleKey.NUM_PINGS_DIAG, DataParticleKey.VALUE: working_value[NortekUserConfigDataParticleKey.NUM_PINGS_DIAG]},
{DataParticleKey.VALUE_ID: NortekUserConfigDataParticleKey.USE_DSP_FILTER, DataParticleKey.VALUE: working_value[NortekUserConfigDataParticleKey.USE_DSP_FILTER]},
{DataParticleKey.VALUE_ID: NortekUserConfigDataParticleKey.FILTER_DATA_OUTPUT, DataParticleKey.VALUE: working_value[NortekUserConfigDataParticleKey.FILTER_DATA_OUTPUT]},
{DataParticleKey.VALUE_ID: NortekUserConfigDataParticleKey.ANALOG_INPUT_ADDR, DataParticleKey.VALUE: working_value[NortekUserConfigDataParticleKey.ANALOG_INPUT_ADDR]},
{DataParticleKey.VALUE_ID: NortekUserConfigDataParticleKey.SW_VER, DataParticleKey.VALUE: working_value[NortekUserConfigDataParticleKey.SW_VER]},
{DataParticleKey.VALUE_ID: NortekUserConfigDataParticleKey.VELOCITY_ADJ_FACTOR, DataParticleKey.VALUE: working_value[NortekUserConfigDataParticleKey.VELOCITY_ADJ_FACTOR]},
{DataParticleKey.VALUE_ID: NortekUserConfigDataParticleKey.FILE_COMMENTS, DataParticleKey.VALUE: working_value[NortekUserConfigDataParticleKey.FILE_COMMENTS]},
{DataParticleKey.VALUE_ID: NortekUserConfigDataParticleKey.WAVE_DATA_RATE, DataParticleKey.VALUE: working_value[NortekUserConfigDataParticleKey.WAVE_DATA_RATE]},
{DataParticleKey.VALUE_ID: NortekUserConfigDataParticleKey.WAVE_CELL_POS, DataParticleKey.VALUE: working_value[NortekUserConfigDataParticleKey.WAVE_CELL_POS]},
{DataParticleKey.VALUE_ID: NortekUserConfigDataParticleKey.DYNAMIC_POS_TYPE, DataParticleKey.VALUE: working_value[NortekUserConfigDataParticleKey.DYNAMIC_POS_TYPE]},
{DataParticleKey.VALUE_ID: NortekUserConfigDataParticleKey.PERCENT_WAVE_CELL_POS, DataParticleKey.VALUE: working_value[NortekUserConfigDataParticleKey.PERCENT_WAVE_CELL_POS]},
{DataParticleKey.VALUE_ID: NortekUserConfigDataParticleKey.WAVE_TX_PULSE, DataParticleKey.VALUE: working_value[NortekUserConfigDataParticleKey.WAVE_TX_PULSE]},
{DataParticleKey.VALUE_ID: NortekUserConfigDataParticleKey.FIX_WAVE_BLANK_DIST, DataParticleKey.VALUE: working_value[NortekUserConfigDataParticleKey.FIX_WAVE_BLANK_DIST]},
{DataParticleKey.VALUE_ID: NortekUserConfigDataParticleKey.WAVE_CELL_SIZE, DataParticleKey.VALUE: working_value[NortekUserConfigDataParticleKey.WAVE_CELL_SIZE]},
{DataParticleKey.VALUE_ID: NortekUserConfigDataParticleKey.NUM_DIAG_PER_WAVE, DataParticleKey.VALUE: working_value[NortekUserConfigDataParticleKey.NUM_DIAG_PER_WAVE]},
{DataParticleKey.VALUE_ID: NortekUserConfigDataParticleKey.NUM_SAMPLE_PER_BURST, DataParticleKey.VALUE: working_value[NortekUserConfigDataParticleKey.NUM_SAMPLE_PER_BURST]},
{DataParticleKey.VALUE_ID: NortekUserConfigDataParticleKey.ANALOG_SCALE_FACTOR, DataParticleKey.VALUE: working_value[NortekUserConfigDataParticleKey.ANALOG_SCALE_FACTOR]},
{DataParticleKey.VALUE_ID: NortekUserConfigDataParticleKey.CORRELATION_THRS, DataParticleKey.VALUE: working_value[NortekUserConfigDataParticleKey.CORRELATION_THRS]},
{DataParticleKey.VALUE_ID: NortekUserConfigDataParticleKey.TX_PULSE_LEN_2ND, DataParticleKey.VALUE: working_value[NortekUserConfigDataParticleKey.TX_PULSE_LEN_2ND]},
{DataParticleKey.VALUE_ID: NortekUserConfigDataParticleKey.FILTER_CONSTANTS, DataParticleKey.VALUE: working_value[NortekUserConfigDataParticleKey.FILTER_CONSTANTS]}]
calculated_checksum = NortekProtocolParameterDict.calculate_checksum(self.raw_data, USER_CONFIG_LEN)
if working_value[NortekUserConfigDataParticleKey.CHECKSUM] != calculated_checksum:
log.warn("Calculated checksum: %s did not match packet checksum: %s",
calculated_checksum, working_value[NortekUserConfigDataParticleKey.CHECKSUM])
self.contents[DataParticleKey.QUALITY_FLAG] = DataParticleValue.CHECKSUM_FAILED
log.debug('NortekUserConfigDataParticle: particle=%s', result)
return result
class NortekEngClockDataParticleKey(BaseEnum):
"""
Particles for the clock data
"""
DATE_TIME_ARRAY = "date_time_array"
DATE_TIME_STAMP = "date_time_stamp"
class NortekEngClockDataParticle(DataParticle):
"""
Routine for parsing clock engineering data into a data particle structure
for the Nortek sensor.
"""
_data_particle_type = NortekDataParticleType.CLOCK
def _build_parsed_values(self):
"""
Take the clock data and parse it into
values with appropriate tags.
@throws SampleException If there is a problem with sample creation
"""
match = CLOCK_DATA_REGEX.match(self.raw_data)
if not match:
raise SampleException("NortekEngClockDataParticle: No regex match of parsed sample data: [%s]" %
self.raw_data)
date_time_array = [int((match.group(1)).encode("hex")),
int((match.group(2)).encode("hex")),
int((match.group(3)).encode("hex")),
int((match.group(4)).encode("hex")),
int((match.group(5)).encode("hex")),
int((match.group(6)).encode("hex"))]
if date_time_array is None:
raise SampleException("No date/time array value parsed")
# report values
result = [{DataParticleKey.VALUE_ID: NortekEngClockDataParticleKey.DATE_TIME_ARRAY,
DataParticleKey.VALUE: date_time_array}]
log.debug('NortekEngClockDataParticle: particle=%s', result)
return result
class NortekEngBatteryDataParticleKey(BaseEnum):
"""
Particles for the battery data
"""
BATTERY_VOLTAGE = "battery_voltage_mv"
class NortekEngBatteryDataParticle(DataParticle):
"""
Routine for parsing battery engineering data into a data particle
structure for the Nortek sensor.
"""
_data_particle_type = NortekDataParticleType.BATTERY
def _build_parsed_values(self):
"""
Take the battery data and parse it into
values with appropriate tags.
@throws SampleException If there is a problem with sample creation
"""
match = BATTERY_DATA_REGEX.search(self.raw_data)
if not match:
raise SampleException("NortekEngBatteryDataParticle: No regex match of parsed sample data: [%r]" % self.raw_data)
# Calculate value
battery_voltage = NortekProtocolParameterDict.convert_word_to_int(match.group(1))
if battery_voltage is None:
raise SampleException("No battery_voltage value parsed")
# report values
result = [{DataParticleKey.VALUE_ID: NortekEngBatteryDataParticleKey.BATTERY_VOLTAGE,
DataParticleKey.VALUE: battery_voltage}]
log.debug('NortekEngBatteryDataParticle: particle=%s', result)
return result
class NortekEngIdDataParticleKey(BaseEnum):
"""
Particles for identification data
"""
ID = "identification_string"
class NortekEngIdDataParticle(DataParticle):
"""
Routine for parsing id engineering data into a data particle
structure for the Nortek sensor.
"""
_data_particle_type = NortekDataParticleType.ID_STRING
def _build_parsed_values(self):
"""
Take the id data and parse it into
values with appropriate tags.
@throws SampleException If there is a problem with sample creation
"""
match = ID_DATA_REGEX.match(self.raw_data)
if not match:
raise SampleException("NortekEngIdDataParticle: No regex match of parsed sample data: [%s]" % self.raw_data)
id_str = NortekProtocolParameterDict.convert_bytes_to_string(match.group(1))
if None == id_str:
raise SampleException("No ID value parsed")
# report values
result = [{DataParticleKey.VALUE_ID: NortekEngIdDataParticleKey.ID, DataParticleKey.VALUE: id_str}]
log.debug('NortekEngIdDataParticle: particle=%r', result)
return result
###############################################################################
# Param dictionary helpers
###############################################################################
class NortekProtocolParameterDict(ProtocolParameterDict):
def get_config(self):
"""
Retrieve the configuration (all key values not ending in 'Spare').
"""
config = {}
for (key, val) in self._param_dict.iteritems():
config[key] = val.get_value()
return config
def set_from_value(self, name, value):
"""
Set a parameter value in the dictionary.
@param name The parameter name.
@param value The parameter value.
@raises InstrumentParameterException if the name is invalid.
"""
#log.debug("NortekProtocolParameterDict.set_from_value(): name=%s, value=%s", name, value)
retval = False
if not name in self._param_dict:
raise InstrumentParameterException('Unable to set parameter %s to %s: parameter %s not an dictionary' % (name, value, name))
if ((self._param_dict[name].value.f_format == NortekProtocolParameterDict.word_to_string) or
(self._param_dict[name].value.f_format == NortekProtocolParameterDict.double_word_to_string)):
if not isinstance(value, int):
raise InstrumentParameterException('Unable to set parameter %s to %s: value not an integer' % (name, value))
elif self._param_dict[name].value.f_format == NortekProtocolParameterDict.convert_datetime_to_words:
if not isinstance(value, list):
raise InstrumentParameterException('Unable to set parameter %s to %s: value not a list' % (name, value))
if value != self._param_dict[name].value.get_value():
log.debug("old value: %s, new value: %s", self._param_dict[name].value.get_value(), value)
retval = True
self._param_dict[name].value.set_value(value)
return retval
@staticmethod
def word_to_string(value):
"""
Converts a word into a string field
"""
low_byte = value & 0xff
high_byte = (value & 0xff00) >> 8
return chr(low_byte) + chr(high_byte)
@staticmethod
def convert_word_to_int(word):
"""
Converts a word into an integer field
"""
if len(word) != 2:
raise SampleException("Invalid number of bytes in word input! Found %s with input %s" % len(word))
low_byte = ord(word[0])
high_byte = 0x100 * ord(word[1])
log.trace('w=%s, l=%d, h=%d, v=%d' % (word.encode('hex'), low_byte, high_byte, low_byte + high_byte))
return low_byte + high_byte
@staticmethod
def double_word_to_string(value):
"""
Converts 2 words into a string field
"""
result = NortekProtocolParameterDict.word_to_string(value & 0xffff)
result += NortekProtocolParameterDict.word_to_string((value & 0xffff0000) >> 16)
return result
@staticmethod
def convert_double_word_to_int(dword):
"""
Converts 2 words into an integer field
"""
if len(dword) != 4:
raise SampleException("Invalid number of bytes in double word input! Found %s" % len(dword))
low_word = NortekProtocolParameterDict.convert_word_to_int(dword[0:2])
high_word = NortekProtocolParameterDict.convert_word_to_int(dword[2:4])
log.trace('dw=%s, lw=%d, hw=%d, v=%d' %(dword.encode('hex'), low_word, high_word, low_word + (0x10000 * high_word)))
return low_word + (0x10000 * high_word)
@staticmethod
def convert_bytes_to_bit_field(bytes):
"""
Convert bytes to a bit field, reversing bytes in the process.
ie ['\x05', '\x01'] becomes [0, 0, 0, 1, 0, 1, 0, 1]
@param bytes an array of string literal bytes.
@retval an list of 1 or 0 in order
"""
byte_list = list(bytes)
byte_list.reverse()
result = []
for byte in byte_list:
bin_string = bin(ord(byte))[2:].rjust(8, '0')
result.extend([int(x) for x in list(bin_string)])
log.trace("Returning a bitfield of %s for input string: [%s]", result, bytes)
return result
@staticmethod
def convert_words_to_datetime(bytes):
"""
Convert block of 6 words into a date/time structure for the
instrument family
@param bytes 6 bytes
@retval An array of 6 ints corresponding to the date/time structure
@raise SampleException If the date/time cannot be found
"""
if len(bytes) != 6:
raise SampleException("Invalid number of bytes in input! Found %s" % len(bytes))
list = NortekProtocolParameterDict.convert_to_array(bytes, 1)
for i in range(0, len(list)):
list[i] = int(list[i].encode("hex"))
return list
@staticmethod
def convert_datetime_to_words(int_array):
"""
Convert array if integers into a block of 6 words that could be fed
back to the instrument as a timestamp. The 6 array probably came from
convert_words_to_datetime in the first place.
@param int_array An array of 6 hex values corresponding to a vector
date/time stamp.
@retval A string of 6 binary characters
"""
if len(int_array) != 6:
raise SampleException("Invalid number of bytes in date/time input! Found %s" % len(int_array))
list = [chr(int(str(n), 16)) for n in int_array]
return "".join(list)
@staticmethod
def convert_to_array(bytes, item_size):
"""
Convert the byte stream into a array with each element being
item_size bytes. ie '\x01\x02\x03\x04' with item_size 2 becomes
['\x01\x02', '\x03\x04']
@param bytes byte stream to convert to an array
@param item_size the size in bytes to make each element
@retval An array with elements of the correct size
@raise SampleException if there are problems unpacking the bytes or
fitting them all in evenly.
"""
length = len(bytes)
if length % item_size != 0:
raise SampleException("Uneven number of bytes for size %s" % item_size)
l = list(bytes)
result = []
for i in range(0, length, item_size):
result.append("".join(l[i: i + item_size]))
return result
@staticmethod
def calculate_checksum(input, length=None):
"""
Calculate the checksum
"""
calculated_checksum = CHECK_SUM_SEED
if length is None:
length = len(input)
for word_index in range(0, length - 2, 2):
word_value = NortekProtocolParameterDict.convert_word_to_int(input[word_index:word_index + 2])
calculated_checksum = (calculated_checksum + word_value) % 0x10000
#log.trace('w_i=%d, c_c=%d', word_index, calculated_checksum)
return calculated_checksum
@staticmethod
def convert_bytes_to_string(bytes_in):
"""
Convert a list of bytes into a string, remove trailing nulls
ie. ['\x65', '\x66'] turns into "ef"
@param bytes_in The byte list to take in
@retval The string to return
"""
ba = bytearray(bytes_in)
return str(ba).split('\x00', 1)[0]
@staticmethod
def convert_time(response):
"""
Converts the timestamp in hex to D/M/YYYY HH:MM:SS
"""
t = str(response[2].encode('hex')) # get day
t += '/' + str(response[5].encode('hex')) # get month
t += '/20' + str(response[4].encode('hex')) # get year
t += ' ' + str(response[3].encode('hex')) # get hours
t += ':' + str(response[0].encode('hex')) # get minutes
t += ':' + str(response[1].encode('hex')) # get seconds
return t
###############################################################################
# Driver
###############################################################################
class NortekInstrumentDriver(SingleConnectionInstrumentDriver):
"""
Base class for all seabird instrument drivers.
"""
def __init__(self, evt_callback):
"""
Driver constructor.
@param evt_callback Driver process event callback.
"""
SingleConnectionInstrumentDriver.__init__(self, evt_callback)
def _build_protocol(self):
"""
Construct the driver protocol state machine.
"""
self._protocol = NortekInstrumentProtocol(InstrumentPrompts, NEWLINE, self._driver_event)
def get_resource_params(self):
"""
Return list of device parameters available.
"""
return Parameter.list()
###############################################################################
# Protocol
###############################################################################
class NortekInstrumentProtocol(CommandResponseInstrumentProtocol):
"""
Instrument protocol class for Nortek driver.
Subclasses CommandResponseInstrumentProtocol
"""
#logging level
__metaclass__ = get_logging_metaclass(log_level='debug')
velocity_data_regex = []
velocity_sync_bytes = ''
# user configuration order of params, this needs to match the configuration order for setting params
order_of_user_config = [
Parameter.TRANSMIT_PULSE_LENGTH,
Parameter.BLANKING_DISTANCE,
Parameter.RECEIVE_LENGTH,
Parameter.TIME_BETWEEN_PINGS,
Parameter.TIME_BETWEEN_BURST_SEQUENCES,
Parameter.NUMBER_PINGS,
Parameter.AVG_INTERVAL,
Parameter.USER_NUMBER_BEAMS,
Parameter.TIMING_CONTROL_REGISTER,
Parameter.POWER_CONTROL_REGISTER,
Parameter.A1_1_SPARE,
Parameter.B0_1_SPARE,
Parameter.B1_1_SPARE,
Parameter.COMPASS_UPDATE_RATE,
Parameter.COORDINATE_SYSTEM,
Parameter.NUMBER_BINS,
Parameter.BIN_LENGTH,
Parameter.MEASUREMENT_INTERVAL,
Parameter.DEPLOYMENT_NAME,
Parameter.WRAP_MODE,
Parameter.CLOCK_DEPLOY,
Parameter.DIAGNOSTIC_INTERVAL,
Parameter.MODE,
Parameter.ADJUSTMENT_SOUND_SPEED,
Parameter.NUMBER_SAMPLES_DIAGNOSTIC,
Parameter.NUMBER_BEAMS_CELL_DIAGNOSTIC,
Parameter.NUMBER_PINGS_DIAGNOSTIC,
Parameter.MODE_TEST,
Parameter.ANALOG_INPUT_ADDR,
Parameter.SW_VERSION,
Parameter.USER_1_SPARE,
Parameter.VELOCITY_ADJ_TABLE,
Parameter.COMMENTS,
Parameter.WAVE_MEASUREMENT_MODE,
Parameter.DYN_PERCENTAGE_POSITION,
Parameter.WAVE_TRANSMIT_PULSE,
Parameter.WAVE_BLANKING_DISTANCE,
Parameter.WAVE_CELL_SIZE,
Parameter.NUMBER_DIAG_SAMPLES,
Parameter.A1_2_SPARE,
Parameter.B0_2_SPARE,
Parameter.NUMBER_SAMPLES_PER_BURST,
Parameter.USER_2_SPARE,
Parameter.ANALOG_OUTPUT_SCALE,
Parameter.CORRELATION_THRESHOLD,
Parameter.USER_3_SPARE,
Parameter.TRANSMIT_PULSE_LENGTH_SECOND_LAG,
Parameter.USER_4_SPARE,
Parameter.QUAL_CONSTANTS]
def __init__(self, prompts, newline, driver_event):
"""
Protocol constructor.
@param prompts A BaseEnum class containing instrument prompts.
@param newline The newline.
@param driver_event Driver process event callback.
"""
CommandResponseInstrumentProtocol.__init__(self, prompts, newline, driver_event)
self._protocol_fsm = InstrumentFSM(ProtocolState,
ProtocolEvent,
ProtocolEvent.ENTER,
ProtocolEvent.EXIT)
# Add event handlers for protocol state machine.
self._protocol_fsm.add_handler(ProtocolState.UNKNOWN, ProtocolEvent.ENTER, self._handler_unknown_enter)
self._protocol_fsm.add_handler(ProtocolState.UNKNOWN, ProtocolEvent.DISCOVER, self._handler_unknown_discover)
self._protocol_fsm.add_handler(ProtocolState.UNKNOWN, ProtocolEvent.READ_MODE, self._handler_unknown_read_mode)
self._protocol_fsm.add_handler(ProtocolState.UNKNOWN, ProtocolEvent.EXIT, self._handler_unknown_exit)
self._protocol_fsm.add_handler(ProtocolState.COMMAND, ProtocolEvent.ENTER, self._handler_command_enter)
self._protocol_fsm.add_handler(ProtocolState.COMMAND, ProtocolEvent.EXIT, self._handler_command_exit)
self._protocol_fsm.add_handler(ProtocolState.COMMAND, ProtocolEvent.ACQUIRE_SAMPLE, self._handler_command_acquire_sample)
self._protocol_fsm.add_handler(ProtocolState.COMMAND, ProtocolEvent.START_AUTOSAMPLE, self._handler_command_start_autosample)
self._protocol_fsm.add_handler(ProtocolState.COMMAND, ProtocolEvent.SET, self._handler_command_set)
self._protocol_fsm.add_handler(ProtocolState.COMMAND, ProtocolEvent.GET, self._handler_get)
self._protocol_fsm.add_handler(ProtocolState.COMMAND, ProtocolEvent.ACQUIRE_STATUS, self._handler_command_acquire_status)
self._protocol_fsm.add_handler(ProtocolState.COMMAND, ProtocolEvent.START_DIRECT, self._handler_command_start_direct)
self._protocol_fsm.add_handler(ProtocolState.COMMAND, ProtocolEvent.CLOCK_SYNC, self._handler_command_clock_sync)
self._protocol_fsm.add_handler(ProtocolState.COMMAND, ProtocolEvent.SCHEDULED_CLOCK_SYNC, self._handler_command_clock_sync)
self._protocol_fsm.add_handler(ProtocolState.COMMAND, ProtocolEvent.SCHEDULED_ACQUIRE_STATUS, self._handler_command_acquire_status)
self._protocol_fsm.add_handler(ProtocolState.AUTOSAMPLE, ProtocolEvent.ENTER, self._handler_autosample_enter)
self._protocol_fsm.add_handler(ProtocolState.AUTOSAMPLE, ProtocolEvent.EXIT, self._handler_autosample_exit)
self._protocol_fsm.add_handler(ProtocolState.AUTOSAMPLE, ProtocolEvent.READ_MODE, self._handler_autosample_read_mode)
self._protocol_fsm.add_handler(ProtocolState.AUTOSAMPLE, ProtocolEvent.STOP_AUTOSAMPLE, self._handler_autosample_stop_autosample)
self._protocol_fsm.add_handler(ProtocolState.AUTOSAMPLE, ProtocolEvent.SCHEDULED_CLOCK_SYNC, self._handler_autosample_clock_sync)
self._protocol_fsm.add_handler(ProtocolState.AUTOSAMPLE, ProtocolEvent.SCHEDULED_ACQUIRE_STATUS, self._handler_autosample_acquire_status)
self._protocol_fsm.add_handler(ProtocolState.DIRECT_ACCESS, ProtocolEvent.ENTER, self._handler_direct_access_enter)
self._protocol_fsm.add_handler(ProtocolState.DIRECT_ACCESS, ProtocolEvent.STOP_DIRECT, self._handler_direct_access_stop_direct)
self._protocol_fsm.add_handler(ProtocolState.DIRECT_ACCESS, ProtocolEvent.EXECUTE_DIRECT, self._handler_direct_access_execute_direct)
self._protocol_fsm.add_handler(ProtocolState.DIRECT_ACCESS, ProtocolEvent.READ_MODE, self._handler_unknown_read_mode)
self._protocol_fsm.add_handler(ProtocolState.DIRECT_ACCESS, ProtocolEvent.EXIT, self._handler_direct_access_exit)
# State state machine in UNKNOWN state.
self._protocol_fsm.start(ProtocolState.UNKNOWN)
# Add build handlers for device commands.
self._add_build_handler(InstrumentCmds.SET_REAL_TIME_CLOCK, self._build_set_real_time_clock_command)
# Add response handlers for device commands.
self._add_response_handler(InstrumentCmds.ACQUIRE_DATA, self._parse_acquire_data_response)
self._add_response_handler(InstrumentCmds.CMD_WHAT_MODE, self._parse_what_mode_response)
self._add_response_handler(InstrumentCmds.SAMPLE_WHAT_MODE, self._parse_what_mode_response)
self._add_response_handler(InstrumentCmds.READ_REAL_TIME_CLOCK, self._parse_read_clock_response)
self._add_response_handler(InstrumentCmds.READ_HW_CONFIGURATION, self._parse_read_hw_config)
self._add_response_handler(InstrumentCmds.READ_HEAD_CONFIGURATION, self._parse_read_head_config)
self._add_response_handler(InstrumentCmds.READ_USER_CONFIGURATION, self._parse_read_user_config)
self._add_response_handler(InstrumentCmds.SOFT_BREAK_SECOND_HALF, self._parse_second_break_response)
# Construct the parameter dictionary containing device parameters,
# current parameter values, and set formatting functions.
self._build_param_dict()
self._build_cmd_dict()
self._build_driver_dict()
# create chunker for processing instrument samples.
self._chunker = StringChunker(self.sieve_function)
@classmethod
def sieve_function(cls, raw_data):
"""
The method that detects data sample structures from instrument
Should be in the format [[structure_sync_bytes, structure_len]*]
"""
return_list = []
sieve_matchers = NORTEK_COMMON_REGEXES + cls.velocity_data_regex
for matcher in sieve_matchers:
for match in matcher.finditer(raw_data):
return_list.append((match.start(), match.end()))
log.debug("sieve_function: regex found %r", raw_data[match.start():match.end()])
return return_list
def _got_chunk_base(self, structure, timestamp):
"""
The base class got_data has gotten a structure from the chunker. Pass it to extract_sample
with the appropriate particle objects and REGEXes.
"""
self._extract_sample(NortekUserConfigDataParticle, USER_CONFIG_DATA_REGEX, structure, timestamp)
self._extract_sample(NortekHardwareConfigDataParticle, HARDWARE_CONFIG_DATA_REGEX, structure, timestamp)
self._extract_sample(NortekHeadConfigDataParticle, HEAD_CONFIG_DATA_REGEX, structure, timestamp)
self._extract_sample(NortekEngClockDataParticle, CLOCK_DATA_REGEX, structure, timestamp)
self._extract_sample(NortekEngIdDataParticle, ID_DATA_REGEX, structure, timestamp)
# Note: This appears to be the same size and data structure as average interval & measurement interval
# need to copy over the exact value to match
self._extract_sample(NortekEngBatteryDataParticle, ID_BATTERY_DATA_REGEX, structure, timestamp)
########################################################################
# overridden superclass methods
########################################################################
def _filter_capabilities(self, events):
"""
Filters capabilities
"""
events_out = [x for x in events if Capability.has(x)]
return events_out
def set_init_params(self, config):
"""
over-ridden to handle binary block configuration
Set the initialization parameters to the given values in the protocol parameter dictionary.
@param config A driver configuration dict that should contain an
enclosed dict with key DriverConfigKey.PARAMETERS. This should include
either param_name/value pairs or
{DriverParameter.ALL: base64-encoded string of raw values as the
instrument would return them from a get config}. If the desired value
is false, nothing will happen.
@raise InstrumentParameterException If the config cannot be set
"""
if not isinstance(config, dict):
raise InstrumentParameterException("Invalid init config format")
param_config = config.get(DriverConfigKey.PARAMETERS)
log.debug('%s', param_config)
if DriverParameter.ALL in param_config:
binary_config = base64.b64decode(param_config[DriverParameter.ALL])
# make the configuration string look like it came from instrument to get all the methods to be happy
binary_config += InstrumentPrompts.Z_ACK
log.debug("binary_config len=%d, binary_config=%s",
len(binary_config), binary_config.encode('hex'))
if len(binary_config) == USER_CONFIG_LEN + 2:
if self._check_configuration(binary_config, USER_CONFIG_SYNC_BYTES, USER_CONFIG_LEN):
self._param_dict.update(binary_config)
else:
raise InstrumentParameterException("bad configuration")
else:
raise InstrumentParameterException("configuration not the correct length")
else:
for name in param_config.keys():
self._param_dict.set_init_value(name, param_config[name])
def _set_params(self, *args, **kwargs):
"""
Issue commands to the instrument to set various parameters
Also called when setting parameters during startup and direct access
Issue commands to the instrument to set various parameters. If
startup is set to true that means we are setting startup values
and immutable parameters can be set. Otherwise only READ_WRITE
parameters can be set.
@param params dictionary containing parameter name and value
@param startup bool True is we are initializing, False otherwise
@raise InstrumentParameterException
"""
# Retrieve required parameter from args.
# Raise exception if no parameter provided, or not a dict.
try:
params = args[0]
except IndexError:
raise InstrumentParameterException('Set params requires a parameter dict.')
old_config = self._param_dict.get_config()
# For each key, value in the params list set the value in parameters copy.
try:
for name, value in params.iteritems():
log.debug('_set_params: setting %s to %s', name, value)
if self._param_dict.set_from_value(name, value):
log.debug('_set_params: a value was updated: %s', value)
except Exception as ex:
raise InstrumentParameterException('Unable to set parameter %s to %s: %s' % (name, value, ex))
output = self._create_set_output(self._param_dict)
# Clear the prompt buffer.
self._promptbuf = ''
self._linebuf = ''
log.debug('_set_params: writing instrument configuration to instrument')
self._connection.send(InstrumentCmds.CONFIGURE_INSTRUMENT)
self._connection.send(output)
result = self._get_response(timeout=30,
expected_prompt=[InstrumentPrompts.Z_ACK, InstrumentPrompts.Z_NACK])
log.debug('_set_params: result=%r', result)
if result[1] == InstrumentPrompts.Z_NACK:
raise InstrumentParameterException("NortekInstrumentProtocol._set_params(): Invalid configuration file! ")
self._update_params()
new_config = self._param_dict.get_config()
log.trace("_set_params: old_config: %s", old_config)
log.trace("_set_params: new_config: %s", new_config)
if old_config != new_config:
self._driver_event(DriverAsyncEvent.CONFIG_CHANGE)
log.debug('_set_params: config updated!')
def _send_wakeup(self):
"""
Send a wakeup to the device. Overridden by device specific
subclasses.
"""
self._connection.send(InstrumentCmds.SOFT_BREAK_FIRST_HALF)
def _do_cmd_resp(self, cmd, *args, **kwargs):
"""
Perform a command-response on the device.
@param cmd The command to execute.
@param args positional arguments to pass to the build handler.
@param timeout=timeout optional command timeout.
@retval resp_result The (possibly parsed) response result.
@raises InstrumentTimeoutException if the response did not occur in time.
@raises InstrumentProtocolException if command could not be built or if response
was not recognized.
"""
# Get timeout and initialize response.
timeout = kwargs.get('timeout', TIMEOUT)
response_regex = kwargs.get('response_regex', None)
if response_regex is None:
expected_prompt = kwargs.get('expected_prompt', InstrumentPrompts.Z_ACK)
else:
expected_prompt = None
write_delay = kwargs.get('write_delay', DEFAULT_WRITE_DELAY)
# Get the build handler.
build_handler = self._build_handlers.get(cmd, None)
if not build_handler:
self._add_build_handler(cmd, self._build_command_default)
return super(NortekInstrumentProtocol, self)._do_cmd_resp(cmd, timeout=timeout,
expected_prompt=expected_prompt,
response_regex=response_regex,
write_delay=write_delay,
*args)
########################################################################
# Unknown handlers.
########################################################################
def _handler_unknown_enter(self, *args, **kwargs):
"""
Enter unknown state.
"""
self._driver_event(DriverAsyncEvent.STATE_CHANGE)
def _handler_unknown_discover(self, *args, **kwargs):
"""
Discover current state of instrument; can be COMMAND or AUTOSAMPLE.
@retval (next_state, next_agent_state)
"""
ret_mode = self._protocol_fsm.on_event(ProtocolEvent.READ_MODE)
prompt = ret_mode[1]
if prompt == 0:
log.debug('_handler_unknown_discover: FIRMWARE_UPGRADE')
raise InstrumentStateException('Firmware upgrade state.')
elif prompt == 1:
log.debug('_handler_unknown_discover: MEASUREMENT_MODE')
next_state = ProtocolState.AUTOSAMPLE
next_agent_state = ResourceAgentState.STREAMING
elif prompt == 2:
log.debug('_handler_unknown_discover: COMMAND_MODE')
next_state = ProtocolState.COMMAND
next_agent_state = ResourceAgentState.IDLE
elif prompt == 4:
log.debug('_handler_unknown_discover: DATA_RETRIEVAL_MODE')
next_state = ProtocolState.AUTOSAMPLE
next_agent_state = ResourceAgentState.STREAMING
elif prompt == 5:
log.debug('_handler_unknown_discover: CONFIRMATION_MODE')
next_state = ProtocolState.AUTOSAMPLE
next_agent_state = ResourceAgentState.STREAMING
else:
raise InstrumentStateException('Unknown state: %s' % ret_mode[1])
log.debug('_handler_unknown_discover: state=%s', next_state)
return next_state, next_agent_state
def _handler_unknown_exit(self, *args, **kwargs):
"""
Exiting Unknown state
"""
pass
########################################################################
# Command handlers.
########################################################################
def _handler_command_enter(self, *args, **kwargs):
"""
Enter command state. Configure the instrument and driver, sync the clock, and start scheduled events
if they are set
"""
# Command device to update parameters and send a config change event.
self._update_params()
self._init_params()
if self._param_dict.get(EngineeringParameter.CLOCK_SYNC_INTERVAL) is not None:
log.debug("Configuring the scheduler to sync clock %s", self._param_dict.get(EngineeringParameter.CLOCK_SYNC_INTERVAL))
if self._param_dict.get(EngineeringParameter.CLOCK_SYNC_INTERVAL) != '00:00:00':
self.start_scheduled_job(EngineeringParameter.CLOCK_SYNC_INTERVAL, ScheduledJob.CLOCK_SYNC, ProtocolEvent.CLOCK_SYNC)
if self._param_dict.get(EngineeringParameter.ACQUIRE_STATUS_INTERVAL) is not None:
log.debug("Configuring the scheduler to acquire status %s", self._param_dict.get(EngineeringParameter.ACQUIRE_STATUS_INTERVAL))
if self._param_dict.get(EngineeringParameter.ACQUIRE_STATUS_INTERVAL) != '00:00:00':
self.start_scheduled_job(EngineeringParameter.ACQUIRE_STATUS_INTERVAL, ScheduledJob.ACQUIRE_STATUS, ProtocolEvent.ACQUIRE_STATUS)
# Tell driver superclass to send a state change event.
# Superclass will query the state.
self._driver_event(DriverAsyncEvent.STATE_CHANGE)
def _handler_command_exit(self, *args, **kwargs):
"""
Exit command state.
"""
self.stop_scheduled_job(ScheduledJob.ACQUIRE_STATUS)
self.stop_scheduled_job(ScheduledJob.CLOCK_SYNC)
pass
def _handler_command_acquire_sample(self, *args, **kwargs):
"""
Command the instrument to acquire sample data. Instrument will enter Power Down mode when finished
"""
self._do_cmd_resp(InstrumentCmds.ACQUIRE_DATA, expected_prompt=self.velocity_sync_bytes,
timeout=SAMPLE_TIMEOUT)
return None, (None, None)
def _handler_autosample_acquire_status(self, *args, **kwargs):
"""
High level command for the operator to get all of the status from the instrument from autosample state:
Battery voltage, clock, hw configuration, head configuration, user configuration, and identification string
"""
# break out of measurement mode in order to issue the status related commands
self._handler_autosample_stop_autosample()
self._handler_command_acquire_status()
# return to measurement mode
self._handler_command_start_autosample()
return None, (None, None)
def _handler_command_acquire_status(self, *args, **kwargs):
"""
High level command for the operator to get all of the status from the instrument:
Battery voltage, clock, hw configuration, head configuration, user configuration, and identification string
"""
#ID + BV Call these commands at the same time, so their responses are combined (non-unique regex workaround)
# Issue read id, battery voltage, & clock commands all at the same time (non-unique REGEX workaround).
self._do_cmd_resp(InstrumentCmds.READ_ID + InstrumentCmds.READ_BATTERY_VOLTAGE,
response_regex=ID_BATTERY_DATA_REGEX, timeout=30)
#RC
self._do_cmd_resp(InstrumentCmds.READ_REAL_TIME_CLOCK, response_regex=CLOCK_DATA_REGEX)
#GP
self._do_cmd_resp(InstrumentCmds.READ_HW_CONFIGURATION, response_regex=HARDWARE_CONFIG_DATA_REGEX)
#GH
self._do_cmd_resp(InstrumentCmds.READ_HEAD_CONFIGURATION, response_regex=HEAD_CONFIG_DATA_REGEX)
#GC
self._do_cmd_resp(InstrumentCmds.READ_USER_CONFIGURATION, response_regex=USER_CONFIG_DATA_REGEX)
return None, (None, None)
def _handler_command_set(self, *args, **kwargs):
"""
Perform a set command.
@param args[0] parameter : value dict.
@retval (next_state=None, resul)
@throws InstrumentParameterException if missing set parameters, if set parameters not ALL and
not a dict, or if parameter can't be properly formatted.
"""
self._verify_not_readonly(*args, **kwargs)
self._set_params(*args, **kwargs)
return None, None
def _handler_command_start_autosample(self, *args, **kwargs):
"""
Switch into autosample mode
@retval (next_state, next_resource_state, result) tuple
"""
result = self._do_cmd_resp(InstrumentCmds.START_MEASUREMENT_WITHOUT_RECORDER, timeout=SAMPLE_TIMEOUT, *args, **kwargs)
return ProtocolState.AUTOSAMPLE, (ResourceAgentState.STREAMING, result)
def _handler_command_start_direct(self):
return ProtocolState.DIRECT_ACCESS, (ResourceAgentState.DIRECT_ACCESS, None)
def _handler_command_read_mode(self):
"""
Issue read mode command.
"""
result = self._do_cmd_resp(InstrumentCmds.CMD_WHAT_MODE)
return None, (None, result)
def _handler_autosample_read_mode(self):
"""
Issue read mode command.
"""
self._connection.send(InstrumentCmds.AUTOSAMPLE_BREAK)
time.sleep(.1)
result = self._do_cmd_resp(InstrumentCmds.SAMPLE_WHAT_MODE)
return None, (None, result)
def _handler_unknown_read_mode(self):
"""
Issue read mode command.
"""
next_state = None
next_agent_state = None
try:
self._connection.send(InstrumentCmds.AUTOSAMPLE_BREAK)
time.sleep(.1)
result = self._do_cmd_resp(InstrumentCmds.SAMPLE_WHAT_MODE, timeout=0.6, response_regex=MODE_DATA_REGEX)
except InstrumentTimeoutException:
log.debug('_handler_unknown_read_mode: no response to "I", sending "II"')
# if there is no response, catch timeout exception and issue 'II' command instead
result = self._do_cmd_resp(InstrumentCmds.CMD_WHAT_MODE, response_regex=MODE_DATA_REGEX)
return next_state, (next_agent_state, result)
def _clock_sync(self, *args, **kwargs):
"""
The mechanics of synchronizing a clock
@throws InstrumentCommandException if the clock was not synchronized
"""
str_time = get_timestamp_delayed("%M %S %d %H %y %m")
byte_time = ''
for v in str_time.split():
byte_time += chr(int('0x' + v, base=16))
values = str_time.split()
log.info("_clock_sync: time set to %s:m %s:s %s:d %s:h %s:y %s:M (%s)",
values[0], values[1], values[2], values[3], values[4], values[5],
byte_time.encode('hex'))
self._do_cmd_resp(InstrumentCmds.SET_REAL_TIME_CLOCK, byte_time, **kwargs)
response = self._do_cmd_resp(InstrumentCmds.READ_REAL_TIME_CLOCK, *args, **kwargs)
log.debug('response = %r', response)
response = NortekProtocolParameterDict.convert_time(response)
log.debug('response converted = %r', response)
# verify that the dates match
date_str = get_timestamp_delayed('%d/%m/%Y %H:%M:%S')
if date_str[:10] != response[:10]:
raise InstrumentCommandException("Syncing the clock did not work!")
# verify that the times match closely
hours = int(date_str[11:12])
minutes = int(date_str[14:15])
seconds = int(date_str[17:18])
total_time = (hours * 3600) + (minutes * 60) + seconds
hours = int(response[11:12])
minutes = int(response[14:15])
seconds = int(response[17:18])
total_time2 = (hours * 3600) + (minutes * 60) + seconds
if total_time - total_time2 > TIME_DELAY:
raise InstrumentCommandException("Syncing the clock did not work! Off by %s seconds" %
(total_time - total_time2))
return response
def _handler_command_clock_sync(self, *args, **kwargs):
"""
sync clock close to a second edge
@retval (next_state, result) tuple, (None, None) if successful.
@throws InstrumentTimeoutException if device cannot be woken for command.
@throws InstrumentProtocolException if command could not be built or misunderstood.
"""
result = self._clock_sync()
return None, (None, result)
########################################################################
# Autosample handlers.
########################################################################
def _handler_autosample_clock_sync(self, *args, **kwargs):
"""
While in autosample, sync a clock close to a second edge
@retval next_state, (next_agent_state, result) tuple, AUTOSAMPLE, (STREAMING, None) if successful.
"""
next_state = None
next_agent_state = None
result = None
try:
self._protocol_fsm._on_event(ProtocolEvent.STOP_AUTOSAMPLE)
next_state = ProtocolState.COMMAND
next_agent_state = ResourceAgentState.COMMAND
self._clock_sync()
self._protocol_fsm._on_event(ProtocolEvent.START_AUTOSAMPLE)
next_state = ProtocolState.AUTOSAMPLE
next_agent_state = ResourceAgentState.STREAMING
finally:
return next_state, (next_agent_state, result)
def _handler_autosample_enter(self, *args, **kwargs):
"""
Enter autosample state.
"""
if self._param_dict.get(EngineeringParameter.CLOCK_SYNC_INTERVAL) is not None:
log.debug("Configuring the scheduler to sync clock %s", self._param_dict.get(EngineeringParameter.CLOCK_SYNC_INTERVAL))
if self._param_dict.get(EngineeringParameter.CLOCK_SYNC_INTERVAL) != '00:00:00':
self.start_scheduled_job(EngineeringParameter.CLOCK_SYNC_INTERVAL, ScheduledJob.CLOCK_SYNC, ProtocolEvent.CLOCK_SYNC)
if self._param_dict.get(EngineeringParameter.CLOCK_SYNC_INTERVAL) is not None:
log.debug("Configuring the scheduler to acquire status %s", self._param_dict.get(EngineeringParameter.ACQUIRE_STATUS_INTERVAL))
if self._param_dict.get(EngineeringParameter.ACQUIRE_STATUS_INTERVAL) != '00:00:00':
self.start_scheduled_job(EngineeringParameter.ACQUIRE_STATUS_INTERVAL, ScheduledJob.ACQUIRE_STATUS, ProtocolEvent.ACQUIRE_STATUS)
self._driver_event(DriverAsyncEvent.STATE_CHANGE)
def _handler_autosample_exit(self, *args, **kwargs):
"""
Exit autosample state.
"""
self.stop_scheduled_job(ScheduledJob.ACQUIRE_STATUS)
self.stop_scheduled_job(ScheduledJob.CLOCK_SYNC)
pass
def _handler_autosample_stop_autosample(self, *args, **kwargs):
"""
Stop autosample and switch back to command mode.
@retval ProtocolState.COMMAND, (ResourceAgentState.COMMAND, None) if successful.
@throws InstrumentProtocolException if command misunderstood or incorrect prompt received.
"""
self._connection.send(InstrumentCmds.SOFT_BREAK_FIRST_HALF)
time.sleep(.1)
ret_prompt = self._do_cmd_resp(InstrumentCmds.SOFT_BREAK_SECOND_HALF,
expected_prompt=[InstrumentPrompts.CONFIRMATION, InstrumentPrompts.COMMAND_MODE],
*args, **kwargs)
log.debug('_handler_autosample_stop_autosample, ret_prompt: %s', ret_prompt)
if ret_prompt == InstrumentPrompts.CONFIRMATION:
# Issue the confirmation command.
self._do_cmd_resp(InstrumentCmds.CONFIRMATION, *args, **kwargs)
return ProtocolState.COMMAND, (ResourceAgentState.COMMAND, None)
def stop_scheduled_job(self, schedule_job):
"""
Remove the scheduled job
"""
log.debug("Attempting to remove the scheduler")
if self._scheduler is not None:
try:
self._remove_scheduler(schedule_job)
log.debug("successfully removed scheduler")
except KeyError:
log.debug("_remove_scheduler could not find %s", schedule_job)
def start_scheduled_job(self, param, schedule_job, protocol_event):
"""
Add a scheduled job
"""
interval = self._param_dict.get(param).split(':')
hours = interval[0]
minutes = interval[1]
seconds = interval[2]
log.debug("Setting scheduled interval to: %s %s %s", hours, minutes, seconds)
config = {DriverConfigKey.SCHEDULER: {
schedule_job: {
DriverSchedulerConfigKey.TRIGGER: {
DriverSchedulerConfigKey.TRIGGER_TYPE: TriggerType.INTERVAL,
DriverSchedulerConfigKey.HOURS: int(hours),
DriverSchedulerConfigKey.MINUTES: int(minutes),
DriverSchedulerConfigKey.SECONDS: int(seconds)
}
}
}
}
log.debug("Adding job %s", schedule_job)
try:
self._add_scheduler_event(schedule_job, protocol_event)
except KeyError:
log.debug("scheduler already exists for '%s'", schedule_job)
########################################################################
# Direct access handlers.
########################################################################
def _handler_direct_access_enter(self, *args, **kwargs):
"""
Enter direct access state.
"""
self._driver_event(DriverAsyncEvent.STATE_CHANGE)
self._sent_cmds = []
def _handler_direct_access_exit(self, *args, **kwargs):
"""
Exit direct access state.
"""
pass
def _handler_direct_access_execute_direct(self, data):
"""
Execute Direct Access command(s)
"""
next_state = None
result = None
self._do_cmd_direct(data)
# add sent command to list for 'echo' filtering in callback
self._sent_cmds.append(data)
return next_state, result
def _handler_direct_access_stop_direct(self, *args, **kwargs):
"""
Stop Direct Access, and put the driver into a healthy state by reverting itself back to the previous
state before stopping Direct Access.
"""
#discover the state to go to next
next_state, next_agent_state = self._handler_unknown_discover()
if next_state == DriverProtocolState.COMMAND:
next_agent_state = ResourceAgentState.COMMAND
if next_state == DriverProtocolState.AUTOSAMPLE:
#go into command mode in order to set parameters
self._handler_autosample_stop_autosample()
#restore parameters
log.debug("da_param_restore = %s,", self._param_dict.get_direct_access_list())
self._init_params()
if next_state == DriverProtocolState.AUTOSAMPLE:
#go back into autosample mode
self._do_cmd_resp(InstrumentCmds.START_MEASUREMENT_WITHOUT_RECORDER, timeout=SAMPLE_TIMEOUT)
log.debug("Next_state = %s, Next_agent_state = %s", next_state, next_agent_state)
return next_state, (next_agent_state, None)
########################################################################
# Common handlers.
########################################################################
def _handler_get(self, *args, **kwargs):
"""
Get device parameters from the parameter dict.
@param args[0] list of parameters to retrieve, or DriverParameter.ALL.
@throws InstrumentParameterException if missing or invalid parameter.
"""
next_state = None
# Retrieve the required parameter, raise if not present.
try:
params = args[0]
except IndexError:
raise InstrumentParameterException('Get command requires a parameter list or tuple.')
# If all params requested, retrieve config.
if (params == DriverParameter.ALL) or (params == [DriverParameter.ALL]):
result = self._param_dict.get_config()
# If not all params, confirm a list or tuple of params to retrieve.
# Raise if not a list or tuple.
# Retrieve each key in the list, raise if any are invalid.
else:
if not isinstance(params, (list, tuple)):
raise InstrumentParameterException('Get argument not a list or tuple.')
result = {}
for key in params:
try:
val = self._param_dict.get(key)
result[key] = val
except KeyError:
raise InstrumentParameterException(('%s is not a valid parameter.' % key))
return next_state, result
def _build_driver_dict(self):
"""
Build a driver dictionary structure, load the strings for the metadata
from a file if present.
"""
self._driver_dict = DriverDict()
self._driver_dict.add(DriverDictKey.VENDOR_SW_COMPATIBLE, False)
def _build_cmd_dict(self):
"""
Build a command dictionary structure, load the strings for the metadata
from a file if present.
"""
self._cmd_dict = ProtocolCommandDict()
self._cmd_dict.add(Capability.SET, display_name='set')
self._cmd_dict.add(Capability.GET, display_name='get')
self._cmd_dict.add(Capability.ACQUIRE_SAMPLE, display_name='acquire sample')
self._cmd_dict.add(Capability.START_AUTOSAMPLE, display_name='start autosample')
self._cmd_dict.add(Capability.STOP_AUTOSAMPLE, display_name='stop autosample')
self._cmd_dict.add(Capability.CLOCK_SYNC, display_name='clock sync')
self._cmd_dict.add(Capability.START_DIRECT, display_name='start direct access')
self._cmd_dict.add(Capability.STOP_DIRECT, display_name='stop direct access')
self._cmd_dict.add(Capability.ACQUIRE_STATUS, display_name='acquire status')
def _build_param_dict(self):
"""
Populate the parameter dictionary with parameters.
For each parameter key, add match string, match lambda function,
and value formatting function for set commands.
"""
self._param_dict = NortekProtocolParameterDict()
############################################################################
# ENGINEERING PARAMETERS
###########################################################################
self._param_dict.add(EngineeringParameter.CLOCK_SYNC_INTERVAL,
INTERVAL_TIME_REGEX,
lambda match: match.group(1),
str,
type=ParameterDictType.STRING,
visibility=ParameterDictVisibility.IMMUTABLE,
display_name="Clock Sync Interval",
description='Interval for synchronizing the clock',
units=ParameterUnits.TIME_INTERVAL,
default_value='00:00:00',
startup_param=True)
self._param_dict.add(EngineeringParameter.ACQUIRE_STATUS_INTERVAL,
INTERVAL_TIME_REGEX,
lambda match: match.group(1),
str,
type=ParameterDictType.STRING,
visibility=ParameterDictVisibility.IMMUTABLE,
display_name="Acquire Status Interval",
description='Interval for gathering status particles',
units=ParameterUnits.TIME_INTERVAL,
default_value='00:00:00',
startup_param=True)
def _dump_config(self, input):
"""
For debug purposes, dump the configuration block
"""
dump = []
for byte_index in xrange(0, len(input)):
if byte_index % 0x10 == 0:
dump.append('\n') # no linefeed on first line
dump.append('{:03x} '.format(byte_index))
dump.append('{:02x} '.format(ord(input[byte_index])))
return "".join(dump)
def _check_configuration(self, input, sync, length):
"""
Perform a check on the configuration:
1. Correct length
2. Contains ACK bytes
3. Correct sync byte
4. Correct checksum
"""
if len(input) != length+2:
log.debug('_check_configuration: wrong length, expected length %d != %d' % (length+2, len(input)))
return False
# check for ACK bytes
if input[length:length+2] != InstrumentPrompts.Z_ACK:
log.debug('_check_configuration: ACK bytes in error %s != %s',
input[length:length+2].encode('hex'),
InstrumentPrompts.Z_ACK.encode('hex'))
return False
# check the sync bytes
if input[0:4] != sync:
log.debug('_check_configuration: sync bytes in error %s != %s',
input[0:4], sync)
return False
# check checksum
calculated_checksum = NortekProtocolParameterDict.calculate_checksum(input, length)
log.debug('_check_configuration: user c_c = %s', calculated_checksum)
sent_checksum = NortekProtocolParameterDict.convert_word_to_int(input[length-2:length])
if sent_checksum != calculated_checksum:
log.debug('_check_configuration: user checksum in error %s != %s',
calculated_checksum, sent_checksum)
return False
return True
def _update_params(self):
"""
Update the parameter dictionary. Issue the read config command. The response
needs to be saved to param dictionary.
@throws InstrumentTimeoutException if device cannot be timely woken.
@throws InstrumentProtocolException if ds/dc misunderstood.
"""
# get user_configuration params from the instrument
log.debug('Sending get_user_configuration command to the instrument.')
ret_config = self._do_cmd_resp(InstrumentCmds.READ_USER_CONFIGURATION, response_regex=USER_CONFIG_DATA_REGEX)
self._param_dict.update(ret_config)
def _create_set_output(self, parameters):
"""
load buffer with sync byte (A5), ID byte (01), and size word (# of words in little-endian form)
'user' configuration is 512 bytes = 256 words long = size 0x100
"""
output = ['\xa5\x00\x00\x01']
for param in self.order_of_user_config:
log.trace('_create_set_output: adding %s to list', param)
if param == Parameter.COMMENTS:
output.append(parameters.format(param).ljust(180, "\x00"))
elif param == Parameter.DEPLOYMENT_NAME:
output.append(parameters.format(param).ljust(6, "\x00"))
elif param == Parameter.QUAL_CONSTANTS:
output.append(base64.b64decode(parameters.format(param)))
elif param == Parameter.VELOCITY_ADJ_TABLE:
output.append(base64.b64decode(parameters.format(param)))
else:
output.append(parameters.format(param))
log.trace('_create_set_output: ADDED %s output size = %s', param, len(output))
log.debug("Created set output: %r with length: %s", output, len(output))
checksum = CHECK_SUM_SEED
output = "".join(output)
for word_index in range(0, len(output), 2):
word_value = NortekProtocolParameterDict.convert_word_to_int(output[word_index:word_index+2])
checksum = (checksum + word_value) % 0x10000
log.debug('_create_set_output: user checksum = %r', checksum)
output += (NortekProtocolParameterDict.word_to_string(checksum))
return output
def _build_command_default(self, cmd):
return cmd
def _build_set_real_time_clock_command(self, cmd, time, **kwargs):
"""
Build the set clock command
"""
return cmd + time
def _parse_response_default(self, response, prompt):
"""
Parse the response from the instrument for a command.
@param response The response string from the instrument
@param prompt The prompt received from the instrument
"""
pass
def _parse_acquire_data_response(self, response, prompt):
"""
Parse the response from the instrument for a acquire data command.
@param response The response string from the instrument
@param prompt The prompt received from the instrument
@retval return The [value] as a string
@raise InstrumentProtocolException When a bad response is encountered
"""
key = self.velocity_sync_bytes
start = response.find(key)
if start != -1:
log.debug("_parse_acquire_data_response: response=%r", response[start:start+len(key)])
self._handler_autosample_stop_autosample()
return response[start:start+len(key)]
log.warn("_parse_acquire_data_response: Bad acquire data response from instrument (%s)", response)
raise InstrumentProtocolException("Invalid acquire data response. (%s)" % response)
def _parse_what_mode_response(self, response, prompt):
"""
Parse the response from the instrument for a 'what mode' command.
@param response The response string from the instrument
@param prompt The prompt received from the instrument
@retval return The mode as an int
@raise InstrumentProtocolException When a bad response is encountered
"""
search_obj = re.search(MODE_DATA_REGEX, response)
if search_obj:
log.debug("_parse_what_mode_response: response=%r", search_obj.group(1))
return NortekProtocolParameterDict.convert_word_to_int(search_obj.group(1))
else:
log.warn("_parse_what_mode_response: Bad what mode response from instrument (%r)", response)
raise InstrumentProtocolException("Invalid what mode response. (%s)" % response.encode('hex'))
def _parse_second_break_response(self, response, prompt):
"""
Parse the response from the instrument for a 'what mode' command.
@param response The response string from the instrument
@param prompt The prompt received from the instrument
@retval return The response as is
@raise InstrumentProtocolException When a bad response is encountered
"""
for search_prompt in (InstrumentPrompts.CONFIRMATION, InstrumentPrompts.COMMAND_MODE):
start = response.find(search_prompt)
if start != -1:
log.debug("_parse_second_break_response: response=%r", response[start:start+len(search_prompt)])
return response[start:start+len(search_prompt)]
log.warn("_parse_second_break_response: Bad what second break response from instrument (%s)", response)
raise InstrumentProtocolException("Invalid second break response. (%s)" % response)
# DEBUG TEST PURPOSE ONLY
def _parse_read_battery_voltage_response(self, response, prompt):
"""
Parse the response from the instrument for a read battery voltage command.
@param response The response string from the instrument
@param prompt The prompt received from the instrument
@retval return The battery voltage in mV int
@raise InstrumentProtocolException When a bad response is encountered
"""
match = BATTERY_DATA_REGEX.search(response)
if not match:
log.warn("Bad response from instrument (%s)" % response)
raise InstrumentProtocolException("Invalid response. (%s)" % response.encode('hex'))
return NortekProtocolParameterDict.convert_word_to_int(match.group(1))
def _parse_read_clock_response(self, response, prompt):
"""
Parse the response from the instrument for a read clock command.
@param response The response string from the instrument
@param prompt The prompt received from the instrument
"""
return response
# DEBUG TEST PURPOSE ONLY
def _parse_read_id_response(self, response, prompt):
"""
Parse the response from the instrument for a read ID command.
@param response The response string from the instrument
@param prompt The prompt received from the instrument
@retval return The id as a string
@raise InstrumentProtocolException When a bad response is encountered
"""
match = ID_DATA_REGEX.search(response)
if not match:
log.warn("Bad response from instrument (%s)" % response)
raise InstrumentProtocolException("Invalid response. (%s)" % response.encode('hex'))
return match.group(1)
def _parse_read_hw_config(self, response, prompt):
""" Parse the response from the instrument for a read hw config command.
@param response The response string from the instrument
@param prompt The prompt received from the instrument
@retval return The hardware configuration parse into a dict. Names
include SerialNo (string), Config (int), Frequency(int),
PICversion (int), HWrevision (int), RecSize (int), Status (int), and
FWversion (binary)
@raise InstrumentProtocolException When a bad response is encountered
"""
if not self._check_configuration(self._promptbuf, HW_CONFIG_SYNC_BYTES, HW_CONFIG_LEN):
log.warn("_parse_read_hw_config: Bad read hw response from instrument (%s)", response.encode('hex'))
raise InstrumentProtocolException("Invalid read hw response. (%s)" % response.encode('hex'))
log.debug("_parse_read_hw_config: response=%s", response.encode('hex'))
return hw_config_to_dict(response)
def _parse_read_head_config(self, response, prompt):
"""
Parse the response from the instrument for a read head command.
@param response The response string from the instrument
@param prompt The prompt received from the instrument
@retval return The head configuration parsed into a dict. Names include
Config (int), Frequency (int), Type (int), SerialNo (string)
System (binary), NBeams (int)
@raise InstrumentProtocolException When a bad response is encountered
"""
if not self._check_configuration(self._promptbuf, HEAD_CONFIG_SYNC_BYTES, HEAD_CONFIG_LEN):
log.warn("_parse_read_head_config: Bad read head response from instrument (%s)", response.encode('hex'))
raise InstrumentProtocolException("Invalid read head response. (%s)" % response.encode('hex'))
log.debug("_parse_read_head_config: response=%s", response.encode('hex'))
return head_config_to_dict(response)
def _parse_read_user_config(self, response, prompt):
"""
Parse the response from the instrument for a read user command.
@param response The response string from the instrument
@param prompt The prompt received from the instrument
@retval return The user configuration parsed into a dict. Names include:
@raise InstrumentProtocolException When a bad response is encountered
"""
log.debug("_parse_read_user_config: response=%s", response.encode('hex'))
if not self._check_configuration(response, USER_CONFIG_SYNC_BYTES, USER_CONFIG_LEN):
log.warn("_parse_read_user_config: Bad read user response from instrument (%s)", response.encode('hex'))
raise InstrumentProtocolException("Invalid read user response. (%s)" % response.encode('hex'))
return response
|
mikeh77/mi-instrument
|
mi/instrument/nortek/driver.py
|
Python
|
bsd-2-clause
| 110,468
|
# -*- coding: iso-8859-1 -*-
# Copyright (C) 2010-2016 CEA/DEN, EDF R&D, OPEN CASCADE
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
#
# See http://www.salome-platform.org/ or email : webmaster.salome@opencascade.com
#
__author__="gboulant"
__date__ ="$31 mars 2010 17:09:53$"
from qtsalome import *
from mytestdialog_ui import Ui_MyTestDialog
from genericdialog import GenericDialog
class MyTestDialog(GenericDialog):
"""
This class is to illustrate the usage of the GenericDialog to implement
the dialog windows of the application with a common design template provided
by the generic class GenericDialog.
"""
def __init__(self, parent=None, name="MyTestDialog"):
GenericDialog.__init__(self, parent, name)
# Set up the user interface from Designer.
self.ui = Ui_MyTestDialog()
# BE CAREFULL HERE, the ui form is NOT put in the global dialog (already
# containing some generic widgets) but in the center panel created in the
# GenericDialog as a void container for the form. The MyTestDialog form
# is supposed here to create only the widgets to be placed in the center
# panel
self.ui.setupUi(self.getPanel())
#
# We implement here the interface of the MVC pattern
#
def setData(self, name):
"""
This function implements the mapping from the data model to the widgets
"""
self.ui.txtName.setText(name)
def checkData(self):
"""
This function implements the control to be done on the values contained
in the widgets when trying to validate the dialog window (click OK first
trigs this function).
"""
if ( self.ui.txtName.text().trimmed() == "" ):
self.checkDataMessage = "The name can't be void"
return False
return True
def getData(self):
"""
This function implements the mapping from the widgets to the data model
"""
name = str(self.ui.txtName.text().trimmed().toUtf8())
# _MEM_: note here that (i) the utf8 format is used and (ii) we must not
# forget to convert to a standard python string (instead of a QString).
return name
class MyTestDialogWithSignals(MyTestDialog):
"""
This class is to illustrate the usage of the GenericDialog in the
case where the dialog windows is not modal. In such a case, the
controller must be warn of close events using Qt signals.
"""
inputValidated = pyqtSignal()
def __init__(self, parent=None, name="MyTestDialogWithSignals"):
MyTestDialog.__init__(self, parent, name)
def accept(self):
"""
This function is the slot connected to the the OK button
(click event of the OK button).
"""
# The dialog is raised in a non modal mode (for example, to
# get interactivity with the parents windows. Then we have to
# emit a signal to warn the parent observer that the dialog
# has been validated so that it can process the event
MyTestDialog.accept(self)
if self.wasOk():
self.inputValidated.emit()
#
# ==============================================================================
# Basic use case
# ==============================================================================
#
def TEST_MyTestDialog_modal():
import sys
from qtsalome import QApplication
app = QApplication(sys.argv)
app.lastWindowClosed.connect(app.quit)
dlg=MyTestDialog()
dlg.setData("A default name")
dlg.displayAndWait()
if dlg.wasOk():
name = dlg.getData()
print "The name has been modified to",name
class DialogListener:
def onProcessEvent(self):
print "onProcessEvent(): OK has been pressed"
import sys
sys.exit(0)
def TEST_MyTestDialog_non_modal():
import sys
app = QApplication(sys.argv)
app.lastWindowClosed.connect(app.quit)
dlg=MyTestDialogWithSignals()
# This dialog window will emit a inputValidated() signal when the
# OK button is pressed and the data are validated. Then, we
# connect this signal to a local slot so that the event can be
# processed.
dlgListener = DialogListener()
dlg.inputValidated.connect(dlgListener.onProcessEvent)
# This connect instruction means that the signal inputValidated()
# emited by the dlg Qt object will raise a call to the slot
# dlgListener.onProcessEvent
dlg.setData("A default name")
dlg.show()
app.exec_()
if __name__ == "__main__":
#TEST_MyTestDialog_modal()
TEST_MyTestDialog_non_modal()
|
FedoraScientific/salome-gui
|
src/GUI_PY/mytestdialog.py
|
Python
|
lgpl-2.1
| 5,327
|
from django.core.management import call_command
from celery import shared_task
@shared_task(ignore_result=False,
track_started=True)
def syncldap():
"""
Call the appropriate management command to synchronize the LDAP users
with the local database.
"""
call_command('syncldap')
|
alexsilva/django-ldap-sync
|
ldap_sync/tasks.py
|
Python
|
bsd-3-clause
| 313
|
# decodex - simple enigma decoder.
#
# Copyright (c) 2013 Paul R. Tagliamonte <tag@pault.ag>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from collections import defaultdict
def cleanup(what):
return what.strip().lower().replace("'", "")
def issubset(superstr, substr):
superstr = list(superstr)
for ch in substr:
if ch not in superstr:
return False
superstr.remove(ch)
return True
def strsub(superstr, substr):
superstr = list(superstr)
substr = list(substr)
for k in substr:
superstr.remove(k)
return "".join(superstr)
class Words(object):
def __init__(self, dictionary):
self.path = "/usr/share/dict/%s" % (dictionary)
self.mapping = defaultdict(set)
self.word_hash = {}
self._build_map()
def _build_map(self):
for line in (cleanup(x) for x in open(self.path, 'r')):
self.word_hash[line] = line
self.mapping["".join(sorted(line))].add(line)
def anagram(self, word, depth=2):
if depth == 0:
return
l_hash = "".join(sorted(word))
# OK. Let's start simple.
if l_hash in self.mapping:
for entry in self.mapping[l_hash]:
yield [entry]
# Meh, Let's do our best and find l_hash in r_hash.
for r_hash, entries in self.mapping.items():
if issubset(l_hash, r_hash):
leftover = strsub(l_hash, r_hash)
# OK. So, this is a word if we can match the rest.
for anagram in self.anagram(leftover, depth=(depth - 1)):
for entry in entries:
yield [entry] + anagram
|
paultag/decodex
|
decodex/utils/words.py
|
Python
|
agpl-3.0
| 2,313
|
# -*- coding: utf-8 -*-
# Copyright(C) 2012 Romain Bignon
#
# This file is part of a weboob module.
#
# This weboob module is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This weboob module is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with this weboob module. If not, see <http://www.gnu.org/licenses/>.
from __future__ import unicode_literals
import re
import datetime
import json
from decimal import Decimal
from dateutil import parser
from weboob.browser import LoginBrowser, need_login, StatesMixin
from weboob.browser.switch import SiteSwitch
from weboob.browser.url import URL
from weboob.capabilities.bank import Account, AddRecipientStep, Recipient, TransferBankError, Transaction, TransferStep
from weboob.capabilities.base import NotAvailable, find_object
from weboob.capabilities.profile import Profile
from weboob.browser.exceptions import BrowserHTTPNotFound, ClientError, ServerError
from weboob.exceptions import (
BrowserIncorrectPassword, BrowserUnavailable, BrowserHTTPError, BrowserPasswordExpired, ActionNeeded
)
from weboob.tools.capabilities.bank.transactions import (
sorted_transactions, FrenchTransaction, keep_only_card_transactions,
omit_deferred_transactions,
)
from weboob.tools.capabilities.bank.investments import create_french_liquidity
from weboob.tools.compat import urljoin, urlparse
from weboob.tools.value import Value
from weboob.tools.decorators import retry
from .pages import (
IndexPage, ErrorPage, MarketPage, LifeInsurance, LifeInsuranceHistory, LifeInsuranceInvestments, GarbagePage, MessagePage, LoginPage,
TransferPage, ProTransferPage, TransferConfirmPage, TransferSummaryPage, ProTransferConfirmPage,
ProTransferSummaryPage, ProAddRecipientOtpPage, ProAddRecipientPage,
SmsPage, SmsPageOption, SmsRequest, AuthentPage, RecipientPage, CanceledAuth, CaissedepargneKeyboard,
TransactionsDetailsPage, LoadingPage, ConsLoanPage, MeasurePage, NatixisLIHis, NatixisLIInv, NatixisRedirectPage,
SubscriptionPage, CreditCooperatifMarketPage, UnavailablePage, CardsPage, CardsComingPage, CardsOldWebsitePage,
)
from .linebourse_browser import LinebourseAPIBrowser
__all__ = ['CaisseEpargne']
class CaisseEpargne(LoginBrowser, StatesMixin):
BASEURL = "https://www.caisse-epargne.fr"
STATE_DURATION = 5
HISTORY_MAX_PAGE = 200
LINEBOURSE_BROWSER = LinebourseAPIBrowser
login = URL('/authentification/manage\?step=identification&identifiant=(?P<login>.*)',
'https://.*/login.aspx', LoginPage)
account_login = URL('/authentification/manage\?step=account&identifiant=(?P<login>.*)&account=(?P<accountType>.*)', LoginPage)
loading = URL('https://.*/CreditConso/ReroutageCreditConso.aspx', LoadingPage)
cons_loan = URL('https://www.credit-conso-cr.caisse-epargne.fr/websavcr-web/rest/contrat/getContrat\?datePourIe=(?P<datepourie>)', ConsLoanPage)
transaction_detail = URL('https://.*/Portail.aspx.*', TransactionsDetailsPage)
recipient = URL('https://.*/Portail.aspx.*', RecipientPage)
transfer = URL('https://.*/Portail.aspx.*', TransferPage)
transfer_summary = URL('https://.*/Portail.aspx.*', TransferSummaryPage)
transfer_confirm = URL('https://.*/Portail.aspx.*', TransferConfirmPage)
pro_transfer = URL('https://.*/Portail.aspx.*', ProTransferPage)
pro_transfer_confirm = URL('https://.*/Portail.aspx.*', ProTransferConfirmPage)
pro_transfer_summary = URL('https://.*/Portail.aspx.*', ProTransferSummaryPage)
pro_add_recipient_otp = URL('https://.*/Portail.aspx.*', ProAddRecipientOtpPage)
pro_add_recipient = URL('https://.*/Portail.aspx.*', ProAddRecipientPage)
measure_page = URL('https://.*/Portail.aspx.*', MeasurePage)
cards_old = URL('https://.*/Portail.aspx.*', CardsOldWebsitePage)
cards = URL('https://.*/Portail.aspx.*', CardsPage)
cards_coming = URL('https://.*/Portail.aspx.*', CardsComingPage)
authent = URL('https://.*/Portail.aspx.*', AuthentPage)
subscription = URL('https://.*/Portail.aspx\?tache=(?P<tache>).*', SubscriptionPage)
home = URL('https://.*/Portail.aspx.*', IndexPage)
home_tache = URL('https://.*/Portail.aspx\?tache=(?P<tache>).*', IndexPage)
error = URL('https://.*/login.aspx',
'https://.*/Pages/logout.aspx.*',
'https://.*/particuliers/Page_erreur_technique.aspx.*', ErrorPage)
market = URL('https://.*/Pages/Bourse.*',
'https://www.caisse-epargne.offrebourse.com/ReroutageSJR',
r'https://www.caisse-epargne.offrebourse.com/fr/6CE.*', MarketPage)
unavailable_page = URL('https://www.caisse-epargne.fr/.*/au-quotidien', UnavailablePage)
creditcooperatif_market = URL('https://www.offrebourse.com/.*', CreditCooperatifMarketPage) # just to catch the landing page of the Credit Cooperatif's Linebourse
natixis_redirect = URL(r'/NaAssuranceRedirect/NaAssuranceRedirect.aspx',
r'https://www.espace-assurances.caisse-epargne.fr/espaceinternet-ce/views/common/routage-itce.xhtml\?windowId=automatedEntryPoint',
NatixisRedirectPage)
life_insurance_history = URL(r'https://www.extranet2.caisse-epargne.fr/cin-front/contrats/evenements', LifeInsuranceHistory)
life_insurance_investments = URL(r'https://www.extranet2.caisse-epargne.fr/cin-front/contrats/details', LifeInsuranceInvestments)
life_insurance = URL(r'https://.*/Assurance/Pages/Assurance.aspx',
r'https://www.extranet2.caisse-epargne.fr.*', LifeInsurance)
natixis_life_ins_his = URL(r'https://www.espace-assurances.caisse-epargne.fr/espaceinternet-ce/rest/v2/contratVie/load-operation/(?P<id1>\w+)/(?P<id2>\w+)/(?P<id3>)', NatixisLIHis)
natixis_life_ins_inv = URL(r'https://www.espace-assurances.caisse-epargne.fr/espaceinternet-ce/rest/v2/contratVie/load/(?P<id1>\w+)/(?P<id2>\w+)/(?P<id3>)', NatixisLIInv)
message = URL(r'https://www.caisse-epargne.offrebourse.com/DetailMessage\?refresh=O', MessagePage)
garbage = URL(r'https://www.caisse-epargne.offrebourse.com/Portefeuille',
r'https://www.caisse-epargne.fr/particuliers/.*/emprunter.aspx',
r'https://.*/particuliers/emprunter.*',
r'https://.*/particuliers/epargner.*', GarbagePage)
sms = URL(r'https://www.icgauth.caisse-epargne.fr/dacswebssoissuer/AuthnRequestServlet', SmsPage)
sms_option = URL(r'https://www.icgauth.caisse-epargne.fr/dacstemplate-SOL/index.html\?transactionID=.*', SmsPageOption)
request_sms = URL(r'https://www.icgauth.caisse-epargne.fr/dacsrest/api/v1u0/transaction/(?P<param>)', SmsRequest)
__states__ = ('BASEURL', 'multi_type', 'typeAccount', 'is_cenet_website', 'recipient_form', 'is_send_sms')
# Accounts managed in life insurance space (not in linebourse)
insurance_accounts = ('AIKIDO',
'ASSURECUREUIL',
'ECUREUIL PROJET',
'GARANTIE RETRAITE EU',
'INITIATIVES PLUS',
'INITIATIVES TRANSMIS',
'LIVRET ASSURANCE VIE',
'OCEOR EVOLUTION',
'PATRIMONIO CRESCENTE',
'PEP TRANSMISSION',
'PERP',
'PERSPECTIVES ECUREUI',
'POINTS RETRAITE ECUR',
'RICOCHET',
'SOLUTION PERP',
'TENDANCES',
'YOGA', )
def __init__(self, nuser, *args, **kwargs):
self.BASEURL = kwargs.pop('domain', self.BASEURL)
if not self.BASEURL.startswith('https://'):
self.BASEURL = 'https://%s' % self.BASEURL
self.is_cenet_website = False
self.new_website = True
self.multi_type = False
self.accounts = None
self.loans = None
self.typeAccount = None
self.inexttype = 0 # keep track of index in the connection type's list
self.nuser = nuser
self.recipient_form = None
self.is_send_sms = None
self.weboob = kwargs['weboob']
self.market_url = kwargs.pop(
'market_url',
'https://www.caisse-epargne.offrebourse.com',
)
super(CaisseEpargne, self).__init__(*args, **kwargs)
dirname = self.responses_dirname
if dirname:
dirname += '/bourse'
self.linebourse = self.LINEBOURSE_BROWSER(
self.market_url,
logger=self.logger,
responses_dirname=dirname,
weboob=self.weboob,
proxy=self.PROXIES,
)
def deleteCTX(self):
# For connection to offrebourse and natixis, we need to delete duplicate of CTX cookie
if len([k for k in self.session.cookies.keys() if k == 'CTX']) > 1:
del self.session.cookies['CTX']
def load_state(self, state):
if state.get('expire') and parser.parse(state['expire']) < datetime.datetime.now():
return self.logger.info('State expired, not reloading it from storage')
# Reload session only for add recipient step
transfer_states = ('recipient_form', 'is_send_sms')
for transfer_state in transfer_states:
if transfer_state in state and state[transfer_state] is not None:
super(CaisseEpargne, self).load_state(state)
self.logged = True
break
# need to post to valid otp when adding recipient.
def locate_browser(self, state):
if 'is_send_sms' in state and state['is_send_sms']:
super(CaisseEpargne, self).locate_browser(state)
def do_login(self):
"""
Attempt to log in.
Note: this method does nothing if we are already logged in.
"""
# Among the parameters used during the login step, there is
# a connection type (called typeAccount) that can take the
# following values:
# WE: espace particulier
# WP: espace pro
# WM: personnes protégées
# EU: Cenet
#
# A connection can have one connection type as well as many of
# them. There is an issue when there is many connection types:
# the connection type to use can't be guessed in advance, we
# have to test all of them until the login step is successful
# (sometimes all connection type can be used for the login, sometimes
# only one will work).
#
# For simplicity's sake, we try each connection type from first to
# last (they are returned in a list by the first request)
#
# Examples of connection types combination that have been seen so far:
# [WE]
# [WP]
# [WE, WP]
# [WE, WP, WM]
# [WP, WM]
# [EU]
# [EU, WE] (EU tends to come first when present)
if not self.username or not self.password:
raise BrowserIncorrectPassword()
# Retrieve the list of types: can contain a single type or more
# - when there is a single type: all the information are available
# - when there are several types: an additional request is needed
try:
connection = self.login.go(login=self.username)
# The website crash sometime when the module is not on caissedepargne (on linebourse, for exemple).
# The module think is not connected anymore, so we go to the home logged page. If there are no error
# that mean we are already logged and now, on the good website
except ValueError:
self.home.go()
if self.home.is_here():
return
# If that not the case, that's an other error that we have to correct
raise
data = connection.get_response()
if data is None:
raise BrowserIncorrectPassword()
accounts_types = data.get('account', [])
if not self.nuser and 'WE' not in accounts_types:
raise BrowserIncorrectPassword("Utilisez Caisse d'Épargne Professionnels et renseignez votre nuser pour connecter vos comptes sur l'epace Professionels ou Entreprises.")
if len(accounts_types) > 1:
# Additional request when there is more than one connection type
# to "choose" from the list of connection types
self.multi_type = True
if self.inexttype < len(accounts_types):
if accounts_types[self.inexttype] == 'EU' and not self.nuser:
# when EU is present and not alone, it tends to come first
# if nuser is unset though, user probably doesn't want 'EU'
self.inexttype += 1
self.typeAccount = accounts_types[self.inexttype]
else:
assert False, 'should have logged in with at least one connection type'
self.inexttype += 1
data = self.account_login.go(login=self.username, accountType=self.typeAccount).get_response()
assert data is not None
if data.get('authMode', '') == 'redirect': # the connection type EU could also be used as a criteria
raise SiteSwitch('cenet')
typeAccount = data['account'][0]
if self.multi_type:
assert typeAccount == self.typeAccount
id_token_clavier = data['keyboard']['Id']
vk = CaissedepargneKeyboard(data['keyboard']['ImageClavier'], data['keyboard']['Num']['string'])
newCodeConf = vk.get_string_code(self.password)
playload = {
'idTokenClavier': id_token_clavier,
'newCodeConf': newCodeConf,
'auth_mode': 'ajax',
'nuusager': self.nuser.encode('utf-8'),
'codconf': '', # must be present though empty
'typeAccount': typeAccount,
'step': 'authentification',
'ctx': 'typsrv={}'.format(typeAccount),
'clavierSecurise': '1',
'nuabbd': self.username
}
try:
res = self.location(data['url'], params=playload)
except ValueError:
raise BrowserUnavailable()
if not res.page:
raise BrowserUnavailable()
response = res.page.get_response()
assert response is not None
if response['error'] == 'Veuillez changer votre mot de passe':
raise BrowserPasswordExpired(response['error'])
if not response['action']:
# the only possible way to log in w/o nuser is on WE. if we're here no need to go further.
if not self.nuser and self.typeAccount == 'WE':
raise BrowserIncorrectPassword(response['error'])
# we tested all, next iteration will throw the assertion
if self.inexttype == len(accounts_types) and 'Temporairement votre abonnement est bloqué' in response['error']:
raise ActionNeeded(response['error'])
if self.multi_type:
# try to log in with the next connection type's value
self.do_login()
return
raise BrowserIncorrectPassword(response['error'])
self.BASEURL = urljoin(data['url'], '/')
try:
self.home.go()
except BrowserHTTPNotFound:
raise BrowserIncorrectPassword()
def loans_conso(self):
days = ('Mon', 'Tue', 'Wed', 'Thu', 'Fri', 'Sat', 'Sun')
month = ('Jan', 'Feb', 'Mar', 'Apr', 'May', 'Jun', 'Jul', 'Aug', 'Sep', 'Oct', 'Nov', 'Dec')
now = datetime.datetime.today()
# for non-DST
# d = '%s %s %s %s %s:%s:%s GMT+0100 (heure normale d’Europe centrale)' % (days[now.weekday()], now.day, month[now.month - 1], now.year, now.hour, format(now.minute, "02"), now.second)
# TODO use babel library to simplify this code
d = '%s %s %s %s %s:%s:%s GMT+0200 (heure d’été d’Europe centrale)' % (days[now.weekday()], now.day, month[now.month - 1], now.year, now.hour, format(now.minute, "02"), now.second)
if self.home.is_here():
msg = self.page.loan_unavailable_msg()
if msg:
self.logger.warning('%s' % msg)
return None
self.cons_loan.go(datepourie=d)
return self.page.get_conso()
# On home page there is a list of "measure" links, each one leading to one person accounts list.
# Iter over each 'measure' and navigate to it to get all accounts
@need_login
def get_measure_accounts_list(self):
self.home.go()
# Make sure we are on list of measures page
if self.measure_page.is_here():
self.page.check_no_accounts()
measure_ids = self.page.get_measure_ids()
self.accounts = []
for measure_id in measure_ids:
self.page.go_measure_accounts_list(measure_id)
if self.page.check_measure_accounts():
for account in list(self.page.get_list()):
account._info['measure_id'] = measure_id
self.accounts.append(account)
self.page.go_measure_list()
for account in self.accounts:
if 'acc_type' in account._info and account._info['acc_type'] == Account.TYPE_LIFE_INSURANCE:
self.page.go_measure_list()
self.page.go_measure_accounts_list(account._info['measure_id'])
self.page.go_history(account._info)
if self.message.is_here():
self.page.submit()
self.page.go_history(account._info)
balance = self.page.get_measure_balance(account)
account.balance = Decimal(FrenchTransaction.clean_amount(balance))
account.currency = account.get_currency(balance)
return self.accounts
def update_linebourse_token(self):
assert self.linebourse is not None, "linebourse browser should already exist"
self.linebourse.session.cookies.update(self.session.cookies)
# It is important to fetch the domain dynamically because
# for caissedepargne the domain is 'www.caisse-epargne.offrebourse.com'
# whereas for creditcooperatif it is 'www.offrebourse.com'
domain = urlparse(self.url).netloc
self.linebourse.session.headers['X-XSRF-TOKEN'] = self.session.cookies.get('XSRF-TOKEN', domain=domain)
@need_login
@retry(ClientError, tries=3)
def get_accounts_list(self):
if self.accounts is None:
self.accounts = self.get_measure_accounts_list()
if self.accounts is None:
if self.home.is_here():
self.page.check_no_accounts()
self.page.go_list()
else:
self.home.go()
self.accounts = list(self.page.get_list())
for account in self.accounts:
self.deleteCTX()
if account.type in (Account.TYPE_MARKET, Account.TYPE_PEA):
self.home_tache.go(tache='CPTSYNT0')
self.page.go_history(account._info)
if self.message.is_here():
self.page.submit()
self.page.go_history(account._info)
# Some users may not have access to this.
if not self.market.is_here():
continue
self.page.submit()
if 'offrebourse.com' in self.url:
# Some users may not have access to this.
if self.page.is_error():
continue
self.update_linebourse_token()
page = self.linebourse.go_portfolio(account.id)
assert self.linebourse.portfolio.is_here()
# We must declare "page" because this URL also matches MarketPage
account.valuation_diff = page.get_valuation_diff()
# We need to go back to the synthesis, else we can not go home later
self.home_tache.go(tache='CPTSYNT0')
else:
assert False, "new domain that hasn't been seen so far ?"
"""
Card cases are really tricky on the new website.
There are 2 kinds of page where we can find cards information
- CardsPage: List some of the PSU cards
- CardsComingPage: On the coming transaction page (for a specific checking account),
we can find all cards related to this checking account. Information to reach this
CC is in the home page
We have to go through this both kind of page for those reasons:
- If there is no coming yet, the card will not be found in the home page and we will not
be able to reach the CardsComingPage. But we can find it on CardsPage
- Some cards are only on the CardsComingPage and not the CardsPage
- In CardsPage, there are cards (with "Business" in the label) without checking account on the
website (neither history nor coming), so we skip them.
- Some card on the CardsPage that have a checking account parent, but if we follow the link to
reach it with CardsComingPage, we find an other card that not in CardsPage.
"""
if self.new_website:
for account in self.accounts:
# Adding card's account that we find in CardsComingPage of each Checking account
if account._card_links:
self.home.go()
self.page.go_history(account._card_links)
for card in self.page.iter_cards():
card.parent = account
card._coming_info = self.page.get_card_coming_info(card.number, card.parent._card_links.copy())
self.accounts.append(card)
self.home.go()
self.page.go_list()
self.page.go_cards()
# We are on the new website. We already added some card, but we can find more of them on the CardsPage
if self.cards.is_here():
for card in self.page.iter_cards():
card.parent = find_object(self.accounts, number=card._parent_id)
assert card.parent, 'card account parent %s was not found' % card
# If we already added this card, we don't have to add it a second time
if find_object(self.accounts, number=card.number):
continue
info = card.parent._card_links
# If card.parent._card_links is not filled, it mean this checking account
# has no coming transactions.
card._coming_info = None
if info:
self.page.go_list()
self.page.go_history(info)
card._coming_info = self.page.get_card_coming_info(card.number, info.copy())
if not card._coming_info:
self.logger.warning('Skip card %s (not found on checking account)', card.number)
continue
self.accounts.append(card)
# We are on the old website. We add all card that we can find on the CardsPage
elif self.cards_old.is_here():
for card in self.page.iter_cards():
card.parent = find_object(self.accounts, number=card._parent_id)
assert card.parent, 'card account parent %s was not found' % card.number
self.accounts.append(card)
# Some accounts have no available balance or label and cause issues
# in the backend so we must exclude them from the accounts list:
self.accounts = [account for account in self.accounts if account.label and account.balance != NotAvailable]
for account in self.accounts:
yield account
@need_login
def get_loans_list(self):
if self.loans is None:
self.loans = []
if self.home.is_here():
if self.page.check_no_accounts() or self.page.check_no_loans():
return []
for trial in range(5):
for _ in range(3):
self.home_tache.go(tache='CRESYNT0')
if self.home.is_here():
break
if self.home.is_here():
if not self.page.is_access_error():
# The server often returns a 520 error (Undefined):
try:
self.loans = list(self.page.get_real_estate_loans())
self.loans.extend(self.page.get_loan_list())
except ServerError:
self.logger.warning('Access to loans failed, we try again')
else:
# We managed to reach the Loans JSON
break
for _ in range(3):
try:
self.home_tache.go(tache='CPTSYNT0')
if self.home.is_here():
self.page.go_list()
except ClientError:
pass
else:
break
return iter(self.loans)
# For all account, we fill up the history with transaction. For checking account, there will have
# also deferred_card transaction too.
# From this logic, if we send "account_card", that mean we recover all transactions from the parent
# checking account of the account_card, then we filter later the deferred transaction.
@need_login
def _get_history(self, info, account_card=None):
# Only fetch deferred debit card transactions if `account_card` is not None
if isinstance(info['link'], list):
info['link'] = info['link'][0]
if not info['link'].startswith('HISTORIQUE'):
return
if 'measure_id' in info:
self.page.go_measure_list()
self.page.go_measure_accounts_list(info['measure_id'])
elif self.home.is_here():
self.page.go_list()
else:
self.home_tache.go(tache='CPTSYNT0')
self.page.go_history(info)
# ensure we are on the correct history page
if 'netpro' in self.page.url and not self.page.is_history_of(info['id']):
self.page.go_history_netpro(info)
# In this case, we want the coming transaction for the new website
# (old website return coming directly in `get_coming()` )
if account_card and info and info['type'] == 'HISTORIQUE_CB':
self.page.go_coming(account_card._coming_info['link'])
info['link'] = [info['link']]
for i in range(self.HISTORY_MAX_PAGE):
assert self.home.is_here()
# list of transactions on account page
transactions_list = []
card_and_forms = []
for tr in self.page.get_history():
transactions_list.append(tr)
if tr.type == tr.TYPE_CARD_SUMMARY:
if account_card:
if self.card_matches(tr.card, account_card.number):
card_and_forms.append((tr.card, self.page.get_form_to_detail(tr)))
else:
self.logger.debug('will skip summary detail (%r) for different card %r', tr, account_card.number)
# For deferred card history only :
#
# Now that we find transactions that have TYPE_CARD_SUMMARY on the checking account AND the account_card number we want,
# we browse deferred card transactions that are resume by that list of TYPE_CARD_SUMMARY transaction.
# Checking account transaction:
# - 01/01 - Summary 5134XXXXXX103 - 900.00€ - TYPE_CARD_SUMMARY <-- We have to go in the form of this tr to get
# cards details transactions.
for card, form in card_and_forms:
form.submit()
if self.home.is_here() and self.page.is_access_error():
self.logger.warning('Access to card details is unavailable for this user')
continue
assert self.transaction_detail.is_here()
for tr in self.page.get_detail():
tr.type = Transaction.TYPE_DEFERRED_CARD
if account_card:
tr.card = card
tr.bdate = tr.rdate
transactions_list.append(tr)
if self.new_website:
self.page.go_newsite_back_to_summary()
else:
self.page.go_form_to_summary()
# going back to summary goes back to first page
for j in range(i):
assert self.page.go_next()
# order by date the transactions without the summaries
transactions_list = sorted_transactions(transactions_list)
for tr in transactions_list:
yield tr
assert self.home.is_here()
if not self.page.go_next():
return
assert False, 'More than {} history pages'.format(self.HISTORY_MAX_PAGE)
@need_login
def _get_history_invests(self, account):
if self.home.is_here():
self.page.go_list()
else:
self.home.go()
self.page.go_history(account._info)
if account.type in (Account.TYPE_LIFE_INSURANCE, Account.TYPE_CAPITALISATION, Account.TYPE_PERP):
if self.page.is_account_inactive(account.id):
self.logger.warning('Account %s %s is inactive.' % (account.label, account.id))
return []
# There is (currently ?) no history for MILLEVIE PREMIUM accounts
if "MILLEVIE" in account.label:
try:
self.page.go_life_insurance(account)
except ServerError as ex:
if ex.response.status_code == 500 and 'MILLEVIE PREMIUM' in account.label:
self.logger.info("Can not reach history page for MILLEVIE PREMIUM account")
return []
raise
label = account.label.split()[-1]
try:
self.natixis_life_ins_his.go(id1=label[:3], id2=label[3:5], id3=account.id)
except BrowserHTTPError as e:
if e.response.status_code == 500:
error = json.loads(e.response.text)
raise BrowserUnavailable(error["error"])
raise
return sorted_transactions(self.page.get_history())
if account.label.startswith('NUANCES ') or account.label in self.insurance_accounts:
self.page.go_life_insurance(account)
if 'JSESSIONID' in self.session.cookies:
# To access the life insurance space, we need to delete the JSESSIONID cookie to avoid an expired session
del self.session.cookies['JSESSIONID']
try:
if not self.life_insurance.is_here() and not self.message.is_here():
# life insurance website is not always available
raise BrowserUnavailable()
self.page.submit()
self.life_insurance_history.go()
# Life insurance transactions are not sorted by date in the JSON
return sorted_transactions(self.page.iter_history())
except (IndexError, AttributeError) as e:
self.logger.error(e)
return []
return self.page.iter_history()
@need_login
def get_history(self, account):
self.home.go()
self.deleteCTX()
if account.type == account.TYPE_CARD:
def match_cb(tr):
return self.card_matches(tr.card, account.number)
hist = self._get_history(account.parent._info, account)
hist = keep_only_card_transactions(hist, match_cb)
return hist
if not hasattr(account, '_info'):
raise NotImplementedError
if account.type in (Account.TYPE_LIFE_INSURANCE, Account.TYPE_CAPITALISATION) and 'measure_id' not in account._info:
return self._get_history_invests(account)
if account.type in (Account.TYPE_MARKET, Account.TYPE_PEA):
self.page.go_history(account._info)
if "Bourse" in self.url:
self.page.submit()
if 'offrebourse.com' in self.url:
# Some users may not have access to this.
if self.page.is_error():
return []
self.linebourse.session.cookies.update(self.session.cookies)
self.update_linebourse_token()
return self.linebourse.iter_history(account.id)
hist = self._get_history(account._info, False)
return omit_deferred_transactions(hist)
@need_login
def get_coming(self, account):
if account.type != account.TYPE_CARD:
return []
trs = []
if not hasattr(account.parent, '_info'):
raise NotImplementedError()
# We are on the old website
if hasattr(account, '_coming_eventargument'):
if not self.cards_old.is_here():
self.home.go()
self.page.go_list()
self.page.go_cards()
self.page.go_card_coming(account._coming_eventargument)
return sorted_transactions(self.page.iter_coming())
# We are on the new website.
info = account.parent._card_links
# if info is empty, that mean there are no coming yet
if info:
for tr in self._get_history(info.copy(), account):
tr.type = tr.TYPE_DEFERRED_CARD
trs.append(tr)
return sorted_transactions(trs)
@need_login
def get_investment(self, account):
self.deleteCTX()
if account.type not in (Account.TYPE_LIFE_INSURANCE, Account.TYPE_CAPITALISATION, Account.TYPE_MARKET, Account.TYPE_PEA) or 'measure_id' in account._info:
raise NotImplementedError()
if account.type == Account.TYPE_PEA and account.label == 'PEA NUMERAIRE':
yield create_french_liquidity(account.balance)
return
if self.home.is_here():
self.page.go_list()
else:
self.home.go()
self.page.go_history(account._info)
if account.type in (Account.TYPE_MARKET, Account.TYPE_PEA):
# Some users may not have access to this.
if not self.market.is_here():
return
self.page.submit()
if 'offrebourse.com' in self.url:
# Some users may not have access to this.
if self.page.is_error():
return
self.update_linebourse_token()
for investment in self.linebourse.iter_investments(account.id):
yield investment
# We need to go back to the synthesis, else we can not go home later
self.home_tache.go(tache='CPTSYNT0')
return
elif account.type in (Account.TYPE_LIFE_INSURANCE, Account.TYPE_CAPITALISATION):
if self.page.is_account_inactive(account.id):
self.logger.warning('Account %s %s is inactive.' % (account.label, account.id))
return
if "MILLEVIE" in account.label:
try:
self.page.go_life_insurance(account)
except ServerError as ex:
if ex.response.status_code == 500 and 'MILLEVIE PREMIUM' in account.label:
self.logger.info("Can not reach investment page for MILLEVIE PREMIUM account")
return
raise
label = account.label.split()[-1]
self.natixis_life_ins_inv.go(id1=label[:3], id2=label[3:5], id3=account.id)
for tr in self.page.get_investments():
yield tr
return
try:
self.page.go_life_insurance(account)
if not self.market.is_here() and not self.message.is_here():
# life insurance website is not always available
raise BrowserUnavailable()
self.page.submit()
self.life_insurance_investments.go()
except (IndexError, AttributeError) as e:
self.logger.error(e)
return
if self.garbage.is_here():
self.page.come_back()
return
for i in self.page.iter_investment():
yield i
if self.market.is_here():
self.page.come_back()
@need_login
def get_advisor(self):
raise NotImplementedError()
@need_login
def get_profile(self):
from weboob.tools.misc import to_unicode
profile = Profile()
if len([k for k in self.session.cookies.keys() if k == 'CTX']) > 1:
del self.session.cookies['CTX']
elif 'username=' in self.session.cookies.get('CTX', ''):
profile.name = to_unicode(re.search('username=([^&]+)', self.session.cookies['CTX']).group(1))
elif 'nomusager=' in self.session.cookies.get('headerdei'):
profile.name = to_unicode(re.search('nomusager=(?:[^&]+/ )?([^&]+)', self.session.cookies['headerdei']).group(1))
return profile
@need_login
def iter_recipients(self, origin_account):
if origin_account.type in [Account.TYPE_LOAN, Account.TYPE_CARD]:
return []
if 'pro' in self.url:
# If transfer is not yet allowed, the next step will send a sms to the customer to validate it
self.home.go()
self.page.go_pro_transfer_availability()
if not self.page.is_transfer_allowed():
return []
# Transfer unavailable
try:
self.pre_transfer(origin_account)
except TransferBankError:
return []
go_transfer_errors = (
# redirected to home page because:
# - need to relogin, see `self.page.need_auth()`
# - need more security, see `self.page.transfer_unavailable()`
# - transfer is not available for this connection, see `self.page.go_transfer_via_history()`
# TransferPage inherit from IndexPage so self.home.is_here() is true, check page type to avoid this problem
type(self.page) is IndexPage,
# check if origin_account have recipients
self.transfer.is_here() and not self.page.can_transfer(origin_account),
)
if any(go_transfer_errors):
return []
return self.page.iter_recipients(account_id=origin_account.id)
def pre_transfer(self, account):
if self.home.is_here():
if 'measure_id' in account._info:
self.page.go_measure_list()
self.page.go_measure_accounts_list(account._info['measure_id'])
else:
self.page.go_list()
else:
self.home.go()
self.page.go_transfer(account)
@need_login
def init_transfer(self, account, recipient, transfer):
self.is_send_sms = False
self.pre_transfer(account)
self.page.init_transfer(account, recipient, transfer)
if self.sms_option.is_here():
self.is_send_sms = True
raise TransferStep(
transfer,
Value(
'otp_sms',
label='Veuillez renseigner le mot de passe unique qui vous a été envoyé par SMS dans le champ réponse.'
)
)
if 'netpro' in self.url:
return self.page.create_transfer(account, recipient, transfer)
self.page.continue_transfer(account.label, recipient.label, transfer.label)
return self.page.update_transfer(transfer, account, recipient)
@need_login
def otp_sms_continue_transfer(self, transfer, **params):
self.is_send_sms = False
assert 'otp_sms' in params, 'OTP SMS is missing'
self.otp_sms_validation(params['otp_sms'])
if self.transfer.is_here():
self.page.continue_transfer(transfer.account_label, transfer.recipient_label, transfer.label)
return self.page.update_transfer(transfer)
@need_login
def execute_transfer(self, transfer):
self.page.confirm()
return self.page.populate_reference(transfer)
def get_recipient_obj(self, recipient):
r = Recipient()
r.iban = recipient.iban
r.id = recipient.iban
r.label = recipient.label
r.category = u'Externe'
r.enabled_at = datetime.datetime.now().replace(microsecond=0)
r.currency = u'EUR'
r.bank_name = NotAvailable
return r
def otp_sms_validation(self, otp_sms):
tr_id = re.search(r'transactionID=(.*)', self.page.url)
if tr_id:
transaction_id = tr_id.group(1)
else:
assert False, 'Transfer transaction id was not found in url'
self.request_sms.go(param=transaction_id)
key = self.page.validate_key()
data = {
'validate': {
key: [{
'id': self.page.validation_id(key),
'otp_sms': otp_sms,
'type': 'SMS'
}]
}
}
headers = {'Content-Type': 'application/json'}
self.location(self.url + '/step', json=data, headers=headers)
saml = self.page.get_saml()
action = self.page.get_action()
self.location(action, data={'SAMLResponse': saml})
def post_sms_password(self, otp, otp_field_xpath):
data = {}
for k, v in self.recipient_form.items():
if k != 'url':
data[k] = v
data[otp_field_xpath] = otp
self.location(self.recipient_form['url'], data=data)
self.recipient_form = None
def facto_post_recip(self, recipient):
self.page.post_recipient(recipient)
self.page.confirm_recipient()
return self.get_recipient_obj(recipient)
def end_sms_recipient(self, recipient, **params):
self.post_sms_password(params['sms_password'], 'uiAuthCallback__1_')
self.page.post_form()
self.page.go_on()
self.facto_post_recip(recipient)
def end_pro_recipient(self, recipient, **params):
self.post_sms_password(params['pro_password'], 'MM$ANR_WS_AUTHENT$ANR_WS_AUTHENT_SAISIE$txtReponse')
return self.facto_post_recip(recipient)
@retry(CanceledAuth)
@need_login
def new_recipient(self, recipient, **params):
if 'sms_password' in params:
return self.end_sms_recipient(recipient, **params)
if 'otp_sms' in params:
self.otp_sms_validation(params['otp_sms'])
if self.authent.is_here():
self.page.go_on()
return self.facto_post_recip(recipient)
if 'pro_password' in params:
return self.end_pro_recipient(recipient, **params)
self.pre_transfer(next(acc for acc in self.get_accounts_list() if acc.type in (Account.TYPE_CHECKING, Account.TYPE_SAVINGS)))
# This send sms to user.
self.page.go_add_recipient()
if self.sms_option.is_here():
self.is_send_sms = True
raise AddRecipientStep(
self.get_recipient_obj(recipient),
Value(
'otp_sms',
label='Veuillez renseigner le mot de passe unique qui vous a été envoyé par SMS dans le champ réponse.'
)
)
# pro add recipient.
elif self.page.need_auth():
self.page.set_browser_form()
raise AddRecipientStep(self.get_recipient_obj(recipient), Value('pro_password', label=self.page.get_prompt_text()))
else:
self.page.check_canceled_auth()
self.page.set_browser_form()
raise AddRecipientStep(self.get_recipient_obj(recipient), Value('sms_password', label=self.page.get_prompt_text()))
@need_login
def iter_subscription(self):
self.home.go()
# CapDocument is not implemented for professional accounts yet
if any(x in self.url for x in ["netpp", "netpro"]):
raise NotImplementedError()
self.home_tache.go(tache='CPTSYNT1')
if self.unavailable_page.is_here():
# some users don't have checking account
self.home_tache.go(tache='EPASYNT0')
self.page.go_subscription()
if not self.subscription.is_here():
# if user is not allowed to have subscription we are redirected to IndexPage
assert self.home.is_here() and self.page.is_subscription_unauthorized()
return []
if self.page.has_subscriptions():
return self.page.iter_subscription()
return []
@need_login
def iter_documents(self, subscription):
self.home.go()
self.home_tache.go(tache='CPTSYNT1')
if self.unavailable_page.is_here():
# some users don't have checking account
self.home_tache.go(tache='EPASYNT0')
self.page.go_subscription()
assert self.subscription.is_here()
sub_id = subscription.id
self.page.go_document_list(sub_id=sub_id)
for doc in self.page.iter_documents(sub_id=sub_id):
yield doc
@need_login
def download_document(self, document):
self.home.go()
self.home_tache.go(tache='CPTSYNT1')
if self.unavailable_page.is_here():
# some users don't have checking account
self.home_tache.go(tache='EPASYNT0')
self.page.go_subscription()
assert self.subscription.is_here()
sub_id = document.id.split('_')[0]
self.page.go_document_list(sub_id=sub_id)
return self.page.download_document(document).content
def card_matches(self, a, b):
# For the same card, depending where we scrape it, we have
# more or less visible number. `X` are visible number, `*` hidden one's.
# tr.card: XXXX******XXXXXX, account.number: XXXXXX******XXXX
return (a[:4], a[-4:]) == (b[:4], b[-4:])
|
vicnet/weboob
|
modules/caissedepargne/browser.py
|
Python
|
lgpl-3.0
| 47,604
|
#############################################################################
##
## Copyright (C) 2015 The Qt Company Ltd.
## Contact: http://www.qt.io/licensing
##
## This file is part of Qt Creator.
##
## Commercial License Usage
## Licensees holding valid commercial Qt licenses may use this file in
## accordance with the commercial license agreement provided with the
## Software or, alternatively, in accordance with the terms contained in
## a written agreement between you and The Qt Company. For licensing terms and
## conditions see http://www.qt.io/terms-conditions. For further information
## use the contact form at http://www.qt.io/contact-us.
##
## GNU Lesser General Public License Usage
## Alternatively, this file may be used under the terms of the GNU Lesser
## General Public License version 2.1 or version 3 as published by the Free
## Software Foundation and appearing in the file LICENSE.LGPLv21 and
## LICENSE.LGPLv3 included in the packaging of this file. Please review the
## following information to ensure the GNU Lesser General Public License
## requirements will be met: https://www.gnu.org/licenses/lgpl.html and
## http://www.gnu.org/licenses/old-licenses/lgpl-2.1.html.
##
## In addition, as a special exception, The Qt Company gives you certain additional
## rights. These rights are described in The Qt Company LGPL Exception
## version 1.1, included in the file LGPL_EXCEPTION.txt in this package.
##
#############################################################################
import tempfile
def neededFilePresent(path):
found = os.path.exists(path)
if os.getenv("SYSTEST_DEBUG") == "1":
checkAccess(path)
elif not found:
test.fatal("Missing file or directory: " + path)
return found
def tempDir():
Result = os.path.abspath(os.getcwd()+"/../../testing")
if not os.path.exists(Result):
os.mkdir(Result)
return tempfile.mkdtemp(prefix="qtcreator_", dir=Result)
def deleteDirIfExists(path):
shutil.rmtree(path, True)
def verifyChecked(objectName):
object = waitForObject(objectName)
test.compare(object.checked, True)
return object
def ensureChecked(objectName, shouldBeChecked = True, timeout=20000):
if shouldBeChecked:
targetState = Qt.Checked
state = "checked"
else:
targetState = Qt.Unchecked
state = "unchecked"
widget = waitForObject(objectName, timeout)
try:
# needed for transition Qt::PartiallyChecked -> Qt::Checked -> Qt::Unchecked
clicked = 0
while not waitFor('widget.checkState() == targetState', 1000) and clicked < 2:
clickButton(widget)
clicked += 1
test.verify(waitFor("widget.checkState() == targetState", 1000))
except:
# widgets not derived from QCheckbox don't have checkState()
if not waitFor('widget.checked == shouldBeChecked', 1000):
clickButton(widget)
test.verify(waitFor("widget.checked == shouldBeChecked", 1000))
test.log("New state for QCheckBox: %s" % state,
str(objectName))
return widget
# verify that an object is in an expected enable state. Returns the object.
# param objectSpec specifies the object to check. It can either be a string determining an object
# or the object itself. If it is an object, it must exist already.
# param expectedState is the expected enable state of the object
def verifyEnabled(objectSpec, expectedState = True):
if isinstance(objectSpec, (str, unicode)):
waitFor("object.exists('" + str(objectSpec).replace("'", "\\'") + "')", 20000)
foundObject = findObject(objectSpec)
else:
foundObject = objectSpec
if objectSpec == None:
test.warning("No valid object in function verifyEnabled.")
else:
test.compare(foundObject.enabled, expectedState)
return foundObject
# select an item from a combo box
# param objectSpec specifies the combo box. It can either be a string determining an object
# or the object itself. If it is an object, it must exist already.
# param itemName is the item to be selected in the combo box
# returns True if selection was changed or False if the wanted value was already selected
def selectFromCombo(objectSpec, itemName):
object = verifyEnabled(objectSpec)
if itemName == str(object.currentText):
return False
else:
mouseClick(object, 5, 5, 0, Qt.LeftButton)
snooze(1)
mouseClick(waitForObjectItem(object, itemName.replace(".", "\\.")), 5, 5, 0, Qt.LeftButton)
test.verify(waitFor("str(object.currentText)==itemName", 5000),
"Switched combo item to '%s'" % itemName)
return True
def selectFromLocator(filter, itemName = None):
if itemName == None:
itemName = filter
itemName = itemName.replace(".", "\\.").replace("_", "\\_")
locator = waitForObject(":*Qt Creator_Utils::FilterLineEdit")
mouseClick(locator, 5, 5, 0, Qt.LeftButton)
replaceEditorContent(locator, filter)
# clicking the wanted item
# if you replace this by pressing ENTER, be sure that something is selected
# otherwise you will run into unwanted behavior
wantedItem = waitForObjectItem("{type='QTreeView' unnamed='1' visible='1'}", itemName)
doubleClick(wantedItem, 5, 5, 0, Qt.LeftButton)
def wordUnderCursor(window):
return textUnderCursor(window, QTextCursor.StartOfWord, QTextCursor.EndOfWord)
def lineUnderCursor(window):
return textUnderCursor(window, QTextCursor.StartOfLine, QTextCursor.EndOfLine)
def textUnderCursor(window, fromPos, toPos):
cursor = window.textCursor()
oldposition = cursor.position()
cursor.movePosition(fromPos)
cursor.movePosition(toPos, QTextCursor.KeepAnchor)
returnValue = cursor.selectedText()
cursor.setPosition(oldposition)
return returnValue
def which(program):
def is_exe(fpath):
return os.path.exists(fpath) and os.access(fpath, os.X_OK)
def callableFile(path):
if is_exe(path):
return path
if platform.system() in ('Windows', 'Microsoft'):
for suffix in suffixes.split(os.pathsep):
if is_exe(path + suffix):
return path + suffix
return None
if platform.system() in ('Windows', 'Microsoft'):
suffixes = os.getenv("PATHEXT")
if not suffixes:
test.fatal("Can't read environment variable PATHEXT. Please check your installation.")
suffixes = ""
fpath, fname = os.path.split(program)
if fpath:
return callableFile(program)
else:
if platform.system() in ('Windows', 'Microsoft'):
cf = callableFile(os.getcwd() + os.sep + program)
if cf:
return cf
for path in os.environ["PATH"].split(os.pathsep):
exe_file = os.path.join(path, program)
cf = callableFile(exe_file)
if cf:
return cf
return None
# this function removes the user files of given pro file(s)
# can be called with a single string object or a list of strings holding path(s) to
# the pro file(s) returns False if it could not remove all user files or has been
# called with an unsupported object
def cleanUpUserFiles(pathsToProFiles=None):
if pathsToProFiles==None:
return False
if isinstance(pathsToProFiles, (str, unicode)):
filelist = glob.glob(pathsToProFiles+".user*")
elif isinstance(pathsToProFiles, (list, tuple)):
filelist = []
for p in pathsToProFiles:
filelist.extend(glob.glob(p+".user*"))
else:
test.fatal("Got an unsupported object.")
return False
doneWithoutErrors = True
for file in filelist:
try:
file = os.path.abspath(file)
os.remove(file)
except:
doneWithoutErrors = False
return doneWithoutErrors
def invokeMenuItem(menu, item, *subItems):
if platform.system() == "Darwin":
try:
waitForObject(":Qt Creator.QtCreator.MenuBar_QMenuBar", 2000)
except:
nativeMouseClick(waitForObject(":Qt Creator_Core::Internal::MainWindow", 1000), 20, 20, 0, Qt.LeftButton)
# HACK to avoid squish crash using Qt5.2 on Squish 5.0.1 - remove asap
if platform.system() == "Darwin":
if menu == "Tools" and item == "Options...":
nativeType("<Command+,>")
return
if menu == "File" and item == "Exit":
nativeType("<Command+q>")
return
menuObject = waitForObjectItem(":Qt Creator.QtCreator.MenuBar_QMenuBar", menu)
waitFor("menuObject.visible", 1000)
activateItem(menuObject)
itemObject = waitForObjectItem(objectMap.realName(menuObject), item)
waitFor("itemObject.enabled", 2000)
activateItem(itemObject)
for subItem in subItems:
sub = itemObject.menu()
waitFor("sub.visible", 1000)
itemObject = waitForObjectItem(sub, subItem)
activateItem(itemObject)
def logApplicationOutput():
# make sure application output is shown
ensureChecked(":Qt Creator_AppOutput_Core::Internal::OutputPaneToggleButton")
try:
output = waitForObject("{type='Core::OutputWindow' visible='1' windowTitle='Application Output Window'}")
test.log("Application Output:\n%s" % output.plainText)
return str(output.plainText)
except:
test.fail("Could not find any Application Output - did the project run?")
return None
# get the output from a given cmdline call
def getOutputFromCmdline(cmdline):
versCall = subprocess.Popen(cmdline, stdout=subprocess.PIPE, shell=True)
result = versCall.communicate()[0]
versCall.stdout.close()
return result
def selectFromFileDialog(fileName, waitForFile=False):
if platform.system() == "Darwin":
snooze(1)
nativeType("<Command+Shift+g>")
snooze(1)
nativeType(fileName)
snooze(1)
nativeType("<Return>")
snooze(3)
nativeType("<Return>")
snooze(1)
else:
fName = os.path.basename(os.path.abspath(fileName))
pName = os.path.dirname(os.path.abspath(fileName)) + os.sep
try:
waitForObject("{name='QFileDialog' type='QFileDialog' visible='1'}", 5000)
pathLine = waitForObject("{name='fileNameEdit' type='QLineEdit' visible='1'}")
snooze(1)
replaceEditorContent(pathLine, pName)
clickButton(waitForObject("{text='Open' type='QPushButton'}"))
waitFor("str(pathLine.text)==''")
snooze(1)
replaceEditorContent(pathLine, fName)
clickButton(waitForObject("{text='Open' type='QPushButton'}"))
except:
nativeType("<Ctrl+a>")
nativeType("<Delete>")
nativeType(pName + fName)
snooze(1)
nativeType("<Return>")
snooze(3)
if waitForFile:
fileCombo = waitForObject(":Qt Creator_FilenameQComboBox")
if not waitFor("str(fileCombo.currentText) in fileName", 5000):
test.fail("%s could not be opened in time." % fileName)
# add Qt documentations from given paths
# param which a list/tuple of the paths to the qch files to be added
def addHelpDocumentation(which):
global sdkPath
invokeMenuItem("Tools", "Options...")
waitForObjectItem(":Options_QListView", "Help")
clickItem(":Options_QListView", "Help", 14, 15, 0, Qt.LeftButton)
waitForObject("{container=':Options.qt_tabwidget_tabbar_QTabBar' type='TabItem' text='Documentation'}")
clickOnTab(":Options.qt_tabwidget_tabbar_QTabBar", "Documentation")
# get rid of all docs already registered
listWidget = waitForObject("{type='QListWidget' name='docsListWidget' visible='1'}")
if listWidget.count > 0:
rect = listWidget.visualItemRect(listWidget.item(0))
mouseClick(listWidget, rect.x+5, rect.y+5, 0, Qt.LeftButton)
type(listWidget, "<Ctrl+a>")
mouseClick(waitForObject("{type='QPushButton' name='removeButton' visible='1'}"), 5, 5, 0, Qt.LeftButton)
for qch in which:
clickButton(waitForObject("{type='QPushButton' name='addButton' visible='1' text='Add...'}"))
selectFromFileDialog(qch)
clickButton(waitForObject(":Options.OK_QPushButton"))
def addCurrentCreatorDocumentation():
currentCreatorPath = currentApplicationContext().cwd
if platform.system() == "Darwin":
docPath = os.path.abspath(os.path.join(currentCreatorPath, "Qt Creator.app", "Contents",
"Resources", "doc", "qtcreator.qch"))
else:
docPath = os.path.abspath(os.path.join(currentCreatorPath, "..", "share", "doc",
"qtcreator", "qtcreator.qch"))
if not os.path.exists(docPath):
test.fatal("Missing current Qt Creator documentation (expected in %s)" % docPath)
return
invokeMenuItem("Tools", "Options...")
waitForObjectItem(":Options_QListView", "Help")
clickItem(":Options_QListView", "Help", 14, 15, 0, Qt.LeftButton)
waitForObject("{container=':Options.qt_tabwidget_tabbar_QTabBar' type='TabItem' text='Documentation'}")
clickOnTab(":Options.qt_tabwidget_tabbar_QTabBar", "Documentation")
clickButton(waitForObject("{type='QPushButton' name='addButton' visible='1' text='Add...'}"))
selectFromFileDialog(docPath)
try:
waitForObject("{type='QMessageBox' unnamed='1' visible='1' "
"text?='Unable to register documentation.*'}", 3000)
test.passes("Qt Creator's documentation found already registered.")
clickButton(waitForObject("{type='QPushButton' text='OK' unnamed='1' visible='1' "
"container={name='groupBox' type='QGroupBox' visible='1'}}"))
except:
test.fail("Added Qt Creator's documentation explicitly.")
clickButton(waitForObject(":Options.OK_QPushButton"))
def verifyOutput(string, substring, outputFrom, outputIn):
index = string.find(substring)
if (index == -1):
test.fail("Output from " + outputFrom + " could not be found in " + outputIn)
else:
test.passes("Output from " + outputFrom + " found at position " + str(index) + " of " + outputIn)
# function that verifies the existence and the read permissions
# of the given file path
# if the executing user hasn't the read permission it checks
# the parent folders for their execute permission
def checkAccess(pathToFile):
if os.path.exists(pathToFile):
test.log("Path '%s' exists" % pathToFile)
if os.access(pathToFile, os.R_OK):
test.log("Got read access on '%s'" % pathToFile)
else:
test.fail("No read permission on '%s'" % pathToFile)
else:
test.fatal("Path '%s' does not exist or cannot be accessed" % pathToFile)
__checkParentAccess__(pathToFile)
# helper function for checking the execute rights of all
# parents of filePath
def __checkParentAccess__(filePath):
for i in range(1, filePath.count(os.sep)):
tmp = filePath.rsplit(os.sep, i)[0]
if os.access(tmp, os.X_OK):
test.log("Got execute permission on '%s'" % tmp)
else:
test.fail("No execute permission on '%s'" % tmp)
# this function checks for all configured Qt versions inside
# options dialog and returns a dict holding the kits as keys
# and a list of information of its configured Qt
def getConfiguredKits():
def __retrieveQtVersionName__(target, version):
treeWidget = waitForObject(":QtSupport__Internal__QtVersionManager.qtdirList_QTreeWidget")
return treeWidget.currentItem().text(0)
# end of internal function for iterateQtVersions
def __setQtVersionForKit__(kit, kitName, kitsQtVersionName):
treeView = waitForObject(":BuildAndRun_QTreeView")
clickItem(treeView, kit, 5, 5, 0, Qt.LeftButton)
qtVersionStr = str(waitForObject(":Kits_QtVersion_QComboBox").currentText)
kitsQtVersionName[kitName] = qtVersionStr
# end of internal function for iterate kits
kitsWithQtVersionName = {}
result = {}
# collect kits and their Qt versions
targetsQtVersions, qtVersionNames = iterateQtVersions(True, False, __retrieveQtVersionName__)
# update collected Qt versions with their configured device and version
iterateKits(True, True, __setQtVersionForKit__, kitsWithQtVersionName)
# merge defined target names with their configured Qt versions and devices
for kit, qtVersion in kitsWithQtVersionName.iteritems():
if kit in ('Fremantle', 'Harmattan', 'Qt Simulator'):
test.verify(qtVersion == 'None',
"The outdated kit '%s' should not have a Qt version" % kit)
elif qtVersion in qtVersionNames:
result[kit] = targetsQtVersions[qtVersionNames.index(qtVersion)].items()[0]
else:
test.fail("Qt version '%s' for kit '%s' can't be found in qtVersionNames."
% (qtVersion, kit))
clickButton(waitForObject(":Options.Cancel_QPushButton"))
test.log("Configured kits: %s" % str(result))
return result
def visibleCheckBoxExists(text):
try:
findObject("{type='QCheckBox' text='%s' visible='1'}" % text)
return True
except:
return False
# this function verifies if the text matches the given
# regex inside expectedTexts
# param text must be a single str/unicode
# param expectedTexts can be str/unicode/list/tuple
def regexVerify(text, expectedTexts):
if isinstance(expectedTexts, (str,unicode)):
expectedTexts = [expectedTexts]
for curr in expectedTexts:
pattern = re.compile(curr)
if pattern.match(text):
return True
return False
# function that opens Options Dialog and parses the configured Qt versions
# param keepOptionsOpen set to True if the Options dialog should stay open when
# leaving this function
# param alreadyOnOptionsDialog set to True if you already have opened the Options Dialog
# (if False this function will open it via the MenuBar -> Tools -> Options...)
# param additionalFunction pass a function or name of a defined function to execute
# for each correctly configured item on the list of Qt versions
# (Qt versions having no assigned toolchain, failing qmake,... will be skipped)
# this function must take at least 2 parameters - the first is the target name
# and the second the version of the current selected Qt version item
# param argsForAdditionalFunc you can specify as much parameters as you want to pass
# to additionalFunction from the outside
# the function returns a list of dict holding target-version mappings if used without
# additionalFunction
# WATCH OUT! if you're using the additionalFunction parameter - this function will
# return the list mentioned above as well as the returned value(s) from
# additionalFunction. You MUST call this function like
# result, additionalResult = _iterateQtVersions(...)
# where additionalResult is the result of all executions of additionalFunction which
# means it is a list of results.
def iterateQtVersions(keepOptionsOpen=False, alreadyOnOptionsDialog=False,
additionalFunction=None, *argsForAdditionalFunc):
result = []
additionalResult = []
if not alreadyOnOptionsDialog:
invokeMenuItem("Tools", "Options...")
waitForObjectItem(":Options_QListView", "Build & Run")
clickItem(":Options_QListView", "Build & Run", 14, 15, 0, Qt.LeftButton)
clickOnTab(":Options.qt_tabwidget_tabbar_QTabBar", "Qt Versions")
pattern = re.compile("Qt version (?P<version>.*?) for (?P<target>.*)")
treeWidget = waitForObject(":QtSupport__Internal__QtVersionManager.qtdirList_QTreeWidget")
root = treeWidget.invisibleRootItem()
for rootChild in dumpChildren(root):
rootChildText = str(rootChild.text(0)).replace(".", "\\.").replace("_", "\\_")
for subChild in dumpChildren(rootChild):
subChildText = str(subChild.text(0)).replace(".", "\\.").replace("_", "\\_")
clickItem(treeWidget, ".".join([rootChildText,subChildText]), 5, 5, 0, Qt.LeftButton)
currentText = str(waitForObject(":QtSupport__Internal__QtVersionManager.QLabel").text)
matches = pattern.match(currentText)
if matches:
target = matches.group("target").strip()
version = matches.group("version").strip()
result.append({target:version})
if additionalFunction:
try:
if isinstance(additionalFunction, (str, unicode)):
currResult = globals()[additionalFunction](target, version, *argsForAdditionalFunc)
else:
currResult = additionalFunction(target, version, *argsForAdditionalFunc)
except:
import sys
t,v,tb = sys.exc_info()
currResult = None
test.fatal("Function to additionally execute on Options Dialog could not be found or "
"an exception occurred while executing it.", "%s(%s)" % (str(t), str(v)))
additionalResult.append(currResult)
if not keepOptionsOpen:
clickButton(waitForObject(":Options.Cancel_QPushButton"))
if additionalFunction:
return result, additionalResult
else:
return result
# function that opens Options Dialog (if necessary) and parses the configured Kits
# param keepOptionsOpen set to True if the Options dialog should stay open when
# leaving this function
# param alreadyOnOptionsDialog set to True if you already have opened the Options Dialog
# (if False this functions will open it via the MenuBar -> Tools -> Options...)
# param additionalFunction pass a function or name of a defined function to execute
# for each configured item on the list of Kits
# this function must take at least 2 parameters - the first is the item (QModelIndex)
# of the current Kit (if you need to click on it) and the second the Kit name itself
# param argsForAdditionalFunc you can specify as much parameters as you want to pass
# to additionalFunction from the outside
# the function returns a list of Kit names if used without an additional function
# WATCH OUT! if you're using the additionalFunction parameter - this function will
# return the list mentioned above as well as the returned value(s) from
# additionalFunction. You MUST call this function like
# result, additionalResult = _iterateQtVersions(...)
# where additionalResult is the result of all executions of additionalFunction which
# means it is a list of results.
def iterateKits(keepOptionsOpen=False, alreadyOnOptionsDialog=False,
additionalFunction=None, *argsForAdditionalFunc):
result = []
additionalResult = []
if not alreadyOnOptionsDialog:
invokeMenuItem("Tools", "Options...")
waitForObjectItem(":Options_QListView", "Build & Run")
clickItem(":Options_QListView", "Build & Run", 14, 15, 0, Qt.LeftButton)
clickOnTab(":Options.qt_tabwidget_tabbar_QTabBar", "Kits")
treeView = waitForObject(":BuildAndRun_QTreeView")
model = treeView.model()
test.compare(model.rowCount(), 2, "Verifying expected target section count")
autoDetected = model.index(0, 0)
test.compare(autoDetected.data().toString(), "Auto-detected",
"Verifying label for target section")
manual = model.index(1, 0)
test.compare(manual.data().toString(), "Manual", "Verifying label for target section")
for section in [autoDetected, manual]:
for currentItem in dumpItems(model, section):
kitName = currentItem
if (kitName.endswith(" (default)")):
kitName = kitName.rsplit(" (default)", 1)[0]
result.append(kitName)
item = ".".join([str(section.data().toString()),
currentItem.replace(".", "\\.")])
if additionalFunction:
try:
if isinstance(additionalFunction, (str, unicode)):
currResult = globals()[additionalFunction](item, kitName, *argsForAdditionalFunc)
else:
currResult = additionalFunction(item, kitName, *argsForAdditionalFunc)
except:
import sys
t,v,tb = sys.exc_info()
currResult = None
test.fatal("Function to additionally execute on Options Dialog could not be "
"found or an exception occurred while executing it.", "%s(%s)" %
(str(t), str(v)))
additionalResult.append(currResult)
if not keepOptionsOpen:
clickButton(waitForObject(":Options.Cancel_QPushButton"))
if additionalFunction:
return result, additionalResult
else:
return result
# set "Always Start Full Help" in "Tools" -> "Options..." -> "Help" -> "General"
def setAlwaysStartFullHelp():
invokeMenuItem("Tools", "Options...")
waitForObjectItem(":Options_QListView", "Help")
clickItem(":Options_QListView", "Help", 5, 5, 0, Qt.LeftButton)
clickOnTab(":Options.qt_tabwidget_tabbar_QTabBar", "General")
selectFromCombo(":Startup.contextHelpComboBox_QComboBox", "Always Show in Help Mode")
clickButton(waitForObject(":Options.OK_QPushButton"))
def removePackagingDirectory(projectPath):
qtcPackaging = os.path.join(projectPath, "qtc_packaging")
if os.path.exists(qtcPackaging):
test.log("Removing old packaging directory '%s'" % qtcPackaging)
deleteDirIfExists(qtcPackaging)
else:
test.log("Couldn't remove packaging directory '%s' - did not exist." % qtcPackaging)
# returns the indices from a QAbstractItemModel
def dumpIndices(model, parent=None, column=0):
if parent:
return [model.index(row, column, parent) for row in range(model.rowCount(parent))]
else:
return [model.index(row, column) for row in range(model.rowCount())]
DisplayRole = 0
# returns the data from a QAbstractItemModel as strings
def dumpItems(model, parent=None, role=DisplayRole, column=0):
return [str(index.data(role)) for index in dumpIndices(model, parent, column)]
# returns the children of a QTreeWidgetItem
def dumpChildren(item):
return [item.child(index) for index in range(item.childCount())]
def writeTestResults(folder):
if not os.path.exists(folder):
print "Skipping writing test results (folder '%s' does not exist)." % folder
return
resultFile = open("%s.srf" % os.path.join(folder, os.path.basename(squishinfo.testCase)), "w")
resultFile.write("suite:%s\n" % os.path.basename(os.path.dirname(squishinfo.testCase)))
categories = ["passes", "fails", "fatals", "errors", "tests", "warnings", "xfails", "xpasses"]
for cat in categories:
resultFile.write("%s:%d\n" % (cat, test.resultCount(cat)))
resultFile.close()
# wait and verify if object exists/not exists
def checkIfObjectExists(name, shouldExist = True, timeout = 3000, verboseOnFail = False):
result = waitFor("object.exists(name) == shouldExist", timeout)
if verboseOnFail and not result:
test.log("checkIfObjectExists() failed for '%s'" % name)
return result
# wait for progress bar(s) to appear and disappear
def progressBarWait(timeout=60000, warn=True):
if not checkIfObjectExists(":Qt Creator_Core::Internal::ProgressBar", True, 6000):
if warn:
test.warning("progressBarWait() timed out when waiting for ProgressBar.",
"This may lead to unforeseen behavior. Consider increasing the timeout.")
checkIfObjectExists(":Qt Creator_Core::Internal::ProgressBar", False, timeout)
def readFile(filename):
f = open(filename, "r")
content = f.read()
f.close()
return content
def simpleFileName(navigatorFileName):
# try to find the last part of the given name, assume it's inside a (folder) structure
search = re.search(".*[^\\\\]\.(.*)$", navigatorFileName)
if search:
return search.group(1).replace("\\", "")
# it's just the filename
return navigatorFileName.replace("\\", "")
def clickOnTab(tabBarStr, tabText, timeout=5000):
if not waitFor("object.exists(tabBarStr)", timeout):
raise LookupError("Could not find QTabBar: %s" % objectMap.realName(tabBarStr))
tabBar = findObject(tabBarStr)
if platform.system() == 'Darwin' and not tabBar.visible:
test.log("Using workaround for Mac.")
setWindowState(tabBar, WindowState.Normal)
clickTab(tabBar, tabText)
waitFor("str(tabBar.tabText(tabBar.currentIndex)) == '%s'" % tabText, timeout)
# constructs a string holding the properties for a QModelIndex
# param property a string holding additional properties including their values
# ATTENTION! use single quotes for values (e.g. "text='Text'", "text='Text' occurrence='2'")
# param container the container (str) to be used for this QModelIndex
def getQModelIndexStr(property, container):
if (container.startswith(":")):
container = "'%s'" % container
return ("{column='0' container=%s %s type='QModelIndex'}" % (container, property))
def verifyItemOrder(items, text):
text = str(text)
lastIndex = 0
for item in items:
index = text.find(item)
test.verify(index > lastIndex, "'" + item + "' found at index " + str(index))
lastIndex = index
def openVcsLog():
try:
foundObj = waitForObject("{type='QPlainTextEdit' unnamed='1' visible='1' "
"window=':Qt Creator_Core::Internal::MainWindow'}", 2000)
if className(foundObj) != 'QPlainTextEdit':
raise Exception("Found derived class, but not a pure QPlainTextEdit.")
except:
invokeMenuItem("Window", "Output Panes", "Version Control")
def openGeneralMessages():
if not object.exists(":Qt Creator_Core::OutputWindow"):
invokeMenuItem("Window", "Output Panes", "General Messages")
# function that retrieves a specific child object by its class
# this is sometimes the best way to avoid using waitForObject() on objects that
# occur more than once - but could easily be found by using a compound object
# (e.g. search for Utils::PathChooser instead of Utils::FancyLineEdit and get the child)
def getChildByClass(parent, classToSearchFor, occurrence=1):
children = [child for child in object.children(parent) if className(child) == classToSearchFor]
if len(children) < occurrence:
return None
else:
return children[occurrence - 1]
def getHelpViewer():
try:
return waitForObject(":Qt Creator_Help::Internal::HelpViewer", 3000)
except:
return waitForObject("{type='Help::Internal::TextBrowserHelpWidget' unnamed='1' "
"visible='1' window=':Qt Creator_Core::Internal::MainWindow'}", 1000)
def getHelpTitle():
hv = getHelpViewer()
try:
return str(hv.title)
except:
return str(hv.documentTitle)
def canTestEmbeddedQtQuick():
return (squishinfo.major * 0x10000 + squishinfo.minor * 0x100
+ squishinfo.patch) > 0x050100
|
xianian/qt-creator
|
tests/system/shared/utils.py
|
Python
|
lgpl-2.1
| 31,486
|
# -*- coding: utf-8 -*-
# Generated by Django 1.9.4 on 2016-07-04 22:04
from __future__ import unicode_literals
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
('auth', '0007_alter_validators_add_error_messages'),
]
operations = [
migrations.CreateModel(
name='CustomUser',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('password', models.CharField(max_length=128, verbose_name='password')),
('last_login', models.DateTimeField(blank=True, null=True, verbose_name='last login')),
('is_superuser', models.BooleanField(default=False, help_text='Designates that this user has all permissions without explicitly assigning them.', verbose_name='superuser status')),
('email', models.EmailField(max_length=255, unique=True, verbose_name='email address')),
('is_admin', models.BooleanField(default=False)),
('is_active', models.BooleanField(default=True)),
('groups', models.ManyToManyField(blank=True, help_text='The groups this user belongs to. A user will get all permissions granted to each of their groups.', related_name='user_set', related_query_name='user', to='auth.Group', verbose_name='groups')),
('user_permissions', models.ManyToManyField(blank=True, help_text='Specific permissions for this user.', related_name='user_set', related_query_name='user', to='auth.Permission', verbose_name='user permissions')),
],
options={
'abstract': False,
},
),
migrations.CreateModel(
name='Member',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('email', models.EmailField(blank=True, max_length=255, null=True, verbose_name='email address')),
('email_consent', models.BooleanField(default=False)),
('first_name', models.CharField(max_length=255)),
('last_name', models.CharField(max_length=255)),
('preferred_name', models.CharField(blank=True, max_length=255, null=True)),
('date_of_birth', models.DateField(blank=True, null=True)),
('guardian_name', models.CharField(blank=True, max_length=255, null=True)),
('phone', models.CharField(blank=True, max_length=20, null=True)),
('street', models.CharField(blank=True, max_length=255, null=True)),
('city', models.CharField(blank=True, max_length=255, null=True)),
('province', models.CharField(blank=True, max_length=255, null=True)),
('country', models.CharField(blank=True, max_length=255, null=True)),
('post_code', models.CharField(max_length=20, null=True)),
('waiver', models.DateTimeField(blank=True, null=True)),
('is_active', models.BooleanField(default=True)),
('user', models.OneToOneField(null=True, on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
),
]
|
BridgeCityBicycleCoop/workstand
|
registration/migrations/0001_initial.py
|
Python
|
mit
| 3,388
|
# Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for tensorflow.python.framework.dtypes."""
import numpy as np
from tensorflow.core.framework import types_pb2
from tensorflow.python.framework import _dtypes
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import test_util
from tensorflow.python.platform import googletest
def _is_numeric_dtype_enum(datatype_enum):
non_numeric_dtypes = [types_pb2.DT_VARIANT,
types_pb2.DT_VARIANT_REF,
types_pb2.DT_INVALID,
types_pb2.DT_RESOURCE,
types_pb2.DT_RESOURCE_REF]
return datatype_enum not in non_numeric_dtypes
class TypesTest(test_util.TensorFlowTestCase):
def testAllTypesConstructible(self):
for datatype_enum in types_pb2.DataType.values():
if datatype_enum == types_pb2.DT_INVALID:
continue
self.assertEqual(datatype_enum,
dtypes.DType(datatype_enum).as_datatype_enum)
def testAllTypesConvertibleToDType(self):
for datatype_enum in types_pb2.DataType.values():
if datatype_enum == types_pb2.DT_INVALID:
continue
dt = dtypes.as_dtype(datatype_enum)
self.assertEqual(datatype_enum, dt.as_datatype_enum)
def testAllTypesConvertibleToNumpyDtype(self):
for datatype_enum in types_pb2.DataType.values():
if not _is_numeric_dtype_enum(datatype_enum):
continue
dtype = dtypes.as_dtype(datatype_enum)
numpy_dtype = dtype.as_numpy_dtype
_ = np.empty((1, 1, 1, 1), dtype=numpy_dtype)
if dtype.base_dtype != dtypes.bfloat16:
# NOTE(touts): Intentionally no way to feed a DT_BFLOAT16.
self.assertEqual(
dtypes.as_dtype(datatype_enum).base_dtype,
dtypes.as_dtype(numpy_dtype))
def testAllPybind11DTypeConvertibleToDType(self):
for datatype_enum in types_pb2.DataType.values():
if datatype_enum == types_pb2.DT_INVALID:
continue
dtype = _dtypes.DType(datatype_enum)
self.assertEqual(dtypes.as_dtype(datatype_enum), dtype)
def testInvalid(self):
with self.assertRaises(TypeError):
dtypes.DType(types_pb2.DT_INVALID)
with self.assertRaises(TypeError):
dtypes.as_dtype(types_pb2.DT_INVALID)
def testNumpyConversion(self):
self.assertIs(dtypes.float32, dtypes.as_dtype(np.float32))
self.assertIs(dtypes.float64, dtypes.as_dtype(np.float64))
self.assertIs(dtypes.int32, dtypes.as_dtype(np.int32))
self.assertIs(dtypes.int64, dtypes.as_dtype(np.int64))
self.assertIs(dtypes.uint8, dtypes.as_dtype(np.uint8))
self.assertIs(dtypes.uint16, dtypes.as_dtype(np.uint16))
self.assertIs(dtypes.int16, dtypes.as_dtype(np.int16))
self.assertIs(dtypes.int8, dtypes.as_dtype(np.int8))
self.assertIs(dtypes.complex64, dtypes.as_dtype(np.complex64))
self.assertIs(dtypes.complex128, dtypes.as_dtype(np.complex128))
self.assertIs(dtypes.string, dtypes.as_dtype(np.object_))
self.assertIs(dtypes.string,
dtypes.as_dtype(np.array(["foo", "bar"]).dtype))
self.assertIs(dtypes.bool, dtypes.as_dtype(np.bool_))
with self.assertRaises(TypeError):
dtypes.as_dtype(np.dtype([("f1", np.uint), ("f2", np.int32)]))
class AnObject(object):
dtype = "f4"
self.assertIs(dtypes.float32, dtypes.as_dtype(AnObject))
class AnotherObject(object):
dtype = np.dtype(np.complex64)
self.assertIs(dtypes.complex64, dtypes.as_dtype(AnotherObject))
def testRealDtype(self):
for dtype in [
dtypes.float32, dtypes.float64, dtypes.bool, dtypes.uint8, dtypes.int8,
dtypes.int16, dtypes.int32, dtypes.int64
]:
self.assertIs(dtype.real_dtype, dtype)
self.assertIs(dtypes.complex64.real_dtype, dtypes.float32)
self.assertIs(dtypes.complex128.real_dtype, dtypes.float64)
def testStringConversion(self):
self.assertIs(dtypes.float32, dtypes.as_dtype("float32"))
self.assertIs(dtypes.float64, dtypes.as_dtype("float64"))
self.assertIs(dtypes.int32, dtypes.as_dtype("int32"))
self.assertIs(dtypes.uint8, dtypes.as_dtype("uint8"))
self.assertIs(dtypes.uint16, dtypes.as_dtype("uint16"))
self.assertIs(dtypes.int16, dtypes.as_dtype("int16"))
self.assertIs(dtypes.int8, dtypes.as_dtype("int8"))
self.assertIs(dtypes.string, dtypes.as_dtype("string"))
self.assertIs(dtypes.complex64, dtypes.as_dtype("complex64"))
self.assertIs(dtypes.complex128, dtypes.as_dtype("complex128"))
self.assertIs(dtypes.int64, dtypes.as_dtype("int64"))
self.assertIs(dtypes.bool, dtypes.as_dtype("bool"))
self.assertIs(dtypes.qint8, dtypes.as_dtype("qint8"))
self.assertIs(dtypes.quint8, dtypes.as_dtype("quint8"))
self.assertIs(dtypes.qint32, dtypes.as_dtype("qint32"))
self.assertIs(dtypes.bfloat16, dtypes.as_dtype("bfloat16"))
self.assertIs(dtypes.float32_ref, dtypes.as_dtype("float32_ref"))
self.assertIs(dtypes.float64_ref, dtypes.as_dtype("float64_ref"))
self.assertIs(dtypes.int32_ref, dtypes.as_dtype("int32_ref"))
self.assertIs(dtypes.uint8_ref, dtypes.as_dtype("uint8_ref"))
self.assertIs(dtypes.int16_ref, dtypes.as_dtype("int16_ref"))
self.assertIs(dtypes.int8_ref, dtypes.as_dtype("int8_ref"))
self.assertIs(dtypes.string_ref, dtypes.as_dtype("string_ref"))
self.assertIs(dtypes.complex64_ref, dtypes.as_dtype("complex64_ref"))
self.assertIs(dtypes.complex128_ref, dtypes.as_dtype("complex128_ref"))
self.assertIs(dtypes.int64_ref, dtypes.as_dtype("int64_ref"))
self.assertIs(dtypes.bool_ref, dtypes.as_dtype("bool_ref"))
self.assertIs(dtypes.qint8_ref, dtypes.as_dtype("qint8_ref"))
self.assertIs(dtypes.quint8_ref, dtypes.as_dtype("quint8_ref"))
self.assertIs(dtypes.qint32_ref, dtypes.as_dtype("qint32_ref"))
self.assertIs(dtypes.bfloat16_ref, dtypes.as_dtype("bfloat16_ref"))
with self.assertRaises(TypeError):
dtypes.as_dtype("not_a_type")
def testDTypesHaveUniqueNames(self):
dtypez = []
names = set()
for datatype_enum in types_pb2.DataType.values():
if datatype_enum == types_pb2.DT_INVALID:
continue
dtype = dtypes.as_dtype(datatype_enum)
dtypez.append(dtype)
names.add(dtype.name)
self.assertEqual(len(dtypez), len(names))
def testIsInteger(self):
self.assertEqual(dtypes.as_dtype("int8").is_integer, True)
self.assertEqual(dtypes.as_dtype("int16").is_integer, True)
self.assertEqual(dtypes.as_dtype("int32").is_integer, True)
self.assertEqual(dtypes.as_dtype("int64").is_integer, True)
self.assertEqual(dtypes.as_dtype("uint8").is_integer, True)
self.assertEqual(dtypes.as_dtype("uint16").is_integer, True)
self.assertEqual(dtypes.as_dtype("complex64").is_integer, False)
self.assertEqual(dtypes.as_dtype("complex128").is_integer, False)
self.assertEqual(dtypes.as_dtype("float").is_integer, False)
self.assertEqual(dtypes.as_dtype("double").is_integer, False)
self.assertEqual(dtypes.as_dtype("string").is_integer, False)
self.assertEqual(dtypes.as_dtype("bool").is_integer, False)
self.assertEqual(dtypes.as_dtype("bfloat16").is_integer, False)
self.assertEqual(dtypes.as_dtype("qint8").is_integer, False)
self.assertEqual(dtypes.as_dtype("qint16").is_integer, False)
self.assertEqual(dtypes.as_dtype("qint32").is_integer, False)
self.assertEqual(dtypes.as_dtype("quint8").is_integer, False)
self.assertEqual(dtypes.as_dtype("quint16").is_integer, False)
def testIsFloating(self):
self.assertEqual(dtypes.as_dtype("int8").is_floating, False)
self.assertEqual(dtypes.as_dtype("int16").is_floating, False)
self.assertEqual(dtypes.as_dtype("int32").is_floating, False)
self.assertEqual(dtypes.as_dtype("int64").is_floating, False)
self.assertEqual(dtypes.as_dtype("uint8").is_floating, False)
self.assertEqual(dtypes.as_dtype("uint16").is_floating, False)
self.assertEqual(dtypes.as_dtype("complex64").is_floating, False)
self.assertEqual(dtypes.as_dtype("complex128").is_floating, False)
self.assertEqual(dtypes.as_dtype("float32").is_floating, True)
self.assertEqual(dtypes.as_dtype("float64").is_floating, True)
self.assertEqual(dtypes.as_dtype("string").is_floating, False)
self.assertEqual(dtypes.as_dtype("bool").is_floating, False)
self.assertEqual(dtypes.as_dtype("bfloat16").is_floating, True)
self.assertEqual(dtypes.as_dtype("qint8").is_floating, False)
self.assertEqual(dtypes.as_dtype("qint16").is_floating, False)
self.assertEqual(dtypes.as_dtype("qint32").is_floating, False)
self.assertEqual(dtypes.as_dtype("quint8").is_floating, False)
self.assertEqual(dtypes.as_dtype("quint16").is_floating, False)
def testIsComplex(self):
self.assertEqual(dtypes.as_dtype("int8").is_complex, False)
self.assertEqual(dtypes.as_dtype("int16").is_complex, False)
self.assertEqual(dtypes.as_dtype("int32").is_complex, False)
self.assertEqual(dtypes.as_dtype("int64").is_complex, False)
self.assertEqual(dtypes.as_dtype("uint8").is_complex, False)
self.assertEqual(dtypes.as_dtype("uint16").is_complex, False)
self.assertEqual(dtypes.as_dtype("complex64").is_complex, True)
self.assertEqual(dtypes.as_dtype("complex128").is_complex, True)
self.assertEqual(dtypes.as_dtype("float32").is_complex, False)
self.assertEqual(dtypes.as_dtype("float64").is_complex, False)
self.assertEqual(dtypes.as_dtype("string").is_complex, False)
self.assertEqual(dtypes.as_dtype("bool").is_complex, False)
self.assertEqual(dtypes.as_dtype("bfloat16").is_complex, False)
self.assertEqual(dtypes.as_dtype("qint8").is_complex, False)
self.assertEqual(dtypes.as_dtype("qint16").is_complex, False)
self.assertEqual(dtypes.as_dtype("qint32").is_complex, False)
self.assertEqual(dtypes.as_dtype("quint8").is_complex, False)
self.assertEqual(dtypes.as_dtype("quint16").is_complex, False)
def testIsUnsigned(self):
self.assertEqual(dtypes.as_dtype("int8").is_unsigned, False)
self.assertEqual(dtypes.as_dtype("int16").is_unsigned, False)
self.assertEqual(dtypes.as_dtype("int32").is_unsigned, False)
self.assertEqual(dtypes.as_dtype("int64").is_unsigned, False)
self.assertEqual(dtypes.as_dtype("uint8").is_unsigned, True)
self.assertEqual(dtypes.as_dtype("uint16").is_unsigned, True)
self.assertEqual(dtypes.as_dtype("float32").is_unsigned, False)
self.assertEqual(dtypes.as_dtype("float64").is_unsigned, False)
self.assertEqual(dtypes.as_dtype("bool").is_unsigned, False)
self.assertEqual(dtypes.as_dtype("string").is_unsigned, False)
self.assertEqual(dtypes.as_dtype("complex64").is_unsigned, False)
self.assertEqual(dtypes.as_dtype("complex128").is_unsigned, False)
self.assertEqual(dtypes.as_dtype("bfloat16").is_unsigned, False)
self.assertEqual(dtypes.as_dtype("qint8").is_unsigned, False)
self.assertEqual(dtypes.as_dtype("qint16").is_unsigned, False)
self.assertEqual(dtypes.as_dtype("qint32").is_unsigned, False)
self.assertEqual(dtypes.as_dtype("quint8").is_unsigned, False)
self.assertEqual(dtypes.as_dtype("quint16").is_unsigned, False)
def testMinMax(self):
# make sure min/max evaluates for all data types that have min/max
for datatype_enum in types_pb2.DataType.values():
if not _is_numeric_dtype_enum(datatype_enum):
continue
dtype = dtypes.as_dtype(datatype_enum)
numpy_dtype = dtype.as_numpy_dtype
# ignore types for which there are no minimum/maximum (or we cannot
# compute it, such as for the q* types)
if (dtype.is_quantized or dtype.base_dtype == dtypes.bool or
dtype.base_dtype == dtypes.string or
dtype.base_dtype == dtypes.complex64 or
dtype.base_dtype == dtypes.complex128):
continue
print("%s: %s - %s" % (dtype, dtype.min, dtype.max))
# check some values that are known
if numpy_dtype == np.bool_:
self.assertEqual(dtype.min, 0)
self.assertEqual(dtype.max, 1)
if numpy_dtype == np.int8:
self.assertEqual(dtype.min, -128)
self.assertEqual(dtype.max, 127)
if numpy_dtype == np.int16:
self.assertEqual(dtype.min, -32768)
self.assertEqual(dtype.max, 32767)
if numpy_dtype == np.int32:
self.assertEqual(dtype.min, -2147483648)
self.assertEqual(dtype.max, 2147483647)
if numpy_dtype == np.int64:
self.assertEqual(dtype.min, -9223372036854775808)
self.assertEqual(dtype.max, 9223372036854775807)
if numpy_dtype == np.uint8:
self.assertEqual(dtype.min, 0)
self.assertEqual(dtype.max, 255)
if numpy_dtype == np.uint16:
if dtype == dtypes.uint16:
self.assertEqual(dtype.min, 0)
self.assertEqual(dtype.max, 65535)
elif dtype == dtypes.bfloat16:
self.assertEqual(dtype.min, 0)
self.assertEqual(dtype.max, 4294967295)
if numpy_dtype == np.uint32:
self.assertEqual(dtype.min, 0)
self.assertEqual(dtype.max, 4294967295)
if numpy_dtype == np.uint64:
self.assertEqual(dtype.min, 0)
self.assertEqual(dtype.max, 18446744073709551615)
if numpy_dtype in (np.float16, np.float32, np.float64):
self.assertEqual(dtype.min, np.finfo(numpy_dtype).min)
self.assertEqual(dtype.max, np.finfo(numpy_dtype).max)
if numpy_dtype == dtypes.bfloat16.as_numpy_dtype:
self.assertEqual(dtype.min, float.fromhex("-0x1.FEp127"))
self.assertEqual(dtype.max, float.fromhex("0x1.FEp127"))
def testRepr(self):
self.skipTest("b/142725777")
for enum, name in dtypes._TYPE_TO_STRING.items():
if enum > 100:
continue
dtype = dtypes.DType(enum)
self.assertEqual(repr(dtype), "tf." + name)
import tensorflow as tf
dtype2 = eval(repr(dtype))
self.assertEqual(type(dtype2), dtypes.DType)
self.assertEqual(dtype, dtype2)
def testEqWithNonTFTypes(self):
self.assertNotEqual(dtypes.int32, int)
self.assertNotEqual(dtypes.float64, 2.1)
def testPythonLongConversion(self):
self.assertIs(dtypes.int64, dtypes.as_dtype(np.array(2**32).dtype))
def testPythonTypesConversion(self):
self.assertIs(dtypes.float32, dtypes.as_dtype(float))
self.assertIs(dtypes.bool, dtypes.as_dtype(bool))
def testReduce(self):
for enum in dtypes._TYPE_TO_STRING:
dtype = dtypes.DType(enum)
ctor, args = dtype.__reduce__()
self.assertEqual(ctor, dtypes.as_dtype)
self.assertEqual(args, (dtype.name,))
reconstructed = ctor(*args)
self.assertEqual(reconstructed, dtype)
def testAsDtypeInvalidArgument(self):
with self.assertRaises(TypeError):
dtypes.as_dtype((dtypes.int32, dtypes.float32))
def testAsDtypeReturnsInternedVersion(self):
dt = dtypes.DType(types_pb2.DT_VARIANT)
self.assertIs(dtypes.as_dtype(dt), dtypes.variant)
if __name__ == "__main__":
googletest.main()
|
tensorflow/tensorflow
|
tensorflow/python/framework/dtypes_test.py
|
Python
|
apache-2.0
| 15,671
|
# pylint: disable=W0232, R0903
from django import forms
from django.contrib.auth.forms import AuthenticationForm
from accounts.models import User
from rest_framework.authtoken.models import Token
from common.forms import get_bootstrap_helper
class RegistrationForm(forms.ModelForm):
"""
A form that creates a user, with no privileges, from the given username and
password.
"""
error_messages = {
'duplicate_username': "A user with that username already exists.",
'password_mismatch': "The two password fields didn't match.",
}
username = forms.RegexField(
label="Username", max_length=30,
regex=r'^[\w.@+-]+$',
help_text=("30 characters max."),
error_messages={
'invalid': ("This value may contain only letters, numbers and "
"@/./+/-/_ characters.")
}
)
email = forms.EmailField(
label="Email address"
)
password1 = forms.CharField(
label="Password",
widget=forms.PasswordInput, max_length=64
)
password2 = forms.CharField(
label="Password confirmation",
widget=forms.PasswordInput,
max_length=64,
help_text="Enter the same password as above, for verification."
)
class Meta(object):
model = User
fields = ("username", "email")
def __init__(self, *args, **kwargs):
super(RegistrationForm, self).__init__(*args, **kwargs)
self.helper = get_bootstrap_helper(
['username', 'email', 'password1', 'password2'],
'register', "Register"
)
def clean_username(self):
# Since User.username is unique, this check is redundant,
# but it sets a nicer error message than the ORM. See #13147.
username = self.cleaned_data["username"]
try:
User.objects.get(username=username)
except User.DoesNotExist:
return username
raise forms.ValidationError(self.error_messages['duplicate_username'])
def clean_password2(self):
password1 = self.cleaned_data.get("password1")
password2 = self.cleaned_data.get("password2")
if password1 and password2 and password1 != password2:
raise forms.ValidationError(
self.error_messages['password_mismatch'])
return password2
def save(self, commit=True):
user = super(RegistrationForm, self).save(commit=False)
user.set_password(self.cleaned_data["password1"])
if commit:
user.save()
Token.objects.create(user=user)
return user
class LoginForm(AuthenticationForm):
def __init__(self, *args, **kwargs):
super(LoginForm, self).__init__(*args, **kwargs)
self.helper = get_bootstrap_helper(
['username', 'password'], 'signin', "Sign in"
)
class ProfileForm(forms.ModelForm):
class Meta(object):
model = User
fields = ('website', 'avatar', 'email')
def __init__(self, *args, **kwargs):
super(ProfileForm, self).__init__(*args, **kwargs)
self.helper = get_bootstrap_helper(list(self.Meta.fields),
'save', "Save")
def save(self, *args, **kwargs):
if 'email' in self.changed_data:
self.instance.email_confirmed = False
return super(ProfileForm, self).save(*args, **kwargs)
|
Turupawn/website
|
accounts/forms.py
|
Python
|
agpl-3.0
| 3,416
|
"""
WSGI config for Admirals project.
This module contains the WSGI application used by Django's development server
and any production WSGI deployments. It should expose a module-level variable
named ``application``. Django's ``runserver`` and ``runfcgi`` commands discover
this application via the ``WSGI_APPLICATION`` setting.
Usually you will have the standard Django WSGI application here, but it also
might make sense to replace the whole Django WSGI application with a custom one
that later delegates to the Django one. For example, you could introduce WSGI
middleware here, or combine a Django application with an application of another
framework.
"""
import os
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "Admirals.settings")
# This application object is used by any WSGI server configured to use this
# file. This includes Django's development server, if the WSGI_APPLICATION
# setting points here.
from django.core.wsgi import get_wsgi_application
application = get_wsgi_application()
# Apply WSGI middleware here.
# from helloworld.wsgi import HelloWorldApplication
# application = HelloWorldApplication(application)
|
PhilMacKay/django-admirals
|
Admirals/Admirals/wsgi.py
|
Python
|
mit
| 1,138
|
#
# utils.py
#
# Auxiliary functions for the `docmaker' tool (library file).
#
# Copyright 2002-2018 by
# David Turner.
#
# This file is part of the FreeType project, and may only be used,
# modified, and distributed under the terms of the FreeType project
# license, LICENSE.TXT. By continuing to use, modify, or distribute
# this file you indicate that you have read the license and
# understand and accept it fully.
import string, sys, os, glob, itertools
# current output directory
#
output_dir = None
# A function that generates a sorting key. We want lexicographical order
# (primary key) except that capital letters are sorted before lowercase
# ones (secondary key).
#
# The primary key is implemented by lowercasing the input. The secondary
# key is simply the original data appended, character by character. For
# example, the sort key for `FT_x' is `fFtT__xx', while the sort key for
# `ft_X' is `fftt__xX'. Since ASCII codes of uppercase letters are
# numerically smaller than the codes of lowercase letters, `fFtT__xx' gets
# sorted before `fftt__xX'.
#
def index_key( s ):
return string.join( itertools.chain( *zip( s.lower(), s ) ) )
# Sort `input_list', placing the elements of `order_list' in front.
#
def sort_order_list( input_list, order_list ):
new_list = order_list[:]
for id in input_list:
if not id in order_list:
new_list.append( id )
return new_list
# Divert standard output to a given project documentation file. Use
# `output_dir' to determine the filename location if necessary and save the
# old stdout handle in a tuple that is returned by this function.
#
def open_output( filename ):
global output_dir
if output_dir and output_dir != "":
filename = output_dir + os.sep + filename
old_stdout = sys.stdout
new_file = open( filename, "w" )
sys.stdout = new_file
return ( new_file, old_stdout )
# Close the output that was returned by `open_output'.
#
def close_output( output ):
output[0].close()
sys.stdout = output[1]
# Check output directory.
#
def check_output():
global output_dir
if output_dir:
if output_dir != "":
if not os.path.isdir( output_dir ):
sys.stderr.write( "argument"
+ " '" + output_dir + "' "
+ "is not a valid directory\n" )
sys.exit( 2 )
else:
output_dir = None
def file_exists( pathname ):
"""Check that a given file exists."""
result = 1
try:
file = open( pathname, "r" )
file.close()
except:
result = None
sys.stderr.write( pathname + " couldn't be accessed\n" )
return result
def make_file_list( args = None ):
"""Build a list of input files from command-line arguments."""
file_list = []
# sys.stderr.write( repr( sys.argv[1 :] ) + '\n' )
if not args:
args = sys.argv[1:]
for pathname in args:
if string.find( pathname, '*' ) >= 0:
newpath = glob.glob( pathname )
newpath.sort() # sort files -- this is important because
# of the order of files
else:
newpath = [pathname]
file_list.extend( newpath )
if len( file_list ) == 0:
file_list = None
else:
# now filter the file list to remove non-existing ones
file_list = filter( file_exists, file_list )
return file_list
# eof
|
joncampbell123/dosbox-rewrite
|
vs2015/freetype/src/tools/docmaker/utils.py
|
Python
|
gpl-2.0
| 3,513
|
from django.test import TestCase
from wellcom_app.models import Well, Note, DeviceData, WaterTest, Usage
from django.utils import timezone
from django.core.urlresolvers import reverse
from time import sleep
# Using this for Django testing guidance:
# https://realpython.com/blog/python/testing-in-django-part-1-best-practices-and-examples/
# MODELS TESTS
class WellTest(TestCase):
def create_well(self, name="Test Well", latitude=35.993078,
longitude=-78.904689, country="United States",
date_installed="2016-07-11", last_update=timezone.now(),
estimated_users=500, cost_usd=8000.50,
contractor="Test Contractor", flow_rate_lpm=120):
return Well.objects.create(name=name, latitude=latitude,
longitude=longitude, country=country,
date_installed=date_installed,
last_update=last_update,
estimated_users=estimated_users,
cost_usd=cost_usd, contractor=contractor,
flow_rate_lpm=flow_rate_lpm
)
def test_well_creation(self):
well = self.create_well()
self.assertTrue(isinstance(well, Well))
self.assertEqual(well.__str__(), well.name)
def test_well_save(self):
well = self.create_well()
create_time = well.last_update
sleep(1)
well.name = 'Test Saved Well'
well.save()
self.assertTrue(create_time is not well.last_update)
self.assertTrue(isinstance(well, Well))
self.assertEqual(well.__str__(), 'Test Saved Well')
class NoteTest(TestCase):
def create_note(self, title="Test note title", text="Test note text"):
well = WellTest().create_well()
well.save()
return Note.objects.create(well=well, title=title, text=text)
def test_note_creation(self):
note = self.create_note()
self.assertTrue(isinstance(note, Note))
self.assertEqual(note.__str__(), note.title)
class DeviceDataTest(TestCase):
def create_device_data(self, timestamp=timezone.now(), temperature_c=32):
well = WellTest().create_well()
well.save()
return DeviceData.objects.create(well=well, timestamp=timestamp,
temperature_c=temperature_c)
def test_device_data_creation(self):
device_data = self.create_device_data()
self.assertTrue(isinstance(device_data, DeviceData))
self.assertEqual(device_data.__str__(),
"{} | {} | {}".format(device_data.well,
device_data.timestamp,
device_data.temperature_c))
class WaterTestTest(TestCase):
def create_water_test_data(self):
pass
class UsageTest(TestCase):
pass
# VIEWS TESTS
|
WellConstructed/WellCom
|
wellcom_app/tests/tests.py
|
Python
|
mit
| 3,019
|
from django.http import HttpResponse, Http404, HttpResponseRedirect
from django.shortcuts import render_to_response
from django.template import RequestContext
from django.core.urlresolvers import reverse
from django.contrib.auth.decorators import login_required
from django.contrib.auth.forms import AuthenticationForm
from django.core.paginator import Paginator, InvalidPage, EmptyPage
from django.conf import settings
from django.contrib.syndication.feeds import Feed
from django.utils.feedgenerator import Atom1Feed
from django.template import defaultfilters
from datetime import datetime, time
from mongoengine.django.auth import REDIRECT_FIELD_NAME
import string
from mumblr.entrytypes import markup, EntryType
def _lookup_template(name):
return 'mumblr/admin/%s.html' % name
@login_required
def dashboard(request):
"""Display the main admin page.
"""
entry_types = [e.type for e in EntryType._types.values()]
entries = EntryType.objects.order_by('-publish_date')[:10]
context = {
'entry_types': entry_types,
'entries': entries,
'datenow': datetime.now(),
}
return render_to_response(_lookup_template('dashboard'), context,
context_instance=RequestContext(request))
@login_required
def edit_entry(request, entry_id):
"""Edit an existing entry.
"""
entry = EntryType.objects.with_id(entry_id)
if not entry:
return HttpResponseRedirect(reverse('admin'))
# Select correct form for entry type
form_class = entry.AdminForm
if request.method == 'POST':
form = form_class(request.POST)
if form.is_valid():
# Get necessary post data from the form
for field, value in form.cleaned_data.items():
if field in entry._fields.keys():
entry[field] = value
entry.save()
return HttpResponseRedirect(entry.get_absolute_url())
else:
fields = entry._fields.keys()
field_dict = dict([(name, entry[name]) for name in fields])
# tags are stored as a list in the db, convert them back to a string
field_dict['tags'] = ', '.join(field_dict['tags'])
# publish_time and expiry_time are not initialised as they
# don't have a field in the DB
field_dict['publish_time'] = time(
hour=entry.publish_date.hour,
minute=entry.publish_date.minute,
second=entry.publish_date.second,
)
if field_dict['expiry_date']:
field_dict['expiry_time'] = time(
hour=entry.expiry_date.hour,
minute=entry.expiry_date.minute,
second=entry.expiry_date.second,
)
form = form_class(field_dict)
link_url = reverse('add-entry', args=['Link'])
video_url = reverse('add-entry', args=['Video'])
context = {
'title': 'Edit an entry',
'type': type,
'form': form,
'link_url': request.build_absolute_uri(link_url),
'video_url': request.build_absolute_uri(video_url),
}
return render_to_response(_lookup_template('add_entry'), context,
context_instance=RequestContext(request))
@login_required
def add_entry(request, type):
"""Display the 'Add an entry' form when GET is used, and add an entry to
the database when POST is used.
"""
# 'type' must be a valid entry type (e.g. html, image, etc..)
if type.lower() not in EntryType._types:
raise Http404
# Use correct entry type Document class
entry_type = EntryType._types[type.lower()]
# Select correct form for entry type
form_class = entry_type.AdminForm
if request.method == 'POST':
form = form_class(request.POST)
if form.is_valid():
entry = entry_type(**form.cleaned_data)
# Save the entry to the DB
entry.save()
return HttpResponseRedirect(entry.get_absolute_url())
else:
initial = {
'publish_date': datetime.now(),
'publish_time': datetime.now().time(),
'comments_enabled': True,
}
# Pass in inital values from query string - added by bookmarklet
for field, value in request.GET.items():
if field in form_class.base_fields:
initial[field] = value
if 'title' in initial:
initial['slug'] = defaultfilters.slugify(initial['title'])
form = form_class(initial=initial)
link_url = reverse('add-entry', args=['Link'])
video_url = reverse('add-entry', args=['Video'])
context = {
'title': 'Add %s Entry' % type,
'type': type,
'form': form,
'link_url': request.build_absolute_uri(link_url),
'video_url': request.build_absolute_uri(video_url),
}
return render_to_response(_lookup_template('add_entry'), context,
context_instance=RequestContext(request))
@login_required
def delete_entry(request):
"""Delete an entry from the database.
"""
entry_id = request.POST.get('entry_id', None)
if request.method == 'POST' and entry_id:
EntryType.objects.with_id(entry_id).delete()
return HttpResponseRedirect(reverse('recent-entries'))
@login_required
def delete_comment(request):
"""Delete a comment from the database.
"""
comment_id = request.POST.get('comment_id', None)
if request.method == 'POST' and comment_id:
# Delete matching comment from entry
entry = EntryType.objects(comments__id=comment_id).first()
if entry:
entry.comments = [c for c in entry.comments if c.id != comment_id]
entry.save()
return HttpResponseRedirect(entry.get_absolute_url()+'#comments')
return HttpResponseRedirect(reverse('recent-entries'))
|
thiagopa/django-mumblr
|
mumblr/views/admin.py
|
Python
|
mit
| 5,871
|
'''
Awesome people who have directly contributed to the project:
Jon Palmer - Bug finder & advice on project direction
Mahmut Uludag - Bug finder
Help: print pybam.wat
Github: http://github.com/JohnLonginotto/pybam
This code was written by John Longinotto, a PhD student of the Pospisilik Lab at the Max Planck Institute of Immunbiology & Epigenetics, Freiburg.
My PhD is funded by the Deutsches Epigenom Programm (DEEP), and the Max Planck IMPRS Program.
I study Adipose Biology and Circadian Rhythm in mice, although it seems these days I spend most of my time at the computer :-)
'''
import os
import sys
import zlib
import time
import tempfile
import subprocess
from array import array
from struct import unpack
CtoPy = { 'A':'<c', 'c':'<b', 'C':'<B', 's':'<h', 'S':'<H', 'i':'<i', 'I':'<I', 'f':'<f' }
py4py = { 'A': 1 , 'c': 1 , 'C': 1 , 's': 2 , 'S': 2 , 'i': 4 , 'I': 4 , 'f': 4 }
dna_codes = '=ACMGRSVTWYHKDBN'
cigar_codes = 'MIDNSHP=X'
parse_codes = {
'sam': ' The current alignment in SAM format.',
'bam': ' All the bytes that make up the current alignment ("read"),\n still in binary just as it was in the BAM file. Useful\n when creating a new BAM file of filtered alignments.',
'sam_qname': ' [1st column in SAM] The QNAME (fragment ID) of the alignment.',
'bam_qname': ' The original bytes before decoding to sam_qname.',
'sam_flag': ' [2nd column in SAM] The FLAG number of the alignment.',
'bam_flag': ' The original bytes before decoding to sam_flag.',
'sam_refID': ' The chromosome ID (not the same as the name!).\n Chromosome names are stored in the BAM header (file_chromosomes),\n so to convert refIDs to chromsome names one needs to do:\n "my_bam.file_chromosomes[read.sam_refID]" (or use sam_rname)\n But for comparisons, using the refID is much faster that using\n the actual chromosome name (for example, when reading through a\n sorted BAM file and looking for where last_refID != this_refID)\n Note that when negative the alignment is not aligned, and thus one\n must not perform my_bam.file_chromosomes[read.sam_refID]\n without checking that the value is positive first.',
'sam_rname': ' [3rd column in SAM] The actual chromosome/contig name for the\n alignment. Will return "*" if refID is negative.',
'bam_refID': ' The original bytes before decoding to sam_refID.',
'sam_pos1': ' [4th column in SAM] The 1-based position of the alignment. Note\n that in SAM format values less than 1 are converted to "0" for\n "no data" and sam_pos1 will also do this.',
'sam_pos0': ' The 0-based position of the alignment. Note that in SAM all\n positions are 1-based, but in BAM they are stored as 0-based.\n Unlike sam_pos1, negative values are kept as negative values,\n essentially giving one the decoded value as it was stored.',
'bam_pos': ' The original bytes before decoding to sam_pos*.',
'sam_mapq': ' [5th column in SAM] The Mapping Quality of the current alignment.',
'bam_mapq': ' The original bytes before decoding to sam_mapq.',
'sam_cigar_string': ' [6th column in SAM] The CIGAR string, as per the SAM format.\n Allowed values are "MIDNSHP=X".',
'sam_cigar_list': ' A list of tuples with 2 values per tuple:\n the number of bases, and the CIGAR operation applied to those\n bases. Faster to calculate than sam_cigar_string.',
'bam_cigar': ' The original bytes before decoding to sam_cigar_*.',
'sam_next_refID': ' The sam_refID of the alignment\'s mate (if any). Note that as per\n sam_refID, this value can be negative and is not the actual\n chromosome name (see sam_pnext1).',
'sam_rnext': ' [7th column in SAM] The chromosome name of the alignment\'s mate.\n Value is "*" if unmapped. Note that in a SAM file this value\n is "=" if it is the same as the sam_rname, however pybam will\n only do this if the user prints the whole SAM entry with "sam".',
'bam_next_refID': ' The original bytes before decoding to sam_next_refID.',
'sam_pnext1': ' [8th column in SAM] The 1-based position of the alignment\'s mate.\n Note that in SAM format values less than 1 are converted to "0"\n for "no data", and sam_pnext1 will also do this.',
'sam_pnext0': ' The 0-based position of the alignment\'s mate. Note that in SAM all\n positions are 1-based, but in BAM they are stored as 0-based.\n Unlike sam_pnext1, negative values are kept as negative values\n here, essentially giving you the value as it was stored in BAM.',
'bam_pnext': ' The original bytes before decoding to sam_pnext0.',
'sam_tlen': ' [9th column in SAM] The TLEN value.',
'bam_tlen': ' The original bytes before decoding to sam_tlen.',
'sam_seq': ' [10th column in SAM] The SEQ value (DNA sequence of the alignment).\n Allowed values are "ACGTMRSVWYHKDBN and =".',
'bam_seq': ' The original bytes before decoding to sam_seq.',
'sam_qual': ' [11th column in SAM] The QUAL value (quality scores per DNA base\n in SEQ) of the alignment.',
'bam_qual': ' The original bytes before decoding to sam_qual.',
'sam_tags_list': ' A list of tuples with 3 values per tuple: a two-letter TAG ID, the\n type code used to describe the data in the TAG value (see SAM spec.\n for details), and the value of the TAG. Note that the BAM format\n has type codes like "c" for a number in the range -127 to +127,\n and "C" for a number in the range of 0 to 255.\n In a SAM file however, all numerical codes appear to just be stored\n using "i", which is a number in the range -2147483647 to +2147483647.\n sam_tags_list will therefore return the code used in the BAM file,\n and not "i" for all numbers.',
'sam_tags_string': ' [12th column a SAM] Returns the TAGs in the same format as would be found \n in a SAM file (with all numbers having a signed 32bit code of "i").',
'bam_tags': ' The original bytes before decoding to sam_tags_*.',
'sam_bin': ' The bin value of the alignment (used for indexing reads).\n Please refer to section 5.3 of the SAM spec for how this\n value is calculated.',
'bam_bin': ' The original bytes before decoding to sam_bin.',
'sam_block_size': ' The number of bytes the current alignment takes up in the BAM\n file minus the four bytes used to store the block_size value\n itself. Essentially sam_block_size +4 == bytes needed to store\n the current alignment.',
'bam_block_size': ' The original bytes before decoding to sam_block_size.',
'sam_l_read_name': ' The length of the QNAME plus 1 because the QNAME is terminated\n with a NUL byte.',
'bam_l_read_name': ' The original bytes before decoding to sam_l_read_name.',
'sam_l_seq': ' The number of bases in the seq. Useful if you just want to know\n how many bases are in the SEQ but do not need to know what those\n bases are (which requires more decoding effort).',
'bam_l_seq': ' The original bytes before decoding to sam_l_seq.',
'sam_n_cigar_op': ' The number of CIGAR operations in the CIGAR field. Useful if one\n wants to know how many CIGAR operations there are, but does not\n need to know what they are.',
'bam_n_cigar_op': ' The original bytes before decoding to sam_n_cigar_op.',
'file_alignments_read': ' A running counter of the number of alignments ("reads"),\n processed thus far. Note the BAM format does not store\n how many reads are in a file, so the usefulness of this\n metric is somewhat limited unless one already knows how\n many reads are in the file.',
'file_binary_header': ' From the first byte in the file, until the first byte of\n the first read. The original binary header.',
'file_bytes_read': ' A running counter of the bytes read from the file. Note\n that as data is read in arbitary chunks, this is literally\n the amount of data read from the file/pipe by pybam.',
'file_chromosome_lengths': ' The binary header of the BAM file includes chromosome names\n and chromosome lengths. This is a dictionary of chromosome-name\n keys and chromosome-length values.',
'file_chromosomes': ' A list of chromosomes from the binary header.',
'file_decompressor': ' BAM files are compressed with bgzip. The value here reflects\n the decompressor used. "internal" if pybam\'s internal\n decompressor is being used, "gzip" or "pigz" if the system\n has these binaries installed and pybam can find them.\n Any other value reflects a custom decompression command.',
'file_directory': ' The directory the input BAM file can be found in. This will be\n correct if the input file is specified via a string or python\n file object, however if the input is a pipe such as sys.stdin, \n then the current working directory will be used.',
'file_header': ' The ASCII portion of the BAM header. This is the typical header\n users of samtools will be familiar with.',
'file_name': ' The file name (base name) of input file if input is a string or\n python file object. If input is via stdin this will be "<stdin>"'
}
wat = '''
Main class: pybam.read
Github: http://github.com/JohnLonginotto/pybam
[ Dynamic Parser Example ]
for alignment in pybam.read('/my/data.bam'):
print alignment.sam_seq
[ Static Parser Example ]
for seq,mapq in pybam.read('/my/data.bam',['sam_seq','sam_mapq']):
print seq
print mapq
[ Mixed Parser Example ]
my_bam = pybam.read('/my/data.bam',['sam_seq','sam_mapq'])
print my_bam._static_parser_code
for seq,mapq in my_bam:
if seq.startswith('ACGT') and mapq > 10:
print my_bam.sam
[ Custom Decompressor (from file path) Example ]
my_bam = pybam.read('/my/data.bam.lzma',decompressor='lzma --decompress --stdout /my/data.bam.lzma')
[ Custom Decompressor (from file object) Example ]
my_bam = pybam.read(sys.stdin,decompressor='lzma --decompress --stdout') # data given to lzma via stdin
[ Force Internal bgzip Decompressor ]
my_bam = pybam.read('/my/data.bam',decompressor='internal')
[ Parse Words (hah) ]'''
wat += '\n'+''.join([('\n===============================================================================================\n\n ' if code is 'file_alignments_read' or code is 'sam' else ' ')+(code+' ').ljust(25,'-')+description+'\n' for code,description in sorted(parse_codes.items())]) + '\n'
class read:
'''
[ Dynamic Parser Example ]
for alignment in pybam.read('/my/data.bam'):
print alignment.sam_seq
[ Static Parser Example ]
for seq,mapq in pybam.read('/my/data.bam',['sam_seq','sam_mapq']):
print seq
print mapq
[ Mixed Parser Example ]
my_bam = pybam.read('/my/data.bam',['sam_seq','sam_mapq'])
print my_bam._static_parser_code
for seq,mapq in my_bam:
if seq.startswith('ACGT') and mapq > 10:
print my_bam.sam
[ Custom Decompressor (from file path) Example ]
my_bam = pybam.read('/my/data.bam.lzma',decompressor='lzma --decompress --stdout /my/data.bam.lzma')
[ Custom Decompressor (from file object) Example ]
my_bam = pybam.read(sys.stdin,decompressor='lzma --decompress --stdout') # data given to lzma via stdin
[ Force Internal bgzip Decompressor ]
my_bam = pybam.read('/my/data.bam',decompressor='internal')
"print pybam.wat" in the python terminal to see the possible parsable values,
or visit http://github.com/JohnLonginotto/pybam for the latest info.
'''
def __init__(self,f,fields=False,decompressor=False):
self.file_bytes_read = 0
self.file_chromosomes = []
self.file_alignments_read = 0
self.file_chromosome_lengths = {}
if fields is not False:
if type(fields) is not list or len(fields) is 0:
raise PybamError('\n\nFields for the static parser must be provided as a non-empty list. You gave a ' + str(type(fields)) + '\n')
else:
for field in fields:
if field.startswith('sam') or field.startswith('bam'):
if field not in parse_codes.keys():
raise PybamError('\n\nStatic parser field "' + str(field) + '" from fields ' + str(fields) + ' is not known to this version of pybam!\nPrint "pybam.wat" to see available field names with explinations.\n')
else:
raise PybamError('\n\nStatic parser field "' + str(field) + '" from fields ' + str(fields) + ' does not start with "sam" or "bam" and thus is not an avaliable field for the static parsing.\nPrint "pybam.wat" in interactive python to see available field names with explinations.\n')
if decompressor:
if type(decompressor) is str:
if decompressor is not 'internal' and '{}' not in decompressor: raise PybamError('\n\nWhen a custom decompressor is used and the input file is a string, the decompressor string must contain at least one occurence of "{}" to be substituted with a filepath by pybam.\n')
else: raise PybamError('\n\nUser-supplied decompressor must be a string that when run on the command line decompresses a named file (or stdin), to stdout:\ne.g. "lzma --decompress --stdout {}" if pybam is provided a path as input file, where {} is substituted for that path.\nor just "lzma --decompress --stdout" if pybam is provided a file object instead of a file path, as data from that file object will be piped via stdin to the decompression program.\n')
## First we make a generator that will return chunks of uncompressed data, regardless of how we choose to decompress:
def generator():
DEVNULL = open(os.devnull, 'wb')
# First we need to figure out what sort of file we have - whether it's gzip compressed, uncompressed, or something else entirely!
if type(f) is str:
try: self._file = open(f,'rb')
except: raise PybamError('\n\nCould not open "' + str(self._file.name) + '" for reading!\n')
try: magic = os.read(self._file.fileno(),4)
except: raise PybamError('\n\nCould not read from "' + str(self._file.name) + '"!\n')
elif type(f) is file:
self._file = f
try: magic = os.read(self._file.fileno(),4)
except: raise PybamError('\n\nCould not read from "' + str(self._file.name) + '"!\n')
else: raise PybamError('\n\nInput file was not a string or a file object. It was: "' + str(f) + '"\n')
self.file_name = os.path.basename(os.path.realpath(self._file.name))
self.file_directory = os.path.dirname(os.path.realpath(self._file.name))
if magic == 'BAM\1':
# The user has passed us already unzipped BAM data! Job done :)
data = 'BAM\1' + self._file.read(35536)
self.file_bytes_read += len(data)
self.file_decompressor = 'None'
while data:
yield data
data = self._file.read(35536)
self.file_bytes_read += len(data)
self._file.close()
DEVNULL.close()
raise StopIteration
elif magic == "\x1f\x8b\x08\x04": # The user has passed us compressed gzip/bgzip data, which is typical for a BAM file
# use custom decompressor if provided:
if decompressor is not False and decompressor is not 'internal':
if type(f) is str: self._subprocess = subprocess.Popen( decompressor.replace('{}',f), shell=True, stdout=subprocess.PIPE, stderr=DEVNULL)
else: self._subprocess = subprocess.Popen('{ printf "'+magic+'"; cat; } | ' + decompressor, stdin=self._file, shell=True, stdout=subprocess.PIPE, stderr=DEVNULL)
self.file_decompressor = decompressor
data = self._subprocess.stdout.read(35536)
self.file_bytes_read += len(data)
while data:
yield data
data = self._subprocess.stdout.read(35536)
self.file_bytes_read += len(data)
self._file.close()
DEVNULL.close()
raise StopIteration
# else look for pigz or gzip:
else:
try:
self._subprocess = subprocess.Popen(["pigz"],stdin=DEVNULL,stdout=DEVNULL,stderr=DEVNULL)
if self._subprocess.returncode is None: self._subprocess.kill()
use = 'pigz'
except OSError:
try:
self._subprocess = subprocess.Popen(["gzip"],stdin=DEVNULL,stdout=DEVNULL,stderr=DEVNULL)
if self._subprocess.returncode is None: self._subprocess.kill()
use = 'gzip'
except OSError: use = 'internal'
if use is not 'internal' and decompressor is not 'internal':
if type(f) is str: self._subprocess = subprocess.Popen([ use , '--decompress','--stdout', f ], stdout=subprocess.PIPE, stderr=DEVNULL)
else: self._subprocess = subprocess.Popen('{ printf "'+magic+'"; cat; } | ' + use + ' --decompress --stdout', stdin=f, shell=True, stdout=subprocess.PIPE, stderr=DEVNULL)
time.sleep(1)
if self._subprocess.poll() == None:
data = self._subprocess.stdout.read(35536)
self.file_decompressor = use
self.file_bytes_read += len(data)
while data:
yield data
data = self._subprocess.stdout.read(35536)
self.file_bytes_read += len(data)
self._file.close()
DEVNULL.close()
raise StopIteration
# Python's gzip module can't read from a stream that doesn't support seek(), and the zlib module cannot read the bgzip format without a lot of help:
self.file_decompressor = 'internal'
raw_data = magic + self._file.read(65536)
self.file_bytes_read = len(raw_data)
internal_cache = []
blocks_left_to_grab = 50
bs = 0
checkpoint = 0
decompress = zlib.decompress
while raw_data:
if len(raw_data) - bs < 35536:
raw_data = raw_data[bs:] + self._file.read(65536)
self.file_bytes_read += len(raw_data) - bs
bs = 0
magic = raw_data[bs:bs+4]
if not magic: break # a child's heart
if magic != "\x1f\x8b\x08\x04": raise PybamError('\n\nThe input file is not in a format I understand. First four bytes: ' + repr(magic) + '\n')
try:
more_bs = bs + unpack("<H", raw_data[bs+16:bs+18])[0] +1
internal_cache.append(decompress(raw_data[bs+18:more_bs-8],-15))
bs = more_bs
except: ## zlib doesnt have a nice exception for when things go wrong. just "error"
header_data = magic + raw_data[bs+4:bs+12]
header_size = 12
extra_len = unpack("<H", header_data[-2:])[0]
while header_size-12 < extra_len:
header_data += raw_data[bs+12:bs+16]
subfield_id = header_data[-4:-2]
subfield_len = unpack("<H", header_data[-2:])[0]
subfield_data = raw_data[bs+16:bs+16+subfield_len]
header_data += subfield_data
header_size += subfield_len + 4
if subfield_id == 'BC': block_size = unpack("<H", subfield_data)[0]
raw_data = raw_data[bs+16+subfield_len:bs+16+subfield_len+block_size-extra_len-19]
crc_data = raw_data[bs+16+subfield_len+block_size-extra_len-19:bs+16+subfield_len+block_size-extra_len-19+8] # I have left the numbers in verbose, because the above try is the optimised code.
bs = bs+16+subfield_len+block_size-extra_len-19+8
zipped_data = header_data + raw_data + crc_data
internal_cache.append(decompress(zipped_data,47)) # 31 works the same as 47.
# Although the following in the bgzip code from biopython, its not needed if you let zlib decompress the whole zipped_data, header and crc, because it checks anyway (in C land)
# I've left the manual crc checks in for documentation purposes:
'''
expected_crc = crc_data[:4]
expected_size = unpack("<I", crc_data[4:])[0]
if len(unzipped_data) != expected_size: print 'ERROR: Failed to unpack due to a Type 1 CRC error. Could the BAM be corrupted?'; exit()
crc = zlib.crc32(unzipped_data)
if crc < 0: crc = pack("<i", crc)
else: crc = pack("<I", crc)
if expected_crc != crc: print 'ERROR: Failed to unpack due to a Type 2 CRC error. Could the BAM be corrupted?'; exit()
'''
blocks_left_to_grab -= 1
if blocks_left_to_grab == 0:
yield ''.join(internal_cache)
internal_cache = []
blocks_left_to_grab = 50
self._file.close()
DEVNULL.close()
if internal_cache != '': yield ''.join(internal_cache)
raise StopIteration
elif decompressor is not False and decompressor is not 'internal':
# It wouldn't be safe to just print to the shell four random bytes from the beginning of a file, so instead it's
# written to a temp file and cat'd. The idea here being that we trust the decompressor string as it was written by
# someone with access to python, so it has system access anyway. The file/data, however, should not be trusted.
magic_file = os.path.join(tempfile.mkdtemp(),'magic')
with open(magic_file,'wb') as mf: mf.write(magic)
if type(f) is str: self._subprocess = subprocess.Popen( decompressor.replace('{}',f), shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
else: self._subprocess = subprocess.Popen('{ cat "'+magic_file+'"; cat; } | ' + decompressor, stdin=self._file, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
self.file_decompressor = decompressor
data = self._subprocess.stdout.read(35536)
self.file_bytes_read += len(data)
while data:
yield data
data = self._subprocess.stdout.read(35536)
self.file_bytes_read += len(data)
self._file.close()
DEVNULL.close()
raise StopIteration
else:
raise PybamError('\n\nThe input file is not in a format I understand. First four bytes: ' + repr(magic) + '\n')
## At this point, we know that whatever decompression method was used, a call to self._generator will return some uncompressed data.
self._generator = generator()
## So lets parse the BAM header:
header_cache = ''
while len(header_cache) < 8: header_cache += next(self._generator)
p_from = 0; p_to = 4
if header_cache[p_from:p_to] != 'BAM\1':
raise PybamError('\n\nInput file ' + self.file_name + ' does not appear to be a BAM file.\n')
## Parse the BAM header:
p_from = p_to; p_to += 4
length_of_header = unpack('<i',header_cache[p_from:p_to])[0]
p_from = p_to; p_to += length_of_header
while len(header_cache) < p_to: header_cache += next(self._generator)
self.file_header = header_cache[p_from:p_to]
p_from = p_to; p_to += 4
while len(header_cache) < p_to: header_cache += next(self._generator)
number_of_reference_sequences = unpack('<i',header_cache[p_from:p_to])[0]
for _ in range(number_of_reference_sequences):
p_from = p_to; p_to += 4
while len(header_cache) < p_to: header_cache += next(self._generator)
l_name = unpack('<l',header_cache[p_from:p_to])[0]
p_from = p_to; p_to += l_name
while len(header_cache) < p_to: header_cache += next(self._generator)
self.file_chromosomes.append(header_cache[p_from:p_to -1])
p_from = p_to; p_to += 4
while len(header_cache) < p_to: header_cache += next(self._generator)
self.file_chromosome_lengths[self.file_chromosomes[-1]] = unpack('<l',header_cache[p_from:p_to])[0]
self.file_bytes_read = p_to
self.file_binary_header = buffer(header_cache[:p_to])
header_cache = header_cache[p_to:]
# A quick check to make sure the header of this BAM file makes sense:
chromosomes_from_header = []
for line in self.file_header.split('\n'):
if line.startswith('@SQ\tSN:'):
chromosomes_from_header.append(line.split('\t')[1][3:])
if chromosomes_from_header != self.file_chromosomes:
raise PybamWarn('For some reason the BAM format stores the chromosome names in two locations,\n the ASCII text header we all know and love, viewable with samtools view -H, and another special binary header\n which is used to translate the chromosome refID (a number) into a chromosome RNAME when you do bam -> sam.\n\nThese two headers should always be the same, but apparently they are not:\nThe ASCII header looks like: ' + self.file_header + '\nWhile the binary header has the following chromosomes: ' + self.file_chromosomes + '\n')
## Variable parsing:
def new_entry(header_cache):
cache = header_cache # we keep a small cache of X bytes of decompressed BAM data, to smoothen out disk access.
p = 0 # where the next alignment/entry starts in the cache
while True:
try:
while len(cache) < p + 4: cache = cache[p:] + next(self._generator); p = 0 # Grab enough bytes to parse blocksize
self.sam_block_size = unpack('<i',cache[p:p+4])[0]
self.file_alignments_read += 1
while len(cache) < p + 4 + self.sam_block_size:
cache = cache[p:] + next(self._generator); p = 0 # Grab enough bytes to parse entry
except StopIteration: break
self.bam = cache[p:p + 4 + self.sam_block_size]
p = p + 4 + self.sam_block_size
yield self
self._new_entry = new_entry(header_cache)
def compile_parser(self,fields):
temp_code = ''
end_of_qname = False
end_of_cigar = False
end_of_seq = False
end_of_qual = False
dependencies = set(fields)
if 'bam' in fields:
fields[fields.index('bam')] = 'self.bam'
if 'sam_block_size' in fields:
fields[fields.index('sam_block_size')] = 'self.sam_block_size'
if 'sam' in dependencies:
dependencies.update(['sam_qname','sam_flag','sam_rname','sam_pos1','sam_mapq','sam_cigar_string','bam_refID','bam_next_refID','sam_rnext','sam_pnext1','sam_tlen','sam_seq','sam_qual','sam_tags_string'])
if 'sam_tags_string' in dependencies:
dependencies.update(['sam_tags_list'])
if 'sam_pos1' in dependencies:
temp_code += "\n sam_pos1 = (0 if sam_pos0 < 0 else sam_pos0 + 1)"
dependencies.update(['sam_pos0'])
if 'sam_pnext1' in dependencies:
temp_code += "\n sam_pnext1 = (0 if sam_pnext0 < 0 else sam_pnext0 + 1)"
dependencies.update(['sam_pnext0'])
if 'sam_qname' in dependencies or 'bam_qname' in dependencies:
temp_code += "\n _end_of_qname = 36 + sam_l_read_name"
dependencies.update(['sam_l_read_name'])
end_of_qname = True
if 'sam_cigar_string' in dependencies or 'sam_cigar_list' in dependencies or 'bam_cigar' in dependencies:
if end_of_qname:
pass
else:
temp_code += "\n _end_of_qname = 36 + sam_l_read_name"
temp_code += "\n _end_of_cigar = _end_of_qname + (4*sam_n_cigar_op)"
dependencies.update(['sam_l_read_name','sam_n_cigar_op'])
end_of_cigar = True
if 'sam_seq' in dependencies or 'bam_seq' in dependencies:
if end_of_cigar:
pass
elif end_of_qname:
temp_code += "\n _end_of_cigar = _end_of_qname + (4*sam_n_cigar_op)"
else:
temp_code += "\n _end_of_cigar = 36 + sam_l_read_name + (4*sam_n_cigar_op)"
temp_code += "\n _end_of_seq = _end_of_cigar + (-((-sam_l_seq)//2))"
dependencies.update(['sam_l_seq','sam_n_cigar_op','sam_l_read_name'])
end_of_seq = True
if 'sam_qual' in dependencies or 'bam_qual' in dependencies:
if end_of_seq:
pass
elif end_of_cigar:
temp_code += "\n _end_of_seq = _end_of_cigar + (-((-sam_l_seq)//2))"
elif end_of_qname:
temp_code += "\n _end_of_seq = _end_of_qname + (4*sam_n_cigar_op) + (-((-sam_l_seq)//2))"
else:
temp_code += "\n _end_of_seq = 36 + sam_l_read_name + (4*sam_n_cigar_op) + (-((-sam_l_seq)//2))"
temp_code += "\n _end_of_qual = _end_of_seq + sam_l_seq"
dependencies.update(['sam_l_seq','sam_n_cigar_op','sam_l_read_name'])
end_of_qual = True
if 'sam_tags_list' in dependencies or 'bam_tags' in dependencies:
if end_of_qual:
pass
elif end_of_seq:
temp_code += "\n _end_of_qual = _end_of_seq + sam_l_seq"
elif end_of_cigar:
temp_code += "\n _end_of_qual = _end_of_cigar + (-((-sam_l_seq)//2)) + sam_l_seq"
elif end_of_qname:
temp_code += "\n _end_of_qual = _end_of_qname + (4*sam_n_cigar_op) + (-((-sam_l_seq)//2)) + sam_l_seq"
else:
temp_code += "\n _end_of_qual = 36 + sam_l_read_name + (4*sam_n_cigar_op) + (-((-sam_l_seq)//2)) + sam_l_seq"
dependencies.update(['sam_l_seq','sam_n_cigar_op','sam_l_read_name'])
if 'sam_rname' in dependencies:
temp_code += "\n sam_rname = '*' if sam_refID < 0 else self.file_chromosomes[sam_refID]"
dependencies.update(['sam_refID'])
if 'sam_rnext' in dependencies:
temp_code += "\n sam_rnext = '*' if sam_next_refID < 0 else self.file_chromosomes[sam_next_refID]"
dependencies.update(['sam_next_refID'])
## First we figure out what data from the static portion of the BAM entry we'll need:
tmp = {}
tmp['code'] = 'def parser(self):\n from array import array\n from struct import unpack\n for _ in self._new_entry:'
tmp['last_start'] = None
tmp['name_list'] = []
tmp['dtype_list'] = []
def pack_up(name,dtype,length,end,tmp):
if name in dependencies:
if tmp['last_start'] is None:
tmp['last_start'] = end - length
tmp['name_list'].append(name)
tmp['dtype_list'].append(dtype)
elif tmp['last_start'] is not None:
tmp['code'] += '\n ' + ', '.join(tmp['name_list']) + ' = unpack("<' + ''.join(tmp['dtype_list']) + '",self.bam[' + str(tmp['last_start']) + ':' + str(end-length) + '])'
if len(tmp['dtype_list']) == 1:
tmp['code'] += '[0]'
tmp['last_start'] = None
tmp['name_list'] = []
tmp['dtype_list'] = []
pack_up('sam_refID', 'i',4, 8,tmp)
pack_up('sam_pos0', 'i',4,12,tmp)
pack_up('sam_l_read_name', 'B',1,13,tmp)
pack_up('sam_mapq', 'B',1,14,tmp)
pack_up('sam_bin', 'H',2,16,tmp)
pack_up('sam_n_cigar_op', 'H',2,18,tmp)
pack_up('sam_flag', 'H',2,20,tmp)
pack_up('sam_l_seq', 'i',4,24,tmp)
pack_up('sam_next_refID', 'i',4,28,tmp)
pack_up('sam_pnext0', 'i',4,32,tmp)
pack_up('sam_tlen', 'i',4,36,tmp)
pack_up( None, None,0,36,tmp) # To add anything not yet added.
code = tmp['code']
del tmp
code += temp_code
# Fixed-length BAM data (where we just grab the bytes, we dont unpack) can, however, be grabbed individually.
if 'bam_block_size' in dependencies: code += "\n bam_block_size = self.bam[0 : 4 ]"
if 'bam_refID' in dependencies: code += "\n bam_refID = self.bam[4 : 8 ]"
if 'bam_pos' in dependencies: code += "\n bam_pos = self.bam[8 : 12 ]"
if 'bam_l_read_name' in dependencies: code += "\n bam_l_read_name = self.bam[12 : 13 ]"
if 'bam_mapq' in dependencies: code += "\n bam_mapq = self.bam[13 : 14 ]"
if 'bam_bin' in dependencies: code += "\n bam_bin = self.bam[14 : 16 ]"
if 'bam_n_cigar_op' in dependencies: code += "\n bam_n_cigar_op = self.bam[16 : 18 ]"
if 'bam_flag' in dependencies: code += "\n bam_flag = self.bam[18 : 20 ]"
if 'bam_l_seq' in dependencies: code += "\n bam_l_seq = self.bam[20 : 24 ]"
if 'bam_next_refID' in dependencies: code += "\n bam_next_refID = self.bam[24 : 28 ]"
if 'bam_pnext' in dependencies: code += "\n bam_pnext = self.bam[28 : 32 ]"
if 'bam_tlen' in dependencies: code += "\n bam_tlen = self.bam[32 : 36 ]"
if 'bam_qname' in dependencies: code += "\n bam_qname = self.bam[36 : _end_of_qname ]"
if 'bam_cigar' in dependencies: code += "\n bam_cigar = self.bam[_end_of_qname : _end_of_cigar ]"
if 'bam_seq' in dependencies: code += "\n bam_seq = self.bam[_end_of_cigar : _end_of_seq ]"
if 'bam_qual' in dependencies: code += "\n bam_qual = self.bam[_end_of_seq : _end_of_qual ]"
if 'bam_tags' in dependencies: code += "\n bam_tags = self.bam[_end_of_qual : ]"
if 'sam_qname' in dependencies:
if 'bam_qname' in dependencies: code += "\n sam_qname = bam_qname[:-1]"
else: code += "\n sam_qname = self.bam[36 : _end_of_qname -1 ]"
if 'sam_cigar_list' in dependencies:
if 'bam_cigar' in dependencies: code += "\n sam_cigar_list = [( cig >> 4 , cigar_codes[cig & 0b1111]) for cig in array('I', bam_cigar) ]"
else: code += "\n sam_cigar_list = [( cig >> 4 , cigar_codes[cig & 0b1111]) for cig in array('I', self.bam[_end_of_qname : _end_of_cigar]) ]"
if 'sam_cigar_string'in dependencies:
if 'bam_cigar' in dependencies: code += "\n sam_cigar_string = ''.join([ str(cig >> 4) + cigar_codes[cig & 0b1111] for cig in array('I', bam_cigar)])"
else: code += "\n sam_cigar_string = ''.join([ str(cig >> 4) + cigar_codes[cig & 0b1111] for cig in array('I', self.bam[_end_of_qname : _end_of_cigar]) ])"
if 'sam_seq' in dependencies:
if 'bam_seq' in dependencies: code += "\n sam_seq = ''.join( [ dna_codes[dna >> 4] + dna_codes[dna & 0b1111] for dna in array('B', bam_seq)])[:sam_l_seq]"
else: code += "\n sam_seq = ''.join( [ dna_codes[dna >> 4] + dna_codes[dna & 0b1111] for dna in array('B', self.bam[_end_of_cigar : _end_of_seq])])[:sam_l_seq]"
if 'sam_qual' in dependencies:
if 'bam_qual' in dependencies: code += "\n sam_qual = ''.join( [ chr(ord(quality) + 33) for quality in bam_qual ])"
else: code += "\n sam_qual = ''.join( [ chr(ord(quality) + 33) for quality in self.bam[_end_of_seq : _end_of_qual ]])"
if 'sam_tags_list' in dependencies:
code += '''
sam_tags_list = []
offset = _end_of_qual
while offset != len(self.bam):
tag_name = self.bam[offset:offset+2]
tag_type = self.bam[offset+2]
if tag_type == 'Z':
offset_end = self.bam.index('\\0',offset+3)+1
tag_data = self.bam[offset+3:offset_end-1]
elif tag_type in CtoPy:
offset_end = offset+3+py4py[tag_type]
tag_data = unpack(CtoPy[tag_type],self.bam[offset+3:offset_end])[0]
elif tag_type == 'B':
offset_end = offset+8+(unpack('<i',self.bam[offset+4:offset+8])[0]*py4py[self.bam[offset+3]])
tag_data = array(self.bam[offset+3] , self.bam[offset+8:offset_end] )
else:
print 'PYBAM ERROR: I dont know how to parse BAM tags in this format: ',repr(tag_type)
print ' This is simply because I never saw this kind of tag during development.'
print ' If you could mail the following chunk of text to john at john.uk.com, i will fix this up for everyone :)'
print repr(tag_type),repr(self.bam[offset+3:end])
exit()
sam_tags_list.append((tag_name,tag_type,tag_data))
offset = offset_end'''
if 'sam_tags_string' in dependencies:
code += "\n sam_tags_string = '\t'.join(A + ':' + ('i' if B in 'cCsSI' else B) + ':' + ((C.typecode + ',' + ','.join(map(str,C))) if type(C)==array else str(C)) for A,B,C in self.sam_tags_list)"
if 'sam' in dependencies:
code += "\n sam = sam_qname + '\t' + str(sam_flag) + '\t' + sam_rname + '\t' + str(sam_pos1) + '\t' + str(sam_mapq) + '\t' + ('*' if sam_cigar_string == '' else sam_cigar_string) + '\t' + ('=' if bam_refID == bam_next_refID else sam_rnext) + '\t' + str(sam_pnext1) + '\t' + str(sam_tlen) + '\t' + sam_seq + '\t' + sam_qual + '\t' + sam_tags_string"
code += '\n yield ' + ','.join([x for x in fields]) + '\n'
self._static_parser_code = code # "code" is the static parser's code as a string (a function called "parser")
exec_dict = { # This dictionary stores things the exec'd code needs to know about, and will store the compiled function after exec()
'unpack':unpack,
'array':array,
'dna_codes':dna_codes,
'CtoPy':CtoPy,
'py4py':py4py,
'cigar_codes':cigar_codes
}
exec code in exec_dict # exec() compiles "code" to real code, creating the "parser" function and adding it to exec_dict['parser']
return exec_dict['parser']
if fields:
static_parser = compile_parser(self,fields)(self)
def next_read(): return next(static_parser)
else:
def next_read(): return next(self._new_entry)
self.next = next_read
def __iter__(self): return self
def __str__(self): return self.sam
## Methods to pull out raw bam data from entry (so still in its binary encoding). This can be helpful in some scenarios.
@property
def bam_block_size(self): return self.bam[ : 4 ]
@property
def bam_refID(self): return self.bam[ 4 : 8 ]
@property
def bam_pos(self): return self.bam[ 8 : 12 ]
@property
def bam_l_read_name(self): return self.bam[ 12 : 13 ]
@property
def bam_mapq(self): return self.bam[ 13 : 14 ]
@property
def bam_bin(self): return self.bam[ 14 : 16 ]
@property
def bam_n_cigar_op(self): return self.bam[ 16 : 18 ]
@property
def bam_flag(self): return self.bam[ 18 : 20 ]
@property
def bam_l_seq(self): return self.bam[ 20 : 24 ]
@property
def bam_next_refID(self): return self.bam[ 24 : 28 ]
@property
def bam_pnext(self): return self.bam[ 28 : 32 ]
@property
def bam_tlen(self): return self.bam[ 32 : 36 ]
@property
def bam_qname(self): return self.bam[ 36 : self._end_of_qname ]
@property
def bam_cigar(self): return self.bam[ self._end_of_qname : self._end_of_cigar ]
@property
def bam_seq(self): return self.bam[ self._end_of_cigar : self._end_of_seq ]
@property
def bam_qual(self): return self.bam[ self._end_of_seq : self._end_of_qual ]
@property
def bam_tags(self): return self.bam[ self._end_of_qual : ]
@property
def sam_refID(self): return unpack( '<i', self.bam[ 4 : 8 ] )[0]
@property
def sam_pos0(self): return unpack( '<i', self.bam[ 8 : 12 ] )[0]
@property
def sam_l_read_name(self): return unpack( '<B', self.bam[ 12 : 13 ] )[0]
@property
def sam_mapq(self): return unpack( '<B', self.bam[ 13 : 14 ] )[0]
@property
def sam_bin(self): return unpack( '<H', self.bam[ 14 : 16 ] )[0]
@property
def sam_n_cigar_op(self): return unpack( '<H', self.bam[ 16 : 18 ] )[0]
@property
def sam_flag(self): return unpack( '<H', self.bam[ 18 : 20 ] )[0]
@property
def sam_l_seq(self): return unpack( '<i', self.bam[ 20 : 24 ] )[0]
@property
def sam_next_refID(self): return unpack( '<i', self.bam[ 24 : 28 ] )[0]
@property
def sam_pnext0(self): return unpack( '<i', self.bam[ 28 : 32 ] )[0]
@property
def sam_tlen(self): return unpack( '<i', self.bam[ 32 : 36 ] )[0]
@property
def sam_qname(self): return self.bam[ 36 : self._end_of_qname -1 ] # -1 to remove trailing NUL byte
@property
def sam_cigar_list(self): return [ (cig >> 4 , cigar_codes[cig & 0b1111] ) for cig in array('I', self.bam[self._end_of_qname : self._end_of_cigar ])]
@property
def sam_cigar_string(self): return ''.join( [ str(cig >> 4) + cigar_codes[cig & 0b1111] for cig in array('I', self.bam[self._end_of_qname : self._end_of_cigar ])])
@property
def sam_seq(self): return ''.join( [ dna_codes[dna >> 4] + dna_codes[dna & 0b1111] for dna in array('B', self.bam[self._end_of_cigar : self._end_of_seq ])])[:self.sam_l_seq] # As DNA is 4 bits packed 2-per-byte, there might be a trailing '0000', so we can either
@property
def sam_qual(self): return ''.join( [ chr(ord(quality) + 33) for quality in self.bam[self._end_of_seq : self._end_of_qual ]])
@property
def sam_tags_list(self):
result = []
offset = self._end_of_qual
while offset != len(self.bam):
tag_name = self.bam[offset:offset+2]
tag_type = self.bam[offset+2]
if tag_type == 'Z':
offset_end = self.bam.index('\x00',offset+3)+1
tag_data = self.bam[offset+3:offset_end-1]
elif tag_type in CtoPy:
offset_end = offset+3+py4py[tag_type]
tag_data = unpack(CtoPy[tag_type],self.bam[offset+3:offset_end])[0]
elif tag_type == 'B':
offset_end = offset+8+(unpack('<i',self.bam[offset+4:offset+8])[0]*py4py[self.bam[offset+3]])
tag_data = array(self.bam[offset+3] , self.bam[offset+8:offset_end] )
else:
print 'PYBAM ERROR: I dont know how to parse BAM tags in this format: ',repr(tag_type)
print ' This is simply because I never saw this kind of tag during development.'
print ' If you could mail the following chunk of text to john at john.uk.com, ill fix this up :)'
print repr(tag_type),repr(self.bam[offset+3:end])
exit()
result.append((tag_name,tag_type,tag_data))
offset = offset_end
return result
@property
def sam_tags_string(self):
return '\t'.join(A + ':' + ('i' if B in 'cCsSI' else B) + ':' + ((C.typecode + ',' + ','.join(map(str,C))) if type(C)==array else str(C)) for A,B,C in self.sam_tags_list)
## BONUS methods - methods that mimic how samtools works.
@property
def sam_pos1(self): return 0 if self.sam_pos0 < 0 else self.sam_pos0 + 1
@property
def sam_pnext1(self): return 0 if self.sam_pnext0 < 0 else self.sam_pnext0 + 1
@property
def sam_rname(self): return '*' if self.sam_refID < 0 else self.file_chromosomes[self.sam_refID ]
@property
def sam_rnext(self): return '*' if self.sam_next_refID < 0 else self.file_chromosomes[self.sam_next_refID]
@property
def sam(self): return (
self.sam_qname + '\t' +
str(self.sam_flag) + '\t' +
self.sam_rname + '\t' +
str(self.sam_pos1) + '\t' +
str(self.sam_mapq) + '\t' +
('*' if self.sam_cigar_string == '' else self.sam_cigar_string) + '\t' +
('=' if self.bam_refID == self.bam_next_refID else self.sam_rnext) + '\t' +
str(self.sam_pnext1) + '\t' +
str(self.sam_tlen) + '\t' +
self.sam_seq + '\t' +
self.sam_qual + '\t' +
self.sam_tags_string
)
## Internal methods - methods used to calculate where variable-length blocks start/end
@property
def _end_of_qname(self): return self.sam_l_read_name + 36 # fixed-length stuff at the beginning takes up 36 bytes.
@property
def _end_of_cigar(self): return self._end_of_qname + (4*self.sam_n_cigar_op) # 4 bytes per n_cigar_op
@property
def _end_of_seq(self): return self._end_of_cigar + (-((-self.sam_l_seq)//2)) # {blurgh}
@property
def _end_of_qual(self): return self._end_of_seq + self.sam_l_seq # qual has the same length as seq
def __del__(self):
if self._subprocess.returncode is None: self._subprocess.kill()
self._file.close()
class PybamWarn(Exception): pass
class PybamError(Exception): pass
|
JohnLonginotto/pybam
|
pybam.py
|
Python
|
apache-2.0
| 53,233
|
"""Support for MQTT JSON lights."""
from contextlib import suppress
import json
import logging
import voluptuous as vol
from homeassistant.components.light import (
ATTR_BRIGHTNESS,
ATTR_COLOR_MODE,
ATTR_COLOR_TEMP,
ATTR_EFFECT,
ATTR_FLASH,
ATTR_HS_COLOR,
ATTR_RGB_COLOR,
ATTR_RGBW_COLOR,
ATTR_RGBWW_COLOR,
ATTR_TRANSITION,
ATTR_WHITE_VALUE,
ATTR_XY_COLOR,
COLOR_MODE_COLOR_TEMP,
COLOR_MODE_HS,
COLOR_MODE_RGB,
COLOR_MODE_RGBW,
COLOR_MODE_RGBWW,
COLOR_MODE_XY,
FLASH_LONG,
FLASH_SHORT,
SUPPORT_BRIGHTNESS,
SUPPORT_COLOR,
SUPPORT_COLOR_TEMP,
SUPPORT_EFFECT,
SUPPORT_FLASH,
SUPPORT_TRANSITION,
SUPPORT_WHITE_VALUE,
VALID_COLOR_MODES,
LightEntity,
legacy_supported_features,
valid_supported_color_modes,
)
from homeassistant.const import (
CONF_BRIGHTNESS,
CONF_COLOR_TEMP,
CONF_EFFECT,
CONF_HS,
CONF_NAME,
CONF_OPTIMISTIC,
CONF_RGB,
CONF_WHITE_VALUE,
CONF_XY,
STATE_ON,
)
from homeassistant.core import callback
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.restore_state import RestoreEntity
from homeassistant.helpers.typing import ConfigType
import homeassistant.util.color as color_util
from .. import CONF_COMMAND_TOPIC, CONF_QOS, CONF_RETAIN, CONF_STATE_TOPIC, subscription
from ... import mqtt
from ..debug_info import log_messages
from ..mixins import MQTT_ENTITY_COMMON_SCHEMA, MqttEntity
from .schema import MQTT_LIGHT_SCHEMA_SCHEMA
from .schema_basic import CONF_BRIGHTNESS_SCALE, MQTT_LIGHT_ATTRIBUTES_BLOCKED
_LOGGER = logging.getLogger(__name__)
DOMAIN = "mqtt_json"
DEFAULT_BRIGHTNESS = False
DEFAULT_COLOR_MODE = False
DEFAULT_COLOR_TEMP = False
DEFAULT_EFFECT = False
DEFAULT_FLASH_TIME_LONG = 10
DEFAULT_FLASH_TIME_SHORT = 2
DEFAULT_NAME = "MQTT JSON Light"
DEFAULT_OPTIMISTIC = False
DEFAULT_RGB = False
DEFAULT_WHITE_VALUE = False
DEFAULT_XY = False
DEFAULT_HS = False
DEFAULT_BRIGHTNESS_SCALE = 255
CONF_COLOR_MODE = "color_mode"
CONF_SUPPORTED_COLOR_MODES = "supported_color_modes"
CONF_EFFECT_LIST = "effect_list"
CONF_FLASH_TIME_LONG = "flash_time_long"
CONF_FLASH_TIME_SHORT = "flash_time_short"
CONF_MAX_MIREDS = "max_mireds"
CONF_MIN_MIREDS = "min_mireds"
def valid_color_configuration(config):
"""Test color_mode is not combined with deprecated config."""
deprecated = {CONF_COLOR_TEMP, CONF_HS, CONF_RGB, CONF_WHITE_VALUE, CONF_XY}
if config[CONF_COLOR_MODE] and any(config.get(key) for key in deprecated):
raise vol.Invalid(f"color_mode must not be combined with any of {deprecated}")
return config
PLATFORM_SCHEMA_JSON = vol.All(
mqtt.MQTT_RW_PLATFORM_SCHEMA.extend(
{
vol.Optional(CONF_BRIGHTNESS, default=DEFAULT_BRIGHTNESS): cv.boolean,
vol.Optional(
CONF_BRIGHTNESS_SCALE, default=DEFAULT_BRIGHTNESS_SCALE
): vol.All(vol.Coerce(int), vol.Range(min=1)),
vol.Inclusive(
CONF_COLOR_MODE, "color_mode", default=DEFAULT_COLOR_MODE
): cv.boolean,
vol.Optional(CONF_COLOR_TEMP, default=DEFAULT_COLOR_TEMP): cv.boolean,
vol.Optional(CONF_EFFECT, default=DEFAULT_EFFECT): cv.boolean,
vol.Optional(CONF_EFFECT_LIST): vol.All(cv.ensure_list, [cv.string]),
vol.Optional(
CONF_FLASH_TIME_LONG, default=DEFAULT_FLASH_TIME_LONG
): cv.positive_int,
vol.Optional(
CONF_FLASH_TIME_SHORT, default=DEFAULT_FLASH_TIME_SHORT
): cv.positive_int,
vol.Optional(CONF_HS, default=DEFAULT_HS): cv.boolean,
vol.Optional(CONF_MAX_MIREDS): cv.positive_int,
vol.Optional(CONF_MIN_MIREDS): cv.positive_int,
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
vol.Optional(CONF_OPTIMISTIC, default=DEFAULT_OPTIMISTIC): cv.boolean,
vol.Optional(CONF_QOS, default=mqtt.DEFAULT_QOS): vol.All(
vol.Coerce(int), vol.In([0, 1, 2])
),
vol.Optional(CONF_RETAIN, default=mqtt.DEFAULT_RETAIN): cv.boolean,
vol.Optional(CONF_RGB, default=DEFAULT_RGB): cv.boolean,
vol.Optional(CONF_STATE_TOPIC): mqtt.valid_subscribe_topic,
vol.Inclusive(CONF_SUPPORTED_COLOR_MODES, "color_mode"): vol.All(
cv.ensure_list,
[vol.In(VALID_COLOR_MODES)],
vol.Unique(),
valid_supported_color_modes,
),
vol.Optional(CONF_WHITE_VALUE, default=DEFAULT_WHITE_VALUE): cv.boolean,
vol.Optional(CONF_XY, default=DEFAULT_XY): cv.boolean,
},
)
.extend(MQTT_ENTITY_COMMON_SCHEMA.schema)
.extend(MQTT_LIGHT_SCHEMA_SCHEMA.schema),
valid_color_configuration,
)
async def async_setup_entity_json(
hass, config: ConfigType, async_add_entities, config_entry, discovery_data
):
"""Set up a MQTT JSON Light."""
async_add_entities([MqttLightJson(hass, config, config_entry, discovery_data)])
class MqttLightJson(MqttEntity, LightEntity, RestoreEntity):
"""Representation of a MQTT JSON light."""
_attributes_extra_blocked = MQTT_LIGHT_ATTRIBUTES_BLOCKED
def __init__(self, hass, config, config_entry, discovery_data):
"""Initialize MQTT JSON light."""
self._state = False
self._supported_features = 0
self._topic = None
self._optimistic = False
self._brightness = None
self._color_mode = None
self._color_temp = None
self._effect = None
self._flash_times = None
self._hs = None
self._rgb = None
self._rgbw = None
self._rgbww = None
self._white_value = None
self._xy = None
MqttEntity.__init__(self, hass, config, config_entry, discovery_data)
@staticmethod
def config_schema():
"""Return the config schema."""
return PLATFORM_SCHEMA_JSON
def _setup_from_config(self, config):
"""(Re)Setup the entity."""
self._topic = {
key: config.get(key) for key in (CONF_STATE_TOPIC, CONF_COMMAND_TOPIC)
}
optimistic = config[CONF_OPTIMISTIC]
self._optimistic = optimistic or self._topic[CONF_STATE_TOPIC] is None
self._flash_times = {
key: config.get(key)
for key in (CONF_FLASH_TIME_SHORT, CONF_FLASH_TIME_LONG)
}
self._supported_features = SUPPORT_TRANSITION | SUPPORT_FLASH
self._supported_features |= config[CONF_EFFECT] and SUPPORT_EFFECT
if not self._config[CONF_COLOR_MODE]:
self._supported_features |= config[CONF_BRIGHTNESS] and SUPPORT_BRIGHTNESS
self._supported_features |= config[CONF_COLOR_TEMP] and SUPPORT_COLOR_TEMP
self._supported_features |= config[CONF_HS] and SUPPORT_COLOR
self._supported_features |= config[CONF_RGB] and (
SUPPORT_COLOR | SUPPORT_BRIGHTNESS
)
self._supported_features |= config[CONF_WHITE_VALUE] and SUPPORT_WHITE_VALUE
self._supported_features |= config[CONF_XY] and SUPPORT_COLOR
def _update_color(self, values):
if not self._config[CONF_COLOR_MODE]:
# Deprecated color handling
try:
red = int(values["color"]["r"])
green = int(values["color"]["g"])
blue = int(values["color"]["b"])
self._hs = color_util.color_RGB_to_hs(red, green, blue)
except KeyError:
pass
except ValueError:
_LOGGER.warning("Invalid RGB color value received")
return
try:
x_color = float(values["color"]["x"])
y_color = float(values["color"]["y"])
self._hs = color_util.color_xy_to_hs(x_color, y_color)
except KeyError:
pass
except ValueError:
_LOGGER.warning("Invalid XY color value received")
return
try:
hue = float(values["color"]["h"])
saturation = float(values["color"]["s"])
self._hs = (hue, saturation)
except KeyError:
pass
except ValueError:
_LOGGER.warning("Invalid HS color value received")
return
else:
color_mode = values["color_mode"]
if not self._supports_color_mode(color_mode):
_LOGGER.warning("Invalid color mode received")
return
try:
if color_mode == COLOR_MODE_COLOR_TEMP:
self._color_temp = int(values["color_temp"])
self._color_mode = COLOR_MODE_COLOR_TEMP
elif color_mode == COLOR_MODE_HS:
hue = float(values["color"]["h"])
saturation = float(values["color"]["s"])
self._color_mode = COLOR_MODE_HS
self._hs = (hue, saturation)
elif color_mode == COLOR_MODE_RGB:
r = int(values["color"]["r"]) # pylint: disable=invalid-name
g = int(values["color"]["g"]) # pylint: disable=invalid-name
b = int(values["color"]["b"]) # pylint: disable=invalid-name
self._color_mode = COLOR_MODE_RGB
self._rgb = (r, g, b)
elif color_mode == COLOR_MODE_RGBW:
r = int(values["color"]["r"]) # pylint: disable=invalid-name
g = int(values["color"]["g"]) # pylint: disable=invalid-name
b = int(values["color"]["b"]) # pylint: disable=invalid-name
w = int(values["color"]["w"]) # pylint: disable=invalid-name
self._color_mode = COLOR_MODE_RGBW
self._rgbw = (r, g, b, w)
elif color_mode == COLOR_MODE_RGBWW:
r = int(values["color"]["r"]) # pylint: disable=invalid-name
g = int(values["color"]["g"]) # pylint: disable=invalid-name
b = int(values["color"]["b"]) # pylint: disable=invalid-name
c = int(values["color"]["c"]) # pylint: disable=invalid-name
w = int(values["color"]["w"]) # pylint: disable=invalid-name
self._color_mode = COLOR_MODE_RGBWW
self._rgbww = (r, g, b, c, w)
elif color_mode == COLOR_MODE_XY:
x = float(values["color"]["x"]) # pylint: disable=invalid-name
y = float(values["color"]["y"]) # pylint: disable=invalid-name
self._color_mode = COLOR_MODE_XY
self._xy = (x, y)
except (KeyError, ValueError):
_LOGGER.warning("Invalid or incomplete color value received")
async def _subscribe_topics(self):
"""(Re)Subscribe to topics."""
last_state = await self.async_get_last_state()
@callback
@log_messages(self.hass, self.entity_id)
def state_received(msg):
"""Handle new MQTT messages."""
values = json.loads(msg.payload)
if values["state"] == "ON":
self._state = True
elif values["state"] == "OFF":
self._state = False
if self._supported_features and SUPPORT_COLOR and "color" in values:
if values["color"] is None:
self._hs = None
else:
self._update_color(values)
if self._config[CONF_COLOR_MODE] and "color_mode" in values:
self._update_color(values)
if self._supported_features and SUPPORT_BRIGHTNESS:
try:
self._brightness = int(
values["brightness"]
/ float(self._config[CONF_BRIGHTNESS_SCALE])
* 255
)
except KeyError:
pass
except (TypeError, ValueError):
_LOGGER.warning("Invalid brightness value received")
if (
self._supported_features
and SUPPORT_COLOR_TEMP
and not self._config[CONF_COLOR_MODE]
):
try:
if values["color_temp"] is None:
self._color_temp = None
else:
self._color_temp = int(values["color_temp"])
except KeyError:
pass
except ValueError:
_LOGGER.warning("Invalid color temp value received")
if self._supported_features and SUPPORT_EFFECT:
with suppress(KeyError):
self._effect = values["effect"]
if self._supported_features and SUPPORT_WHITE_VALUE:
try:
self._white_value = int(values["white_value"])
except KeyError:
pass
except ValueError:
_LOGGER.warning("Invalid white value received")
self.async_write_ha_state()
if self._topic[CONF_STATE_TOPIC] is not None:
self._sub_state = await subscription.async_subscribe_topics(
self.hass,
self._sub_state,
{
"state_topic": {
"topic": self._topic[CONF_STATE_TOPIC],
"msg_callback": state_received,
"qos": self._config[CONF_QOS],
}
},
)
if self._optimistic and last_state:
self._state = last_state.state == STATE_ON
last_attributes = last_state.attributes
self._brightness = last_attributes.get(ATTR_BRIGHTNESS, self._brightness)
self._color_mode = last_attributes.get(ATTR_COLOR_MODE, self._color_mode)
self._color_temp = last_attributes.get(ATTR_COLOR_TEMP, self._color_temp)
self._effect = last_attributes.get(ATTR_EFFECT, self._effect)
self._hs = last_attributes.get(ATTR_HS_COLOR, self._hs)
self._rgb = last_attributes.get(ATTR_RGB_COLOR, self._rgb)
self._rgbw = last_attributes.get(ATTR_RGBW_COLOR, self._rgbw)
self._rgbww = last_attributes.get(ATTR_RGBWW_COLOR, self._rgbww)
self._white_value = last_attributes.get(ATTR_WHITE_VALUE, self._white_value)
self._xy = last_attributes.get(ATTR_XY_COLOR, self._xy)
@property
def brightness(self):
"""Return the brightness of this light between 0..255."""
return self._brightness
@property
def color_temp(self):
"""Return the color temperature in mired."""
return self._color_temp
@property
def min_mireds(self):
"""Return the coldest color_temp that this light supports."""
return self._config.get(CONF_MIN_MIREDS, super().min_mireds)
@property
def max_mireds(self):
"""Return the warmest color_temp that this light supports."""
return self._config.get(CONF_MAX_MIREDS, super().max_mireds)
@property
def effect(self):
"""Return the current effect."""
return self._effect
@property
def effect_list(self):
"""Return the list of supported effects."""
return self._config.get(CONF_EFFECT_LIST)
@property
def hs_color(self):
"""Return the hs color value."""
return self._hs
@property
def rgb_color(self):
"""Return the hs color value."""
return self._rgb
@property
def rgbw_color(self):
"""Return the hs color value."""
return self._rgbw
@property
def rgbww_color(self):
"""Return the hs color value."""
return self._rgbww
@property
def xy_color(self):
"""Return the hs color value."""
return self._xy
@property
def white_value(self):
"""Return the white property."""
return self._white_value
@property
def is_on(self):
"""Return true if device is on."""
return self._state
@property
def assumed_state(self):
"""Return true if we do optimistic updates."""
return self._optimistic
@property
def color_mode(self):
"""Return current color mode."""
return self._color_mode
@property
def supported_color_modes(self):
"""Flag supported color modes."""
return self._config.get(CONF_SUPPORTED_COLOR_MODES)
@property
def supported_features(self):
"""Flag supported features."""
return legacy_supported_features(
self._supported_features, self._config.get(CONF_SUPPORTED_COLOR_MODES)
)
def _set_flash_and_transition(self, message, **kwargs):
if ATTR_TRANSITION in kwargs:
message["transition"] = kwargs[ATTR_TRANSITION]
if ATTR_FLASH in kwargs:
flash = kwargs.get(ATTR_FLASH)
if flash == FLASH_LONG:
message["flash"] = self._flash_times[CONF_FLASH_TIME_LONG]
elif flash == FLASH_SHORT:
message["flash"] = self._flash_times[CONF_FLASH_TIME_SHORT]
def _scale_rgbxx(self, rgbxx, kwargs):
# If there's a brightness topic set, we don't want to scale the
# RGBxx values given using the brightness.
if self._config[CONF_BRIGHTNESS]:
brightness = 255
else:
brightness = kwargs.get(ATTR_BRIGHTNESS, 255)
return tuple(round(i / 255 * brightness) for i in rgbxx)
def _supports_color_mode(self, color_mode):
return self.supported_color_modes and color_mode in self.supported_color_modes
async def async_turn_on(self, **kwargs): # noqa: C901
"""Turn the device on.
This method is a coroutine.
"""
should_update = False
message = {"state": "ON"}
if ATTR_HS_COLOR in kwargs and (
self._config[CONF_HS] or self._config[CONF_RGB] or self._config[CONF_XY]
):
hs_color = kwargs[ATTR_HS_COLOR]
message["color"] = {}
if self._config[CONF_RGB]:
# If there's a brightness topic set, we don't want to scale the
# RGB values given using the brightness.
if self._config[CONF_BRIGHTNESS]:
brightness = 255
else:
brightness = kwargs.get(ATTR_BRIGHTNESS, 255)
rgb = color_util.color_hsv_to_RGB(
hs_color[0], hs_color[1], brightness / 255 * 100
)
message["color"]["r"] = rgb[0]
message["color"]["g"] = rgb[1]
message["color"]["b"] = rgb[2]
if self._config[CONF_XY]:
xy_color = color_util.color_hs_to_xy(*kwargs[ATTR_HS_COLOR])
message["color"]["x"] = xy_color[0]
message["color"]["y"] = xy_color[1]
if self._config[CONF_HS]:
message["color"]["h"] = hs_color[0]
message["color"]["s"] = hs_color[1]
if self._optimistic:
self._hs = kwargs[ATTR_HS_COLOR]
should_update = True
if ATTR_HS_COLOR in kwargs and self._supports_color_mode(COLOR_MODE_HS):
hs_color = kwargs[ATTR_HS_COLOR]
message["color"] = {"h": hs_color[0], "s": hs_color[1]}
if self._optimistic:
self._color_mode = COLOR_MODE_HS
self._hs = hs_color
should_update = True
if ATTR_RGB_COLOR in kwargs and self._supports_color_mode(COLOR_MODE_RGB):
rgb = self._scale_rgbxx(kwargs[ATTR_RGB_COLOR], kwargs)
message["color"] = {"r": rgb[0], "g": rgb[1], "b": rgb[2]}
if self._optimistic:
self._color_mode = COLOR_MODE_RGB
self._rgb = rgb
should_update = True
if ATTR_RGBW_COLOR in kwargs and self._supports_color_mode(COLOR_MODE_RGBW):
rgb = self._scale_rgbxx(kwargs[ATTR_RGBW_COLOR], kwargs)
message["color"] = {"r": rgb[0], "g": rgb[1], "b": rgb[2], "w": rgb[3]}
if self._optimistic:
self._color_mode = COLOR_MODE_RGBW
self._rgbw = rgb
should_update = True
if ATTR_RGBWW_COLOR in kwargs and self._supports_color_mode(COLOR_MODE_RGBWW):
rgb = self._scale_rgbxx(kwargs[ATTR_RGBWW_COLOR], kwargs)
message["color"] = {
"r": rgb[0],
"g": rgb[1],
"b": rgb[2],
"c": rgb[3],
"w": rgb[4],
}
if self._optimistic:
self._color_mode = COLOR_MODE_RGBWW
self._rgbww = rgb
should_update = True
if ATTR_XY_COLOR in kwargs and self._supports_color_mode(COLOR_MODE_XY):
xy = kwargs[ATTR_XY_COLOR] # pylint: disable=invalid-name
message["color"] = {"x": xy[0], "y": xy[1]}
if self._optimistic:
self._color_mode = COLOR_MODE_XY
self._xy = xy
should_update = True
self._set_flash_and_transition(message, **kwargs)
if ATTR_BRIGHTNESS in kwargs and self._config[CONF_BRIGHTNESS]:
brightness_normalized = kwargs[ATTR_BRIGHTNESS] / DEFAULT_BRIGHTNESS_SCALE
brightness_scale = self._config[CONF_BRIGHTNESS_SCALE]
device_brightness = min(
round(brightness_normalized * brightness_scale), brightness_scale
)
# Make sure the brightness is not rounded down to 0
device_brightness = max(device_brightness, 1)
message["brightness"] = device_brightness
if self._optimistic:
self._brightness = kwargs[ATTR_BRIGHTNESS]
should_update = True
if ATTR_COLOR_TEMP in kwargs:
message["color_temp"] = int(kwargs[ATTR_COLOR_TEMP])
if self._optimistic:
self._color_temp = kwargs[ATTR_COLOR_TEMP]
should_update = True
if ATTR_EFFECT in kwargs:
message["effect"] = kwargs[ATTR_EFFECT]
if self._optimistic:
self._effect = kwargs[ATTR_EFFECT]
should_update = True
if ATTR_WHITE_VALUE in kwargs:
message["white_value"] = int(kwargs[ATTR_WHITE_VALUE])
if self._optimistic:
self._white_value = kwargs[ATTR_WHITE_VALUE]
should_update = True
mqtt.async_publish(
self.hass,
self._topic[CONF_COMMAND_TOPIC],
json.dumps(message),
self._config[CONF_QOS],
self._config[CONF_RETAIN],
)
if self._optimistic:
# Optimistically assume that the light has changed state.
self._state = True
should_update = True
if should_update:
self.async_write_ha_state()
async def async_turn_off(self, **kwargs):
"""Turn the device off.
This method is a coroutine.
"""
message = {"state": "OFF"}
self._set_flash_and_transition(message, **kwargs)
mqtt.async_publish(
self.hass,
self._topic[CONF_COMMAND_TOPIC],
json.dumps(message),
self._config[CONF_QOS],
self._config[CONF_RETAIN],
)
if self._optimistic:
# Optimistically assume that the light has changed state.
self._state = False
self.async_write_ha_state()
|
sander76/home-assistant
|
homeassistant/components/mqtt/light/schema_json.py
|
Python
|
apache-2.0
| 23,959
|
from django.shortcuts import render,get_object_or_404
from django.http import HttpResponse
from django.http import Http404
from django.http import HttpResponseRedirect, HttpResponse
from django.core.urlresolvers import reverse
#from django.template import RequestContext, loader
from .models import Question,Choice
# Create your views here.
def detail(request, question_id):
try:
question = Question.objects.get(pk=question_id)
except Question.DoesNotExist:
raise Http404("Question does not exist")
question = get_object_or_404(Question, pk=question_id)
return render(request, 'polls/detail.html', {'question': question})
#return HttpResponse("You're looking at question %s." % question_id)
def results(request, question_id):
# response = "You're looking at the results of question %s."
# return HttpResponse(response % question_id)
question = get_object_or_404(Question, pk=question_id)
return render(request, 'polls/results.html', {'question': question})
def vote(request, question_id):
#return HttpResponse("You're voting on question %s." % question_id)
p = get_object_or_404(Question, pk=question_id)
try:
selected_choice = p.choice_set.get(pk=request.POST['choice'])
except (KeyError, Choice.DoesNotExist):
# Redisplay the question voting form.
return render(request, 'polls/detail.html', {
'question': p,
'error_message': "You didn't select a choice.",
})
else:
selected_choice.votes += 1
selected_choice.save()
# Always return an HttpResponseRedirect after successfully dealing
# with POST data. This prevents data from being posted twice if a
# user hits the Back button.
return HttpResponseRedirect(reverse('polls:results', args=(p.id,)))
def index(request):
latest_question_list = Question.objects.order_by('-pub_date')[:5]
# template = loader.get_template('polls/index.html')
# context = RequestContext(request, {
# 'latest_question_list': latest_question_list,
# })
# return HttpResponse(template.render(context))
#from django.shortcuts import render
context = {'latest_question_list': latest_question_list}
return render(request, 'polls/index.html', context)
#return render("index.html",latest_question_list = latest_question_list)
# output = ', '.join([p.question_text for p in latest_question_list])
# return HttpResponse(output)
|
DingYuanfang/Django-simple-liwu
|
polls/views0.py
|
Python
|
mit
| 2,477
|
#!/usr/bin/env python
## \file filter_adjoint.py
# \brief Applies various filters to the adjoint surface sensitivities of an airfoil
# \author T. Lukaczyk, F. Palacios
# \version 5.0.0 "Raven"
#
# SU2 Lead Developers: Dr. Francisco Palacios (Francisco.D.Palacios@boeing.com).
# Dr. Thomas D. Economon (economon@stanford.edu).
#
# SU2 Developers: Prof. Juan J. Alonso's group at Stanford University.
# Prof. Piero Colonna's group at Delft University of Technology.
# Prof. Nicolas R. Gauger's group at Kaiserslautern University of Technology.
# Prof. Alberto Guardone's group at Polytechnic University of Milan.
# Prof. Rafael Palacios' group at Imperial College London.
# Prof. Edwin van der Weide's group at the University of Twente.
# Prof. Vincent Terrapon's group at the University of Liege.
#
# Copyright (C) 2012-2017 SU2, the open-source CFD code.
#
# SU2 is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# SU2 is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with SU2. If not, see <http://www.gnu.org/licenses/>.
import os, math
from numpy import pi
from optparse import OptionParser
import numpy as np
import libSU2, libSU2_mesh
# plotting with matplotlib
try:
import pylab as plt
pylab_imported = True
except ImportError:
pylab_imported = False
# -------------------------------------------------------------------
# MAIN
# -------------------------------------------------------------------
def main():
# Command Line Options
parser=OptionParser()
parser.add_option( "-f", "--file", dest="filename",
help="read config from FILE", metavar="FILE" )
parser.add_option( "-t", "--type", dest="filter_type", default='LAPLACE',
help="apply filter TYPE", metavar="TYPE" )
parser.add_option( "-m", "--marker", dest="marker_name", default='airfoil',
help="use marker named TAG", metavar="TAG" )
parser.add_option( "-c", "--chord", dest="chord_length", default=1.0,
help="reference CHORD length", metavar="CHORD" )
(options, args)=parser.parse_args()
options.chord_length = float( options.chord_length )
# run filter
process_surface_adjoint( options.filename ,
options.filter_type ,
options.marker_name ,
options.chord_length )
#: def main()
# -------------------------------------------------------------------
# PROCESS SURFACE ADJOINT
# -------------------------------------------------------------------
def process_surface_adjoint( config_filename ,
filter_type='LAPLACE' ,
marker_name='airfoil' ,
chord_length=1.0 ):
print('')
print('-------------------------------------------------------------------------')
print('| SU2 Suite (Process Surface Adjoint) |')
print('-------------------------------------------------------------------------')
print('')
# some other defaults
c_clip = 0.01 # percent chord to truncate
fft_copy = 5 # number of times to copy the fft signal
smth_len = 0.05 # percent chord smoothing window length
lapl_len = 1e-4 # laplace smoothing parameter
# read config file
config_data = libSU2.Get_ConfigParams(config_filename)
surface_filename = config_data['SURFACE_ADJ_FILENAME'] + '.csv'
print surface_filename
mesh_filename = config_data['MESH_FILENAME']
gradient = config_data['OBJECTIVE_FUNCTION']
print('Config filename = %s' % config_filename)
print('Surface filename = %s' % surface_filename)
print('Filter Type = %s' % filter_type)
# read adjoint data
adj_data = np.genfromtxt( surface_filename ,
dtype = float ,
delimiter = ',' ,
skip_header = 1 )
# read mesh data
mesh_data = libSU2_mesh.Read_Mesh(mesh_filename)
# proces adjoint data
P = map(int, adj_data[:,0] )
X = adj_data[:,6].copy()
Y = adj_data[:,7].copy()
Sens = adj_data[:,1].copy()
PsiRho = adj_data[:,2].copy()
I = range(0,len(P)) # important - for unsorting durring write
# store in dict by point index
adj_data_dict = dict( zip( P , zip(X,Y,Sens,PsiRho,I) ) )
# sort airfoil points
iP_sorted,_ = libSU2_mesh.sort_Airfoil(mesh_data,marker_name)
assert(len(iP_sorted) == len(P))
# rebuild airfoil loop
i = 0
for this_P in iP_sorted:
# the adjoint data entry
this_adj_data = adj_data_dict[this_P]
# re-sort
P[i] = this_P
X[i] = this_adj_data[0]
Y[i] = this_adj_data[1]
Sens[i] = this_adj_data[2]
PsiRho[i] = this_adj_data[3]
I[i] = this_adj_data[4]
# next
i = i+1
#: for each point
# calculate arc length
S = np.sqrt( np.diff(X)**2 + np.diff(Y)**2 ) / chord_length
S = np.cumsum( np.hstack([ 0 , S ]) )
# tail trucating, by arc length
I_clip_lo = S < S[0] + c_clip
I_clip_hi = S > S[-1] - c_clip
S_clip = S.copy()
Sens_clip = Sens.copy()
Sens_clip[I_clip_hi] = Sens_clip[I_clip_hi][0]
Sens_clip[I_clip_lo] = Sens_clip[I_clip_lo][-1]
# some edge length statistics
dS_clip = np.diff(S_clip)
min_dS = np.min ( dS_clip )
mean_dS = np.mean( dS_clip )
max_dS = np.max ( dS_clip )
#print 'min_dS = %.4e ; mean_dS = %.4e ; max_dS = %.4e' % ( min_dS , mean_dS , max_dS )
# --------------------------------------------
# APPLY FILTER
if filter_type == 'FOURIER':
Freq_notch = [ 1/max_dS, np.inf ] # the notch frequencies
Sens_filter,Frequency,Power = fft_filter( S_clip,Sens_clip, Freq_notch, fft_copy )
#Sens_filter = smooth(S_clip,Sens_filter, 0.03,'blackman') # post smoothing
elif filter_type == 'WINDOW':
Sens_filter = window( S_clip, Sens_clip, smth_len, 'blackman' )
elif filter_type == 'LAPLACE':
Sens_filter = laplace( S_clip, Sens_clip, lapl_len )
elif filter_type == 'SHARPEN':
Sens_smooth = smooth( S_clip, Sens_clip , smth_len/5, 'blackman' ) # pre smoothing
Sens_smoother = smooth( S_clip, Sens_smooth, smth_len , 'blackman' )
Sens_filter = Sens_smooth + (Sens_smooth - Sens_smoother) # sharpener
else:
raise Exception, 'unknown filter type'
# --------------------------------------------
# PLOTTING
if pylab_imported:
# start plot
fig = plt.figure(gradient)
plt.clf()
#if not fig.axes: # for comparing two filter calls
#plt.subplot(1,1,1)
#ax = fig.axes[0]
#if len(ax.lines) == 4:
#ax.lines.pop(0)
#ax.lines.pop(0)
# SENSITIVITY
plt.plot(S ,Sens ,color='b') # original
plt.plot(S_clip,Sens_filter,color='r') # filtered
plt.xlim(-0.1,2.1)
plt.ylim(-5,5)
plt.xlabel('Arc Length')
plt.ylabel('Surface Sensitivity')
#if len(ax.lines) == 4:
#seq = [2, 2, 7, 2]
#ax.lines[0].set_dashes(seq)
#ax.lines[1].set_dashes(seq)
plot_filename = os.path.splitext(surface_filename)[0] + '.png'
plt.savefig('Sens_'+plot_filename,dpi=300)
# zoom in
plt.ylim(-0.4,0.4)
plt.savefig('Sens_zoom_'+plot_filename,dpi=300)
# SPECTRAL
if filter_type == 'FOURIER':
plt.figure('SPECTRAL')
plt.clf()
plt.plot(Frequency,Power)
#plt.xlim(0,Freq_notch[0]+10)
plt.xlim(0,200)
plt.ylim(0,0.15)
plt.xlabel('Frequency (1/C)')
plt.ylabel('Surface Sensitivity Spectal Power')
plt.savefig('Spectral_'+plot_filename,dpi=300)
#: if spectral plot
#: if plot
# --------------------------------------------
# SAVE SURFACE FILE
# reorder back to input surface points
Sens_out = np.zeros(len(S))
Sens_out[I] = Sens_filter # left over from sort
adj_data[:,1] = Sens_out
# get surface header
surface_orig = open(surface_filename,'r')
header = surface_orig.readline()
surface_orig.close()
# get list of prefix names
prefix_names = libSU2.get_AdjointPrefix(None)
prefix_names = prefix_names.values()
# add filter prefix, before adjoint prefix
surface_filename_split = surface_filename.rstrip('.csv').split('_')
if surface_filename_split[-1] in prefix_names:
surface_filename_split = surface_filename_split[0:-1] + ['filtered'] + [surface_filename_split[-1]]
else:
surface_filename_split = surface_filename_split + ['filtered']
surface_filename_new = '_'.join(surface_filename_split) + '.csv'
# write filtered surface file (only updates Sensitivity)
surface_new = open(surface_filename_new,'w')
surface_new.write(header)
for row in adj_data:
for i,value in enumerate(row):
if i > 0:
surface_new.write(', ')
if i == 0:
surface_new.write('%i' % value )
else:
surface_new.write('%.16e' % value )
surface_new.write('\n')
surface_new.close()
print('')
print('----------------- Exit Success (Process Surface Adjoint) ----------------')
print('')
return
#: def process_surface_adjoint()
# -------------------------------------------------------------------
# LAPLACIAN SMOOTHING
# -------------------------------------------------------------------
def laplace(t,x,e):
''' Laplacian filter
input:
t - time sample vector
x - signal vector x(t)
e - smoother coefficient (e>0)
output:
y: smoothed signal at t
'''
n_x = len(x)
# padding
t_1 = t[ 0] + t[-2]-t[-1]
t_2 = t[-1] + t[ 1]-t[ 0]
t_p = np.hstack([ t_1 , t , t_2 ])
x_p = np.hstack([ x[0] , x , x[-1] ])
# finite differencing
dt_f = t_p[2: ] - t_p[1:-1]
dt_b = t_p[1:-1] - t_p[0:-2]
dt_c = t_p[2: ] - t_p[0:-2]
# diagonal coefficients
Coeff = e * 2.0 / (dt_b*dt_f*dt_c)
diag_c = Coeff*dt_c
diag_f = -Coeff*dt_b
diag_b = -Coeff*dt_f
# system matrix
A = ( np.diag(diag_c , 0) +
np.diag(diag_f[0:-1], 1) +
np.diag(diag_b[1: ],-1) +
np.diag(np.ones(n_x), 0) )
# periodic conditions
#A[1,-1] = dt_b[0]
#A[-1,1] = dt_f[-1]
# rhs
b = np.array([x]).T
# boundary conditions
# signal start
i_d = 0
A[i_d,:] = 0.0
A[i_d,i_d] = 1.0 # dirichlet
#A[i_d,i_d+1] = 1.0 # neuman
#A[i_d,i_d] = -1.0
#b[i_d] = 0.0 #x[i_d+1]-x[i_d]
# signal end
i_d = n_x-1
A[i_d,:] = 0.0
A[i_d,i_d] = 1.0 # dirichlet
#A[i_d,i_d] = 1.0 # neuman
#A[i_d,i_d-1] = -1.0
#b[i_d] = 0.0 #x[i_d]-x[i_d-1]
# solve
y = np.linalg.solve(A,b)
y = y[:,0]
return y
#: def laplace
# -------------------------------------------------------------------
# FFT NOTCH FILTER
# -------------------------------------------------------------------
def fft_filter(t,x,n,c=1):
''' Notch filter with Fast Fourier Transform
input:
t = input time vector
x = input signal vector
n = [low,high] frequency range to supress
c = number of times to duplicate signal
output:
y = smoothed signal at t
signal will be interpolated to constant spacing
'''
assert(c>0)
# choose sampling frequency
min_dt = np.min( np.diff(t) )
Ts = min_dt/2
Fs = 1/Ts
# interpolate to constant spacing
nt_lin = int( t[-1]/Ts )
t_lin = np.linspace(0,t[-1],nt_lin)
x_lin = np.interp(t_lin,t,x)
# pad last index
t_lin = np.hstack([ t_lin , t_lin[0:10]+t_lin[-1] ])
x_lin = np.hstack([ x_lin , np.ones(10)*x_lin[-1] ])
# copy signal
for ic in range(c-1):
t_lin = np.hstack([ t_lin[0:-2] , t_lin[1:]+t_lin[-1] ])
x_lin = np.hstack([ x_lin[0:-2] , x_lin[1:] ])
nt = len(t_lin)
# perform fourier transform
nxtpow2 = int(math.log(nt, 2))+1 # next power of 2
nfft = 2**nxtpow2 # fft efficiency
P = np.fft.rfft(x_lin,nfft) # the transform
a = np.angle(P) # complex
p = np.absolute(P) # complex
p = p/nt # normalize
p = 2*p[0:(nfft/2)] # symmetric
a = a[0:(nfft/2)] # symmetric
# frequency domain
F = np.arange(0,nfft/2) * Fs/nfft
# for return
Freq = F.copy()
# --------------------------------------------------
# THE NOTCH FILTER
# filter multiplier
k = np.ones(nfft/2)
# clip power within notch frequencies
I_fil = np.logical_and( F>n[0] , F<n[1] )
k[I_fil] = 0.0
# change the power spectrum
p = p*k
# For Return
Pow = p.copy()
# untransform
p = p*nt/2.
p = np.hstack( [ p , p[::-1] ] )
a = np.hstack( [ a , -a[::-1] ] )
P = p*(np.cos(a) + 1j*np.sin(a))
y_lin = np.fft.irfft(P,nfft) # the inverse transform
y_lin = y_lin[0:nt]
# interpolate back to given t
y = np.interp(t,t_lin,y_lin)
return y,Freq,Pow
# def: fft_filter()
# -------------------------------------------------------------------
# WINDOWED SMOOTHING
# -------------------------------------------------------------------
def window(t,x,window_delta,window='hanning'):
"""Smooth the data using a window with requested size and shape
original source:
http://www.scipy.org/Cookbook/SignalSmooth
input:
t: input time samples
x: input signal at t
window_delta: length (in units of t) of the window
window: type of window from 'flat', 'hanning', 'hamming', 'bartlett', 'blackman'
flat window will produce a moving average smoothing.
output:
y: smoothed signal at t
"""
if x.ndim != 1:
raise ValueError, "smooth only accepts 1 dimension arrays."
if not window in ['flat', 'hanning', 'hamming', 'bartlett', 'blackman']:
raise ValueError, "Window is not of 'flat', 'hanning', 'hamming', 'bartlett', 'blackman'"
# interpolate to constant time sample width
min_dt = np.min( np.diff(t) )
Ts = min_dt/2
nt_lin = int( t[-1]/Ts )
t_lin = np.linspace(0,t[-1],nt_lin)
x_lin = np.interp(t_lin,t,x)
# window sample length
window_len = int( window_delta / Ts )
# padding
s=np.r_[x_lin[window_len-1:0:-1],x_lin,x_lin[-1:-window_len:-1]]
# window template
if window == 'flat': #moving average
w=np.ones(window_len,'d')
else:
w=eval('np.'+window+'(window_len)')
# the filter
y_lin = np.convolve(w/w.sum(),s,mode='valid')
# remove padding
y_lin = y_lin[((window_len-1)/2):-(window_len/2)]
# interpolate back to given t
y = np.interp(t,t_lin,y_lin)
return y
#: def window()
# -----------------------------------------------------------------
# Run Main from Command Line
# -----------------------------------------------------------------
if __name__ == '__main__': main()
|
cspode/SU2
|
SU2_PY/SU2/util/filter_adjoint.py
|
Python
|
lgpl-2.1
| 16,651
|
# -*- coding: utf-8 -*-
# Form implementation generated from reading ui file '/home/homoludens/projekti/eric4/brePodder/DialogAdd.ui'
#
# Created: Wed Feb 6 01:40:43 2008
# by: PyQt4 UI code generator 4.3.1
#
# WARNING! All changes made in this file will be lost!
from PyQt5 import QtCore, QtGui
class Ui_Dialog(object):
def setupUi(self, Dialog):
Dialog.setObjectName("Dialog")
Dialog.resize(QtCore.QSize(QtCore.QRect(0,0,267,106).size()).expandedTo(Dialog.minimumSizeHint()))
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Fixed,QtGui.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(Dialog.sizePolicy().hasHeightForWidth())
Dialog.setSizePolicy(sizePolicy)
self.buttonBox = QtGui.QDialogButtonBox(Dialog)
self.buttonBox.setGeometry(QtCore.QRect(-80,70,341,32))
self.buttonBox.setOrientation(QtCore.Qt.Horizontal)
self.buttonBox.setStandardButtons(QtGui.QDialogButtonBox.Cancel|QtGui.QDialogButtonBox.NoButton|QtGui.QDialogButtonBox.Ok)
self.buttonBox.setObjectName("buttonBox")
self.lineEdit = QtGui.QLineEdit(Dialog)
self.lineEdit.setGeometry(QtCore.QRect(10,40,251,22))
self.lineEdit.setObjectName("lineEdit")
self.label = QtGui.QLabel(Dialog)
self.label.setGeometry(QtCore.QRect(10,20,141,16))
self.label.setObjectName("label")
self.retranslateUi(Dialog)
QtCore.QObject.connect(self.buttonBox,QtCore.SIGNAL("accepted()"),Dialog.accept)
QtCore.QObject.connect(self.buttonBox,QtCore.SIGNAL("rejected()"),Dialog.reject)
QtCore.QMetaObject.connectSlotsByName(Dialog)
def retranslateUi(self, Dialog):
Dialog.setWindowTitle(QtGui.QApplication.translate("Dialog", "Dialog", None, QtGui.QApplication.UnicodeUTF8))
self.label.setText(QtGui.QApplication.translate("Dialog", "Add Podcast\'s URL", None, QtGui.QApplication.UnicodeUTF8))
if __name__ == "__main__":
import sys
app = QtGui.QApplication(sys.argv)
Dialog = QtGui.QDialog()
ui = Ui_Dialog()
ui.setupUi(Dialog)
Dialog.show()
sys.exit(app.exec_())
|
homoludens/brePodder
|
Ui_DialogAdd.py
|
Python
|
gpl-3.0
| 2,216
|
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2012 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
#
# Copyright 2012 Openstack, LLC
# Copyright 2012 Nebula, Inc.
# Copyright (c) 2012 X.commerce, a business unit of eBay Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from __future__ import absolute_import
import logging
from django.conf import settings # noqa
from django.utils.translation import ugettext_lazy as _ # noqa
from novaclient.v1_1 import client as nova_client
from novaclient.v1_1.contrib.list_extensions import ListExtManager # noqa
from novaclient.v1_1 import security_group_rules as nova_rules
from novaclient.v1_1.security_groups import SecurityGroup as NovaSecurityGroup # noqa
from novaclient.v1_1.servers import REBOOT_HARD # noqa
from novaclient.v1_1.servers import REBOOT_SOFT # noqa
from horizon.conf import HORIZON_CONFIG # noqa
from horizon.utils.memoized import memoized # noqa
from openstack_dashboard.api import base
from openstack_dashboard.api import network_base
LOG = logging.getLogger(__name__)
# API static values
INSTANCE_ACTIVE_STATE = 'ACTIVE'
VOLUME_STATE_AVAILABLE = "available"
class VNCConsole(base.APIDictWrapper):
"""Wrapper for the "console" dictionary returned by the
novaclient.servers.get_vnc_console method.
"""
_attrs = ['url', 'type']
class SPICEConsole(base.APIDictWrapper):
"""Wrapper for the "console" dictionary returned by the
novaclient.servers.get_spice_console method.
"""
_attrs = ['url', 'type']
class Server(base.APIResourceWrapper):
"""Simple wrapper around novaclient.server.Server
Preserves the request info so image name can later be retrieved
"""
_attrs = ['addresses', 'attrs', 'id', 'image', 'links',
'metadata', 'name', 'private_ip', 'public_ip', 'status', 'uuid',
'image_name', 'VirtualInterfaces', 'flavor', 'key_name',
'tenant_id', 'user_id', 'OS-EXT-STS:power_state',
'OS-EXT-STS:task_state', 'OS-EXT-SRV-ATTR:instance_name',
'OS-EXT-SRV-ATTR:host', 'created']
def __init__(self, apiresource, request):
super(Server, self).__init__(apiresource)
self.request = request
@property
def image_name(self):
import glanceclient.exc as glance_exceptions
from openstack_dashboard.api import glance
if not self.image:
return "(not found)"
try:
image = glance.image_get(self.request, self.image['id'])
return image.name
except glance_exceptions.ClientException:
return "(not found)"
@property
def internal_name(self):
return getattr(self, 'OS-EXT-SRV-ATTR:instance_name', "")
class NovaUsage(base.APIResourceWrapper):
"""Simple wrapper around contrib/simple_usage.py."""
_attrs = ['start', 'server_usages', 'stop', 'tenant_id',
'total_local_gb_usage', 'total_memory_mb_usage',
'total_vcpus_usage', 'total_hours']
def get_summary(self):
return {'instances': self.total_active_instances,
'memory_mb': self.memory_mb,
'vcpus': getattr(self, "total_vcpus_usage", 0),
'vcpu_hours': self.vcpu_hours,
'local_gb': self.local_gb,
'disk_gb_hours': self.disk_gb_hours}
@property
def total_active_instances(self):
return sum(1 for s in self.server_usages if s['ended_at'] is None)
@property
def vcpus(self):
return sum(s['vcpus'] for s in self.server_usages
if s['ended_at'] is None)
@property
def vcpu_hours(self):
return getattr(self, "total_hours", 0)
@property
def local_gb(self):
return sum(s['local_gb'] for s in self.server_usages
if s['ended_at'] is None)
@property
def memory_mb(self):
return sum(s['memory_mb'] for s in self.server_usages
if s['ended_at'] is None)
@property
def disk_gb_hours(self):
return getattr(self, "total_local_gb_usage", 0)
class SecurityGroup(base.APIResourceWrapper):
"""Wrapper around novaclient.security_groups.SecurityGroup which wraps its
rules in SecurityGroupRule objects and allows access to them.
"""
_attrs = ['id', 'name', 'description', 'tenant_id']
@property
def rules(self):
"""Wraps transmitted rule info in the novaclient rule class."""
if "_rules" not in self.__dict__:
manager = nova_rules.SecurityGroupRuleManager(None)
rule_objs = [nova_rules.SecurityGroupRule(manager, rule)
for rule in self._apiresource.rules]
self._rules = [SecurityGroupRule(rule) for rule in rule_objs]
return self.__dict__['_rules']
class SecurityGroupRule(base.APIResourceWrapper):
""" Wrapper for individual rules in a SecurityGroup. """
_attrs = ['id', 'ip_protocol', 'from_port', 'to_port', 'ip_range', 'group']
def __unicode__(self):
if 'name' in self.group:
vals = {'from': self.from_port,
'to': self.to_port,
'group': self.group['name']}
return _('ALLOW %(from)s:%(to)s from %(group)s') % vals
else:
vals = {'from': self.from_port,
'to': self.to_port,
'cidr': self.ip_range['cidr']}
return _('ALLOW %(from)s:%(to)s from %(cidr)s') % vals
# The following attributes are defined to keep compatibility with Neutron
@property
def ethertype(self):
return None
@property
def direction(self):
return 'ingress'
class SecurityGroupManager(network_base.SecurityGroupManager):
backend = 'nova'
def __init__(self, request):
self.request = request
self.client = novaclient(request)
def list(self):
return [SecurityGroup(g) for g
in self.client.security_groups.list()]
def get(self, sg_id):
return SecurityGroup(self.client.security_groups.get(sg_id))
def create(self, name, desc):
return SecurityGroup(self.client.security_groups.create(name, desc))
def update(self, sg_id, name, desc):
return SecurityGroup(self.client.security_groups.update(sg_id,
name, desc))
def delete(self, security_group_id):
self.client.security_groups.delete(security_group_id)
def rule_create(self, parent_group_id,
direction=None, ethertype=None,
ip_protocol=None, from_port=None, to_port=None,
cidr=None, group_id=None):
# Nova Security Group API does not use direction and ethertype fields.
sg = self.client.security_group_rules.create(parent_group_id,
ip_protocol,
from_port,
to_port,
cidr,
group_id)
return SecurityGroupRule(sg)
def rule_delete(self, security_group_rule_id):
self.client.security_group_rules.delete(security_group_rule_id)
def list_by_instance(self, instance_id):
"""Gets security groups of an instance."""
# TODO(gabriel): This needs to be moved up to novaclient, and should
# be removed once novaclient supports this call.
security_groups = []
nclient = self.client
resp, body = nclient.client.get('/servers/%s/os-security-groups'
% instance_id)
if body:
# Wrap data in SG objects as novaclient would.
sg_objs = [NovaSecurityGroup(nclient.security_groups, sg,
loaded=True)
for sg in body.get('security_groups', [])]
# Then wrap novaclient's object with our own. Yes, sadly wrapping
# with two layers of objects is necessary.
security_groups = [SecurityGroup(sg) for sg in sg_objs]
return security_groups
def update_instance_security_group(self, instance_id, new_sgs):
wanted_groups = set(new_sgs)
try:
current_groups = self.list_by_instance(instance_id)
except Exception:
raise Exception(_("Couldn't get current security group "
"list for instance %s.")
% instance_id)
current_group_names = set(map(lambda g: g.id, current_groups))
groups_to_add = wanted_groups - current_group_names
groups_to_remove = current_group_names - wanted_groups
num_groups_to_modify = len(groups_to_add | groups_to_remove)
try:
for group in groups_to_add:
self.client.servers.add_security_group(instance_id, group)
num_groups_to_modify -= 1
for group in groups_to_remove:
self.client.servers.remove_security_group(instance_id, group)
num_groups_to_modify -= 1
except Exception:
raise Exception(_('Failed to modify %d instance security groups.')
% num_groups_to_modify)
return True
class FlavorExtraSpec(object):
def __init__(self, flavor_id, key, val):
self.flavor_id = flavor_id
self.id = key
self.key = key
self.value = val
class FloatingIp(base.APIResourceWrapper):
_attrs = ['id', 'ip', 'fixed_ip', 'port_id', 'instance_id', 'pool']
def __init__(self, fip):
fip.__setattr__('port_id', fip.instance_id)
super(FloatingIp, self).__init__(fip)
class FloatingIpPool(base.APIDictWrapper):
def __init__(self, pool):
pool_dict = {'id': pool.name,
'name': pool.name}
super(FloatingIpPool, self).__init__(pool_dict)
class FloatingIpTarget(base.APIDictWrapper):
def __init__(self, server):
server_dict = {'name': '%s (%s)' % (server.name, server.id),
'id': server.id}
super(FloatingIpTarget, self).__init__(server_dict)
class FloatingIpManager(network_base.FloatingIpManager):
def __init__(self, request):
self.request = request
self.client = novaclient(request)
def list_pools(self):
return [FloatingIpPool(pool)
for pool in self.client.floating_ip_pools.list()]
def list(self):
return [FloatingIp(fip)
for fip in self.client.floating_ips.list()]
def get(self, floating_ip_id):
return FloatingIp(self.client.floating_ips.get(floating_ip_id))
def allocate(self, pool):
return FloatingIp(self.client.floating_ips.create(pool=pool))
def release(self, floating_ip_id):
self.client.floating_ips.delete(floating_ip_id)
def associate(self, floating_ip_id, port_id):
# In Nova implied port_id is instance_id
server = self.client.servers.get(port_id)
fip = self.client.floating_ips.get(floating_ip_id)
self.client.servers.add_floating_ip(server.id, fip.ip)
def disassociate(self, floating_ip_id, port_id):
fip = self.client.floating_ips.get(floating_ip_id)
server = self.client.servers.get(fip.instance_id)
self.client.servers.remove_floating_ip(server.id, fip.ip)
def list_targets(self):
return [FloatingIpTarget(s) for s in self.client.servers.list()]
def get_target_id_by_instance(self, instance_id):
return instance_id
def is_simple_associate_supported(self):
return HORIZON_CONFIG["simple_ip_management"]
def novaclient(request):
insecure = getattr(settings, 'OPENSTACK_SSL_NO_VERIFY', False)
LOG.debug('novaclient connection created using token "%s" and url "%s"' %
(request.user.token.id, base.url_for(request, 'compute')))
c = nova_client.Client(request.user.username,
request.user.token.id,
project_id=request.user.tenant_id,
auth_url=base.url_for(request, 'compute'),
insecure=insecure,
http_log_debug=settings.DEBUG)
c.client.auth_token = request.user.token.id
c.client.management_url = base.url_for(request, 'compute')
return c
def server_vnc_console(request, instance_id, console_type='novnc'):
return VNCConsole(novaclient(request).servers.get_vnc_console(instance_id,
console_type)['console'])
def server_spice_console(request, instance_id, console_type='spice-html5'):
return SPICEConsole(novaclient(request).servers.get_spice_console(
instance_id, console_type)['console'])
def flavor_create(request, name, memory, vcpu, disk, flavorid='auto',
ephemeral=0, swap=0, metadata=None):
flavor = novaclient(request).flavors.create(name, memory, vcpu, disk,
flavorid=flavorid,
ephemeral=ephemeral,
swap=swap)
if (metadata):
flavor_extra_set(request, flavor.id, metadata)
return flavor
def flavor_delete(request, flavor_id):
novaclient(request).flavors.delete(flavor_id)
def flavor_get(request, flavor_id):
return novaclient(request).flavors.get(flavor_id)
@memoized
def flavor_list(request):
"""Get the list of available instance sizes (flavors)."""
return novaclient(request).flavors.list()
def flavor_get_extras(request, flavor_id, raw=False):
"""Get flavor extra specs."""
flavor = novaclient(request).flavors.get(flavor_id)
extras = flavor.get_keys()
if raw:
return extras
return [FlavorExtraSpec(flavor_id, key, value) for
key, value in extras.items()]
def flavor_extra_delete(request, flavor_id, keys):
"""Unset the flavor extra spec keys."""
flavor = novaclient(request).flavors.get(flavor_id)
return flavor.unset_keys(keys)
def flavor_extra_set(request, flavor_id, metadata):
"""Set the flavor extra spec keys."""
flavor = novaclient(request).flavors.get(flavor_id)
if (not metadata): # not a way to delete keys
return None
return flavor.set_keys(metadata)
def snapshot_create(request, instance_id, name):
return novaclient(request).servers.create_image(instance_id, name)
def keypair_create(request, name):
return novaclient(request).keypairs.create(name)
def keypair_import(request, name, public_key):
return novaclient(request).keypairs.create(name, public_key)
def keypair_delete(request, keypair_id):
novaclient(request).keypairs.delete(keypair_id)
def keypair_list(request):
return novaclient(request).keypairs.list()
def server_create(request, name, image, flavor, key_name, user_data,
security_groups, block_device_mapping, nics=None,
availability_zone=None, instance_count=1, admin_pass=None):
return Server(novaclient(request).servers.create(
name, image, flavor, userdata=user_data,
security_groups=security_groups,
key_name=key_name, block_device_mapping=block_device_mapping,
nics=nics, availability_zone=availability_zone,
min_count=instance_count, admin_pass=admin_pass), request)
def server_delete(request, instance):
novaclient(request).servers.delete(instance)
def server_get(request, instance_id):
return Server(novaclient(request).servers.get(instance_id), request)
def server_list(request, search_opts=None, all_tenants=False):
page_size = request.session.get('horizon_pagesize',
getattr(settings, 'API_RESULT_PAGE_SIZE',
20))
paginate = False
if search_opts is None:
search_opts = {}
elif 'paginate' in search_opts:
paginate = search_opts.pop('paginate')
if paginate:
search_opts['limit'] = page_size + 1
if all_tenants:
search_opts['all_tenants'] = True
else:
search_opts['project_id'] = request.user.tenant_id
servers = [Server(s, request)
for s in novaclient(request).servers.list(True, search_opts)]
has_more_data = False
if paginate and len(servers) > page_size:
servers.pop(-1)
has_more_data = True
elif paginate and len(servers) == getattr(settings, 'API_RESULT_LIMIT',
1000):
has_more_data = True
return (servers, has_more_data)
def server_console_output(request, instance_id, tail_length=None):
"""Gets console output of an instance."""
return novaclient(request).servers.get_console_output(instance_id,
length=tail_length)
def server_pause(request, instance_id):
novaclient(request).servers.pause(instance_id)
def server_unpause(request, instance_id):
novaclient(request).servers.unpause(instance_id)
def server_suspend(request, instance_id):
novaclient(request).servers.suspend(instance_id)
def server_resume(request, instance_id):
novaclient(request).servers.resume(instance_id)
def server_reboot(request, instance_id, soft_reboot=False):
hardness = REBOOT_HARD
if soft_reboot:
hardness = REBOOT_SOFT
novaclient(request).servers.reboot(instance_id, hardness)
def server_rebuild(request, instance_id, image_id, password=None):
return novaclient(request).servers.rebuild(instance_id, image_id,
password)
def server_update(request, instance_id, name):
return novaclient(request).servers.update(instance_id, name=name)
def server_migrate(request, instance_id):
novaclient(request).servers.migrate(instance_id)
def server_resize(request, instance_id, flavor, **kwargs):
novaclient(request).servers.resize(instance_id, flavor, **kwargs)
def server_confirm_resize(request, instance_id):
novaclient(request).servers.confirm_resize(instance_id)
def server_revert_resize(request, instance_id):
novaclient(request).servers.revert_resize(instance_id)
def server_start(request, instance_id):
novaclient(request).servers.start(instance_id)
def server_stop(request, instance_id):
novaclient(request).servers.stop(instance_id)
def tenant_quota_get(request, tenant_id):
return base.QuotaSet(novaclient(request).quotas.get(tenant_id))
def tenant_quota_update(request, tenant_id, **kwargs):
novaclient(request).quotas.update(tenant_id, **kwargs)
def default_quota_get(request, tenant_id):
return base.QuotaSet(novaclient(request).quotas.defaults(tenant_id))
def usage_get(request, tenant_id, start, end):
return NovaUsage(novaclient(request).usage.get(tenant_id, start, end))
def usage_list(request, start, end):
return [NovaUsage(u) for u in
novaclient(request).usage.list(start, end, True)]
def virtual_interfaces_list(request, instance_id):
return novaclient(request).virtual_interfaces.list(instance_id)
def get_x509_credentials(request):
return novaclient(request).certs.create()
def get_x509_root_certificate(request):
return novaclient(request).certs.get()
def instance_volume_attach(request, volume_id, instance_id, device):
return novaclient(request).volumes.create_server_volume(instance_id,
volume_id,
device)
def instance_volume_detach(request, instance_id, att_id):
return novaclient(request).volumes.delete_server_volume(instance_id,
att_id)
def instance_volumes_list(request, instance_id):
from openstack_dashboard.api.cinder import cinderclient # noqa
volumes = novaclient(request).volumes.get_server_volumes(instance_id)
for volume in volumes:
volume_data = cinderclient(request).volumes.get(volume.id)
volume.name = volume_data.display_name
return volumes
def hypervisor_list(request):
return novaclient(request).hypervisors.list()
def hypervisor_stats(request):
return novaclient(request).hypervisors.statistics()
def tenant_absolute_limits(request, reserved=False):
limits = novaclient(request).limits.get(reserved=reserved).absolute
limits_dict = {}
for limit in limits:
# -1 is used to represent unlimited quotas
if limit.value == -1:
limits_dict[limit.name] = float("inf")
else:
limits_dict[limit.name] = limit.value
return limits_dict
def availability_zone_list(request, detailed=False):
return novaclient(request).availability_zones.list(detailed=detailed)
def service_list(request):
return novaclient(request).services.list()
def aggregate_list(request):
result = []
for aggregate in novaclient(request).aggregates.list():
result.append(novaclient(request).aggregates.get_details(aggregate.id))
return result
@memoized
def list_extensions(request):
return ListExtManager(novaclient(request)).show_all()
@memoized
def extension_supported(extension_name, request):
"""
this method will determine if nova supports a given extension name.
example values for the extension_name include AdminActions, ConsoleOutput,
etc.
"""
extensions = list_extensions(request)
for extension in extensions:
if extension.name == extension_name:
return True
return False
|
deepakselvaraj/federated-horizon
|
openstack_dashboard/api/nova.py
|
Python
|
apache-2.0
| 22,424
|
# app/data.py
from flask.ext.sqlalchemy import SQLAlchemy
db = SQLAlchemy()
class CRUDMixin(object):
__table_args__ = {'extend_existing': True}
id = db.Column(db.Integer, primary_key=True)
@classmethod
def create(cls, commit=True, **kwargs):
instance = cls(**kwargs)
return instance.save(commit=commit)
@classmethod
def get(cls, id):
return cls.query.get(id)
@classmethod
def get_or_404(cls, id):
return cls.query.get_or_404(id)
def update(self, commit=True, **kwargs):
for attr, value in kwargs.iteritems():
setattr(self, attr, value)
return commit and self.save() or self
def save(self, commit=True):
db.session.add(self)
if commit:
db.session.commit()
return self
def delete(self, commit=True):
db.session.delete(self)
return commit and db.session.commit()
def query_to_list(query, include_field_names=True):
"""Turns a SQLAlchemy query into a list of data values."""
column_names = []
for i, obj in enumerate(query.all()):
if i == 0:
column_names = [c.name for c in obj.__table__.columns]
if include_field_names:
yield column_names
yield obj_to_list(obj, column_names)
def obj_to_list(sa_obj, field_order):
"""Takes a SQLAlchemy object - returns a list of all its data"""
return [getattr(sa_obj, field_name, None) for field_name in field_order]
|
ChristopherGS/sensor_readings
|
app/data.py
|
Python
|
bsd-3-clause
| 1,496
|
stopCodons = ['TAG', 'TAA', 'TGA']
translation = {'CTT': 'L', 'ATG': 'M', 'AAG': 'K', 'AAA': 'K', 'ATC': 'I', 'AAC': 'N', 'ATA': 'I', 'AGG': 'R', 'CCT': 'P', 'ACT': 'T', 'AGC': 'S', 'ACA': 'T', 'AGA': 'R', 'CAT': 'H', 'AAT': 'N', 'ATT': 'I', 'CTG': 'L', 'CTA': 'L', 'CTC': 'L', 'CAC': 'H', 'ACG': 'T', 'CAA': 'Q', 'AGT': 'S', 'CAG': 'Q', 'CCG': 'P', 'CCC': 'P', 'TAT': 'Y', 'GGT': 'G', 'TGT': 'C', 'CGA': 'R', 'CCA': 'P', 'TCT': 'S', 'GAT': 'D', 'CGG': 'R', 'TTT': 'F', 'TGC': 'C', 'GGG': 'G', 'GGA': 'G', 'TGG': 'W', 'GGC': 'G', 'TAC': 'Y', 'GAG': 'E', 'TCG': 'S', 'TTA': 'L', 'GAC': 'D', 'TCC': 'S', 'GAA': 'E', 'TCA': 'S', 'GCA': 'A', 'GTA': 'V', 'GCC': 'A', 'GTC': 'V', 'GCG': 'A', 'GTG': 'V', 'TTC': 'F', 'GTT': 'V', 'GCT': 'A', 'ACC': 'T', 'TTG': 'L', 'CGT': 'R', 'CGC': 'R'}
import warnings, os
#from future import print_function
class SequenceError(Exception):
pass
def codonify(sequence):
'''
Converts an input DNA sequence (str) to a list of codons.
'''
if type(sequence) == type([]):
return sequence
return [sequence[i:i+3] for i in list(range(0,len(sequence),3))]
def seqify(cod):
'''
Converts an input list of codons into a DNA sequence (str).
'''
if type(cod) == type("str"):
return cod
sequence = ""
for codon in cod:
sequence += codon
return sequence
def translate(codons):
'''
Translates a list of DNA codons into the corresponding amino
acids, stopping translation if a stop codon is encountered.
'''
codons = codonify(codons)
aa = ''
for i in list(range(0,len(codons))):
if codons[i] in stopCodons or len(codons[i]) != 3:
#codons = codons[:i]
if codons[i] in stopCodons:
aa = aa + '*'
break
try:
aa = aa + translation[codons[i]]
except KeyError as e:
raise SequenceError("Invalid codon: " + e.message)
return aa
def insertMutation(codons, mut):
'''
Takes as input a list of
`codons` to mutate and a tuple `mut` in the form
(index, 'mutated codon') ex. `(3, 'TAA')`.
Returns a list of codons with a mutation generated by
removing the codon to be mutated from the list, then adding
the newly mutated codon in its position.
'''
iCodons = [c for c in codons]
iCodons[mut[0]] = mut[1]
return iCodons
def pointMutant(seq, mut):
'''
Takes as input a sequence `seq` to mutate
and a tuple `mut` in the form (index, 'mutated nt') ex. `(3, 'A')`.
Returns a nucleotide sequence with a point mutation.
'''
return seq[:mut[0]] + mut[1] + seq[mut[0]+1:]
def findOverprintedGene(seq, startIndex, frame=1):
'''
Given a sequence `seq` and the `startIndex` of
an overprinted gene, returns a list of codons that
correspond to the overprinted gene. The `frame`
argument is only necessary if the overprinted
gene's start codon is before the input sequence, in
which case `startIndex` must be -1. <br> <br>
**NOTE:** the index of the first nucleotide in `seq` is 0,
so if the overprinted gene starts from the 59th nucleotide
of `seq`, the `startIndex` will be 58.
'''
if startIndex != -1:
frame = 1 # In case `frame` argument provided erroneously
codons = codonify(seq[startIndex:])[:-1] # Remove last (incomplete) codon
else:
if frame == 1:
raise SequenceError("The overprinted sequence is in the same frame as the main coding sequence. Please provide a frame argument.")
codons = codonify(seq[frame - 1:])[:-1] # Remove last (incomplete) codon
for i in list(range(1,len(codons))):
if codons[i] in stopCodons:
codons = codons[:i]
break
if codons[0] != 'ATG' and startIndex != -1:
#NOTE: Not all viral genes are initiated with ATG.
warnings.warn("The first codon of your sequence is not a start codon.")
return codons
def reverseComplement(seq):
'''
Given a sequence `seq`, returns the reverse complement.
'''
seq = seqify(seq)
pairs = {'A':'T', 'T':'A', 'C':'G', 'G':'C'}
rev = ""
#Here should add reverse complements for regex sites? i.e. Y -> R
try:
for nt in seq[::-1]:
rev += pairs[nt]
except KeyError:
print("Unknown nucleotide '" + nt + "' encountered.")
return "False"
return rev
def findOverlap(seq1, seq2, minimum=10):
'''
Given two sequences, returns a tuple `(i1, i2)` where `i1`
is the index in `seq1` where the overlap with `seq2` begins and
`i2` is the corresponding index in `seq2`. `minimum` is the minimum
overlap length considered (default 10).
'''
i1 = 0
i2 = 0
if seq1 in seq2:
i2 = seq2.index(seq1)
elif seq2 in seq1:
i1 = seq1.index(seq2)
else:
max12 = 0
max21 = 0
l1 = len(seq1)
l2 = len(seq2)
overall = min(l1,l2) + 1
for i in list(range(1, overall)):
if seq1[:i] == seq2[l2-i:]:
max21 = i
if seq2[:i] == seq1[l1-i:]:
max12 = i
if max12 > max21:
if max12 > minimum:
i1 = l1 - max12
else:
raise SequenceError("No overlap detected between input sequences")
else: #without this small overlaps equal on both sides are mishandled
if max21 > minimum:
i2 = l2 - max21
else:
raise SequenceError("No overlap detected between input sequences")
return (i1, i2)
def readFasta(loc):
'''
Reads in a FASTA file, returns tuples in the form
`('> identifying information', 'sequence')`.
'''
f = open(loc, 'r')
seqs = []
iden = ''
seq = ''
for line in f.readlines():
if iden == '':
try:
if line.lstrip()[0] != '>':
raise SequenceError("Invalid file format: id line doesn't begin with '>'")
iden = line.strip()
except IndexError: #blank line
next
else:
try:
if line.lstrip()[0] == '>':
seqs.append((iden, seq))
iden = line.strip()
seq = ''
else:
seq += line.strip().upper().replace(' ', '')
except IndexError: #blank line
next
seqs.append((iden, seq))
f.close()
return seqs
def writeFasta(fname, mutlist, seq, hasRxSites = False, rloc = "", floc = ""):
'''
Given a filename `fname`, list of mutations `mutlist` input sequence `seq`
and an optional file location `rloc` (relative location) or `floc` (absolute location),
generates a FASTA file with all mutants in the sequence.
Accepts input in two formats: if hasRxSites=False, assumes the mutations are of the form
`(mutant codon index, 'stop codon')`.
'''
fname = fname.replace('|', '.')[:30]
for character in fname:
if character not in '0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz.,\'"()-_':
fname = fname.replace(character, ' ')
fname = ' '.join([i for i in fname.split(' ') if i != '']) + '.fasta'
if floc == "":
dirs = os.listdir(os.getcwd())
else:
dirs = os.listdir(floc)
if fname in dirs:
i = 1
while fname[:-6] + '(' + str(i) + ').fasta' in dirs:
i += 1
fname = fname[:-6] + '(' + str(i) + ').fasta'
del i
fasta = open(fname, 'w')
for m in mutlist:
fasta.write(">mutant at codon " + str(m[0][0]+1) +': \n' )
codons = codonify(seq)
mutSeq = seqify(codons[:m[0][0]]+[m[0][1]]+codons[m[0][0]+1:])
fasta.write('\n'.join([mutSeq[i:i+100] for i in range(0,len(mutSeq),100)]) +'\n')
fasta.close()
return True
|
louiejtaylor/pyViKO
|
pyviko/core.py
|
Python
|
mit
| 6,958
|
#!/usr/bin/env python
"""
Tetris Tk - A tetris clone written in Python using the Tkinter GUI library.
Controls:
Left Arrow Move left
Right Arrow Move right
Down Arrow Move down
Up Arrow Drop Tetronimoe to the bottom
'a' Rotate anti-clockwise (to the left)
'b' Rotate clockwise (to the right)
'p' Pause the game.
"""
from Tkinter import *
from time import sleep
from random import randint
import tkMessageBox
import sys
SCALE = 20
OFFSET = 3
MAXX = 10
MAXY = 22
NO_OF_LEVELS = 10
LEFT = "left"
RIGHT = "right"
DOWN = "down"
direction_d = { "left": (-1, 0), "right": (1, 0), "down": (0, 1) }
def level_thresholds( first_level, no_of_levels ):
"""
Calculates the score at which the level will change, for n levels.
"""
thresholds =[]
for x in xrange( no_of_levels ):
multiplier = 2**x
thresholds.append( first_level * multiplier )
return thresholds
class status_bar( Frame ):
"""
Status bar to display the score and level
"""
def __init__(self, parent):
Frame.__init__( self, parent )
self.label = Label( self, bd=1, relief=SUNKEN, anchor=W )
self.label.pack( fill=X )
def set( self, format, *args):
self.label.config( text = format % args)
self.label.update_idletasks()
def clear( self ):
self.label.config(test="")
self.label.update_idletasks()
class Board( Frame ):
"""
The board represents the tetris playing area. A grid of x by y blocks.
"""
def __init__(self, parent, scale=20, max_x=10, max_y=20, offset=3):
"""
Init and config the tetris board, default configuration:
Scale (block size in pixels) = 20
max X (in blocks) = 10
max Y (in blocks) = 20
offset (in pixels) = 3
"""
Frame.__init__(self, parent)
# blocks are indexed by there corrdinates e.g. (4,5), these are
self.landed = {}
self.parent = parent
self.scale = scale
self.max_x = max_x
self.max_y = max_y
self.offset = offset
self.canvas = Canvas(parent,
height=(max_y * scale)+offset,
width= (max_x * scale)+offset)
self.canvas.pack()
def check_for_complete_row( self, blocks ):
"""
Look for a complete row of blocks, from the bottom up until the top row
or until an empty row is reached.
"""
rows_deleted = 0
# Add the blocks to those in the grid that have already 'landed'
for block in blocks:
self.landed[ block.coord() ] = block.id
empty_row = 0
# find the first empty row
for y in xrange(self.max_y -1, -1, -1):
row_is_empty = True
for x in xrange(self.max_x):
if self.landed.get((x,y), None):
row_is_empty = False
break;
if row_is_empty:
empty_row = y
break
# Now scan up and until a complete row is found.
y = self.max_y - 1
while y > empty_row:
complete_row = True
for x in xrange(self.max_x):
if self.landed.get((x,y), None) is None:
complete_row = False
break;
if complete_row:
rows_deleted += 1
#delete the completed row
for x in xrange(self.max_x):
block = self.landed.pop((x,y))
self.delete_block(block)
del block
# move all the rows above it down
for ay in xrange(y-1, empty_row, -1):
for x in xrange(self.max_x):
block = self.landed.get((x,ay), None)
if block:
block = self.landed.pop((x,ay))
dx,dy = direction_d[DOWN]
self.move_block(block, direction_d[DOWN])
self.landed[(x+dx, ay+dy)] = block
# move the empty row down index down too
empty_row +=1
# y stays same as row above has moved down.
else:
y -= 1
#self.output() # non-gui diagnostic
# return the score, calculated by the number of rows deleted.
return (100 * rows_deleted) * rows_deleted
def output( self ):
for y in xrange(self.max_y):
line = []
for x in xrange(self.max_x):
if self.landed.get((x,y), None):
line.append("X")
else:
line.append(".")
print "".join(line)
def add_block( self, (x, y), colour):
"""
Create a block by drawing it on the canvas, return
it's ID to the caller.
"""
rx = (x * self.scale) + self.offset
ry = (y * self.scale) + self.offset
return self.canvas.create_rectangle(
rx, ry, rx+self.scale, ry+self.scale, fill=colour
)
def move_block( self, id, coord):
"""
Move the block, identified by 'id', by x and y. Note this is a
relative movement, e.g. move 10, 10 means move 10 pixels right and
10 pixels down NOT move to position 10,10.
"""
x, y = coord
self.canvas.move(id, x*self.scale, y*self.scale)
def delete_block(self, id):
"""
Delete the identified block
"""
self.canvas.delete( id )
def check_block( self, (x, y) ):
"""
Check if the x, y coordinate can have a block placed there.
That is; if there is a 'landed' block there or it is outside the
board boundary, then return False, otherwise return true.
"""
if x < 0 or x >= self.max_x or y < 0 or y >= self.max_y:
return False
elif self.landed.has_key( (x, y) ):
return False
else:
return True
class Block(object):
def __init__( self, id, (x, y)):
self.id = id
self.x = x
self.y = y
def coord( self ):
return (self.x, self.y)
class shape(object):
"""
Shape is the Base class for the game pieces e.g. square, T, S, Z, L,
reverse L and I. Shapes are constructed of blocks.
"""
@classmethod
def check_and_create(cls, board, coords, colour ):
"""
Check if the blocks that make the shape can be placed in empty coords
before creating and returning the shape instance. Otherwise, return
None.
"""
for coord in coords:
if not board.check_block( coord ):
return None
return cls( board, coords, colour)
def __init__(self, board, coords, colour ):
"""
Initialise the shape base.
"""
self.board = board
self.blocks = []
for coord in coords:
block = Block(self.board.add_block( coord, colour), coord)
self.blocks.append( block )
def move( self, direction ):
"""
Move the blocks in the direction indicated by adding (dx, dy) to the
current block coordinates
"""
d_x, d_y = direction_d[direction]
for block in self.blocks:
x = block.x + d_x
y = block.y + d_y
if not self.board.check_block( (x, y) ):
return False
for block in self.blocks:
x = block.x + d_x
y = block.y + d_y
self.board.move_block( block.id, (d_x, d_y) )
block.x = x
block.y = y
return True
def rotate(self, clockwise = True):
"""
Rotate the blocks around the 'middle' block, 90-degrees. The
middle block is always the index 0 block in the list of blocks
that make up a shape.
"""
# TO DO: Refactor for DRY
middle = self.blocks[0]
rel = []
for block in self.blocks:
rel.append( (block.x-middle.x, block.y-middle.y ) )
# to rotate 90-degrees (x,y) = (-y, x)
# First check that the there are no collisions or out of bounds moves.
for idx in xrange(len(self.blocks)):
rel_x, rel_y = rel[idx]
if clockwise:
x = middle.x+rel_y
y = middle.y-rel_x
else:
x = middle.x-rel_y
y = middle.y+rel_x
if not self.board.check_block( (x, y) ):
return False
for idx in xrange(len(self.blocks)):
rel_x, rel_y = rel[idx]
if clockwise:
x = middle.x+rel_y
y = middle.y-rel_x
else:
x = middle.x-rel_y
y = middle.y+rel_x
diff_x = x - self.blocks[idx].x
diff_y = y - self.blocks[idx].y
self.board.move_block( self.blocks[idx].id, (diff_x, diff_y) )
self.blocks[idx].x = x
self.blocks[idx].y = y
return True
class shape_limited_rotate( shape ):
"""
This is a base class for the shapes like the S, Z and I that don't fully
rotate (which would result in the shape moving *up* one block on a 180).
Instead they toggle between 90 degrees clockwise and then back 90 degrees
anti-clockwise.
"""
def __init__( self, board, coords, colour ):
self.clockwise = True
super(shape_limited_rotate, self).__init__(board, coords, colour)
def rotate(self, clockwise=True):
"""
Clockwise, is used to indicate if the shape should rotate clockwise
or back again anti-clockwise. It is toggled.
"""
super(shape_limited_rotate, self).rotate(clockwise=self.clockwise)
if self.clockwise:
self.clockwise=False
else:
self.clockwise=True
class square_shape( shape ):
@classmethod
def check_and_create( cls, board ):
coords = [(4,0),(5,0),(4,1),(5,1)]
return super(square_shape, cls).check_and_create(board, coords, "red")
def rotate(self, clockwise=True):
"""
Override the rotate method for the square shape to do exactly nothing!
"""
pass
class t_shape( shape ):
@classmethod
def check_and_create( cls, board ):
coords = [(4,0),(3,0),(5,0),(4,1)]
return super(t_shape, cls).check_and_create(board, coords, "yellow" )
class l_shape( shape ):
@classmethod
def check_and_create( cls, board ):
coords = [(4,0),(3,0),(5,0),(3,1)]
return super(l_shape, cls).check_and_create(board, coords, "orange")
class reverse_l_shape( shape ):
@classmethod
def check_and_create( cls, board ):
coords = [(5,0),(4,0),(6,0),(6,1)]
return super(reverse_l_shape, cls).check_and_create(
board, coords, "green")
class z_shape( shape_limited_rotate ):
@classmethod
def check_and_create( cls, board ):
coords =[(5,0),(4,0),(5,1),(6,1)]
return super(z_shape, cls).check_and_create(board, coords, "purple")
class s_shape( shape_limited_rotate ):
@classmethod
def check_and_create( cls, board ):
coords =[(5,1),(4,1),(5,0),(6,0)]
return super(s_shape, cls).check_and_create(board, coords, "magenta")
class i_shape( shape_limited_rotate ):
@classmethod
def check_and_create( cls, board ):
coords =[(4,0),(3,0),(5,0),(6,0)]
return super(i_shape, cls).check_and_create(board, coords, "blue")
class game_controller(object):
"""
Main game loop and receives GUI callback events for keypresses etc...
"""
def __init__(self, parent):
"""
Intialise the game...
"""
self.parent = parent
self.score = 0
self.level = 0
self.delay = 1000 #ms
#lookup table
self.shapes = [square_shape,
t_shape,
l_shape,
reverse_l_shape,
z_shape,
s_shape,
i_shape ]
self.thresholds = level_thresholds( 500, NO_OF_LEVELS )
self.status_bar = status_bar( parent )
self.status_bar.pack(side=TOP,fill=X)
#print "Status bar width",self.status_bar.cget("width")
self.status_bar.set("Score: %-7d\t Level: %d " % (
self.score, self.level+1)
)
self.board = Board(
parent,
scale=SCALE,
max_x=MAXX,
max_y=MAXY,
offset=OFFSET
)
self.board.pack(side=BOTTOM)
self.parent.bind("<Left>", self.left_callback)
self.parent.bind("<Right>", self.right_callback)
self.parent.bind("<Up>", self.up_callback)
self.parent.bind("<Down>", self.down_callback)
self.parent.bind("a", self.a_callback)
self.parent.bind("s", self.s_callback)
self.parent.bind("p", self.p_callback)
self.shape = self.get_next_shape()
#self.board.output()
self.after_id = self.parent.after( self.delay, self.move_my_shape )
def handle_move(self, direction):
#if you can't move then you've hit something
if not self.shape.move( direction ):
# if your heading down then the shape has 'landed'
if direction == DOWN:
self.score += self.board.check_for_complete_row(
self.shape.blocks
)
del self.shape
self.shape = self.get_next_shape()
# If the shape returned is None, then this indicates that
# that the check before creating it failed and the
# game is over!
if self.shape is None:
tkMessageBox.showwarning(
title="GAME OVER",
message ="Score: %7d\tLevel: %d\t" % (
self.score, self.level),
parent=self.parent
)
Toplevel().destroy()
self.parent.destroy()
sys.exit(0)
# do we go up a level?
if (self.level < NO_OF_LEVELS and
self.score >= self.thresholds[ self.level]):
self.level+=1
self.delay-=100
self.status_bar.set("Score: %-7d\t Level: %d " % (
self.score, self.level+1)
)
# Signal that the shape has 'landed'
return False
return True
def left_callback( self, event ):
if self.shape:
self.handle_move( LEFT )
def right_callback( self, event ):
if self.shape:
self.handle_move( RIGHT )
def up_callback( self, event ):
if self.shape:
# drop the tetrominoe to the bottom
while self.handle_move( DOWN ):
pass
def down_callback( self, event ):
if self.shape:
self.handle_move( DOWN )
def a_callback( self, event):
if self.shape:
self.shape.rotate(clockwise=True)
def s_callback( self, event):
if self.shape:
self.shape.rotate(clockwise=False)
def p_callback(self, event):
self.parent.after_cancel( self.after_id )
tkMessageBox.askquestion(
title = "Paused!",
message="Continue?",
type=tkMessageBox.OK)
self.after_id = self.parent.after( self.delay, self.move_my_shape )
def move_my_shape( self ):
if self.shape:
self.handle_move( DOWN )
self.after_id = self.parent.after( self.delay, self.move_my_shape )
def get_next_shape( self ):
"""
Randomly select which tetrominoe will be used next.
"""
the_shape = self.shapes[ randint(0,len(self.shapes)-1) ]
return the_shape.check_and_create(self.board)
if __name__ == "__main__":
root = Tk()
root.title("Tetris Tk")
theGame = game_controller( root )
root.mainloop()
|
strommer/Python_rpi
|
tetris_tk.py
|
Python
|
gpl-2.0
| 17,632
|
from utils.voiceParser import *
COMMANDS = [
{
'aliases': ['please set', 'pls set', 'please','pls', 'go','set'],
'script': 'env python3 kodiCrud.py',
'hasArgs': True
},
{
'aliases': ['music','play music'],
'script': 'env python3 playYtPlaylist.py'
},
{
'aliases': ['play playlist', 'search playlist'],
'script': 'env python3 playYtSearchPlaylist.py',
'hasArgs': True
},
{
'aliases': ['play'],
'script': 'env python3 playYtSearch.py 10',
'hasArgs': True
},
{
'aliases': ['next','skip'],
'script': 'env python3 kodiCrud.py next'
},
{
'aliases': ['pause'],
'script': 'env python3 kodiCrud.py pause'
}
]
listen(COMMANDS);
|
hamidzr/kodi-extras
|
voiceControl.py
|
Python
|
mit
| 790
|
"""Tests for sudo handling of engage_utils.process and engage.drivers.action.
There are three modes that we can be running in:
1. If running as root, sudo actions will be transparently run directly
2. If running as normal user, but no password is needed for sudo access,
process utilities will call sudo -n.
3. If running as normal user, and a sudo password is neeed, process
utilities will call sudo -p "" -S and provide the password via stdin.
If an sudo password is required, the tests cannon be run under Nose - they will
only be available as a command line script. For the other two cases, the tests
will be run.
"""
import sys
import os.path
import getpass
import logging
import time
import optparse
import tempfile
try:
import engage
except:
sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__), "../..")))
import engage
import engage_utils.process as process
import engage.drivers.action as action
from engage.drivers.action import _check_file_exists
import engage.tests.test_common as tc
logger = logging.getLogger()
class TestError(Exception):
pass
def _assert(pred, msg):
if not pred:
raise TestError(msg)
server_log_dir = None
def tst_run_program(sudo_password):
logger.debug("tst_run_program() Starting")
process.run_sudo_program(["/bin/ls", "/"], sudo_password,
logger)
process.run_sudo_program(["/bin/ls", "/"], sudo_password,
logger, user=getpass.getuser())
logger.debug("tst_run_program() Successful")
def tst_cat_file(sudo_password):
"""This test relies on the fact that sudo_cat_file() doesn't actually
check the file permissions, but just returns it"""
logger.debug("tst_cat_file() Starting")
data = process.sudo_cat_file(__file__, logger, sudo_password)
_assert(len(data)>0,
"Should have gotten length of this test file, instead got 0")
logger.debug("tst_cat_file() Successful")
def tst_run_sudo_program_and_scan_results(sudo_password):
logger.debug("tst_run_sudo_program_and_scan_results() Starting")
cmd = ["/bin/ls", "/"]
re_map={"bin_dir":"^bin$"}
(rc, result_map) = process.run_sudo_program_and_scan_results(cmd, re_map,
logger,
sudo_password,
log_output=True)
_assert(rc==0, "Return code of /bin/ls was %d" % rc)
_assert(result_map["bin_dir"]==True,
"Expecting bin_dir regexp to be found, got %s" %
result_map["bin_dir"])
logger.debug("tst_run_sudo_program_and_scan_results() Successful")
def tst_sudo_run_server(sudo_password):
"""Tests of sudo_run_server(), sudo_check_server_status,
and sudo_stop_server_process()
"""
logger.debug("tst_sudo_run_server() Starting")
if server_log_dir:
td = server_log_dir
if not os.path.exists(server_log_dir):
os.makedirs(server_log_dir)
else:
td = tempfile.mkdtemp(prefix="test_sudo")
try:
pidfile = os.path.join(td, "server.pid")
logger.debug("pidfile=%s" % pidfile)
cwd = os.path.abspath(os.path.dirname(__file__))
process.sudo_run_server([sys.executable,
os.path.abspath(__file__), "--run-server",
pidfile],
{}, os.path.join(td, "server.log"),
logger, sudo_password,
cwd=cwd)
found = False
for i in range(5):
pid = process.sudo_check_server_status(pidfile, logger,
sudo_password)
if pid!=None:
logger.info("Verified that server started. Pid is %d" % pid)
found = True
break
else:
time.sleep(5)
_assert(found, "Test server processs not found after 25 seconds")
process.sudo_stop_server_process(pidfile, logger, "test",
sudo_password)
found = True
for i in range(5):
pid = process.sudo_check_server_status(pidfile, logger,
sudo_password)
if pid==None:
logger.info("Verified that server stopped")
found = False
break
else:
time.sleep(5)
_assert(not found, "Test server processs not stopped after 25 seconds")
finally:
if not server_log_dir:
process.sudo_rm(td, sudo_password, logger)
logger.debug("tst_sudo_run_server() Successful")
class LsAction(action.SudoAction):
"""This is a test action that just runs /bin/ls under the regular user
or super user.
"""
NAME="LsAction"
def __init__(self, ctx):
super(LsAction, self).__init__(ctx)
def run(self, path):
rc = process.run_and_log_program(["/bin/ls", path],
{}, self.ctx.logger)
if rc!=0:
raise Exception("Bad rc from /bin/ls: %d" % rc)
def dry_run(self, path):
pass
def sudo_run(self, path):
process.run_sudo_program(["/bin/ls", path],
self.ctx._get_sudo_password(self),
self.ctx.logger)
class CatValueAction(action.SudoValueAction):
NAME = "CatValueAction"
def __init__(self, ctx):
super(CatValueAction, self).__init__(ctx)
def run(self, path, mode="b"):
_check_file_exists(path, self)
with open(path, "r" + mode) as f:
data = f.read()
return data
def sudo_run(self, path, mode="b"):
return process.sudo_cat_file(path, self.ctx.logger,
self.ctx._get_sudo_password(self))
def dry_run(self, path, mode="b"):
pass
def _make_context(testname, sudo_password):
ctx = action.Context({"id": testname}, logger, __file__,
lambda : sudo_password,
dry_run=False)
return ctx
def tst_r_su(sudo_password):
"""Test the r_su() method of the action module's Context object
"""
ctx = _make_context("tst_r_su", sudo_password)
ctx.r(LsAction, "/")
ctx.r_su(LsAction, "/")
def tst_rv_su(sudo_password):
ctx = _make_context("tst_rv_su", sudo_password)
d1 = ctx.rv(CatValueAction, __file__)
d2 = ctx.rv_su(CatValueAction, __file__)
_assert(d1 == d2, "Data from su and regular file reading are different")
ALL_TESTS = [tst_run_program, tst_cat_file,
tst_run_sudo_program_and_scan_results,
tst_sudo_run_server, tst_r_su, tst_rv_su]
def test_sudo_generator():
"""Generate sudo tests for Nose. If a pasword is required (meaning we cannot
run non-interactively), don't generate any tests.
"""
if process.SUDO_PASSWORD_REQUIRED==True:
tests = []
else:
tests = ALL_TESTS
for test in tests:
yield test, None
def run_server(pidfile):
logger.info("Starting server")
with open(pidfile, "w") as f:
f.write("%d" % os.getpid())
logger.debug("Wrote pidfile %s" % pidfile)
while True:
time.sleep(5)
logger.debug("Server woke up")
return 0
if __name__ == "__main__":
root_logger = logging.getLogger()
root_logger.setLevel(logging.DEBUG)
h = logging.StreamHandler(sys.stdout)
h.setLevel(logging.DEBUG)
root_logger.addHandler(h)
usage = "%prog [options] test1 test2...\n If not tests specified, all are run"
parser = optparse.OptionParser(usage=usage)
parser.add_option("--run-server", default=False, action="store_true",
help="If specified, start a test server instead of running the tests")
parser.add_option("--server-log-dir", default=None,
help="If specified, use the directory for server logfiles rather than a temporary directory")
(options, args) = parser.parse_args()
if options.run_server and options.server_log_dir!=None:
parser.error("Option --server-log-dir not valid with --run-server")
if options.run_server:
if len(args)!=1:
parser.error("Need to specify pidfile")
sys.exit(run_server(args[0]))
if options.server_log_dir:
server_log_dir = os.path.abspath(os.path.expanduser(options.server_log_dir))
if len(args)==0:
tests = ALL_TESTS
else:
test_names_to_functions = {}
for tf in ALL_TESTS:
test_names_to_functions[tf.__name__] = tf
tests = []
all_test_names = [t.__name__ for t in ALL_TESTS]
for test in args:
if not test_names_to_functions.has_key(test):
parser.error("Unknown test %s. Valid tests are: %s" %
(test, ', '.join(all_test_names)))
tests.append(test_names_to_functions[test])
if process.SUDO_PASSWORD_REQUIRED==True:
sudo_password = getpass.getpass("Sudo Password:")
else:
sudo_password = None
logger.info("Starting sudo tests, SUDO_PASSWORD_REQUIRED=%s" %
process.SUDO_PASSWORD_REQUIRED)
for test in tests:
test(sudo_password)
logger.info("sudo tests successful")
sys.exit(0)
|
quaddra/engage
|
python_pkg/engage/tests/test_sudo.py
|
Python
|
apache-2.0
| 9,510
|
"""
Used for two things: 1. Locally for git workflow, 2. Update remote servers.
"""
from __future__ import with_statement
from fabric.api import *
from fabric.colors import *
from fabric.context_managers import cd, lcd
import os.path
HOST = '' # e.g. seb@bla.com
REMOTE_BASE_DIR = '/webapps/seb_django/www' # absolute path, where project/repo lives
REMOTE_ERR_FILE = '/webapps/seb_django/logs/00UPDATE_203341.err' # absolute path
REMOTE_LOG_FILE = '/webapps/seb_django/logs/00UPDATE_203341.log' # absolute path
REPO_NAME = 'genomics-tutorial' # basename of project
REPO_URL = 'git@github.com:sschmeier/genomics-tutorial.git' # e.g. github url
REPO_BRANCH = 'gh-pages' # this is the branch to clone on servers
@hosts('seb@vm010865.massey.ac.nz', 'seb@vm010944.massey.ac.nz') # only for deploy
def logs():
""" Reading Massey server log-files and print to stdout."""
puts(yellow("[Reading log-file]"))
run("cat %s" % REMOTE_LOG_FILE)
puts(yellow("[Reading err-file]"))
run("cat %s" % REMOTE_ERR_FILE)
@hosts('seb@vm010865.massey.ac.nz', 'seb@vm010944.massey.ac.nz') # only for deploy
def deploymassey(activate_env=True, conda=None):
""" Deploy project to Massey servers."""
remote_dir = os.path.abspath(os.path.join(REMOTE_BASE_DIR, REPO_NAME))
if activate_env:
if conda:
puts(yellow("[Activate conda env]"))
run('source activate %s' %(conda))
else:
puts(yellow("[Activate env through ~/bin/activate]"))
run('source ~/bin/activate')
with settings(warn_only=True):
if run("test -d %s" % (remote_dir)).failed:
puts(red("[Repo %s does not exist on remote at: %s]" % (REPO_NAME, remote_dir)))
with cd(REMOTE_BASE_DIR):
run("git clone -b %s %s %s" % (REPO_BRANCH, REPO_URL, REPO_NAME))
puts(yellow("[Write logs]"))
run("echo '-----------------------------' > %s" % REMOTE_ERR_FILE)
run("echo `date` >> %s" % REMOTE_ERR_FILE)
run("echo '-----------------------------' >> %s" % REMOTE_ERR_FILE)
run("echo '-----------------------------' > %s" % REMOTE_LOG_FILE)
run("echo `date` >> %s" % REMOTE_LOG_FILE)
run("echo '-----------------------------' >> %s" % REMOTE_LOG_FILE)
puts(yellow("[Update repo: %s]" % REPO_NAME))
with cd(remote_dir):
run("git pull origin %s >> %s 2>> %s" %
(REPO_BRANCH, REMOTE_LOG_FILE, REMOTE_ERR_FILE))
def git(br, to_br='master', v=None):
"""Execute local git checkout master, merge branch into master.
Keyword arguments:
br -- the branch that should be merged into 'to_br'
to_br -- branch to merge to (defaults to master).
v -- new version/tag number requested this will create a repo tag.
Usage:
fab git:br='new_feature',v='v1.2.5'
"""
# co master and merge
puts(yellow("[Checkout branch %s]"%(to_br)))
local("git checkout %s"%(to_br))
puts(yellow('[Merge branch "%s" into %s]'%(br,to_br)))
local("git merge %s --no-ff" %br)
with settings(warn_only=True):
if v:
puts(yellow("[Bump version: %s]"%v))
# bump version number: project specific
local("sed -i '' 's/v.\..\../%s/g' VERSION.txt" %v)
# add config.json and commit
local("git add VERSION.txt")
local('git commit -m "Bumped to %s"' %v)
# add tag
puts(yellow("[Tag new version: %s]"%v))
local('git tag -a %s'%v)
puts(yellow('[Delete dev branch "%s"]'%(br)))
local('git branch -D %s'%(br))
def deploy(msg, br='master'):
"""Deploy master to remotes.
- make latexpdf
- copy pdf to _static.
- Safe conda packages of used env in package-list.txt
- commit changes.
- push master to gitlab remote.
- push master to origin (github)
- make clean; make html
- commit html to gh-pages
- push gh-pages to github/gh-pages
Keyword arguments:
msg -- commit message
br -- the branch that should be pushed
Usage:
fab deploy:msg="This is a commit message"
"""
# co branch
puts(yellow("[Checkout branch %s]"%(br)))
local("git checkout %s"%(br))
# create new pdf
puts(yellow('[Make latexpdf]'))
local("make latexpdf > /tmp/latex")
puts(yellow('[Copy pdf]'))
local("cp ../build/latex/*.pdf _static/")
# save package list
puts(yellow('[Conda package list]'))
local("conda list --export > conda-package-list.txt")
# save conda env
puts(yellow('[Conda env freeze]'))
local("conda env export > conda-env-freeze.yml")
puts(yellow('[git stage/commit changes]'))
local("git add -u")
local('git commit -m "%s"' %(msg))
# push changes to gitlab
puts(yellow("[Push %s to gitlab]"%(br)))
local("git push gitlab %s"%(br))
# push changes to github
puts(yellow("[Push %s to github]"%(br)))
local("git push origin %s"%(br))
# for html to github gh-pages
puts(yellow("[Make html for github/gh-pages]"))
local("make clean; make html")
puts(yellow("[Push gh-pages to github]"))
puts(red("Will NOT add newly created content. Only already tracked content."))
with lcd("../build/html"):
local("git add -u")
local('git commit -m "%s"'%(msg))
local("git push origin gh-pages")
|
sschmeier/genomics-tutorial
|
fabfile.py
|
Python
|
mit
| 5,363
|
# -*- coding: utf-8 -*-
## @package pil_exif
#
# pil_exif utility package.
# @author Tody
# @date 2016/07/22
from PIL import Image
from PIL.ExifTags import TAGS, GPSTAGS
## Exifデータの取得.
def getExif(file_path):
image = Image.open(file_path)
exif = image._getexif()
if exif is None:
return
exif_data = {}
for tag_id, value in exif.items():
tag = TAGS.get(tag_id, tag_id)
# GPS情報は個別に扱う.
if tag == "GPSInfo":
gps_data = {}
for t in value:
gps_tag = GPSTAGS.get(t, t)
gps_data[gps_tag] = value[t]
exif_data[tag] = gps_data
else:
exif_data[tag] = value
return exif_data
if __name__ == '__main__':
exif_data = getExif("sample.jpg")
for key, value in exif_data.items():
print(key, value)
|
tody411/PyIntroduction
|
lib/pil_exif.py
|
Python
|
mit
| 895
|
# -*- coding: utf-8 -*-
from bs4 import BeautifulSoup
import requests
import socket
import socks
import os
class ehentai():
def request(self , url):
socks.set_default_proxy(socks.SOCKS5, "127.0.0.1", 1080)
socket.socket = socks.socksocket
# proxies = { "http": "http://127.0.0.1:1080", "https":'http://127.0.0.1:1080',}
header = {
'Referer':'http://g.e-hentai.org/',
'Cookie':'eap_45442=1; ipb_member_id=3478845; ipb_pass_hash=5f44a298d72b120d278edbcf95ed95b1; ipb_session_id=1319b9d7b963ebcc42e95e862b3dac63; s=532b39fd8',
'User-Agent':'Mozilla/5.0 (Windows NT 6.2; WOW64; rv:21.0) Gecko/20100101 Firefox/21.0'
}
response = requests.get(url,headers = header)
return response
def mkdir(self , path):
front_pass = "/Users/Anhedonia/Desktop/存储/本子/"
isExists = os.path.exists(os.path.join(front_pass, path))
if not isExists:
os.makedirs(os.path.join(front_pass,path))
os.chdir(front_pass+'/'+path)
else:
os.chdir(front_pass+'/'+path)
def save(self , img_url , img):
name = img_url[-7:-4]
f = open(name+'.jpg', 'ab')
f.write(img.content)
f.close()
def main(self , url):
html = self.request(url)
print('requestget')
img_soup_list = BeautifulSoup(html.text , 'lxml').find_all('div' , class_ = 'gdtm')
path = BeautifulSoup(html.text , 'lxml').h1.string
for img_html in img_soup_list:
imgview_url = img_html.find('a')['href']
imgview = self.request(imgview_url)
img_url = BeautifulSoup(imgview.text , 'lxml').find('div',id='i3').find('img')['src']
img = self.request(img_url)
self.mkdir(path)
self.save(img_url , img)
ehentai_picker = ehentai()
ehentai_picker.main('https://e-hentai.org/g/1025486/04b9cf0b96/')
|
Heshichun/ehentai_picker
|
ehentai.py
|
Python
|
gpl-2.0
| 1,943
|
from __future__ import absolute_import, unicode_literals
import logging
from django import forms
from django.contrib import messages
from django.http import Http404
from django.utils.encoding import smart_str
from easy_maps.models import Address
from . import lib
log = logging.getLogger(__name__)
class AddressForm(forms.ModelForm):
"""
Address form validator
Validate the address is unique and it's geocode.
"""
address = forms.CharField(max_length=255, required=True)
class Meta:
model = Address
fields = ['address']
def _post_clean(self):
super(AddressForm, self)._post_clean()
if self.cleaned_data.get('address'):
q = Address.objects.filter(
address__icontains=self.cleaned_data['address']
).exists()
if q:
message_ = ("The %s could not be %s because "
"similar address already exists.") % (
self.instance.__class__.__name__, 'created'
)
log.debug("%s : %s" % (message_, self.cleaned_data['address']))
self._update_errors(message_)
def save(self, commit=True, request=None):
log.info("Saving new address")
try:
instance = super(AddressForm, self).save(commit=commit)
except ValueError as e:
log.debug(smart_str(e))
messages.error(request, smart_str(e))
else:
if instance and not self._valid_address(instance):
message_ = ('Geocode error occurred saving %s: %s' %
(instance.__class__.__name__, instance.address,))
messages.error(request, message_)
instance.delete()
return
log.info("Adding address to fusion table.")
if not request or not request.user:
message_ = "Request or user not found."
log.error(message_)
raise Http404(message_)
else:
flow = lib.FlowClient(request)
service, table_id = flow.get_service_and_table_id()
fusion_table_address_exists = (
lib.FusionTableMixin.address_exists(instance,
service,
table_id))
added_to_fusion_table = False
if fusion_table_address_exists is not None:
log.debug("Address already exist in fusion table:"
" %s" % (instance.address,))
else:
log.info("Adding address to fusion table : %s"
% instance.address)
lib.FusionTableMixin.save(instance, service, table_id)
added_to_fusion_table = True
if instance:
part = "Successfully added a new "
message_ = "%s %s: %s" % (
part,
instance.__class__.__name__,
instance.address
)
if added_to_fusion_table:
f_part = part + "%s to fusion table: %s"
f_message_ = f_part % (
instance.__class__.__name__,
instance.address
)
log.info(f_message_)
messages.success(request, message_)
log.info(message_)
return instance
def _valid_address(self, instance):
if instance.geocode_error or not instance.computed_address:
message_ = 'Geocode Error'
log.debug("%s : %s" % (smart_str(str(message_)),
self.cleaned_data['address']))
self._update_errors(message_)
return False
return True
@staticmethod
def get_addresses():
return Address.objects.only('address').order_by('-id').all()
|
jackton1/django_google_app
|
map_app/forms.py
|
Python
|
gpl-3.0
| 4,132
|
import numpy as np
from .VariableUnitTest import VariableUnitTest
from gwlfe.Output.Loading import NetDisLoad
class TestNetDisLoad(VariableUnitTest):
def test_NetDisLoad(self):
z = self.z
np.testing.assert_array_almost_equal(
NetDisLoad.NetDisLoad_f(z.NYrs, z.DaysMonth, z.Temp, z.InitSnow_0, z.Prec, z.NRur, z.NUrb, z.Area, z.CNI_0,
z.AntMoist_0, z.Grow_0, z.CNP_0, z.Imper, z.ISRR, z.ISRA, z.Qretention,
z.PctAreaInfil, z.Nqual, z.LoadRateImp, z.LoadRatePerv, z.Storm, z.UrbBMPRed,
z.DisFract, z.FilterWidth, z.PctStrmBuf),
NetDisLoad.NetDisLoad(z.NYrs, z.DaysMonth, z.Temp, z.InitSnow_0, z.Prec, z.NRur, z.NUrb, z.Area, z.CNI_0,
z.AntMoist_0, z.Grow_0, z.CNP_0, z.Imper, z.ISRR, z.ISRA, z.Qretention,
z.PctAreaInfil, z.Nqual, z.LoadRateImp, z.LoadRatePerv, z.Storm, z.UrbBMPRed,
z.DisFract, z.FilterWidth, z.PctStrmBuf), decimal=7)
|
WikiWatershed/gwlf-e
|
test/unittests/test_NetDisLoad.py
|
Python
|
apache-2.0
| 1,093
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import, division, print_function, unicode_literals
from pydispatch import dispatcher
class Context(object):
"""
The top context for all other entities.
"""
def __init__(self, agent):
self._agent = agent
self._attributes = {}
self._dispatcher = dispatcher
def __getitem__(self, item):
return self.get(item)
def __setitem__(self, key, value):
self.bind(key, value)
def __delitem__(self, key):
self.unbind(key)
def __getattr__(self, item):
return self.get(item)
def bind(self, key, provider):
self._attributes[key] = provider
def unbind(self, key):
del self._attributes[key]
def get(self, key, default=None):
obj = self._attributes.get(key)
if not obj:
return default
elif callable(obj):
return obj()
else:
return obj
def add_child_greenlet(self, child):
self._agent.add_child_greenlet(child)
def send(self, *args, **kwargs):
"""
Send signal/event to registered receivers.
:param args:
:param kwargs:
:return:
"""
self._dispatcher.send(*args, **kwargs)
def connect(self, receiver, *args, **kwargs):
"""
Connect the receiver to listen for signals/events.
:param signal:
:param sender:
:return:
"""
self._dispatcher.connect(receiver, *args, **kwargs)
def disconnect(self, receiver, *args, **kwargs):
"""
Disconnect the specified receiver.
:return:
"""
self._dispatcher.disconnect(receiver, *args, **kwargs)
class Component(object):
"""
Base for component classes.
"""
pass
#### Singleton construction ####
_context = None
def instance(agent=None):
"""
Gets the context singleton. Must first be invoked by agent.
:param agent:
:return:
"""
global _context
if _context is None:
_context = Context(agent)
return _context
|
eavatar/ava
|
src/eavatar.ava/ava/spi/context.py
|
Python
|
bsd-3-clause
| 2,110
|
from datetime import date, timedelta
from django.views.generic import DetailView, ListView
from django.views.generic.base import ContextMixin
from django.contrib.syndication.views import Feed
from django.core.urlresolvers import reverse_lazy
from django.db.models import F, Sum
from django.shortcuts import get_list_or_404
from django.conf import settings
from models import InfoPage, Category, Tag, ViewCount
class BlogMixin(ContextMixin):
def get_context_data(self, **kwargs):
context = super(BlogMixin, self).get_context_data(**kwargs)
context['all_categories'] = Category.objects.all().order_by('name')
context['recent_posts'] = InfoPage.objects \
.filter(kind=InfoPage.KIND_BLOG) \
.order_by("-publication_date")
context['popular_posts'] = (
InfoPage.objects
.filter(kind=InfoPage.KIND_BLOG)
.filter(viewcount__count__gt=0)
.filter(viewcount__date__gte=date.today() - timedelta(days=28))
.annotate(Sum('viewcount__count'))
.order_by('-viewcount__count__sum', '?')
)
return context
class InfoBlogList(BlogMixin, ListView):
"""Show list of blog posts"""
model = InfoPage
queryset = InfoPage.objects.filter(kind=InfoPage.KIND_BLOG).order_by("-publication_date")
paginate_by = settings.INFO_POSTS_PER_LIST_PAGE
template_name = 'info/blog_list.html'
class InfoBlogLabelBase(InfoBlogList):
def get_queryset(self):
slugs = self.kwargs['slug'].split(',')
queryset = super(InfoBlogLabelBase, self).get_queryset()
filter_args = { self.filter_field: slugs }
return queryset.filter(**filter_args)
def get_context_data(self, **kwargs):
context = super(InfoBlogLabelBase, self).get_context_data(**kwargs)
slugs = self.kwargs['slug'].split(',')
context[self.context_key] = get_list_or_404(self.context_filter_model, slug__in=slugs)
return context
class InfoBlogCategory(InfoBlogLabelBase):
context_key = 'categories'
context_filter_model = Category
filter_field = 'categories__slug__in'
def get_context_data(self, **kwargs):
context = super(InfoBlogCategory, self).get_context_data(**kwargs)
# Filter the recent posts to be specific to this category
slugs = self.kwargs['slug'].split(',')
context['recent_posts'] = context['recent_posts'].filter(categories__slug__in=slugs)
context['category_names'] = []
for category in context['categories']:
context['category_names'].append(category.name)
return context
class InfoBlogTag(InfoBlogLabelBase):
context_key = 'tags'
context_filter_model = Tag
filter_field = 'tags__slug__in'
class InfoBlogView(BlogMixin, DetailView):
"""Show the blog post for the given slug"""
model = InfoPage
queryset = InfoPage.objects.filter(kind=InfoPage.KIND_BLOG)
template_name = 'info/blog_post.html'
def get(self, request, *args, **kwargs):
response = super(InfoBlogView, self).get(request, *args, **kwargs)
_, created = ViewCount.objects.get_or_create(
page=self.object,
date=date.today(),
defaults={'count': 1},
)
if not created:
(ViewCount.objects.filter(page=self.object, date=date.today())
.update(count=F('count')+1))
return response
class InfoBlogFeed(Feed):
"""Create a feed with the latest 10 blog entries in"""
title = "Recent blog posts"
link = reverse_lazy('info_blog_list')
description = "Recent blog posts"
def items(self):
return InfoPage.objects.filter(kind=InfoPage.KIND_BLOG).order_by("-publication_date")[:10]
def item_title(self, item):
return item.title
def item_description(self, item):
return item.content_as_cleaned_html
class InfoPageView(DetailView):
"""Show the page for the given slug"""
model = InfoPage
queryset = InfoPage.objects.filter(kind=InfoPage.KIND_PAGE)
|
patricmutwiri/pombola
|
pombola/info/views.py
|
Python
|
agpl-3.0
| 4,079
|
# -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import proto # type: ignore
__protobuf__ = proto.module(
package='google.cloud.aiplatform.v1.schema.predict.instance',
manifest={
'VideoObjectTrackingPredictionInstance',
},
)
class VideoObjectTrackingPredictionInstance(proto.Message):
r"""Prediction input format for Video Object Tracking.
Attributes:
content (str):
The Google Cloud Storage location of the
video on which to perform the prediction.
mime_type (str):
The MIME type of the content of the video.
Only the following are supported: video/mp4
video/avi video/quicktime
time_segment_start (str):
The beginning, inclusive, of the video's time
segment on which to perform the prediction.
Expressed as a number of seconds as measured
from the start of the video, with "s" appended
at the end. Fractions are allowed, up to a
microsecond precision.
time_segment_end (str):
The end, exclusive, of the video's time
segment on which to perform the prediction.
Expressed as a number of seconds as measured
from the start of the video, with "s" appended
at the end. Fractions are allowed, up to a
microsecond precision, and "inf" or "Infinity"
is allowed, which means the end of the video.
"""
content = proto.Field(
proto.STRING,
number=1,
)
mime_type = proto.Field(
proto.STRING,
number=2,
)
time_segment_start = proto.Field(
proto.STRING,
number=3,
)
time_segment_end = proto.Field(
proto.STRING,
number=4,
)
__all__ = tuple(sorted(__protobuf__.manifest))
|
sasha-gitg/python-aiplatform
|
schema/predict/instance/google/cloud/aiplatform/v1/schema/predict/instance_v1/types/video_object_tracking.py
|
Python
|
apache-2.0
| 2,394
|
#!/usr/bin/env python
# coding=utf-8
import argparse
import urllib2 # python 2.7
import re
import sqlite3
import traceback
import datetime
import csv
headers = { 'User-Agent' : 'Mozilla/5.0 (compatible; Googlebot/2.1;'
+' +http://www.google.com/bot.html)' }
# http://www.useragentstring.com
parser = argparse.ArgumentParser()
parser.add_argument('cmd', choices=['help', 'leech', 'leechuntil', 'list', 'stats', 'search', 'searchconfig', 'config', 'check'])
parser.add_argument('-d',
help='database name to use (default:database.db)',
default='database.db')
parser.add_argument('params', nargs='*')
args = parser.parse_args()
print 'Commande :', args.cmd
print 'Args :', args
cmd = args.cmd
database = args.d
fgen = u"{0} {1:6}€ {2:3}m² {3} {4:25} {5:35} {6:2}/{7:2}/{8} {9} enligne:{13}\nhttp://www.leboncoin.fr/ventes_immobilieres/{0}.htm"
fdb = u'DB : ' + fgen
fdbs = fdb + u' {11}'
if cmd == 'help':
print 'leech [num]: download of data from page [num] (default:1)'
print 'list'
print 'stats [code postal]'
if cmd == 'list':
conn = sqlite3.connect(database)
c = conn.cursor()
for tmp in c.execute('SELECT * FROM apparts'):
print fdb.format(*tmp)
def leechpage(page, cp):
conn = sqlite3.connect(database)
c = conn.cursor()
c.execute('CREATE TABLE IF NOT EXISTS apparts ( '+
'id text PRIMARY KEY, ' +
'prix int, ' +
'surface int, ' +
'cp int, ville text, ' +
'nom text, ' +
'jour int, mois int, annee int, heure text, ' +
'tel text, ' +
'desc text, ' +
'siren text, ' +
'enligne int)')
req = urllib2.Request(
'http://www.leboncoin.fr/ventes_immobilieres/offres/'
#+ 'provence_alpes_cote_d_azur/bouches_du_rhone/'
+ '?'
+ 'ps=10&pe=14' # de 250k à 350k
+ '&ros=4' # pièces min
+ '&ret=1' # 1:maison, appart= '&ret=2'
#+ '&f=p' # p:particuler c:pro
+ '&location=' + cp
+ '&o=' + str(page), None, headers)
response = urllib2.urlopen(req)
re_id = re.compile('ventes_immobilieres/(?P<id>[0-9]+)\.htm')
m = re_id.finditer(response.read())
for m2 in m:
id = m2.group("id")
url = 'http://www.leboncoin.fr/ventes_immobilieres/' + id + '.htm'
c.execute('SELECT * FROM apparts WHERE id=?', (id,))
tmp = c.fetchone()
if(tmp):
try:
print fdb.format(*tmp)
except Exception:
print tmp
tmp = list(tmp)
print type(tmp[4])
tmp[4] = tmp[4].encode('utf-8')
print type(tmp[4])
print tmp
print fdb.format(*tmp)
continue
try:
response = urllib2.urlopen(url)
except Exception as e:
print 'Error on url', url
print e
continue
rep = response.read().decode('cp1252')
try:
m3 = re.findall('class="price"\>([0-9 ]+).*\<', rep)
try:
prix = int(m3[0].replace(' ', ''))
except:
print rep
m3 = re.findall(
'<th>Surface : </th>\s*<td>([0-9 ]+) m<sup>2</sup>', rep)
surface = int(m3[0].replace(' ', ''))
m3 = re.findall(
'<th>Code postal :</th>\s*<td>([0-9]+)</td>', rep)
cp = m3[0]
m3 = re.findall(
'<th>Ville :</th>\s*<td>([^<]+)</td>', rep)
try:
ville = m3[0]
except IndexError:
ville = ''
m3 = re.findall("'utilisateur_v2','N'\)\">([^<]+)</a>", rep)
nom = m3[0]
m3 = re.findall(' Mise en ligne le (\d+) (.+) à (\d+:\d+).', rep)
jour = m3[0][0]
mois = m3[0][1]
if mois[:4] == 'janv' : mois = 1
elif mois[0] == 'f' : mois = 2
elif mois[:4] == 'mars' : mois = 3
elif mois[:4] == 'avri' : mois = 4
elif mois[:3] == 'mai' : mois = 5
elif mois[:4] == 'juin' : mois = 6
elif mois[:4] == 'juil' : mois = 7
elif mois[0] == 'a' : mois = 8
elif mois[:4] == 'sept' : mois = 9
elif mois[:4] == 'octo' : mois = 10
elif mois[:3] == 'nov' : mois = 11
elif mois[0] == 'd' : mois = 12
else : mois = 0
ddt = datetime.datetime.today()
annee = ddt.year
# si on est en début d'année (ex: 3 jan 2014)
# et que l'annonce est d'un mois de fin d'année (ex: 20 déc)
# on corrige l'année (ex: pour avoir 20 déc 2013)
if ddt.month < 6 and mois > 6:
annee -= 1
heure = m3[0][2]
m3 = re.findall('/pg/0([^\.]+)\.gif" class="AdPhonenum', rep)
try:
tel_raw = m3[0]
except:
tel_raw = ''
#print m3, m3[0]
m3 = re.findall('class="content">(.+?)</div>', rep, re.DOTALL)
try:
desc = m3[0]
except:
desc = ''
m3 = re.findall('Siren : ([0-9]+)', rep)
try:
siren = m3[0]
except:
siren = 0
except Exception as e:
print 'Error on url', url
traceback.print_exc()
#print e
continue
#print ville
f = u'{0:6}€ {1:3}m² {4:4}€/m² {2:5} {3:22} {6:20} {7}/{8}/{9}@{10} {5} {12} {11}'
print f.format(
prix, surface, cp, ville, prix / surface,
url, nom, jour, mois, annee, heure, tel_raw, siren)
print desc
c.execute('INSERT INTO apparts VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)', \
(id, prix, surface, cp, ville, nom, jour, mois, annee, heure, \
tel_raw, desc, siren, 1))
#exit(0)
conn.commit()
if cmd == 'leech':
try:
page = int(args.params[0])
except:
page = 1
conn = sqlite3.connect(database)
c = conn.cursor()
c.execute("SELECT * FROM config")
tmp = c.fetchone()
while(tmp):
print u'{0:10}: {1}'.format(*tmp)
if tmp[0] == 'cp' :
print 'CP:',tmp[1]
cp = tmp[1].split(',')
tmp = c.fetchone()
for cpi in cp:
print 'Leech', cpi, page
leechpage(page, cpi)
if cmd == 'leechuntil':
try:
page = int(args.params[0])
except:
page = 1
conn = sqlite3.connect(database)
c = conn.cursor()
c.execute("SELECT * FROM config")
tmp = c.fetchone()
while(tmp):
print u'{0:10}: {1}'.format(*tmp)
if tmp[0] == 'cp' :
print 'CP:',tmp[1]
cp = tmp[1].split(',')
tmp = c.fetchone()
for cpi in cp:
for i in range(page, 0, -1):
print 'Leech', cpi, i
leechpage(i,cpi)
if cmd == 'stats':
try:
cp = int(args.params[0])
except:
cp = 0
conn = sqlite3.connect(database)
c = conn.cursor()
prix_m2_cp = {}
prix_m2_cp_pro = {}
if cp:
c.execute('SELECT * FROM apparts WHERE cp=?', (cp,))
else:
c.execute('SELECT * FROM apparts')
tmp = c.fetchone()
while(tmp):
#print fdb.format(*tmp)
try:
prix_m2_cp[tmp[3]]
except:
prix_m2_cp[tmp[3]] = []
try:
prix_m2_cp_pro[tmp[3]]
except:
prix_m2_cp_pro[tmp[3]] = []
if tmp[1]/tmp[2] > 1000 and tmp[1]/tmp[2] < 8000:
if cp:
print '{0} {1:3} {2} {3:22} {4}'.format(
tmp[1], tmp[2],
str(tmp[1]/tmp[2])+'€/m²',
tmp[5].encode('utf8'),
'http://www.leboncoin.fr/ventes_immobilieres/'+tmp[0]+'.htm')
#print tmp[12]
if int(tmp[12]) > 0: #siren
prix_m2_cp_pro[tmp[3]].append(tmp[1]/tmp[2])
else: # pas pro
prix_m2_cp[tmp[3]].append(tmp[1]/tmp[2])
#print tmp[1]/tmp[2], tmp[3]
tmp = c.fetchone()
#print prix_m2_cp
#print prix_m2_cp_pro
cp_ville = {}
with open('data/insee.csv') as inseefile:
inseedata = csv.reader(inseefile, delimiter=';')
for ligne in inseedata:
try:
cp_ville[int(ligne[1])] = ligne[0]
#print int(ligne[1]), ligne[0]
except ValueError:
pass
for k in list(set(prix_m2_cp.keys() + prix_m2_cp_pro.keys())):
numpart = len(prix_m2_cp[k])
if numpart: moypart = sum(prix_m2_cp[k])/numpart
else: moypart = 0
numpro = len(prix_m2_cp_pro[k])
if numpro: moypro = sum(prix_m2_cp_pro[k])/numpro
else: moypro = 0
if moypro: ratio = moypart / float(moypro)
else: ratio = 0
print u'{0:5} part({1:3}): {2:4}€/m² pro({3:3}): {4:4}€/m² {5:3}% {6}'.format( \
k, numpart, moypart, numpro, moypro, int(ratio*100), cp_ville[k])
if cmd == 'test':
with open('data/insee.csv') as inseefile:
inseedata = csv.reader(inseefile, delimiter=';')
for ligne in inseedata:
try:
print int(ligne[1]), ligne[0]
except ValueError:
pass
if cmd == 'search':
try:
s = args.params[0]
except:
raise Exception('Recherche manquante')
conn = sqlite3.connect(database)
c = conn.cursor()
c.execute("SELECT * FROM apparts WHERE desc LIKE ?", ('%'+s+'%',))
tmp = c.fetchone()
while(tmp):
print fdbs.format(*tmp)
tmp = c.fetchone()
if cmd == 'searchconfig':
conn = sqlite3.connect(database)
c = conn.cursor()
c.execute("SELECT * FROM config")
tmp = c.fetchone()
while(tmp):
print u'{0:10}: {1}'.format(*tmp)
if tmp[0] == 'cp' :
print 'CP:',tmp[1]
cp = tmp[1].split(',')
if tmp[0] == 'prixmax' :
print 'PrixMax:',tmp[1]
prixmax = tmp[1]
if tmp[0] == 'surfmin' :
print 'SurfMin:',tmp[1]
surfmin = tmp[1]
tmp = c.fetchone()
c.execute("SELECT * FROM apparts ORDER BY annee,mois,jour,cp,heure,id")
tmp = c.fetchone()
while(tmp):
if str(tmp[3]) in cp:
print fdb.format(*tmp)
tmp = c.fetchone()
if cmd == 'config':
if len(args.params) < 1:
conn = sqlite3.connect(database)
c = conn.cursor()
c.execute("SELECT * FROM config")
print 'Config:'
tmp = c.fetchone()
while(tmp):
print u'{0:10}: {1}'.format(*tmp)
tmp = c.fetchone()
elif len(args.params) != 2:
raise Exception('Attendu: clé valeur')
else:
conn = sqlite3.connect(database)
c = conn.cursor()
c.execute('CREATE TABLE IF NOT EXISTS config (key text PRIMARY KEY, value text)')
try:
c.execute('INSERT INTO config VALUES (?, ?)', (args.params[0], args.params[1]))
except sqlite3.IntegrityError:
c.execute('UPDATE config SET value=? WHERE key=?', (args.params[1], args.params[0]))
conn.commit()
def check_id(id):
url = 'http://www.leboncoin.fr/ventes_immobilieres/' + id + '.htm'
try:
response = urllib2.urlopen(url)
except urllib2.HTTPError as he:
#Usually, 404
print 'HTTP Error on url', url
print he
return False
except Exception as e:
print 'Error on url', url
print e
#rep = response.read().decode('cp1252')
#if rep.find(u'Cette annonce est désactivée') > -1: return False
return True
if cmd == 'check':
conn = sqlite3.connect(database)
c = conn.cursor()
c.execute("SELECT id FROM apparts WHERE enligne=1")
tmp = c.fetchone()
dead = []
while(tmp):
print tmp
if not check_id(tmp[0]): dead.append(tmp[0])
tmp = c.fetchone()
for d in dead:
print d
c.execute("UPDATE apparts SET enligne=0 WHERE id=?", (d,))
conn.commit()
print 'Fin.'
|
ofaurax/LeechCoin
|
leechcoin.py
|
Python
|
gpl-3.0
| 12,394
|
"""
Tests for EmbargoMiddleware
"""
from contextlib import contextmanager
import mock
from nose.plugins.attrib import attr
import unittest
import pygeoip
import ddt
from django.conf import settings
from django.test.utils import override_settings
from django.core.cache import cache
from django.db import connection
from student.tests.factories import UserFactory
from xmodule.modulestore.tests.factories import CourseFactory
from xmodule.modulestore.tests.django_utils import (
ModuleStoreTestCase, mixed_store_config
)
from student.roles import (
GlobalStaff, CourseRole, OrgRole,
CourseStaffRole, CourseInstructorRole,
OrgStaffRole, OrgInstructorRole
)
from ..models import (
RestrictedCourse, Country, CountryAccessRule,
)
from util.testing import UrlResetMixin
from .. import api as embargo_api
from ..exceptions import InvalidAccessPoint
from mock import patch
MODULESTORE_CONFIG = mixed_store_config(settings.COMMON_TEST_DATA_ROOT, {})
@attr(shard=3)
@ddt.ddt
@override_settings(MODULESTORE=MODULESTORE_CONFIG)
@unittest.skipUnless(settings.ROOT_URLCONF == 'lms.urls', 'Test only valid in lms')
@mock.patch.dict(settings.FEATURES, {'EMBARGO': True})
class EmbargoCheckAccessApiTests(ModuleStoreTestCase):
"""Test the embargo API calls to determine whether a user has access. """
ENABLED_CACHES = ['default', 'mongo_metadata_inheritance', 'loc_cache']
def setUp(self):
super(EmbargoCheckAccessApiTests, self).setUp()
self.course = CourseFactory.create()
self.user = UserFactory.create()
self.restricted_course = RestrictedCourse.objects.create(course_key=self.course.id)
Country.objects.create(country='US')
Country.objects.create(country='IR')
Country.objects.create(country='CU')
# Clear the cache to prevent interference between tests
cache.clear()
@ddt.data(
# IP country, profile_country, blacklist, whitelist, allow_access
('US', None, [], [], True),
('IR', None, ['IR', 'CU'], [], False),
('US', 'IR', ['IR', 'CU'], [], False),
('IR', 'IR', ['IR', 'CU'], [], False),
('US', None, [], ['US'], True),
('IR', None, [], ['US'], False),
('US', 'IR', [], ['US'], False),
)
@ddt.unpack
def test_country_access_rules(self, ip_country, profile_country, blacklist, whitelist, allow_access):
# Configure the access rules
for whitelist_country in whitelist:
CountryAccessRule.objects.create(
rule_type=CountryAccessRule.WHITELIST_RULE,
restricted_course=self.restricted_course,
country=Country.objects.get(country=whitelist_country)
)
for blacklist_country in blacklist:
CountryAccessRule.objects.create(
rule_type=CountryAccessRule.BLACKLIST_RULE,
restricted_course=self.restricted_course,
country=Country.objects.get(country=blacklist_country)
)
# Configure the user's profile country
if profile_country is not None:
self.user.profile.country = profile_country
self.user.profile.save()
# Appear to make a request from an IP in a particular country
with self._mock_geoip(ip_country):
# Call the API. Note that the IP address we pass in doesn't
# matter, since we're injecting a mock for geo-location
result = embargo_api.check_course_access(self.course.id, user=self.user, ip_address='0.0.0.0')
# Verify that the access rules were applied correctly
self.assertEqual(result, allow_access)
def test_no_user_has_access(self):
CountryAccessRule.objects.create(
rule_type=CountryAccessRule.BLACKLIST_RULE,
restricted_course=self.restricted_course,
country=Country.objects.get(country='US')
)
# The user is set to None, because the user has not been authenticated.
result = embargo_api.check_course_access(self.course.id, ip_address='0.0.0.0')
self.assertTrue(result)
def test_no_user_blocked(self):
CountryAccessRule.objects.create(
rule_type=CountryAccessRule.BLACKLIST_RULE,
restricted_course=self.restricted_course,
country=Country.objects.get(country='US')
)
with self._mock_geoip('US'):
# The user is set to None, because the user has not been authenticated.
result = embargo_api.check_course_access(self.course.id, ip_address='0.0.0.0')
self.assertFalse(result)
def test_course_not_restricted(self):
# No restricted course model for this course key,
# so all access checks should be skipped.
unrestricted_course = CourseFactory.create()
with self.assertNumQueries(1):
embargo_api.check_course_access(unrestricted_course.id, user=self.user, ip_address='0.0.0.0')
# The second check should require no database queries
with self.assertNumQueries(0):
embargo_api.check_course_access(unrestricted_course.id, user=self.user, ip_address='0.0.0.0')
def test_ip_v6(self):
# Test the scenario that will go through every check
# (restricted course, but pass all the checks)
result = embargo_api.check_course_access(self.course.id, user=self.user, ip_address='FE80::0202:B3FF:FE1E:8329')
self.assertTrue(result)
def test_country_access_fallback_to_continent_code(self):
# Simulate PyGeoIP falling back to a continent code
# instead of a country code. In this case, we should
# allow the user access.
with self._mock_geoip('EU'):
result = embargo_api.check_course_access(self.course.id, user=self.user, ip_address='0.0.0.0')
self.assertTrue(result)
@mock.patch.dict(settings.FEATURES, {'EMBARGO': True})
def test_profile_country_db_null(self):
# Django country fields treat NULL values inconsistently.
# When saving a profile with country set to None, Django saves an empty string to the database.
# However, when the country field loads a NULL value from the database, it sets
# `country.code` to `None`. This caused a bug in which country values created by
# the original South schema migration -- which defaulted to NULL -- caused a runtime
# exception when the embargo middleware treated the value as a string.
# In order to simulate this behavior, we can't simply set `profile.country = None`.
# (because when we save it, it will set the database field to an empty string instead of NULL)
query = "UPDATE auth_userprofile SET country = NULL WHERE id = %s"
connection.cursor().execute(query, [str(self.user.profile.id)])
# Verify that we can check the user's access without error
result = embargo_api.check_course_access(self.course.id, user=self.user, ip_address='0.0.0.0')
self.assertTrue(result)
def test_caching(self):
with self._mock_geoip('US'):
# Test the scenario that will go through every check
# (restricted course, but pass all the checks)
# This is the worst case, so it will hit all of the
# caching code.
with self.assertNumQueries(3):
embargo_api.check_course_access(self.course.id, user=self.user, ip_address='0.0.0.0')
with self.assertNumQueries(0):
embargo_api.check_course_access(self.course.id, user=self.user, ip_address='0.0.0.0')
def test_caching_no_restricted_courses(self):
RestrictedCourse.objects.all().delete()
cache.clear()
with self.assertNumQueries(1):
embargo_api.check_course_access(self.course.id, user=self.user, ip_address='0.0.0.0')
with self.assertNumQueries(0):
embargo_api.check_course_access(self.course.id, user=self.user, ip_address='0.0.0.0')
@ddt.data(
GlobalStaff,
CourseStaffRole,
CourseInstructorRole,
OrgStaffRole,
OrgInstructorRole,
)
def test_staff_access_country_block(self, staff_role_cls):
# Add a country to the blacklist
CountryAccessRule.objects.create(
rule_type=CountryAccessRule.BLACKLIST_RULE,
restricted_course=self.restricted_course,
country=Country.objects.get(country='US')
)
# Appear to make a request from an IP in the blocked country
with self._mock_geoip('US'):
result = embargo_api.check_course_access(self.course.id, user=self.user, ip_address='0.0.0.0')
# Expect that the user is blocked, because the user isn't staff
self.assertFalse(result, msg="User should not have access because the user isn't staff.")
# Instantiate the role, configuring it for this course or org
if issubclass(staff_role_cls, CourseRole):
staff_role = staff_role_cls(self.course.id)
elif issubclass(staff_role_cls, OrgRole):
staff_role = staff_role_cls(self.course.id.org)
else:
staff_role = staff_role_cls()
# Add the user to the role
staff_role.add_users(self.user)
# Now the user should have access
with self._mock_geoip('US'):
result = embargo_api.check_course_access(self.course.id, user=self.user, ip_address='0.0.0.0')
self.assertTrue(result, msg="User should have access because the user is staff.")
@contextmanager
def _mock_geoip(self, country_code):
"""
Mock for the GeoIP module.
"""
with mock.patch.object(pygeoip.GeoIP, 'country_code_by_addr') as mock_ip:
mock_ip.return_value = country_code
yield
@ddt.ddt
@override_settings(MODULESTORE=MODULESTORE_CONFIG)
@unittest.skipUnless(settings.ROOT_URLCONF == 'lms.urls', 'Test only valid in lms')
class EmbargoMessageUrlApiTests(UrlResetMixin, ModuleStoreTestCase):
"""Test the embargo API calls for retrieving the blocking message URLs. """
URLCONF_MODULES = ['openedx.core.djangoapps.embargo']
ENABLED_CACHES = ['default', 'mongo_metadata_inheritance', 'loc_cache']
@patch.dict(settings.FEATURES, {'EMBARGO': True})
def setUp(self):
super(EmbargoMessageUrlApiTests, self).setUp()
self.course = CourseFactory.create()
@ddt.data(
('enrollment', '/embargo/blocked-message/enrollment/embargo/'),
('courseware', '/embargo/blocked-message/courseware/embargo/')
)
@ddt.unpack
def test_message_url_path(self, access_point, expected_url_path):
self._restrict_course(self.course.id)
# Retrieve the URL to the blocked message page
url_path = embargo_api.message_url_path(self.course.id, access_point)
self.assertEqual(url_path, expected_url_path)
def test_message_url_path_caching(self):
self._restrict_course(self.course.id)
# The first time we retrieve the message, we'll need
# to hit the database.
with self.assertNumQueries(2):
embargo_api.message_url_path(self.course.id, "enrollment")
# The second time, we should be using cached values
with self.assertNumQueries(0):
embargo_api.message_url_path(self.course.id, "enrollment")
@ddt.data('enrollment', 'courseware')
def test_message_url_path_no_restrictions_for_course(self, access_point):
# No restrictions for the course
url_path = embargo_api.message_url_path(self.course.id, access_point)
# Use a default path
self.assertEqual(url_path, '/embargo/blocked-message/courseware/default/')
def test_invalid_access_point(self):
with self.assertRaises(InvalidAccessPoint):
embargo_api.message_url_path(self.course.id, "invalid")
def test_message_url_stale_cache(self):
# Retrieve the URL once, populating the cache with the list
# of restricted courses.
self._restrict_course(self.course.id)
embargo_api.message_url_path(self.course.id, 'courseware')
# Delete the restricted course entry
RestrictedCourse.objects.get(course_key=self.course.id).delete()
# Clear the message URL cache
message_cache_key = (
'embargo.message_url_path.courseware.{course_key}'
).format(course_key=self.course.id)
cache.delete(message_cache_key)
# Try again. Even though the cache results are stale,
# we should still get a valid URL.
url_path = embargo_api.message_url_path(self.course.id, 'courseware')
self.assertEqual(url_path, '/embargo/blocked-message/courseware/default/')
def _restrict_course(self, course_key):
"""Restrict the user from accessing the course. """
country = Country.objects.create(country='us')
restricted_course = RestrictedCourse.objects.create(
course_key=course_key,
enroll_msg_key='embargo',
access_msg_key='embargo'
)
CountryAccessRule.objects.create(
restricted_course=restricted_course,
rule_type=CountryAccessRule.BLACKLIST_RULE,
country=country
)
|
synergeticsedx/deployment-wipro
|
openedx/core/djangoapps/embargo/tests/test_api.py
|
Python
|
agpl-3.0
| 13,295
|
"""
Script that patches a Keras HDF5 model file with parameter values
obtained from a TensorFlow saved model.
When I trained the neural networks for the MPG Ranch NFC Coarse Classifier
versions 3.0 and 4.0, I inadvertently saved untrained parameter values to
a Keras HDF5 model file instead of trained parameter values. The trained
values were only saved as a TensorFlow saved model. This script patches
the Keras HDF5 model file with trained parameter values read from the
saved model. The script reads the parameter values directly from one of
the saved model files since that's the only way I could figure out how to
get at the saved model parameter values. The saved model did not appear
to be usable as is with TensorFlow 2, perhaps because it was written
using too old a version of TensorFlow 1.
I guessed at the structure of the saved model files, knowing the sizes
and types of the tensors that were supposed to be in them, and also
from the contents of their `variables.index` files as displayed in a
text editor, and fortunately my guess seems to have been correct.
TensorFlow 2 detectors and classifiers constructed from the patched
HDF5 model files perform the same as TensorFlow 1 detectors and
classifiers constructed from the saved model files.
I used this script to patch model files for versions 3.1 and 4.1 of the
MPG Ranch NFC Coarse Classifier. For each of the four classifier models,
(i.e. Tseep and Thrush for each classifier version), I copied the
relevant model directory from the 3.0 or 4.0 version to the 3.1 or 4.1
version, modified the `CLIP_TYPE` and `CLASSIFIER_VERSION` attributes
of this script (see below) accordingly, and then ran the script. After
running the script I deleted the saved models, since they did not
appear to be usable with TensorFlow 2.
"""
from pathlib import Path
import h5py
import numpy as np
CLIP_TYPE = 'Thrush'
CLASSIFIER_VERSION = '3_1'
CLASSIFIER_DIR_PATH = Path(
f'/Users/harold/Documents/Code/Python/Vesper/vesper/mpg_ranch/'
f'nfc_coarse_classifier_{CLASSIFIER_VERSION}/{CLIP_TYPE} Classifier')
VARIABLES_DIR_PATH = \
CLASSIFIER_DIR_PATH / 'TensorFlow SavedModel' / 'variables'
TF_DATA_FILE_PATH = VARIABLES_DIR_PATH / 'variables.data-00000-of-00001'
HDF5_FILE_PATH = CLASSIFIER_DIR_PATH / 'Keras Model.h5'
def main():
floats, global_step = read_tf_data_file(TF_DATA_FILE_PATH)
print(f'global_step: {global_step}')
start_index = 0
with h5py.File(HDF5_FILE_PATH, 'r+') as f:
weight_group = f['/model_weights']
for layer_name in weight_group.keys():
if layer_name != 'flatten' and \
not layer_name.startswith('max_pooling'):
layer_group = weight_group[f'{layer_name}/{layer_name}']
for dataset_name, dataset in layer_group.items():
print(
layer_name, dataset_name, dataset.shape, dataset.size)
end_index = start_index + dataset.size
x = floats[start_index:end_index]
x.shape = dataset.shape
dataset[:] = x
start_index = end_index
if start_index != len(floats):
print(
f'Warning: number of saved model floats processed {start_index} '
f'was less than total number {len(floats)}.')
def read_tf_data_file(path):
with open(path, 'rb') as data_file:
buffer = data_file.read()
floats = np.frombuffer(buffer[:-8], 'float32')
global_step = np.frombuffer(buffer[-8:], 'int64')[0]
return floats, global_step
if __name__ == '__main__':
main()
|
HaroldMills/Vesper
|
vesper/mpg_ranch/nfc_coarse_classifier_3_1/patch_hdf5_model_from_saved_model.py
|
Python
|
mit
| 3,802
|
# Copyright 2019 Fortinet, Inc.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <https://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import os
import json
import pytest
from mock import ANY
from ansible.module_utils.network.fortios.fortios import FortiOSHandler
try:
from ansible.modules.network.fortios import fortios_antivirus_settings
except ImportError:
pytest.skip("Could not load required modules for testing", allow_module_level=True)
@pytest.fixture(autouse=True)
def connection_mock(mocker):
connection_class_mock = mocker.patch('ansible.modules.network.fortios.fortios_antivirus_settings.Connection')
return connection_class_mock
fos_instance = FortiOSHandler(connection_mock)
def test_antivirus_settings_creation(mocker):
schema_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.schema')
set_method_result = {'status': 'success', 'http_method': 'POST', 'http_status': 200}
set_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.set', return_value=set_method_result)
input_data = {
'username': 'admin',
'state': 'present',
'antivirus_settings': {
'default_db': 'normal',
'grayware': 'enable',
'override_timeout': '5'
},
'vdom': 'root'}
is_error, changed, response = fortios_antivirus_settings.fortios_antivirus(input_data, fos_instance)
expected_data = {
'default-db': 'normal',
'grayware': 'enable',
'override-timeout': '5'
}
set_method_mock.assert_called_with('antivirus', 'settings', data=expected_data, vdom='root')
schema_method_mock.assert_not_called()
assert not is_error
assert changed
assert response['status'] == 'success'
assert response['http_status'] == 200
def test_antivirus_settings_creation_fails(mocker):
schema_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.schema')
set_method_result = {'status': 'error', 'http_method': 'POST', 'http_status': 500}
set_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.set', return_value=set_method_result)
input_data = {
'username': 'admin',
'state': 'present',
'antivirus_settings': {
'default_db': 'normal',
'grayware': 'enable',
'override_timeout': '5'
},
'vdom': 'root'}
is_error, changed, response = fortios_antivirus_settings.fortios_antivirus(input_data, fos_instance)
expected_data = {
'default-db': 'normal',
'grayware': 'enable',
'override-timeout': '5'
}
set_method_mock.assert_called_with('antivirus', 'settings', data=expected_data, vdom='root')
schema_method_mock.assert_not_called()
assert is_error
assert not changed
assert response['status'] == 'error'
assert response['http_status'] == 500
def test_antivirus_settings_idempotent(mocker):
schema_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.schema')
set_method_result = {'status': 'error', 'http_method': 'DELETE', 'http_status': 404}
set_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.set', return_value=set_method_result)
input_data = {
'username': 'admin',
'state': 'present',
'antivirus_settings': {
'default_db': 'normal',
'grayware': 'enable',
'override_timeout': '5'
},
'vdom': 'root'}
is_error, changed, response = fortios_antivirus_settings.fortios_antivirus(input_data, fos_instance)
expected_data = {
'default-db': 'normal',
'grayware': 'enable',
'override-timeout': '5'
}
set_method_mock.assert_called_with('antivirus', 'settings', data=expected_data, vdom='root')
schema_method_mock.assert_not_called()
assert not is_error
assert not changed
assert response['status'] == 'error'
assert response['http_status'] == 404
def test_antivirus_settings_filter_foreign_attributes(mocker):
schema_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.schema')
set_method_result = {'status': 'success', 'http_method': 'POST', 'http_status': 200}
set_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.set', return_value=set_method_result)
input_data = {
'username': 'admin',
'state': 'present',
'antivirus_settings': {
'random_attribute_not_valid': 'tag',
'default_db': 'normal',
'grayware': 'enable',
'override_timeout': '5'
},
'vdom': 'root'}
is_error, changed, response = fortios_antivirus_settings.fortios_antivirus(input_data, fos_instance)
expected_data = {
'default-db': 'normal',
'grayware': 'enable',
'override-timeout': '5'
}
set_method_mock.assert_called_with('antivirus', 'settings', data=expected_data, vdom='root')
schema_method_mock.assert_not_called()
assert not is_error
assert changed
assert response['status'] == 'success'
assert response['http_status'] == 200
|
thaim/ansible
|
test/units/modules/network/fortios/test_fortios_antivirus_settings.py
|
Python
|
mit
| 5,944
|
# Topydo - A todo.txt client written in Python.
# Copyright (C) 2014 - 2015 Bram Schoenmakers <bram@topydo.org>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
""" Provides a base class for pretty printer filters. """
class PrettyPrinterFilter(object):
"""
Base class for a pretty printer filter.
Subclasses must re-implement the filter method.
"""
def filter(self, p_todo_str, _):
"""
Applies a filter to p_todo_str and returns a modified version of it.
"""
raise NotImplementedError
|
bram85/topydo
|
topydo/lib/PrettyPrinterFilter.py
|
Python
|
gpl-3.0
| 1,130
|
"""
Lamina module
When you set the display mode for OpenGL, you enable all the coolness of 3D rendering,
but you disable the bread-and-butter raster SDL functionality like fill() and blit().
Since the GUI libraries use those surface methods extensively, they cannot readily be
used in OpenGL displays.
Lamina provides the LaminaPanelSurface and LaminaScreenSurface classes, which bridge
between the two.
The 'surf' attribute is a surface, and can be drawn on, blitted to, and passed to GUI
rendering functions for alteration. The 'display' method displays the surface as a
transparent textured quad in the OpenGL model-space. The 'refresh' method indicates that
the surface has changed, and that the texture needs regeneration. The 'clear' method
restores the blank and transparent original condition.
Usage is vaguely like this incomplete pseudocode:
# create gui with appropriate constructor
gui = GUI_Constructor()
# create LaminaPanelSurface
gui_screen = lamina.LaminaPanelSurface( (640,480), (-1,1,2,2) )
# draw widgets on surface
gui.draw( gui_screen.surf )
# hide mouse cursor
pygame.mouse.set_visible(0)
while 1:
# do input events ....
# pass events to gui
gui.doevent(...)
# detect changes to surface
changed = gui.update( gui_screen.surf )
if changed:
# and mark for update
gui_screen.refresh()
# draw opengl geometry .....
# display gui
# opengl code to set modelview matrix for desired gui surface pos
gui_screen.display()
If your gui screen is not sized to match the display, hide the system
mouse cursor, and use the convertMousePos method to position your own
OpenGL mouse cursor (a cone or something). The result of the
convertMousePos is 2D, (x,y) so you need to draw the mouse with the same
modelview matrix as drawing the LaminaPanelSurface itself.
mouse_pos = gui_screen.convertMousePos(mouseX, mouseY)
glTranslate(*mouse_pos)
# opengl code to display your mouse object (a cone?)
The distribution package includes several demo scripts which are functional. Refer
to them for details of working code.
"""
import OpenGL.GLU as oglu
import OpenGL.GL as ogl
import pygame
import math
def load_texture(surf):
"""Load surface into texture object. Return texture object.
@param surf: surface to make texture from.
"""
txtr = ogl.glGenTextures(1)
textureData = pygame.image.tostring(surf, "RGBA", 1)
ogl.glEnable(ogl.GL_TEXTURE_2D)
ogl.glBindTexture(ogl.GL_TEXTURE_2D, txtr)
width, height = surf.get_size()
ogl.glTexImage2D( ogl.GL_TEXTURE_2D, 0, ogl.GL_RGBA, width, height, 0,
ogl.GL_RGBA, ogl.GL_UNSIGNED_BYTE, textureData )
ogl.glTexParameterf(ogl.GL_TEXTURE_2D, ogl.GL_TEXTURE_MAG_FILTER, ogl.GL_NEAREST)
ogl.glTexParameterf(ogl.GL_TEXTURE_2D, ogl.GL_TEXTURE_MIN_FILTER, ogl.GL_NEAREST)
ogl.glDisable(ogl.GL_TEXTURE_2D)
return txtr
def overlay_texture(txtr, surf, r):
"""Load surface into texture object, replacing part of txtr
given by rect r.
@param txtr: texture to add to
@param surf: surface to copy from
@param r: rectangle indicating area to overlay.
"""
subsurf = surf.subsurface(r)
textureData = pygame.image.tostring(subsurf, "RGBA", 1)
hS, wS = surf.get_size()
#rect = pygame.Rect(r.x,hS-(r.y+r.height),r.width,r.height)
rect = r
ogl.glEnable(ogl.GL_TEXTURE_2D)
ogl.glBindTexture(ogl.GL_TEXTURE_2D, txtr)
ogl.glTexSubImage2D(ogl.GL_TEXTURE_2D, 0, rect.x, rect.y, rect.width, rect.height,
ogl.GL_RGBA, ogl.GL_UNSIGNED_BYTE, textureData )
ogl.glDisable(ogl.GL_TEXTURE_2D)
class LaminaPanelSurface(object):
"""Surface for imagery to overlay.
@ivar surf: surface
@ivar dims: tuple with corners of quad
"""
def __init__(self, quadDims=(-1,-1,2,2), winSize=None):
"""Initialize new instance.
@param winSize: tuple (width, height)
@param quadDims: tuple (left, top, width, height)
"""
if not winSize:
winSize = pygame.display.get_surface().get_size()
self._txtr = None
self._winSize = winSize
left, top, width, height = quadDims
right, bottom = left+width, top-height
self._qdims = quadDims
self.dims = (left,top,0), (right,top,0), (right,bottom,0), (left,bottom,0)
self.clear()
def clear(self):
"""Restore the total transparency to the surface. """
powerOfTwo = 64
while powerOfTwo < max(*self._winSize):
powerOfTwo *= 2
raw = pygame.Surface((powerOfTwo, powerOfTwo), pygame.SRCALPHA, 32)
self._surfTotal = raw.convert_alpha()
self._usable = 1.0*self._winSize[0]/powerOfTwo, 1.0*self._winSize[1]/powerOfTwo
self.surf = self._surfTotal.subsurface(0,0,self._winSize[0],self._winSize[1])
self.regen()
def regen(self):
"""Force regen of texture object. Call after change to the GUI appearance. """
if self._txtr:
ogl.glDeleteTextures([self._txtr])
self._txtr = None
def refresh(self, dirty=None):
"""Refresh the texture from the surface.
@param dirty: list of rectangles to update, None for whole panel
"""
if not self._txtr:
self._txtr = load_texture(self._surfTotal)
else:
wS, hS = self._surfTotal.get_size()
if dirty is None:
dirty = [pygame.Rect(0,0,wS,hS)]
for r in dirty:
overlay_texture(self._txtr,self._surfTotal,r)
def convertMousePos(self, pos):
"""Converts 2d pixel mouse pos to 2d gl units.
@param pos: 2-tuple with x,y of mouse
"""
x0, y0 = pos
x = x0/self._winSize[0]*self._qdims[2] + self._qdims[0]
y = y0/self._winSize[1]*self._qdims[3] + self._qdims[1]
return x, y
def display(self):
"""Draw surface to a quad. Call as part of OpenGL rendering code."""
ogl.glEnable(ogl.GL_BLEND)
ogl.glBlendFunc(ogl.GL_SRC_ALPHA, ogl.GL_ONE_MINUS_SRC_ALPHA)
ogl.glEnable(ogl.GL_TEXTURE_2D)
ogl.glBindTexture(ogl.GL_TEXTURE_2D, self._txtr)
ogl.glTexEnvf(ogl.GL_TEXTURE_ENV, ogl.GL_TEXTURE_ENV_MODE, ogl.GL_REPLACE)
ogl.glMatrixMode(ogl.GL_TEXTURE)
ogl.glLoadIdentity()
ogl.glBegin(ogl.GL_QUADS)
ogl.glTexCoord2f(0.0, 1.0)
ogl.glVertex3f(*self.dims[0])
ogl.glTexCoord2f(self._usable[0], 1.0)
ogl.glVertex3f(*self.dims[1])
ogl.glTexCoord2f(self._usable[0], 1-self._usable[1])
ogl.glVertex3f(*self.dims[2])
ogl.glTexCoord2f(0.0, 1-self._usable[1])
ogl.glVertex3f(*self.dims[3])
ogl.glEnd()
ogl.glDisable(ogl.GL_BLEND)
ogl.glDisable(ogl.GL_TEXTURE_2D)
def testMode(self):
"""Draw red/transparent checkerboard. """
w, h = self._winSize[0]*0.25, self._winSize[1]*0.25
Rect = pygame.Rect
pygame.draw.rect(self.surf, (250,0,0), Rect(0,0,w,h), 0)
pygame.draw.rect(self.surf, (250,0,0), Rect(2*w,0,w,h), 0)
pygame.draw.rect(self.surf, (250,0,0), Rect(w,h,w,h), 0)
pygame.draw.rect(self.surf, (250,0,0), Rect(3*w,h,w,h), 0)
pygame.draw.rect(self.surf, (250,0,0), Rect(0,2*h,w,h), 0)
pygame.draw.rect(self.surf, (250,0,0), Rect(2*w,2*h,w,h), 0)
pygame.draw.rect(self.surf, (250,0,0), Rect(w,3*h,w,h), 0)
pygame.draw.rect(self.surf, (250,0,0), Rect(3*w,3*h,w,h), 0)
self.clear = None
class LaminaScreenSurface(LaminaPanelSurface):
"""Surface for imagery to overlay. Autofits to actual display.
@ivar surf: surface
@ivar dims: tuple with corners of quad
"""
def __init__(self, depth=0):
"""Initialize new instance.
@param depth: (0-1) z-value, if you want to draw your own 3D
cursor, set this to a small non-zero value to allow room in
front of this overlay to draw the cursor. (0.1 is a first guess)
"""
self._txtr = None
self._depth = depth
self.setup()
def setup(self):
"""Setup stuff, after pygame is inited. """
self._winSize = pygame.display.get_surface().get_size()
self.refreshPosition()
self.clear()
def refreshPosition(self):
"""Recalc where in modelspace quad needs to be to fill screen."""
depth = self._depth
bottomleft = oglu.gluUnProject(0, 0, depth)
bottomright = oglu.gluUnProject(self._winSize[0], 0, depth)
topleft = oglu.gluUnProject(0, self._winSize[1], depth)
topright = oglu.gluUnProject(self._winSize[0], self._winSize[1], depth)
self.dims = topleft, topright, bottomright, bottomleft
width = topright[0] - topleft[0]
height = topright[1] - bottomright[1]
self._qdims = topleft[0], topleft[1], width, height
class LaminaScreenSurface2(LaminaScreenSurface):
"""Surface that defers initialization to setup method. """
def __init__(self, depth=0):
"""Initialize new instance. """
self._txtr = None
self._depth = depth
def refreshPosition(self):
"""Recalc where in modelspace quad needs to be to fill screen."""
self._dirty = True
self._qdims = None, None
def getPoint(self, pt):
"""Get x,y coords of pt in 3d space."""
pt2 = oglu.gluProject(*pt)
return int(pt2[0]), int(pt2[1]), pt2[2]
def update(self):
pass
def commit(self):
pass
def display(self):
"""Display texture. """
if self._dirty:
depth = self._depth
topleft = oglu.gluUnProject(0,self._winSize[1],depth)
assert topleft, topleft
if topleft[0:2] != self._qdims[0:2]:
bottomleft = oglu.gluUnProject(0,0,depth)
bottomright = oglu.gluUnProject(self._winSize[0],0,depth)
topright = oglu.gluUnProject(self._winSize[0],self._winSize[1],depth)
self.dims = topleft, topright, bottomright, bottomleft
width = topright[0] - topleft[0]
height = topright[1] - bottomright[1]
self._qdims = topleft[0], topleft[1], width, height
LaminaScreenSurface.display(self)
class LaminaScreenSurface3(LaminaScreenSurface):
"""Surface that accepts a 3d point to constructor, and
locates surface parallel to screen through given point.
Defers initialization to setup method, like LSS2.
"""
def __init__(self, point=(0,0,0)):
"""Initialize new instance. """
self._txtr = None
self._point = point
self._depth = None
def refreshPosition(self):
"""Recalc where in modelspace quad needs to be to fill screen."""
self._dirty = True
def getPoint(self, pt):
"""Get x,y coords of pt in 3d space."""
pt2 = oglu.gluProject(*pt)
return pt2[0], pt2[1]
def update(self):
pass
def commit(self):
pass
def display(self):
"""Display texture. """
if self._dirty:
depth = oglu.gluProject(*self._point)[2]
if depth != self._depth:
bottomleft = oglu.gluUnProject(0,0,depth)
bottomright = oglu.gluUnProject(self._winSize[0],0,depth)
topleft = oglu.gluUnProject(0,self._winSize[1],depth)
topright = oglu.gluUnProject(self._winSize[0],self._winSize[1],depth)
self.dims = topleft, topright, bottomright, bottomleft
width = topright[0] - topleft[0]
height = topright[1] - bottomright[1]
self._qdims = topleft[0], topleft[1], width, height
self._depth = depth
LaminaScreenSurface.display(self)
class LaminaPartialScreenSurface(LaminaPanelSurface):
"""Surface for imagery to overlay. Autofits to actual display.
@ivar surf: surface
@ivar dims: tuple with corners of quad
"""
def __init__(self, width=100, height=100, x = 0, y = 0, depth=0.01):
"""
width, height: px
float x: h-center
int x >= 0: left aligned
int x < 0: right aligned
float y: v-center
int y >= 0: bottom aligned
int y < 0: top aligned
"""
self._txtr = None
self._depth = depth
self._whxy = (width, height, x, y)
self.setup()
self.clear()
def setup(self):
"""Setup stuff, after pygame is inited. """
self.regen()
(width, height, x, y) = self._whxy
self._winSize = pygame.display.get_surface().get_size()
(W, H) = self._winSize
if type(x).__name__ == 'float':
left = int(W/2 - width/2 + x)
right = int(W/2 + width/2 + x)
else:
if x > 0:
left = x
right = x + width
else:
left = W - abs(x) - width
right = W - abs(x)
if type(y).__name__ == 'float':
bottom = int(H/2 - height/2 + y)
top = int(H/2 + height/2 + y)
else:
if y > 0:
bottom = y
top = y + height
else:
bottom = H - abs(y) - height
top = H - abs(y)
self.tblr = [top, bottom, left, right]
self.prev_bl = (0,0,0)
self.refreshPosition(True)
def refreshPosition(self, possibly_changed):
"""Recalc where in modelspace quad needs to be to fill screen."""
ogl.glMatrixMode(ogl.GL_MODELVIEW)
ogl.glLoadIdentity()
ogl.glMatrixMode(ogl.GL_PROJECTION)
ogl.glLoadIdentity()
if not possibly_changed:
return
depth = self._depth
(top, bottom, left, right) = self.tblr
bottomleft = oglu.gluUnProject(left, bottom, depth)
if bottomleft == self.prev_bl:
#~ print "not changed"
return
#~ print "changed"
#~ print bottomleft[0] - self.prev_bl[0]
self.prev_bl = bottomleft
bottomright = oglu.gluUnProject(right, bottom, depth)
topleft = oglu.gluUnProject(left, top, depth)
topright = oglu.gluUnProject(right, top, depth)
self.dims = topleft, topright, bottomright, bottomleft
width = topright[0] - topleft[0]
height = topright[1] - bottomright[1]
self._qdims = topleft[0], topleft[1], width, height
def clear(self):
"""Restore the total transparency to the surface. """
(top, bottom, left, right) = self.tblr
width = right - left
height = top - bottom
width2 = pow2(width)
height2 = pow2(height)
raw = pygame.Surface((width2, height2), pygame.SRCALPHA, 32)
self._surfTotal = raw.convert_alpha()
self._usable = 1.0*width/width2, 1.0*height/height2
self.surf = self._surfTotal.subsurface(0,0,width,height)
self.regen()
def pow2(x):
powerOfTwo = 2
while powerOfTwo < x:
powerOfTwo *= 2
return powerOfTwo
|
alexdu/robot-sandbox
|
sim-files/lamina.py
|
Python
|
gpl-3.0
| 16,152
|
# Copyright 2017 Vector Creations Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Attestations ensure that users and groups can't lie about their memberships.
When a user joins a group the HS and GS swap attestations, which allow them
both to independently prove to third parties their membership.These
attestations have a validity period so need to be periodically renewed.
If a user leaves (or gets kicked out of) a group, either side can still use
their attestation to "prove" their membership, until the attestation expires.
Therefore attestations shouldn't be relied on to prove membership in important
cases, but can for less important situations, e.g. showing a users membership
of groups on their profile, showing flairs, etc.
An attestation is a signed blob of json that looks like:
{
"user_id": "@foo:a.example.com",
"group_id": "+bar:b.example.com",
"valid_until_ms": 1507994728530,
"signatures":{"matrix.org":{"ed25519:auto":"..."}}
}
"""
import logging
import random
from typing import TYPE_CHECKING, Optional, Tuple
from signedjson.sign import sign_json
from twisted.internet.defer import Deferred
from synapse.api.errors import HttpResponseException, RequestSendFailed, SynapseError
from synapse.metrics.background_process_metrics import run_as_background_process
from synapse.types import JsonDict, get_domain_from_id
if TYPE_CHECKING:
from synapse.server import HomeServer
logger = logging.getLogger(__name__)
# Default validity duration for new attestations we create
DEFAULT_ATTESTATION_LENGTH_MS = 3 * 24 * 60 * 60 * 1000
# We add some jitter to the validity duration of attestations so that if we
# add lots of users at once we don't need to renew them all at once.
# The jitter is a multiplier picked randomly between the first and second number
DEFAULT_ATTESTATION_JITTER = (0.9, 1.3)
# Start trying to update our attestations when they come this close to expiring
UPDATE_ATTESTATION_TIME_MS = 1 * 24 * 60 * 60 * 1000
class GroupAttestationSigning:
"""Creates and verifies group attestations."""
def __init__(self, hs: "HomeServer"):
self.keyring = hs.get_keyring()
self.clock = hs.get_clock()
self.server_name = hs.hostname
self.signing_key = hs.signing_key
async def verify_attestation(
self,
attestation: JsonDict,
group_id: str,
user_id: str,
server_name: Optional[str] = None,
) -> None:
"""Verifies that the given attestation matches the given parameters.
An optional server_name can be supplied to explicitly set which server's
signature is expected. Otherwise assumes that either the group_id or user_id
is local and uses the other's server as the one to check.
"""
if not server_name:
if get_domain_from_id(group_id) == self.server_name:
server_name = get_domain_from_id(user_id)
elif get_domain_from_id(user_id) == self.server_name:
server_name = get_domain_from_id(group_id)
else:
raise Exception("Expected either group_id or user_id to be local")
if user_id != attestation["user_id"]:
raise SynapseError(400, "Attestation has incorrect user_id")
if group_id != attestation["group_id"]:
raise SynapseError(400, "Attestation has incorrect group_id")
valid_until_ms = attestation["valid_until_ms"]
# TODO: We also want to check that *new* attestations that people give
# us to store are valid for at least a little while.
now = self.clock.time_msec()
if valid_until_ms < now:
raise SynapseError(400, "Attestation expired")
assert server_name is not None
await self.keyring.verify_json_for_server(
server_name,
attestation,
now,
)
def create_attestation(self, group_id: str, user_id: str) -> JsonDict:
"""Create an attestation for the group_id and user_id with default
validity length.
"""
validity_period = DEFAULT_ATTESTATION_LENGTH_MS * random.uniform(
*DEFAULT_ATTESTATION_JITTER
)
valid_until_ms = int(self.clock.time_msec() + validity_period)
return sign_json(
{
"group_id": group_id,
"user_id": user_id,
"valid_until_ms": valid_until_ms,
},
self.server_name,
self.signing_key,
)
class GroupAttestionRenewer:
"""Responsible for sending and receiving attestation updates."""
def __init__(self, hs: "HomeServer"):
self.clock = hs.get_clock()
self.store = hs.get_datastores().main
self.assestations = hs.get_groups_attestation_signing()
self.transport_client = hs.get_federation_transport_client()
self.is_mine_id = hs.is_mine_id
self.attestations = hs.get_groups_attestation_signing()
if not hs.config.worker.worker_app:
self._renew_attestations_loop = self.clock.looping_call(
self._start_renew_attestations, 30 * 60 * 1000
)
async def on_renew_attestation(
self, group_id: str, user_id: str, content: JsonDict
) -> JsonDict:
"""When a remote updates an attestation"""
attestation = content["attestation"]
if not self.is_mine_id(group_id) and not self.is_mine_id(user_id):
raise SynapseError(400, "Neither user not group are on this server")
await self.attestations.verify_attestation(
attestation, user_id=user_id, group_id=group_id
)
await self.store.update_remote_attestion(group_id, user_id, attestation)
return {}
def _start_renew_attestations(self) -> "Deferred[None]":
return run_as_background_process("renew_attestations", self._renew_attestations)
async def _renew_attestations(self) -> None:
"""Called periodically to check if we need to update any of our attestations"""
now = self.clock.time_msec()
rows = await self.store.get_attestations_need_renewals(
now + UPDATE_ATTESTATION_TIME_MS
)
async def _renew_attestation(group_user: Tuple[str, str]) -> None:
group_id, user_id = group_user
try:
if not self.is_mine_id(group_id):
destination = get_domain_from_id(group_id)
elif not self.is_mine_id(user_id):
destination = get_domain_from_id(user_id)
else:
logger.warning(
"Incorrectly trying to do attestations for user: %r in %r",
user_id,
group_id,
)
await self.store.remove_attestation_renewal(group_id, user_id)
return
attestation = self.attestations.create_attestation(group_id, user_id)
await self.transport_client.renew_group_attestation(
destination, group_id, user_id, content={"attestation": attestation}
)
await self.store.update_attestation_renewal(
group_id, user_id, attestation
)
except (RequestSendFailed, HttpResponseException) as e:
logger.warning(
"Failed to renew attestation of %r in %r: %s", user_id, group_id, e
)
except Exception:
logger.exception(
"Error renewing attestation of %r in %r", user_id, group_id
)
for row in rows:
await _renew_attestation((row["group_id"], row["user_id"]))
|
matrix-org/synapse
|
synapse/groups/attestations.py
|
Python
|
apache-2.0
| 8,299
|
from abc import abstractmethod
from neupy.utils import preformat_value
from neupy.helpers.logs import Verbose
from .config import ConfigurableABC
__all__ = ('BaseSkeleton',)
class BaseSkeleton(ConfigurableABC, Verbose):
""" Base class for neural network algorithms.
Methods
-------
fit(\*args, \*\*kwargs)
The same as ``train`` method.
predict(input_data)
Predict value.
"""
def get_params(self, deep=False):
options = {}
for property_name, option in self.options.items():
value = getattr(self, property_name)
property_ = option.value
# Options that have choices contains values that would
# be invalid after parameter initialization
is_choices_option = hasattr(option.value, 'choices')
if is_choices_option and value in property_.choices.values():
choices = {v: k for k, v in property_.choices.items()}
value = choices[value]
# Default value is not always valid type. For this reason we
# need to ignore all the values that have the same value as
# in default attibute.
if value != property_.default:
options[property_name] = value
return options
def set_params(self, **params):
self.__dict__.update(params)
return self
@abstractmethod
def train(self, input_data, target_data):
pass
def predict(self, input_data):
pass
def fit(self, X, y, *args, **kwargs):
self.train(X, y, *args, **kwargs)
return self
def class_name(self):
return self.__class__.__name__
def _repr_options(self):
options = []
for option_name in self.options:
option_value = getattr(self, option_name)
option_value = preformat_value(option_value)
option_repr = "{}={}".format(option_name, option_value)
options.append(option_repr)
return ', '.join(options)
def __repr__(self):
class_name = self.class_name()
available_options = self._repr_options()
return "{}({})".format(class_name, available_options)
|
stczhc/neupy
|
neupy/core/base.py
|
Python
|
mit
| 2,210
|
from model.base import Distro, Package
from util import tree
import logging
import os
from os import path
import urllib
import error
from deb.version import Version
import config
logger = logging.getLogger('model.debian')
class DebianDistro(Distro):
"""An ordinary Debian derivative, with no OBS integration."""
def __init__(self, name, parent=None):
super(DebianDistro, self).__init__(name, parent)
|
dbnicholson/merge-our-misc
|
model/debian.py
|
Python
|
gpl-3.0
| 413
|
#!/usr/bin/env python
#
# Protein Engineering Analysis Tool DataBase (PEATDB)
# Copyright (C) 2010 Damien Farrell & Jens Erik Nielsen
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
# Contact information:
# Email: Jens.Nielsen_at_gmail.com
# Normal mail:
# Jens Nielsen
# SBBS, Conway Institute
# University College Dublin
# Dublin 4, Ireland
#
from Tkinter import *
import Pmw
import os
import numpy
class Ekin_map_annotate:
def map_datatab2structure(self):
"""If the PEATDB record has a structure, then we allow the user to map each datatab
to a specific part of the protein.
One can map a datatab to an atom, a residue, a chain, or define a structural group and map to it"""
if not self.parent:
import tkMessageBox
tkMessageBox.showinfo("No PEAT",
"This option is only available when Ekin is started from PEAT",
parent=self.ekin_win)
return
#
# Do we have a record name
#
if not self.protein:
import tkMessageBox
tkMessageBox.showinfo("No PEAT record",
"This option is only available when Ekin has been started by clicking a PEAT record",
parent=self.ekin_win)
return
#
# Is there a structure?
#
error=None
if not self.parent.data.has_key('DBinstance'):
error=1
else:
DB=self.parent.data['DBinstance'].DB
if not DB[self.protein].has_key('Structure'):
error=1
else:
print 'Trying to get PDB'
self.pdblines,X=self.parent.get_structure(self.protein,'Structure')
if not self.pdblines:
error=1
if error:
import tkMessageBox
tkMessageBox.showinfo("No Structure in PEAT",
"This option is only available when the PEAT record has a structure",
parent=self.ekin_win)
return
#
# Open the mapping window
#
mapper_win=Toplevel()
mapper_win.title('Map datatab to structure. %s - %s' %(self.protein,self.field))
self.set_geometry(self.ekin_win,mapper_win)
#
# Mapping Manager
#
row=0
Label(mapper_win,text='Mapping Manager',bg='lightblue').grid(row=row,column=0,columnspan=3,sticky='news')
row=row+1
Label(mapper_win,textvariable=self.currentdataset.get()).grid(row=row,column=0,columnspan=3,sticky='news')
#
# Headers
#
#row=row+1
#Label(mapper_win,text='Structural group type').grid(row=row,column=0,sticky='news')
#Label(mapper_win,text='Structural element').grid(row=row,column=1,sticky='news')
#Label(mapper_win,text='Datatab property').grid(row=row,column=2,sticky='news')
#
# Structural groupings for this protein
#
#if not DB[self.protein].has_key('structgroups'):
# DB[self.protein]['structgroups']={}
#structgroups=DB[self.protein]['structgroups'].keys()
#
# Load the residue definitions
#
import Protool.mutate
self.M_instance=Protool.mutate.Mutate(onlydefs=1)
self.AAdefs=self.M_instance.aadefs
#
# Struct group types
#
row=row+1
listbox_height=5
self.group_type_box = Pmw.ScrolledListBox(mapper_win,
items=['Residues','Atoms','Titratable groups'],
labelpos='nw',
label_text='Group type',
listbox_height = listbox_height,
usehullsize = 1,
hull_width = 200,
hull_height = 100,
selectioncommand=self.update_elements)
self.group_type_box.grid(row=row,column=0,columnspan=1,sticky='news')
self.group_type_box.configure(listbox_bg='white')
self.group_type_box.configure(listbox_selectmode='single')
self.group_type_box.configure(listbox_exportselection=0)
#
#
# Dropdown list of elements of each structgroup type
#
self.group_elements_box = Pmw.ScrolledListBox(mapper_win,
items=[],
labelpos='nw',
label_text='Group Elements',
listbox_height = listbox_height,
usehullsize = 1,
hull_width = 200,
hull_height = 100)
self.group_elements_box.grid(row=row,column=1,columnspan=1,sticky='news')
self.group_elements_box.configure(listbox_bg='white')
self.group_elements_box.configure(listbox_selectmode='extended')
self.group_elements_box.configure(listbox_exportselection=0)
# Parameters that we can map to structgroups
import Fitter
self.FIT=Fitter.FITTER('1 pKa 2 Chemical shifts',self)
self.dataprops=['Data source']+self.FIT.parameter_names
self.data_prop_box = Pmw.ScrolledListBox(mapper_win,
items=self.dataprops,
labelpos='nw',
label_text='Data properties',
listbox_height = listbox_height,
usehullsize = 1,
hull_width = 200,
hull_height = 100)
self.data_prop_box.grid(row=row,column=2,columnspan=1,sticky='news')
self.data_prop_box.configure(listbox_bg='white')
self.data_prop_box.configure(listbox_selectmode='extended')
self.data_prop_box.configure(listbox_exportselection=0)
#
# List of existing mappings
#
row=row+1
datatab=self.currentdataset.get()
print 'Loading this datatab in mapper',datatab
mappings=self.get_structmappings(datatab)
self.mapping_box = Pmw.ScrolledListBox(mapper_win,
items=mappings,
labelpos='nw',
label_text='Existing mappings',
listbox_height = 6,
usehullsize = 1,
hull_width = 200,
hull_height = 200)
self.mapping_box.grid(row=row,column=0,columnspan=3,sticky='news')
self.mapping_box.configure(listbox_selectmode='single')
self.mapping_box.configure(listbox_bg='white')
#
# Buttons
#
row=row+1
Button(mapper_win,text='Create mapping',bg='lightgreen',borderwidth=2, relief=GROOVE, command=self.create_mapping).grid(row=row,column=0,sticky='news',padx=2,pady=2)
Button(mapper_win,text='Delete mapping',bg='yellow',borderwidth=2, relief=GROOVE, command=self.delete_mapping).grid(row=row,column=1,sticky='news',padx=2,pady=2)
Button(mapper_win,text='Export',bg='#CFECEC',borderwidth=2, relief=GROOVE, command=self.export_dialog).grid(row=row,column=2,sticky='news',padx=2,pady=2)
row=row+1
Button(mapper_win,text='Close',borderwidth=2, relief=GROOVE,command=self.close_mapper_window).grid(row=row,column=1,columnspan=2,sticky='news',padx=2,pady=2)
#
# Structural group manager
#
#row=row+1
#Label(mapper_win,text='Structural Group Manager',bg='lightblue').grid(row=row,column=0,columnspan=3,sticky='news')
#import os, sys
#PEAT_dir=os.path.split(__file__)[0]
#sys.path.append(PEAT_dir)
#import protein_selector
#row=row+1
#SEL=protein_selector.select_residue(mapper_win,self.pdblines)
#SEL.box.grid(row=row,column=0)
##
#row=row+1
#Label(mapper_win,text='Atoms').grid(row=row,column=1)
#row=row+1
#Button(mapper_win,text='Create new structural grouping',command=self.create_new_structgroup).grid(row=row,column=0)
#Button(mapper_win,text='Add to structural grouping',command=self.add_to_structgroup).grid(row=row,column=1)
#Button(mapper_win,text='Close',command=mapper_win.destroy).grid(row=row,column=2,sticky='news')
mapper_win.rowconfigure(2,weight=1)
self.mapper_win=mapper_win
self.mapper_win.transient(master=self.ekin_win)
return
#
# ----
#
def close_mapper_window(self):
"""Close the mapping window and delete references to it"""
self.mapper_win.destroy()
if hasattr(self,"mapper_win"):
delattr(self,"mapper_win")
return
#
# ----
#
def update_elements(self):
"""Insert a new dropdown list for the element"""
#
# Get the group type
#
elements=None
group_type=self.group_type_box.getcurselection()[0]
import Protool
if group_type=='Residues':
P=Protool.structureIO()
P.parsepdb(self.pdblines)
residues=P.residues.keys()
residues.sort()
elements=[]
for res in residues:
elements.append('%s %s' %(res,P.resname(res)))
elif group_type=='Atoms':
P=Protool.structureIO()
P.parsepdb(self.pdblines)
atoms=P.atoms.keys()
for res in P.residues.keys():
resname=P.resname(res)
if self.AAdefs.has_key(resname):
defatoms=self.AAdefs[resname]['atoms']
#print defatoms
for defatom,coord,dummy in defatoms:
atom_name='%s:%s' %(res,defatom)
if not P.atoms.has_key(atom_name):
atoms.append(atom_name)
#print 'Adding',atom_name
atoms.sort()
elements=[]
for at in atoms:
elements.append(at)
elif group_type=='Titratable groups':
P=Protool.structureIO()
P.parsepdb(self.pdblines)
P.get_titratable_groups()
titgrps=P.titratable_groups.keys()
titgrps.sort()
elements=[]
for res in titgrps:
for titgrp in P.titratable_groups[res]:
name='%s %s' %(res,titgrp['name'])
elements.append(name)
else:
print 'Unkown group type',group_type
#
# Make the new dropdown list
#
if elements:
self.group_elements_box.setlist(elements)
return
#
# -----
#
def create_mapping(self):
"""Create the mapping"""
g_type=self.group_type_box.getcurselection()
if len(g_type)==0:
return
g_type=g_type[0]
g_elements=self.group_elements_box.getcurselection()
props=self.data_prop_box.getcurselection()
#
if not getattr(self,'structmappings',None):
self.structmappings={}
datatab=self.currentdataset.get()
if not self.structmappings.has_key(datatab):
self.structmappings[datatab]={}
#
# Get the dict of current mappings
#
curmappings=self.structmappings[datatab]
map_keys=curmappings.keys()
map_keys.sort()
#
# Get the number of the last mapping
#
last_num=0
if len(map_keys)>0:
last_num=map_keys[-1]
#
# Add the new mapping
#
if props and g_elements and g_type:
self.structmappings[datatab][last_num+1]={'Group type':g_type,'Group elements':g_elements,'Data property':props}
#
# Display the updated list of mappings
#
mappings=self.get_structmappings(datatab)
self.mapping_box.setlist(mappings)
return
#
# ----
#
def get_structmappings(self,datatab):
"""Get a printable list of structural mappings for this datatab"""
if not getattr(self,'structmappings',None):
return []
if self.structmappings.has_key(datatab):
map_keys=self.structmappings[datatab].keys()
map_keys.sort()
mappings=[]
for map_key in map_keys:
thismap=self.structmappings[datatab][map_key]
mappings.append('%2d: %s mapped to type "%s" elements %s' %(map_key,thismap['Data property'],thismap['Group type'],thismap['Group elements']))
else:
mappings=[]
return mappings
#
# -----
#
def delete_mapping(self):
"""Delete a structmapping"""
delete=self.mapping_box.getcurselection()
if len(delete)==0:
print 'length is zero'
return
delete=str(delete[0])
number=int(delete.split(':')[0])
print 'NUMBER',number
datatab=self.currentdataset.get()
print self.structmappings.keys()
if self.structmappings.has_key(datatab):
if self.structmappings[datatab].has_key(number):
del self.structmappings[datatab][number]
mappings=self.get_structmappings(datatab)
self.mapping_box.setlist(mappings)
return
#
# -----
#
def update_mapping_window(self):
"""Update the mapping window when we change datatabs"""
#
# Update list of current mappings
#
datatab=self.currentdataset.get()
mappings=self.get_structmappings(datatab)
self.mapping_box.setlist(mappings)
#
# Update List of parameters
#
dataprops=['Data source']+self.FIT.parameter_names
self.data_prop_box.setlist(dataprops)
return
def get_assigned(self):
"""Get all unique assigned elements from the mapping dict"""
if not getattr(self,'structmappings',None):
return []
assigned=[]
for key in self.structmappings.keys():
for val in self.structmappings[key].keys():
elements=self.structmappings[key][val]['Group elements']
for e in elements:
if not e in assigned:
assigned.append(e)
return assigned
#
# -----
#
def export_dialog(self):
if hasattr(self, 'export_win'):
if self.export_win != None :
self.export_win.deiconify()
return
self.export_win=Toplevel()
self.export_win.title('Export mappings')
self.set_geometry(self.ekin_win,self.export_win)
#self.setgeometry(self.ekin_win,self.export_win)
self.grouptype = StringVar() #group type
grptypes=['Residues','Atoms','Titratable groups','Any']
self.grouptype.set(grptypes[0])
self.assignedto = StringVar() #titratable group assigned
#self.expdataprops=['Data source']+self.FIT.parameter_names
self.expdataprops=['Data source','pK','span','offset']
self.dataprop = StringVar() #required property
self.dataprop.set(self.expdataprops[0])
elements=self.get_assigned()
elements.append('All')
elements.sort()
self.assignedto.set(elements[0])
row=0
help=Label(self.export_win,text='Use the list of currently assigned mappings to select\n'
+'an assigned residue/element from.\n'
+'A file will be created for the chosen group element',
bg='#CFECEC' )
help.grid(row=row,column=0,columnspan=2,sticky='news',padx=2,pady=2)
row=1
#drop down labels for grp element, data property and assignedto
Label(self.export_win,text='Assigned:').grid(row=row,column=0,sticky='news',padx=2,pady=2)
w = OptionMenu(self.export_win, self.assignedto, *elements)
w.grid(row=row,column=1,sticky='news',padx=2,pady=2)
'''row=row+1
Label(self.export_win,text='group type:').grid(row=row,column=0,sticky='news',padx=2,pady=2)
w = OptionMenu(self.export_win, self.grouptype, *grptypes)
w.grid(row=row,column=1,sticky='news',padx=2,pady=2)'''
row=row+1
Label(self.export_win,text='data property:').grid(row=row,column=0,sticky='news',padx=2,pady=2)
print self.dataprops
w = OptionMenu(self.export_win, self.dataprop, *self.expdataprops)
w.grid(row=row,column=1,sticky='news',padx=2,pady=2)
row=row+1
Button(self.export_win,text='Cancel',bg='#CFECEC',borderwidth=2, relief=GROOVE, width=10,
command=self.close_exp_dialog).grid(row=row,column=0,sticky='news',padx=2,pady=2)
Button(self.export_win,text='Go',bg='#CFECEC',borderwidth=2, relief=GROOVE, width=10,
command=self.export_as_csv).grid(row=row,column=1,sticky='news',padx=2,pady=2)
return
def close_exp_dialog(self):
if hasattr(self,'export_win'):
self.export_win.destroy()
self.export_win=None
return
def choose_savedir(self):
"""Get a directory to save to"""
import tkFileDialog, os
if self.defaultsavedir == None:
self.defaultsavedir = os.getcwd()
dirname=tkFileDialog.askdirectory(parent=self.export_win,
initialdir=self.defaultsavedir)
if not dirname:
print 'Returning'
return NoneType
return dirname
#
# -----
#
def export_as_csv(self):
"""export struct mapping for specific filters as csv"""
#prompt user for save dir
savedir = self.choose_savedir()
if savedir==None:
return
if self.currplatform == 'Windows':
print 'using windows'
import List_Utils
#sub function for tidiness
def getexplist(assignedto):
reslist={}
reskeys=[]
for key in self.structmappings.keys():
for n in self.structmappings[key].keys():
#check if any dataprop list element contains the key eg 'pK' in pK1, pK2 etc..
datapropkey = List_Utils.elements_contain(self.structmappings[key][n]['Data property'], self.dataprop.get())
if datapropkey != None:
#try to extract the value from the ekin dataset
val = self.get_dataprop_value(key, datapropkey)
print 'found ',val,' for ', datapropkey
#print 'val: ', val
#iterate over group elements list
elements=self.structmappings[key][n]['Group elements']
for e in elements:
if assignedto in e:
reslist[key] = ([key,val])
reskeys.append(key)
if len(reslist.keys())==0:
return
#write the list to a csv file, first add heading
import string
#remove whitespace
name=string.join(assignedto.split(), '')
name=name.replace(':', '')
if self.currplatform == 'Windows':
filename = savedir+'/'+name+'.csv'
else:
filename = os.path.join(savedir, name+'.csv')
print filename
writer = open(filename, "wb")
writer.write(assignedto+'\n')
import csv
csvwriter = csv.writer(open(filename, "a"))
keyssorted = self.sort_by_Num(reskeys)
#print reslist
#print keyssorted
p=[];names=[]
#use key sorted mapping to list residues by number
for item in keyssorted:
k=item[1]
csvwriter.writerow(reslist[k])
p.append(reslist[k][1])
names.append(k)
writer.close()
#do a plot and save to same dir as file
try:
import pylab
except:
return
f=pylab.figure(figsize=(10,4))
pylab.rc("font", family='serif')
a=f.add_subplot(111)
ind=numpy.arange(len(names))
a.bar(ind, p , linewidth=0.5)
a.set_xticks(ind)
a.set_ylabel(self.dataprop.get())
a.set_title(name+' assignments')
a.set_xticklabels(names, rotation='vertical', size=5)
f.savefig(savedir+'/'+name+'.png',dpi=300)
return
if self.assignedto.get() == 'All':
for a in self.get_assigned():
getexplist(a)
else:
getexplist(self.assignedto.get())
self.close_exp_dialog()
return
#
# -----
#
def get_dataprop_value(self, key, dataprop):
"""Annoying but necessary helper func to get value of assigned property
from the ekin fit data"""
tabnum = self.currentdataset.get()
if self.fitter_data.has_key(key):
fitdata = self.fitter_data[key]
else:
return None
model = fitdata['model']
#extracts index number from fit model field name
i = self.FIT.get_param_index(dataprop, model)
print tabnum, key
print fitdata, dataprop, i
if i!=None:
val = fitdata[i]
return val
#
# -----
#
def create_new_structgroup(self):
return
#
# ------
#
def add_to_structgroup(self):
return
def sort_by_Num(self, p):
"""Sort text keys by contained numbers - should be put in utils class"""
splitkeys={}
import re
r=re.compile('\D')
for k in p:
splitkeys[k]=int(r.split(k)[1])
items = splitkeys.items()
items = [(v, k) for (k, v) in items]
items.sort()
return items
|
dmnfarrell/peat
|
PEATDB/Ekin/Ekin_map.py
|
Python
|
mit
| 23,383
|
REQUIREMENT_SCHEMA = {
'description': 'A set of requirement definitions',
'type': ['object', 'null'],
'addtionalProperties': {
'required': ['reason', 'key', 'equals'],
'properties': {
'reason': {
'description':
'Human-readable motivation for this requirement.',
'type': 'string',
},
'class': {
'description': 'Dynamic class to load.',
'type': 'string',
},
'key': {
'description': 'The key for lookup.',
'type': 'string',
},
'equals': {
'description': 'The value to compare the lookup with.',
'type': 'string',
},
},
}
}
|
thiderman/piper
|
piper/schema.py
|
Python
|
mit
| 804
|
from django.forms import widgets
from rest_framework import serializers
from coopInfo.models import Person, State, Cooperative
class PersonSerializer(serializers.ModelSerializer):
class Meta:
model = Person
fields = ('id', 'coopId', 'name', 'ethnicity', 'distric', 'title', 'inBoardSince', 'picture', 'numberOfYearsInBoard')
class StateSerializer(serializers.ModelSerializer):
class Meta:
model = Person
fields = ('id', 'name')
class CooperativeSerializer(serializers.ModelSerializer):
class Meta:
model = Cooperative
fields = ('id', 'stateId', 'name', 'acronym', 'streetAddress', 'website', 'mailAddress',
'email', 'phone', 'countiesServed', 'consumers', 'montlyMeeting', 'annualMeeting',
'numberOfEmployees', 'milesOfLines', 'nextElectionTerms', 'servingTime', 'bylaws', 'is990present')
|
antoineclaval/ruralpowerproject
|
ruralpowerproject/coopInfo/serializers.py
|
Python
|
bsd-3-clause
| 876
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from fabric.tasks import Task
from fabric.contrib import console
from fabric import colors, operations, state, api
from fabric.api import local, run, cd, lcd, env, output, hide, show, warn_only
from fabpress import utils
import os, sys, json
def strtobool(v):
try:
v = v.lower()
except: pass
if v in ['y', 'yes', True, 1, '1']:
return True
if v in ['n', 'no', False, 0, '0']:
return False
raise TypeError('Cannot convert value {0} to boolean'.format(v))
class Argument(object):
"""An utility class to describe required and optional args for tasks"""
parser = None
checker = None
helper = ""
name = ""
required = True
def __init__(self, name, required=True, helper=None, parser=None, checker=None):
self.name = name
self.required = required
self.helper = helper
self.parser = parser
self.checker = checker
class ArgumentError(Exception):
pass
class AbstractBaseTask(object):
"""The base class for every task. Will not be detected by fabric as a registered task,
since it does not inherit from `fabric.tasks.Task`"""
name = None
start_message = None
kwargs = {}
args = {}
hide = ['commands']
show = []
parent = None
expected_args = []
called_via_fab = True
_called_via_fab = True
logging = True
def __call__(self, *args, **kwargs):
"""Fabric uses self.run to run a task,
so we can reliably says that when __call__ is called, the task is run directly via a python script"""
self.parent = kwargs.pop('parent', None)
self._called_via_fab = False
self.logging = kwargs.pop('logging', self.logging)
self.start_message = kwargs.pop('start_message', self.start_message)
return self.run(*args, **kwargs)
def get_expected_args(self):
"""Return a list of expected arguments for this task, including parent classes ones
Required argument are placed first in the list"""
parents = self.__class__.__bases__
expected_args = self.expected_args
for parent in parents:
for arg in parent.expected_args:
if arg not in expected_args:
expected_args = [arg] + expected_args
# put optional args at the end
required_args = [arg for arg in expected_args if arg.required == True]
optional_args = [arg for arg in expected_args if arg.required == False]
return required_args + optional_args
def get_description(self):
return """Task description:\n\n {0}\n""".format(self.__class__.__doc__)
def get_task_id(self):
"""Get the task identifier, such as module.task"""
module = self.__module__.split('.')[-1]
return "{0}.{1}".format(module, self.name)
def get_usage(self):
"""Return the documentation of this task as a string"""
description = self.get_description()
args = ""
for arg in self.get_expected_args():
arg_text = "{0}=<{1}>".format(arg.name, arg.helper)
if not arg.required:
arg_text = "[" + arg_text + "]"
args = args + arg_text + ","
# remove coma
args = args[:-1]
command = "\n" + description + "\nTask usage: \n\n fab fp.{0}:{1}\n".format(self.get_task_id(), args)
return command
def log(self, message, color=None, bold=False, indentation=1, force=False):
if self.logging or force:
i = " " * (self.get_parent_level() + indentation)
message = i + message
if color is not None:
print(getattr(colors, color)(message, bold=bold))
else:
print(message)
def success(self, message, bold=False):
if self.parent is None:
self.log(message, None, bold)
else:
self.log(message, None, bold)
def info(self, message, bold=False):
if self.parent is None:
self.log(message, None, bold)
else:
self.log(message, None, bold)
def error(self, message, bold=False):
self.log(message, "red", bold, force=True)
def pre_run(self):
"""Called just before `self.run`"""
if self.logging:
message = self.get_start_message()
if message:
self.log(message.capitalize(), indentation=0)
def check_arg(self, value, expected):
"""Check if a single argument pass validation"""
e = ArgumentError('Value {0} does not pass validation for argument {1}.\n\tAccepted values: {2}'.format(value, expected.name, expected.helper))
try:
parsed_value = expected.parser(value)
except:
raise e
validate = expected.checker(parsed_value)
if not validate:
raise e
return parsed_value
def get_parent_level(self):
if self.parent is None:
return 0
else:
l = 0
parent = self.parent
while 1:
if parent is not None:
l += 1
parent = parent.parent
else:
break
return l
def check_args(self):
"""Trigger arguments checking"""
expected = self.get_expected_args()
required = [arg for arg in expected if arg.required]
if len(self.args) + len(self.kwargs) > len(expected):
raise ArgumentError('Too many arguments for this task')
if len(self.args) + len(self.kwargs) < len(required):
raise ArgumentError('Missing required arguments for this task')
done = []
new_kwargs = {}
# iterate through non-named args
for index, value in enumerate(self.args):
x = expected[index]
parsed_value = self.check_arg(value, x)
new_kwargs[x.name] = parsed_value
done.append(x.name)
# iterate through named args
for arg, value in self.kwargs.items():
if arg in done:
raise ArgumentError('Multiple values passed for argument {0}'.format(arg))
try:
x = [ex_arg for ex_arg in expected if arg == ex_arg.name][0]
except IndexError:
raise ArgumentError('{0} is not a registered argument for this task'.format(arg))
parsed_value = self.check_arg(value, x)
new_kwargs[x.name] = parsed_value
done.append(x.name)
# arguments will be explicitly passed via keyword, so clean args and update kwargs
self.args = []
self.kwargs.update(new_kwargs)
def setup(self, *args, **kwargs):
"""Setup the config of the task, using arguments"""
self.kwargs = kwargs
self.args = list(args)
self.hide = self.kwargs.pop('hide', self.hide)
self.show = self.kwargs.pop('show', self.show)
# task is called via command-line
if self.called_via_fab:
try:
# trigger arg parsing and validation (only if called via command-line)
self.check_args()
except ArgumentError, e:
self.error("\nThe task was called incorrectly:\n\n {0}.\n\nPlease refer to task usage:".format(str(e)))
self.log(self.get_usage())
sys.exit()
if self.called_via_fab:
message = self.get_task_description()
print(message)
def run(self, *args, **kwargs):
"""Called by fabric. Will set up the task and launch it.
Do not override this in your children tasks, use `operation` instead."""
# check if the task is called via Python or via `fab` in terminal
self.called_via_fab = self._called_via_fab
self._called_via_fab = True
try:
# display help if the users ask for it, then exit
assert args[0] == "help"
self.logging = True
self.log(self.get_usage())
return
except:
pass
self.setup(*args, **kwargs)
self.pre_run()
with hide(*self.hide), show(*self.show):
# run the actual task logic
r = self.operation(*self.args, **self.kwargs)
self.post_run()
return r
def post_run(self):
"""Called after the task has been run"""
with show('everything'):
self.trigger_hooks()
# reset some attributes
self.parent = None
self.start_message = None
def trigger_hooks(self):
"""User can register hooks in settings. We trigger them here"""
task = self.get_task_id()
hooks = utils.setting("hooks")
hooks_to_trigger = [hook for key, hook in hooks.items() if key == task]
for hook in hooks_to_trigger:
self.trigger_hook(hook)
def trigger_hook(self, hook):
"""Trigger a single hook"""
# hook is a callable, so call it
if hasattr(hook, '__call__'):
name = ""
try: name = hook.__name__
except: name = __hook__.__class__.__name__
log = "Triggering {0} hook: {1}...".format(self.get_task_id(), name)
# if hook is subclass of AbstractBaseTask, just run a subtask
if isinstance(hook, AbstractBaseTask):
self.subtask(hook, start_message=log)
else:
self.log(log)
hook()
return
# hook is an iterable with the callback first, then arguments
if hasattr(hook, '__iter__'):
callback = hook[0]
name = ""
try: name = callback.__name__
except: name = callback.__class__.__name__
log = "Triggering {0} hook: {1}...".format(self.get_task_id(), name)
# if hook is subclass of AbstractBaseTask, just run a subtask
if isinstance(callback, AbstractBaseTask):
self.subtask(callback, start_message=log, *hook[1:])
else:
self.log(log)
callback(*hook[1:])
def get_task_description(self):
"""Return the task effect, and passed arguments as a string"""
message = self.get_description()
if self.args or self.kwargs:
task_arguments = "\nThe task was launched with the following arguments:\n\n"
for key, value in self.kwargs.items():
task_arguments += " - {0} : {1}\n".format(key, value)
message += task_arguments
return message
def get_start_message(self, *args, **kwargs):
if self.start_message is not None:
return self.start_message
return "Running {0}{1}...".format(self.get_task_id(), self.get_suffix())
def get_suffix(self):
return ''
def subtask(self, task, *args, **kwargs):
"""run a task as a subtask"""
return task(*args, parent=self, **kwargs)
class BaseTask(AbstractBaseTask, Task):
"""Subclass of these class will be detected as fabric tasks"""
pass
class ConfirmTask(object):
"""Will ask for user confirmation before doing anything else"""
confirmed = False
confirm_message = "This is an important choice. "
confirm_choice = "Do you want to continue ?"
default = False
expected_args = [
Argument("confirm", False, "yes|y|1", strtobool, lambda v: isinstance(v, bool)),
]
def setup(self, *args, **kwargs):
"""Detect wheter the user called the task with confirm=yes, else display the confirm message"""
super(ConfirmTask, self).setup(*args, **kwargs)
confirm = strtobool(self.kwargs.pop('confirm', False))
if not confirm:
# we do not ask for confirmation for subtasks
if self.parent is None:
question = self.confirm_message + self.confirm_choice
self.confirmed = console.confirm(question, default=self.default)
if not self.confirmed:
sys.exit('Cancelling task...')
class TargetTask(BaseTask):
"""A task that run on a target (local or remote)"""
target = None
expected_args = [
Argument("target", True, "local|remote", lambda v: v, lambda v: v in ['local', 'remote']),
]
def get_suffix(self):
return " [on {0}]".format(self.target)
def setup(self, *args, **kwargs):
"""Add a self.target attribute"""
super(TargetTask, self).setup(*args, **kwargs)
target = kwargs.get('target')
if target is None:
target = args[0]
self.target = target
def trigger_hooks(self):
"""Trigger target specific hooks"""
super(TargetTask, self).trigger_hooks()
task = self.get_task_id()
hooks = utils.setting("hooks", self.target, {})
hooks_to_trigger = [hook for key, hook in hooks.items() if key == task]
for hook in hooks_to_trigger:
self.trigger_hook(hook)
class RunTarget(AbstractBaseTask):
"""Run a unix command on the target"""
name = "run_target"
logging = False
def operation(self, target, command, capture=True):
if utils.is_local(target):
return local(command, capture=capture)
if utils.is_remote(target):
return run(command)
run_target = RunTarget()
def subtask(task, *args, **kwargs):
"""run a task as a subtask"""
return task(*args, parent=True, **kwargs)
class WP(TargetTask):
"""Run a wp-cli command on the target. You don't need to prefix it with 'wp', it will be added automatically"""
name = "wp"
logging = False
expected_args = [
Argument("command", True, "a wp-cli command, without 'wp'", lambda v: v, lambda v: True),
]
def setup(self, *args, **kwargs):
# do not display output if it is a subtask
if self.called_via_fab:
kwargs['show'] = ["stdout"]
self.logging = True
super(WP, self).setup(*args, **kwargs)
def operation(self, target, command):
"""run a wpcli command on local or remote"""
# get wp-cli path on target
wp = utils.setting('wp-cli', target, 'wp')
full_command = "{0} {1}".format(wp, command)
if utils.is_local(target):
with lcd(utils.setting("path", target)):
r = local(full_command, capture=True)
if utils.is_remote(target):
with cd(utils.setting("path", target)):
r = run(full_command)
if self.called_via_fab:
self.log(r)
return r
wp = WP()
class GetFile(TargetTask):
"""Download a file from origin to target"""
name = "get_file"
hide = ['commands', "warnings"]
def operation(self, target, origin_path, target_path):
self.log("Downloading from {0}:{1} to {2}:{3}...".format(utils.reverse(target), origin_path, target, target_path))
if utils.is_local(target):
operations.get(remote_path=origin_path, local_path=target_path)
if utils.is_remote(target):
operations.put(remote_path=target_path, local_path=origin_path)
get_file = GetFile()
class WPCollectData(TargetTask):
"""Return a dict of data (version , languages, plugins, themes) about the targeted wordpress installation"""
name = "collect_data"
hide = ["everything"]
def operation(self, target):
data = {}
# get wordpress version
data['version'] = self.subtask(wp, target, "core version")
# get wordpress locale
json_data = self.subtask(wp, target, "core language list --format=json")
languages = json.loads(json_data)
active_languages = [language['language'] for language in languages if language['status'] == "active"]
data['locales'] = active_languages
# get plugins data
json_data = self.subtask(wp, target, "plugin list --format=json")
plugins = json.loads(json_data)
active_plugins = [plugin for plugin in plugins if plugin['status'] == "active"]
data['plugins'] = active_plugins
# get themes data
json_data = self.subtask(wp, target, "theme list --format=json")
themes = json.loads(json_data)
active_themes = [theme for theme in themes if theme['status'] == "active"]
data['themes'] = active_themes
return data
collect_data = WPCollectData()
|
EliotBerriot/fabpress
|
fabpress/tasks/base.py
|
Python
|
bsd-3-clause
| 16,731
|
#!/usr/bin/env python3
import math
from functools import cmp_to_key, partial
PRECISION = 2**(-10)
def float_eq(a, b):
"""Check if two floats are equal within a certain accuracy."""
return abs(a - b) < PRECISION
def point_eq(a, b):
"""Check if two points are equal within a certain accuracy."""
return float_eq(a[0], b[0]) and float_eq(a[1], b[1])
def point_cmp(a, b):
"""Check if two points are equal, less than or greater than within a
certain accuracy."""
if point_eq(a, b):
return 0
elif float_eq(a[0], b[0]) and a[1] < b[1]:
return -1
elif float_eq(a[0], b[0]) and a[1] > b[1]:
return 1
elif a < b:
return -1
else:
return 1
def clockwise_from(p0, p1, p2):
"""Returns a value > 0 if p1 is clockwise of p2 from p0, < 0 if counter
clockwise and == 0 if collinear"""
x1, y1 = p1[0] - p0[0], p1[1] - p0[1]
x2, y2 = p2[0] - p0[0], p2[1] - p0[1]
ans = x1*y2 - x2*y1
if float_eq(ans, 0):
return 0
elif ans < 0:
return -1
else:
return 1
def clockwise_and_dist(p0, p1, p2):
"""Returns a value > 0 if p1 is clockwise of p2 from p0, < 0 if counter
clockwise, if the points are collinear it will return 1 if p2 is further
from p0 than p1 and -1 if p2 is closer to p0 than p1"""
a = clockwise_from(p0, p1, p2)
if a == 0:
dist1 = LineSegment(p0, p1).length()
dist2 = LineSegment(p0, p2).length()
if float_eq(dist1, dist2):
return 0
elif dist1 < dist2:
return -1
else:
return 1
else:
return a
def line_intersects_segment(line, line_segment):
"""Returns the intersection the Line and LineSegment or None if they do
not intersect.
This function is useful for splitting polygons by a straight line.
"""
linesegform = line_segment.to_line()
if line.is_parallel_to(linesegform):
return None
else:
p = line.intersection(linesegform)
# Is the intersection on the line_segment?
if line_segment.between(p):
return p
else:
return None
class LineSegment:
"""A straight line bounded by two points.
LineSegment represents a straight line that is bounded by two points.
The datastructure should be considered immutable.
"""
def __init__(self, point1, point2):
"""point1 and point2 represent the two bounding points of the segment
"""
self.points = (point1, point2)
def __repr__(self):
return repr(self.points)
def length(self):
x1, y1 = self.points[0]
x2, y2 = self.points[1]
return math.hypot(x1 - x2, y1 - y2)
def midpoint(self):
"""Return midpoint of LineSegment"""
x1, y1 = self.points[0]
x2, y2 = self.points[1]
return ((x1 + x2) / 2, (y1 + y2) / 2)
def point_by_length(self, length):
"""Return a point on the ray points[0] to points[1] such that the
distance from points[0] to the new point is 'length'"""
r = self.length()
d = length
x1, y1 = self.points[0]
x2, y2 = self.points[1]
x3 = (x2 - x1) * d / r + x1
y3 = (y2 - y1) * d / r + y1
return (x3, y3)
def to_line(self):
"""Converts this LineSegment into a Line (by extending both ends)
Returns a Line
"""
p1, p2 = self.points
x1, y1 = p1
x2, y2 = p2
A = y1 - y2
B = x2 - x1
C = -A*x1 - B*y1
assert float_eq(C, -A*x2 - B*y2)
line = Line(A, B, C)
return line
def between(self, point):
"""Return True if point is between the LineSegment.
A point is between a line segment if it is between the x and y values
that bound the segment. The point need not lie on the segment.
To check if a point lies on a LineSegment use to_line and
Line.side_of_line
"""
def btw(x, a, b):
"""Returns true if x is between a and b (inclusive)"""
s = min(a, b)
t = max(a, b)
return (s <= x or float_eq(s, x)) and (x <= t or float_eq(x, t))
x, y = point
x1, y1 = self.points[0]
x2, y2 = self.points[1]
return btw(x, x1, x2) and btw(y, y1, y2)
class Line:
"""A straight line
Represents a straight line as (A, B, C) where Ax + By + C = 0.
The line is not normalised (I haven't found an elegant way to do this)
Should be treated as an immutable data structure. However it is not
internally so, and utilises caching.
"""
def __init__(self, A, B, C):
"""Ax + By + C = 0 and A + B + C = 1 """
self.A = A
self.B = B
self.C = C
def side_of_line(self, point):
"""Returns the number 1, 0, -1 if point is on the positive side, on the
line, on the negative side of the line respectively."""
A, B, C = self.A, self.B, self.C
x, y = point
value = A * x + B * y + C
if float_eq(value, 0):
return 0
elif value > 0:
return 1
elif value < 0:
return -1
def is_parallel_to(self, line2):
"""Checks if this lines is parallel to line2. """
A1, B1, C1 = self.A, self.B, self.C
A2, B2, C2 = line2.A, line2.B, line2.C
if float_eq(A1 * B2, A2 * B1):
return True
else:
return False
def intersection(self, line2):
"""Calculate the intersection of this line with line2"""
A1, B1, C1 = self.A, self.B, self.C
A2, B2, C2 = line2.A, line2.B, line2.C
y = (A2*C1 - A1*C2) / (A1 * B2 - A2 * B1)
x = (B1*C2 - B2*C1) / (A1 * B2 - A2 * B1)
return (x, y)
def perpendicular(self, point):
"""Return a line perpendicular to this one passing through point"""
x, y = point
A = -self.B
B = self.A
C = -A*x - B*y
return Line(A, B, C)
def parallel(self, point):
"""Return a line parallel to this one passing through point"""
x, y = point
A = self.A
B = self.B
C = -A*x - B*y
return Line(A, B, C)
def __repr__(self):
return '(' + ', '.join(str(s) for s in [self.A, self.B, self.C]) + ')'
class Triangle:
"""A class structure for storing and minipulating a triangle.
The trianlge is represented as a 3-tuple of points. Each point is
represented as a 2-tuple of floats, the first element being the
x-coordinate and the second element being the y-coordinate.
Several useful operations can be applied to a triangle such as, rotate,
translate, split across altitude, and rectanglify.
The Triangle (and underlying tuple) should be treated as an immutable
data structure. All methods return a new triangle and do not modify the
existing one."""
def __init__(self, tpl):
"""tpl is a 3-tuple of coordinates"""
self.points = tpl
def __iter__(self):
"""Returns the tuple of points"""
return iter(self.points)
@property
def segments(self):
"""A list of segments representing the sides of the line.
The ith line will be opposite the ith point
"""
return [LineSegment(self.points[1], self.points[2]),
LineSegment(self.points[0], self.points[2]),
LineSegment(self.points[0], self.points[1])
]
def angle(self, i):
"""Return the angle at the ith point"""
segs = self.segments
a = segs[i].length()
b = segs[(i + 1) % 3].length()
c = segs[(i + 2) % 3].length()
thing = (a**2 - b**2 - c**2)/(-2*b*c)
# Get rid of rounding errors for boundry values
if float_eq(thing, -1):
thing = -1
elif float_eq(thing, 1):
thing = 1
return math.acos(thing)
def largest_angle(self):
"""Return the the number of the point at the largest angle"""
cur_max = 0
big_ang = None
for i in range(len(self.points)):
ang = self.angle(i)
if ang > cur_max:
cur_max = ang
big_ang = i
return big_ang
def area(self):
"""Return area of triangle"""
x0, y0 = self.points[0]
x1, y1 = self.points[1]
x2, y2 = self.points[2]
area = abs(0.5 * ((x1 - x0) * (y2 - y0) - (x2 - x0) * (y1 - y0)))
return area
def rotate(self, pivot, rangle):
"""Return a new triangle rotate clockwise (by angle) around pivot.
pivot -- A coordinate pair
rangle -- The angle to rotate by in radians"""
new_points = list()
px, py = pivot
for x, y in self.points:
dx, dy = x - px, y - py
current_angle = math.atan2(dy, dx)
total_angle = current_angle - rangle
r = math.hypot(dx, dy)
nx = r*math.cos(total_angle) + px
ny = r*math.sin(total_angle) + py
new_points.append((nx, ny))
return Triangle(tuple(new_points))
def translate(self, translation):
"""Return a new triangle translated by 'translation'"""
tx, ty = translation
new_points = [(x + tx, y + ty) for x, y in self.points]
return Triangle(tuple(new_points))
def to_rightangle(self):
"""Splits the triangle into two right-angled triangles"""
# We need to cut the triangle across the largest angle (in case it's
# obtuse)
p = self.points
big_point = self.largest_angle()
other_points = [(big_point + 1) % 3, (big_point + 2) % 3]
cut = self.segments[big_point].to_line().perpendicular(p[big_point])
new_point = line_intersects_segment(cut, self.segments[big_point])
t1 = Triangle((p[big_point], new_point, p[other_points[0]]))
t2 = Triangle((p[big_point], new_point, p[other_points[1]]))
return (t1, t2)
def split(self, line):
"""Splits the Triangle into two shapes separated by line.
All the points of the first shape will be on the non-negative side of
line. All the points of the second shape will be on the non-positive
side of the line.
"""
sides = [line.side_of_line(p) for p in self.points]
# The whole triangle is on the same side of the line
if sides[0] == sides[1] == sides[2]:
if sides[0] == 1:
return (Shape([self]), Shape([]))
else:
return (Shape([]), Shape([self]))
# The triangle is cut into two, on one vertex
elif sorted(sides) == [-1, 0, 1]:
inverse = [None for i in range(3)]
for i, s in enumerate(sides):
inverse[s % 3] = self.points[i]
base = LineSegment(inverse[1], inverse[2])
basepoint = line_intersects_segment(line, base)
pos_shape = Triangle((basepoint, inverse[0], inverse[1]))
neg_shape = Triangle((basepoint, inverse[0], inverse[2]))
return (Shape([pos_shape]), Shape([neg_shape]))
# Line is "tangent" to triangle
elif 0 in sides:
if 1 in sides:
return (Shape([self]), Shape([]))
elif -1 in sides:
return (Shape([]), Shape([self]))
# Line intersects two segments
else:
segs = self.segments
intersects = (line_intersects_segment(line, s) for s in segs)
intersects = [i for i in intersects if i != None]
assert len(intersects) == 2
sided_points = [[], []]
for i, s in enumerate(sides):
if s == 1:
sided_points[1].append(self.points[i])
elif s == -1:
sided_points[0].append(self.points[i])
if len(sided_points[0]) == 1:
t1 = Triangle((sided_points[0][0], intersects[0], intersects[1]))
t2 = Triangle((sided_points[1][0], intersects[0], intersects[1]))
t3 = Triangle((sided_points[1][0], sided_points[1][1], intersects[0]))
return (Shape([t2, t3]), Shape([t1]))
elif len(sided_points[1]) == 1:
t1 = Triangle((sided_points[1][0], intersects[0], intersects[1]))
t2 = Triangle((sided_points[0][0], intersects[0], intersects[1]))
t3 = Triangle((sided_points[0][0], sided_points[0][1], intersects[0]))
return (Shape([t1]), Shape([t2, t3]))
else:
raise Exception("Segments missing")
class Shape:
"""A class structure for representing and minipulating arbitary shapes.
A shape is defines as a list of triangles (see Triangle). Several
operations can be applied to a shape such as rotation, translation and
splitting the shape into two.
This object should be treated as an immutable data structure. All methods
return new shapes and do not modify the existing one."""
def __init__(self, triangle_list):
"""triangle_list is a list of triangles"""
self.triangles = triangle_list
self._convex_hull = None
def split(self, line):
"""Splits the Shape into two shapes separated by line.
All the points of the first shape will be on the non-negative side of
line. All the points of the second shape will be on the non-positive
side of the line.
"""
up = list()
down = list()
for t in self.triangles:
u, d = t.split(line)
up.extend(u.triangles)
down.extend(d.triangles)
return (Shape(up), Shape(down))
def translate(self, translation):
"""Return a new Shape translated by 'translation'"""
return Shape([t.translate(translation) for t in self.triangles])
def rotate(self, pivot, rangle):
"""Return a new Shape rotate clockwise (by angle) around pivot."""
return Shape([t.rotate(pivot, rangle) for t in self.triangles])
def vertices(self):
"""Return unique vertices inside this shape.
NOTE: this method runs in O(V log V)
"""
vertices = list()
for t in self.triangles:
vertices.extend(t.points)
vertices.sort(key=cmp_to_key(point_cmp))
undup = list()
for v in vertices:
if len(undup) == 0 or not point_eq(v, undup[-1]):
undup.append(v)
return undup
def convex_hull(self):
"""Return the convex hull of the shape.
This uses grahams algorithm [O(V log V)] and caches the result inside
self._convex_hull .
"""
verts = self.vertices()
if len(verts) == 0:
self._convex_hull = []
elif self._convex_hull is None:
p = min(verts, key=cmp_to_key(point_cmp))
verts.remove(p)
ps = sorted(verts, key=cmp_to_key(partial(clockwise_and_dist, p)))
ps.append(p)
hull = [p, ps[0]]
i = 1
while clockwise_from(hull[0], hull[1], ps[i]) >= 0:
hull.pop()
hull.append(ps[i])
i += 1
hull.append(ps[i])
for l in ps[i:]:
while clockwise_from(hull[-2], hull[-1], l) >= 0:
hull.pop()
hull.append(l)
hull.pop()
self._convex_hull = hull
return self._convex_hull
def height(self):
"""Return the height of the rectangle"""
a, b, c, d = self.convex_hull()
s1 = LineSegment(a, b)
s2 = LineSegment(b, c)
height = s2 if s1.length() < s2.length() else s1
return height
def width(self):
"""Return the width of the rectangle"""
a, b, c, d = self.convex_hull()
s1 = LineSegment(a, b)
s2 = LineSegment(b, c)
width = s1 if s1.length() < s2.length() else s2
return width
def orientate(self):
"""Rotates the shape so that the first segment in the convex hull is
parallel to the x-axis."""
hull = self.convex_hull()
x1, y1 = hull[0]
x2, y2 = hull[1]
xd, yd = x2 - x1, y2 - y1
return self.rotate(hull[0], math.atan2(yd, xd))
|
moyamo/polygon2square
|
geometry.py
|
Python
|
mit
| 16,498
|
from ruffus import *
import pandas as pd
from util.util import file_by_type
import datatables.traveltime
import pipeline.data_bt
import pipeline.data_vs
# BLUETH_YYYYMMDD.traveltime, VSDATA_YYYYMMDD.volume -> YYYYMMDD.merged
@collate([pipeline.data_bt.import_bt, pipeline.data_vs.import_vs],
regex(r"^data/(BLUETH|VSDATA)_(\d{8})\.(traveltime|volume)$"),
r"data/\2.merged")
def merge_data(infiles, mergefile):
assert (len(infiles) == 2), "Expected exactly 2 files (BLUETH_... and VSDATA_...) to merge"
bt_f = file_by_type(infiles, '.traveltime') # 'BLUETH_...'
vs_f = file_by_type(infiles, '.volume') # 'VSDATA_...'
bt = pd.read_csv(bt_f, header=None, names=['t', 'travel time'])
vs = pd.read_csv(vs_f, header=None, names=['t', 'volume'])
data = pd.merge(vs, bt, on='t', how='left')
datatables.traveltime.write(data, mergefile)
|
anjsimmo/simple-ml-pipeline
|
pipeline/data_merged.py
|
Python
|
mit
| 875
|
#!/usr/bin/env python
# Copyright (c) 2009 Six Apart Ltd.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
#
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# * Neither the name of Six Apart Ltd. nor the names of its contributors may
# be used to endorse or promote products derived from this software without
# specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
"""
An example Giant Bomb API client, implemented using remoteobjects.
"""
__version__ = '1.0'
__date__ = '24 August 2009'
__author__ = 'Mark Paschal'
from cgi import parse_qs
from datetime import datetime
from optparse import OptionParser
import sys
import time
from urllib import urlencode
from urlparse import urljoin, urlparse, urlunparse
from remoteobjects import RemoteObject, fields
class Bombject(RemoteObject):
content_types = ('application/json', 'text/javascript')
api_key = None
@classmethod
def get(cls, url, **kwargs):
if not urlparse(url)[1]:
url = urljoin('http://api.giantbomb.com/', url)
self = super(Bombject, cls).get(url, **kwargs)
self = self.filter(api_key=cls.api_key, format='json')
return self
def filter(self, **kwargs):
url = self._location
parts = list(urlparse(url))
query = parse_qs(parts[4])
query = dict([(k, v[0]) for k, v in query.iteritems()])
for k, v in kwargs.iteritems():
if v is None and k in query:
del query[k]
else:
query[k] = v
parts[4] = urlencode(query)
url = urlunparse(parts)
return super(Bombject, self).get(url)
class Image(Bombject):
tiny_url = fields.Field()
small_url = fields.Field()
thumb_url = fields.Field()
screen_url = fields.Field()
super_url = fields.Field()
class Game(Bombject):
id = fields.Field()
name = fields.Field()
api_detail_url = fields.Field()
site_detail_url = fields.Field()
summary = fields.Field(api_name='deck')
description = fields.Field()
image = fields.Object(Image)
published = fields.Datetime(dateformat='%Y-%m-%d %H:%M:%S', api_name='date_added')
updated = fields.Datetime(dateformat='%Y-%m-%d %H:%M:%S', api_name='date_last_updated')
characters = fields.Field()
concepts = fields.Field()
developers = fields.Field()
platforms = fields.Field()
publishers = fields.Field()
@classmethod
def get(cls, url, **kwargs):
res = GameResult.get(url)
res = res.filter()
return res.results[0]
class GameResult(Bombject):
status_code = fields.Field()
error = fields.Field()
total = fields.Field(api_name='number_of_total_results')
count = fields.Field(api_name='number_of_page_results')
limit = fields.Field()
offset = fields.Field()
results = fields.List(fields.Object(Game))
def update_from_dict(self, data):
if not isinstance(data['results'], list):
data = dict(data)
data['results'] = [data['results']]
super(GameResult, self).update_from_dict(data)
def main(argv=None):
if argv is None:
argv = sys.argv
parser = OptionParser()
parser.add_option("-k", "--key", dest="key",
help="your Giant Bomb API key")
opts, args = parser.parse_args()
if opts.key is None:
print >>sys.stderr, "Option --key is required"
return 1
query = ' '.join(args)
Bombject.api_key = opts.key
search = GameResult.get('/search/').filter(resources='game')
search = search.filter(query=query)
if len(search.results) == 0:
print "No results for %r" % query
elif len(search.results) == 1:
(game,) = search.results
print "## %s ##" % game.name
print
print game.summary
else:
print "## Search results for %r ##" % query
for game in search.results:
print game.name
return 0
if __name__ == '__main__':
sys.exit(main(sys.argv))
|
alex/remoteobjects
|
examples/giantbomb.py
|
Python
|
bsd-3-clause
| 5,184
|
import codecs
import glob
import os
from setuptools import setup, find_packages
# Prevent spurious errors during `python setup.py test`, a la
# http://www.eby-sarna.com/pipermail/peak/2010-May/003357.html:
try:
import multiprocessing
except ImportError:
pass
def read(fname):
fpath = os.path.join(os.path.dirname(__file__), fname)
with codecs.open(fpath, 'r', 'utf8') as f:
return f.read().strip()
setup(
name='socorro',
version='master',
description=('Socorro is a server to accept and process Breakpad'
' crash reports.'),
long_description=open('README.md').read(),
author='Mozilla',
author_email='socorro-dev@mozilla.com',
license='MPL',
url='https://github.com/mozilla/socorro',
classifiers=[
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: MPL License',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Topic :: Internet :: WWW/HTTP :: WSGI :: Application',
],
keywords=['socorro', 'breakpad', 'crash', 'reporting', 'minidump',
'stacktrace'],
packages=find_packages(),
install_requires=[], # use pip -r requirements.txt instead
entry_points={
'console_scripts': [
'socorro = socorrolib.app.socorro_app:SocorroWelcomeApp.run'
],
},
test_suite='nose.collector',
zip_safe=False,
data_files=[
('socorro/external/postgresql/raw_sql/procs',
glob.glob('socorro/external/postgresql/raw_sql/procs/*.sql')),
('socorro/external/postgresql/raw_sql/views',
glob.glob('socorro/external/postgresql/raw_sql/views/*.sql')),
('socorro/external/postgresql/raw_sql/types',
glob.glob('socorro/external/postgresql/raw_sql/types/*.sql')),
('socorro', ['socorro_revision.txt', 'breakpad_revision.txt',
'JENKINS_BUILD_NUMBER'])
],
),
|
KaiRo-at/socorro
|
setup.py
|
Python
|
mpl-2.0
| 2,021
|
"""
Python Interchangeable Virtual Instrument Library
Copyright (c) 2017 Alex Forencich
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
"""
from .tektronixDPO70000 import *
class tektronixDPO71604B(tektronixDPO70000):
"Tektronix DPO71604B IVI oscilloscope driver"
def __init__(self, *args, **kwargs):
self.__dict__.setdefault('_instrument_id', 'DPO71604B')
super(tektronixDPO71604B, self).__init__(*args, **kwargs)
self._analog_channel_count = 4
self._digital_channel_count = 0
self._bandwidth = 16e9
self._init_channels()
|
alexforencich/python-ivi
|
ivi/tektronix/tektronixDPO71604B.py
|
Python
|
mit
| 1,563
|
# Copyright 2019 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# =============================================================================
"""Tests for tpu_test_wrapper.py."""
import importlib.util # Python 3 only.
import os
from absl.testing import flagsaver
from tensorflow.python.platform import flags
from tensorflow.python.platform import test
from tensorflow.python.tpu import tpu_test_wrapper
class TPUTestWrapperTest(test.TestCase):
@flagsaver.flagsaver()
def test_flags_undefined(self):
tpu_test_wrapper.maybe_define_flags()
self.assertIn('tpu', flags.FLAGS)
self.assertIn('zone', flags.FLAGS)
self.assertIn('project', flags.FLAGS)
self.assertIn('model_dir', flags.FLAGS)
@flagsaver.flagsaver()
def test_flags_already_defined_not_overridden(self):
flags.DEFINE_string('tpu', 'tpuname', 'helpstring')
tpu_test_wrapper.maybe_define_flags()
self.assertIn('tpu', flags.FLAGS)
self.assertIn('zone', flags.FLAGS)
self.assertIn('project', flags.FLAGS)
self.assertIn('model_dir', flags.FLAGS)
self.assertEqual(flags.FLAGS.tpu, 'tpuname')
@flagsaver.flagsaver(bazel_repo_root='tensorflow/python')
def test_parent_path(self):
filepath = '/filesystem/path/tensorflow/python/tpu/example_test.runfiles/tensorflow/python/tpu/example_test' # pylint: disable=line-too-long
self.assertEqual(
tpu_test_wrapper.calculate_parent_python_path(filepath),
'tensorflow.python.tpu')
@flagsaver.flagsaver(bazel_repo_root='tensorflow/python')
def test_parent_path_raises(self):
filepath = '/bad/path'
with self.assertRaisesWithLiteralMatch(
ValueError,
'Filepath "/bad/path" does not contain repo root "tensorflow/python"'):
tpu_test_wrapper.calculate_parent_python_path(filepath)
def test_is_test_class_positive(self):
class A(test.TestCase):
pass
self.assertTrue(tpu_test_wrapper._is_test_class(A))
def test_is_test_class_negative(self):
class A(object):
pass
self.assertFalse(tpu_test_wrapper._is_test_class(A))
@flagsaver.flagsaver(wrapped_tpu_test_module_relative='.tpu_test_wrapper_test'
)
def test_move_test_classes_into_scope(self):
# Test the class importer by having the wrapper module import this test
# into itself.
with test.mock.patch.object(
tpu_test_wrapper, 'calculate_parent_python_path') as mock_parent_path:
mock_parent_path.return_value = (
tpu_test_wrapper.__name__.rpartition('.')[0])
module = tpu_test_wrapper.import_user_module()
tpu_test_wrapper.move_test_classes_into_scope(module)
self.assertEqual(
tpu_test_wrapper.tpu_test_imported_TPUTestWrapperTest.__name__,
self.__class__.__name__)
@flagsaver.flagsaver(test_dir_base='gs://example-bucket/tempfiles')
def test_set_random_test_dir(self):
tpu_test_wrapper.maybe_define_flags()
tpu_test_wrapper.set_random_test_dir()
self.assertStartsWith(flags.FLAGS.model_dir,
'gs://example-bucket/tempfiles')
self.assertGreater(
len(flags.FLAGS.model_dir), len('gs://example-bucket/tempfiles'))
@flagsaver.flagsaver(test_dir_base='gs://example-bucket/tempfiles')
def test_set_random_test_dir_repeatable(self):
tpu_test_wrapper.maybe_define_flags()
tpu_test_wrapper.set_random_test_dir()
first = flags.FLAGS.model_dir
tpu_test_wrapper.set_random_test_dir()
second = flags.FLAGS.model_dir
self.assertNotEqual(first, second)
def test_run_user_main(self):
test_module = _write_and_load_module("""
VARS = 1
if 'unrelated_if' == 'should_be_ignored':
VARS = 2
if __name__ == '__main__':
VARS = 3
if 'extra_if_at_bottom' == 'should_be_ignored':
VARS = 4
""")
self.assertEqual(test_module.VARS, 1)
tpu_test_wrapper.run_user_main(test_module)
self.assertEqual(test_module.VARS, 3)
def test_run_user_main_missing_if(self):
test_module = _write_and_load_module("""
VARS = 1
""")
self.assertEqual(test_module.VARS, 1)
with self.assertRaises(NotImplementedError):
tpu_test_wrapper.run_user_main(test_module)
def test_run_user_main_double_quotes(self):
test_module = _write_and_load_module("""
VARS = 1
if "unrelated_if" == "should_be_ignored":
VARS = 2
if __name__ == "__main__":
VARS = 3
if "extra_if_at_bottom" == "should_be_ignored":
VARS = 4
""")
self.assertEqual(test_module.VARS, 1)
tpu_test_wrapper.run_user_main(test_module)
self.assertEqual(test_module.VARS, 3)
def test_run_user_main_test(self):
test_module = _write_and_load_module("""
from tensorflow.python.platform import test as unique_name
class DummyTest(unique_name.TestCase):
def test_fail(self):
self.fail()
if __name__ == '__main__':
unique_name.main()
""")
# We're actually limited in what we can test here -- we can't call
# test.main() without deleting this current test from locals(), or we'll
# recurse infinitely. We settle for testing that the test imports and calls
# the right test module.
with test.mock.patch.object(test, 'main') as mock_main:
tpu_test_wrapper.run_user_main(test_module)
mock_main.assert_called_once()
def _write_and_load_module(source):
fp = os.path.join(test.get_temp_dir(), 'testmod.py')
with open(fp, 'w') as f:
f.write(source)
spec = importlib.util.spec_from_file_location('testmodule', fp)
test_module = importlib.util.module_from_spec(spec)
spec.loader.exec_module(test_module)
return test_module
if __name__ == '__main__':
test.main()
|
tensorflow/tensorflow
|
tensorflow/python/tpu/tpu_test_wrapper_test.py
|
Python
|
apache-2.0
| 6,127
|
"""Parent class for every Overkiz device."""
from __future__ import annotations
from enum import unique
from typing import cast
from pyoverkiz.enums import OverkizAttribute, OverkizState
from pyoverkiz.models import Device
from homeassistant.backports.enum import StrEnum
from homeassistant.helpers.entity import DeviceInfo, EntityDescription
from homeassistant.helpers.update_coordinator import CoordinatorEntity
from .const import DOMAIN
from .coordinator import OverkizDataUpdateCoordinator
from .executor import OverkizExecutor
class OverkizEntity(CoordinatorEntity):
"""Representation of an Overkiz device entity."""
coordinator: OverkizDataUpdateCoordinator
def __init__(
self, device_url: str, coordinator: OverkizDataUpdateCoordinator
) -> None:
"""Initialize the device."""
super().__init__(coordinator)
self.device_url = device_url
self.base_device_url, *_ = self.device_url.split("#")
self.executor = OverkizExecutor(device_url, coordinator)
self._attr_assumed_state = not self.device.states
self._attr_available = self.device.available
self._attr_unique_id = self.device.device_url
self._attr_name = self.device.label
self._attr_device_info = self.generate_device_info()
@property
def device(self) -> Device:
"""Return Overkiz device linked to this entity."""
return self.coordinator.data[self.device_url]
def generate_device_info(self) -> DeviceInfo:
"""Return device registry information for this entity."""
# Some devices, such as the Smart Thermostat have several devices in one physical device,
# with same device url, terminated by '#' and a number.
# In this case, we use the base device url as the device identifier.
if "#" in self.device_url and not self.device_url.endswith("#1"):
# Only return the url of the base device, to inherit device name and model from parent device.
return {
"identifiers": {(DOMAIN, self.executor.base_device_url)},
}
manufacturer = (
self.executor.select_attribute(OverkizAttribute.CORE_MANUFACTURER)
or self.executor.select_state(OverkizState.CORE_MANUFACTURER_NAME)
or self.coordinator.client.server.manufacturer
)
model = (
self.executor.select_state(
OverkizState.CORE_MODEL,
OverkizState.CORE_PRODUCT_MODEL_NAME,
OverkizState.IO_MODEL,
)
or self.device.widget.value
)
return DeviceInfo(
identifiers={(DOMAIN, self.executor.base_device_url)},
name=self.device.label,
manufacturer=str(manufacturer),
model=str(model),
sw_version=cast(
str,
self.executor.select_attribute(OverkizAttribute.CORE_FIRMWARE_REVISION),
),
hw_version=self.device.controllable_name,
suggested_area=self.coordinator.areas[self.device.place_oid],
via_device=(DOMAIN, self.executor.get_gateway_id()),
configuration_url=self.coordinator.client.server.configuration_url,
)
class OverkizDescriptiveEntity(OverkizEntity):
"""Representation of a Overkiz device entity based on a description."""
def __init__(
self,
device_url: str,
coordinator: OverkizDataUpdateCoordinator,
description: EntityDescription,
) -> None:
"""Initialize the device."""
super().__init__(device_url, coordinator)
self.entity_description = description
self._attr_unique_id = f"{super().unique_id}-{self.entity_description.key}"
if self.entity_description.name:
self._attr_name = f"{super().name} {self.entity_description.name}"
# Used by state translations for sensor and select entities
@unique
class OverkizDeviceClass(StrEnum):
"""Device class for Overkiz specific devices."""
BATTERY = "overkiz__battery"
DISCRETE_RSSI_LEVEL = "overkiz__discrete_rssi_level"
MEMORIZED_SIMPLE_VOLUME = "overkiz__memorized_simple_volume"
OPEN_CLOSED_PEDESTRIAN = "overkiz__open_closed_pedestrian"
PRIORITY_LOCK_ORIGINATOR = "overkiz__priority_lock_originator"
SENSOR_DEFECT = "overkiz__sensor_defect"
SENSOR_ROOM = "overkiz__sensor_room"
|
rohitranjan1991/home-assistant
|
homeassistant/components/overkiz/entity.py
|
Python
|
mit
| 4,412
|
import math
import time
t1 = time.time()
# f(0) = 1 000
# f(1) = 1 001
# f(2) = 2 010
# f(3) = 1 011
# f(4) = 3 100
# f(5) = 2 101
# f(6) = 3 110
# f(7) = 1 111
# f(8) = 4 1000
def tobin(n):
if n == 0:
return [0]
r = []
while n != 0:
if n%2 == 0:
r.insert(0,0)
else:
r.insert(0,1)
n = n//2
return r
def fbin(lb):
l = len(lb)
if l == 1:
return 1
if lb[0] > 0:
if lb[1] == 0:
return fbin(lb[1:])+fbin([2]+lb[2:])
else:
for i in range(2,l):
if lb[i] == 2:
break
if lb[i] == 0:
return fbin(lb[1:])+fbin([2]+lb[i+1:])
return fbin(lb[1:])
def f(n):
return fn(n)
if n == 0:
return 1
return fbin(tobin(n))
N = 10000000
ns = [0]*N
ns[0] = 1
ns[1] = 1
ns[2] = 2
ns[3] = 1
def fn(n):
global ns
if n < N:
if ns[n] > 0:
return ns[n]
d = 1
while d <=n:
d *= 2
d = d//2
r = 0
nn = n-d
r += fn(nn)
d = d//2
if nn >= d:
nn -= d
d = d//2
while nn >= d and d > 0:
nn -= d
d = d//2
r += fn(nn+d)
elif d > 0:
r += fn(nn+d)
#print(n,r)
if n < N:
ns[n] = r
return r
n = 25
a = f(5**n)
b = f(2*(5**n))
#print(a,b)
print((b-a)*n+a)
print("time:",time.time()-t1)
|
Adamssss/projectEuler
|
pb169.py
|
Python
|
mit
| 1,470
|
import os
import tempita
import re
def instantiate_template(name, **args):
path = os.path.join(os.path.dirname(__file__), name)
with file(path) as f:
template = f.read()
code = tempita.sub(template, **args)
# Strip comments
code, n = re.subn(r'/\*.*?\*/', '', code, flags=re.DOTALL)
# Strip empty lines
lines = [line for line in code.split('\n') if len(line.strip()) > 0]
code = '\n'.join(lines)
# Output processed file for debugging
with file(path[:-len('.in')], 'w') as f:
f.write(code)
return code
|
wavemoth/wavemoth
|
wavemoth/cuda/core.py
|
Python
|
gpl-2.0
| 563
|
#!/usr/bin/env python
"""Load all aff4 objects in order to populate the registry.
"""
# pylint: disable=unused-import
from grr.lib.aff4_objects import aff4_grr
from grr.lib.aff4_objects import collects
from grr.lib.aff4_objects import cronjobs
from grr.lib.aff4_objects import filestore
from grr.lib.aff4_objects import security
from grr.lib.aff4_objects import software
from grr.lib.aff4_objects import standard
from grr.lib.aff4_objects import stats
from grr.lib.aff4_objects import stats_store
from grr.lib.aff4_objects import user_managers
from grr.lib.aff4_objects import users
|
pidydx/grr
|
grr/lib/aff4_objects/registry_init.py
|
Python
|
apache-2.0
| 583
|
# noinspection PyPackageRequirements
from Crypto.Cipher import AES
import base64
#AES256
BLOCK_SIZE = 32
PADDING = '{'
DECRYPTION_CHECK = "|PASSMON|"
def aes_encrypt(word, key):
"""Encrypts password"""
pad = lambda s: s + (BLOCK_SIZE - len(s) % BLOCK_SIZE) * PADDING
encode_aes = lambda c, s: base64.b64encode(c.encrypt(pad(s)))
cipher = AES.new(key)
encoded = encode_aes(cipher, DECRYPTION_CHECK + str(word))
return encoded
def aes_decrypt(encrypted_string, key):
"""Decrypts password"""
try:
decode_aes = lambda c, e: c.decrypt(base64.b64decode(e)).decode('utf-8').rstrip(PADDING)
cipher = AES.new(key)
decoded = decode_aes(cipher, encrypted_string)
if decoded[0:int(len(DECRYPTION_CHECK))] == DECRYPTION_CHECK:
return decoded[len(DECRYPTION_CHECK):len(decoded)]
else:
return "Wrong Cipher"
except:
return "Cipher needs to be 32 bytes long"
|
Plasticoo/Pass_Shelter3
|
src/encryption/aes_encryption.py
|
Python
|
mit
| 936
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.