code
stringlengths 2
1.05M
| repo_name
stringlengths 5
104
| path
stringlengths 4
251
| language
stringclasses 1
value | license
stringclasses 15
values | size
int32 2
1.05M
|
|---|---|---|---|---|---|
# -*- coding: utf-8 -*-
from emojibot.utils.response import Response
def test_constructor():
response = Response()
assert isinstance(response, Response)
|
gramhagen/emojibot
|
tests/utils/test_response.py
|
Python
|
mit
| 164
|
from __future__ import absolute_import
# import jinja2
from jinja2.ext import Extension
from .templatetags.promotions_tags import promo_ballance
class PromotionsExtension(Extension):
def __init__(self, environment):
super(PromotionsExtension, self).__init__(environment)
environment.filters["promo_ballance"] = promo_ballance
# Nicer import name
core = PromotionsExtension
|
phani00/tovp
|
tovp/promotions/jinja2tags.py
|
Python
|
mit
| 398
|
# Passability types.
#
# @author Oktay Acikalin <oktay.acikalin@gmail.com>
# @copyright Oktay Acikalin
# @license MIT (LICENSE.txt)
# from json import load
from collections import OrderedDict
# from os.path import splitext, dirname, join
from os.path import dirname, join
# locals().update(load(open('%s.json' % splitext(__file__)[0], 'rb'), object_pairs_hook=OrderedDict))
# filename = join(dirname(__file__), filename)
filename = join(dirname(__file__), 'passability.png')
# Replace all tiles in map and just use the definitions below.
sprites = OrderedDict((
('wall', {'none': [[[0, 0, 16, 16], [0, 0], [0, 0], 60]]}),
('platform', {'none': [[[48, 0, 16, 16], [48, 0], [0, 0], 60]]}),
('climb_platform', {'none': [[[16, 0, 16, 16], [16, 0], [0, 0], 60]]}),
('climb', {'none': [[[32, 0, 16, 16], [32, 0], [0, 0], 60]]}),
))
tile_size = [16, 16]
|
theblacklion/joe-danger
|
data/gfx/passability.py
|
Python
|
mit
| 873
|
#### NOTICE: THIS FILE IS AUTOGENERATED
#### MODIFICATIONS MAY BE LOST IF DONE IMPROPERLY
#### PLEASE SEE THE ONLINE DOCUMENTATION FOR EXAMPLES
from swgpy.object import *
def create(kernel):
result = Tangible()
result.template = "object/tangible/mission/quest_item/shared_sayama_edosun_q2_needed.iff"
result.attribute_template_id = -1
result.stfName("loot_nboo_n","sayama_edosun_q2_needed")
#### BEGIN MODIFICATIONS ####
#### END MODIFICATIONS ####
return result
|
anhstudios/swganh
|
data/scripts/templates/object/tangible/mission/quest_item/shared_sayama_edosun_q2_needed.py
|
Python
|
mit
| 481
|
#### NOTICE: THIS FILE IS AUTOGENERATED
#### MODIFICATIONS MAY BE LOST IF DONE IMPROPERLY
#### PLEASE SEE THE ONLINE DOCUMENTATION FOR EXAMPLES
from swgpy.object import *
def create(kernel):
result = Building()
result.template = "object/building/poi/shared_dathomir_freedprisonerscamp_large1.iff"
result.attribute_template_id = -1
result.stfName("poi_n","base_poi_building")
#### BEGIN MODIFICATIONS ####
#### END MODIFICATIONS ####
return result
|
anhstudios/swganh
|
data/scripts/templates/object/building/poi/shared_dathomir_freedprisonerscamp_large1.py
|
Python
|
mit
| 465
|
#
# Copyright (c) 2015 Open-RnD Sp. z o.o.
#
# Permission is hereby granted, free of charge, to any person
# obtaining a copy of this software and associated documentation
# files (the "Software"), to deal in the Software without
# restriction, including without limitation the rights to use, copy,
# modify, merge, publish, distribute, sublicense, and/or sell copies
# of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
|
open-rnd/ros3d-www
|
ros3dui/system/__init__.py
|
Python
|
mit
| 1,104
|
#### NOTICE: THIS FILE IS AUTOGENERATED
#### MODIFICATIONS MAY BE LOST IF DONE IMPROPERLY
#### PLEASE SEE THE ONLINE DOCUMENTATION FOR EXAMPLES
from swgpy.object import *
def create(kernel):
result = Tangible()
result.template = "object/tangible/furniture/all/shared_frn_all_throwpillow_med_s02.iff"
result.attribute_template_id = 6
result.stfName("frn_n","frn_throwpillow")
#### BEGIN MODIFICATIONS ####
#### END MODIFICATIONS ####
return result
|
anhstudios/swganh
|
data/scripts/templates/object/tangible/furniture/all/shared_frn_all_throwpillow_med_s02.py
|
Python
|
mit
| 465
|
#! /usr/bin/env python
from openturns import *
from math import *
TESTPREAMBLE()
RandomGenerator.SetSeed(0)
try:
# Instanciate one distribution object
dim = 1
meanPoint = NumericalPoint(dim, 1.0)
meanPoint[0] = 0.5
sigma = NumericalPoint(dim, 1.0)
sigma[0] = 2.0
R = CorrelationMatrix(dim)
distribution1 = Normal(meanPoint, sigma, R)
# Instanciate another distribution object
meanPoint[0] = -1.5
sigma[0] = 4.0
distribution2 = Normal(meanPoint, sigma, R)
# Test for sampling
size = 2000
nBars = 20
sample1 = distribution1.getSample(size)
sample2 = distribution2.getSample(size)
# Construct histograms
epsilon = 0.1
min1 = sample1.getMin()[0]
max1 = sample1.getMax()[0] + epsilon
min2 = sample2.getMin()[0]
max2 = sample2.getMax()[0] + epsilon
tmp = NumericalPoint(2)
tmp[0] = (max1 - min1) / nBars
data1 = NumericalSample(nBars, tmp)
tmp[0] = (max2 - min2) / nBars
data2 = NumericalSample(nBars, tmp)
for i in range(size):
index = long(floor((sample1[i, 0] - min1) / (max1 - min1) * nBars))
data1[index, 1] += 1
index = long(floor((sample2[i, 0] - min2) / (max2 - min2) * nBars))
data2[index, 1] += 1
# Create an empty graph
myGraph = Graph("Some barplots", "y", "frequency", True, "topleft")
# Create the first barplot
myBarPlot1 = BarPlot(data1, min1, "blue", "shaded", "dashed", "histogram1")
# Then, draw it
myGraph.add(myBarPlot1)
myGraph.draw("Graph_BarPlot_a_OT", 640, 480)
# Check that the correct files have been generated by computing their
# checksum
# Create the second barplot
myBarPlot2 = BarPlot(data2, min2, "red", "solid", "solid", "histogram2")
# Add it to the graph and draw everything
myGraph.add(myBarPlot2)
myGraph.draw("Graph_BarPlot_b_OT", 640, 480)
except:
import sys
print "t_BarPlot_std.py", sys.exc_type, sys.exc_value
|
sofianehaddad/ot-svn
|
python/test/t_BarPlot_std.py
|
Python
|
mit
| 1,975
|
"""
Constants
"""
TOOL_FREEBAYES = 'freebayes'
TOOL_PINDEL = 'pindel'
TOOL_DELLY = 'delly'
TOOL_LUMPY = 'lumpy'
|
woodymit/millstone
|
genome_designer/pipeline/variant_calling/constants.py
|
Python
|
mit
| 113
|
def count(S, m, n):
table = [[0 for x in range(m)] for x in range(n+1)]
for i in range(m):
table[0][i] = 1
for i in range(1, n+1):
for j in range(m):
x = table[i - S[j]][j] if i-S[j] >= 0 else 0
y = table[i][j-1] if j >= 1 else 0
table[i][j] = x + y
return table[n][m-1]
n,m = [int(a) for a in raw_input().strip().split(' ')]
s = [int(a) for a in raw_input().strip().split(' ')]
print count(s, m, n)
|
Sunhick/hacker_rank
|
Algorithms/Dynamic Programming/The-Coin-Change-Problem.py
|
Python
|
mit
| 496
|
import _plotly_utils.basevalidators
class AnnotationsValidator(_plotly_utils.basevalidators.CompoundArrayValidator):
def __init__(self, plotly_name="annotations", parent_name="layout.scene", **kwargs):
super(AnnotationsValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
data_class_str=kwargs.pop("data_class_str", "Annotation"),
data_docs=kwargs.pop(
"data_docs",
"""
align
Sets the horizontal alignment of the `text`
within the box. Has an effect only if `text`
spans two or more lines (i.e. `text` contains
one or more <br> HTML tags) or if an explicit
width is set to override the text width.
arrowcolor
Sets the color of the annotation arrow.
arrowhead
Sets the end annotation arrow head style.
arrowside
Sets the annotation arrow head position.
arrowsize
Sets the size of the end annotation arrow head,
relative to `arrowwidth`. A value of 1
(default) gives a head about 3x as wide as the
line.
arrowwidth
Sets the width (in px) of annotation arrow
line.
ax
Sets the x component of the arrow tail about
the arrow head (in pixels).
ay
Sets the y component of the arrow tail about
the arrow head (in pixels).
bgcolor
Sets the background color of the annotation.
bordercolor
Sets the color of the border enclosing the
annotation `text`.
borderpad
Sets the padding (in px) between the `text` and
the enclosing border.
borderwidth
Sets the width (in px) of the border enclosing
the annotation `text`.
captureevents
Determines whether the annotation text box
captures mouse move and click events, or allows
those events to pass through to data points in
the plot that may be behind the annotation. By
default `captureevents` is False unless
`hovertext` is provided. If you use the event
`plotly_clickannotation` without `hovertext`
you must explicitly enable `captureevents`.
font
Sets the annotation text font.
height
Sets an explicit height for the text box. null
(default) lets the text set the box height.
Taller text will be clipped.
hoverlabel
:class:`plotly.graph_objects.layout.scene.annot
ation.Hoverlabel` instance or dict with
compatible properties
hovertext
Sets text to appear when hovering over this
annotation. If omitted or blank, no hover label
will appear.
name
When used in a template, named items are
created in the output figure in addition to any
items the figure already has in this array. You
can modify these items in the output figure by
making your own item with `templateitemname`
matching this `name` alongside your
modifications (including `visible: false` or
`enabled: false` to hide it). Has no effect
outside of a template.
opacity
Sets the opacity of the annotation (text +
arrow).
showarrow
Determines whether or not the annotation is
drawn with an arrow. If True, `text` is placed
near the arrow's tail. If False, `text` lines
up with the `x` and `y` provided.
standoff
Sets a distance, in pixels, to move the end
arrowhead away from the position it is pointing
at, for example to point at the edge of a
marker independent of zoom. Note that this
shortens the arrow from the `ax` / `ay` vector,
in contrast to `xshift` / `yshift` which moves
everything by this amount.
startarrowhead
Sets the start annotation arrow head style.
startarrowsize
Sets the size of the start annotation arrow
head, relative to `arrowwidth`. A value of 1
(default) gives a head about 3x as wide as the
line.
startstandoff
Sets a distance, in pixels, to move the start
arrowhead away from the position it is pointing
at, for example to point at the edge of a
marker independent of zoom. Note that this
shortens the arrow from the `ax` / `ay` vector,
in contrast to `xshift` / `yshift` which moves
everything by this amount.
templateitemname
Used to refer to a named item in this array in
the template. Named items from the template
will be created even without a matching item in
the input figure, but you can modify one by
making an item with `templateitemname` matching
its `name`, alongside your modifications
(including `visible: false` or `enabled: false`
to hide it). If there is no template or no
matching item, this item will be hidden unless
you explicitly show it with `visible: true`.
text
Sets the text associated with this annotation.
Plotly uses a subset of HTML tags to do things
like newline (<br>), bold (<b></b>), italics
(<i></i>), hyperlinks (<a href='...'></a>).
Tags <em>, <sup>, <sub> <span> are also
supported.
textangle
Sets the angle at which the `text` is drawn
with respect to the horizontal.
valign
Sets the vertical alignment of the `text`
within the box. Has an effect only if an
explicit height is set to override the text
height.
visible
Determines whether or not this annotation is
visible.
width
Sets an explicit width for the text box. null
(default) lets the text set the box width.
Wider text will be clipped. There is no
automatic wrapping; use <br> to start a new
line.
x
Sets the annotation's x position.
xanchor
Sets the text box's horizontal position anchor
This anchor binds the `x` position to the
"left", "center" or "right" of the annotation.
For example, if `x` is set to 1, `xref` to
"paper" and `xanchor` to "right" then the
right-most portion of the annotation lines up
with the right-most edge of the plotting area.
If "auto", the anchor is equivalent to "center"
for data-referenced annotations or if there is
an arrow, whereas for paper-referenced with no
arrow, the anchor picked corresponds to the
closest side.
xshift
Shifts the position of the whole annotation and
arrow to the right (positive) or left
(negative) by this many pixels.
y
Sets the annotation's y position.
yanchor
Sets the text box's vertical position anchor
This anchor binds the `y` position to the
"top", "middle" or "bottom" of the annotation.
For example, if `y` is set to 1, `yref` to
"paper" and `yanchor` to "top" then the top-
most portion of the annotation lines up with
the top-most edge of the plotting area. If
"auto", the anchor is equivalent to "middle"
for data-referenced annotations or if there is
an arrow, whereas for paper-referenced with no
arrow, the anchor picked corresponds to the
closest side.
yshift
Shifts the position of the whole annotation and
arrow up (positive) or down (negative) by this
many pixels.
z
Sets the annotation's z position.
""",
),
**kwargs
)
|
plotly/python-api
|
packages/python/plotly/plotly/validators/layout/scene/_annotations.py
|
Python
|
mit
| 9,045
|
import unittest
import numpy as np
import pyoptima as opt
class SimulatedAnnealingTest(unittest.TestCase):
def test_with_parabola(self):
""" Test with a simple parabolic function with 2 variables """
def neighbour_func(params):
new_params = params
params['x0'] += np.random.uniform(-1., 1.)
params['x1'] += np.random.uniform(-1., 1.)
return new_params
hyper_params = {
'temperature_func': lambda t, i: t/np.log(i+2),
'neighbour_func': neighbour_func,
'initial_temp': 1000000.0
}
params = {}
params["x0"] = np.random.uniform(-10., 10.)
params["x1"] = np.random.uniform(-10., 10.)
s = opt.SimulatedAnnealing(params, hyper_params)
s.optimize(opt.parabola, 100000)
bst_solution = s.get_best_parameters()
self.assertAlmostEqual(bst_solution['x0'], 0, 2)
self.assertAlmostEqual(bst_solution['x1'], 0, 2)
if __name__ == "__main__":
unittest.main()
|
samueljackson92/metaopt
|
python_tests/simulated_annealing_test.py
|
Python
|
mit
| 1,038
|
from django.contrib import admin
# Register your models here.
from .models import Author, Genre, Book, BookInstance, Language
"""
# Minimal registration of Models.
admin.site.register(Book)
admin.site.register(Author)
admin.site.register(BookInstance)
admin.site.register(Genre)
admin.site.register(Language)
"""
admin.site.register(Genre)
admin.site.register(Language)
class BooksInline(admin.TabularInline):
"""
Defines format of inline book insertion (used in AuthorAdmin)
"""
model = Book
@admin.register(Author)
class AuthorAdmin(admin.ModelAdmin):
"""
Administration object for Author models.
Defines:
- fields to be displayed in list view (list_display)
- orders fields in detail view (fields), grouping the date fields horizontally
- adds inline addition of books in author view (inlines)
"""
list_display = ('last_name', 'first_name', 'date_of_birth', 'date_of_death')
fields = ['first_name', 'last_name', ('date_of_birth', 'date_of_death')]
inlines = [BooksInline]
class BooksInstanceInline(admin.TabularInline):
"""
Defines format of inline book instance insertion (used in BookAdmin)
"""
model = BookInstance
class BookAdmin(admin.ModelAdmin):
"""
Administration object for Book models.
Defines:
- fields to be displayed in list view (list_display)
- adds inline addition of book instances in book view (inlines)
"""
list_display = ('title', 'author', 'display_genre')
inlines = [BooksInstanceInline]
admin.site.register(Book, BookAdmin)
@admin.register(BookInstance)
class BookInstanceAdmin(admin.ModelAdmin):
"""
Administration object for BookInstance models.
Defines:
- fields to be displayed in list view (list_display)
- filters that will be displayed in sidebar (list_filter)
- grouping of fields into sections (fieldsets)
"""
list_display = ('book', 'status', 'borrower','due_back', 'id')
list_filter = ('status', 'due_back')
fieldsets = (
(None, {
'fields': ('book','imprint', 'id')
}),
('Availability', {
'fields': ('status', 'due_back','borrower')
}),
)
|
DarknessSwitch/django-tutorial
|
catalog/admin.py
|
Python
|
cc0-1.0
| 2,208
|
# *****************************************************************************
# Copyright (c) 2014, 2018 IBM Corporation and other Contributors.
#
# All rights reserved. This program and the accompanying materials
# are made available under the terms of the Eclipse Public License v1.0
# which accompanies this distribution, and is available at
# http://www.eclipse.org/legal/epl-v10.html
# *****************************************************************************
from datetime import datetime
import json
import logging
import threading
import paho.mqtt.client as paho
import pytz
from wiotp.sdk import (
AbstractClient,
ConfigurationException,
ConnectionException,
MissingMessageEncoderException,
InvalidEventException,
)
from wiotp.sdk.device.command import Command
from wiotp.sdk.device.config import DeviceClientConfig
class DeviceClient(AbstractClient):
"""
Extends #wiotp.common.AbstractClient to implement a device client supporting
messaging over MQTT
# Parameters
options (dict): Configuration options for the client
logHandlers (list<logging.Handler>): Log handlers to configure. Defaults to `None`,
which will result in a default log handler being created.
"""
_COMMAND_TOPIC = "iot-2/cmd/+/fmt/+"
def __init__(self, config, logHandlers=None):
self._config = DeviceClientConfig(**config)
AbstractClient.__init__(
self,
domain=self._config.domain,
organization=self._config.orgId,
clientId=self._config.clientId,
username=self._config.username,
password=self._config.password,
port=self._config.port,
transport=self._config.transport,
cleanStart=self._config.cleanStart,
sessionExpiry=self._config.sessionExpiry,
keepAlive=self._config.keepAlive,
caFile=self._config.caFile,
logLevel=self._config.logLevel,
logHandlers=logHandlers,
)
# Add handler for commands if not connected to QuickStart
if not self._config.isQuickstart():
self.client.message_callback_add("iot-2/cmd/+/fmt/+", self._onCommand)
# Initialize user supplied callback
self.commandCallback = None
# Register startup subscription list (only for non-Quickstart)
if not self._config.isQuickstart():
self._subscriptions[self._COMMAND_TOPIC] = 1
def publishEvent(self, eventId, msgFormat, data, qos=0, onPublish=None):
"""
Publish an event to Watson IoT Platform.
# Parameters
eventId (string): Name of this event
msgFormat (string): Format of the data for this event
data (dict): Data for this event
qos (int): MQTT quality of service level to use (`0`, `1`, or `2`)
onPublish(function): A function that will be called when receipt
of the publication is confirmed.
# Callback and QoS
The use of the optional #onPublish function has different implications depending
on the level of qos used to publish the event:
- qos 0: the client has asynchronously begun to send the event
- qos 1 and 2: the client has confirmation of delivery from the platform
"""
topic = "iot-2/evt/{eventId}/fmt/{msgFormat}".format(eventId=eventId, msgFormat=msgFormat)
return self._publishEvent(topic, eventId, msgFormat, data, qos, onPublish)
def _onCommand(self, client, userdata, pahoMessage):
"""
Internal callback for device command messages, parses source device from topic string and
passes the information on to the registered device command callback
"""
try:
command = Command(pahoMessage, self._messageCodecs)
except InvalidEventException as e:
self.logger.critical(str(e))
else:
self.logger.debug("Received command '%s'" % (command.commandId))
if self.commandCallback:
self.commandCallback(command)
|
ibm-watson-iot/iot-python
|
src/wiotp/sdk/device/client.py
|
Python
|
epl-1.0
| 4,102
|
#!/usr/bin/env python2
# -*- coding: utf-8 -*-
from __future__ import absolute_import, division, print_function
import struct
class BBType(object):
command = 1
command_return = 2
consolemsg = 3
ping = 4
pong = 5
getenv = 6
getenv_return = 7
fs = 8
fs_return = 9
class BBPacket(object):
def __init__(self, p_type=0, p_flags=0, payload="", raw=None):
self.p_type = p_type
self.p_flags = p_flags
if raw is not None:
self.unpack(raw)
else:
self.payload = payload
def __repr__(self):
return "BBPacket(%i, %i)" % (self.p_type, self.p_flags)
def _unpack_payload(self, data):
self.payload = data
def _pack_payload(self):
return self.payload
def unpack(self, data):
self.p_type, self.p_flags = struct.unpack("!HH", data[:4])
self._unpack_payload(data[4:])
def pack(self):
return struct.pack("!HH", self.p_type, self.p_flags) + \
self._pack_payload()
class BBPacketCommand(BBPacket):
def __init__(self, raw=None, cmd=None):
self.cmd = cmd
super(BBPacketCommand, self).__init__(BBType.command, raw=raw)
def __repr__(self):
return "BBPacketCommand(cmd=%r)" % self.cmd
def _unpack_payload(self, payload):
self.cmd = payload
def _pack_payload(self):
return self.cmd
class BBPacketCommandReturn(BBPacket):
def __init__(self, raw=None, exit_code=None):
self.exit_code = exit_code
super(BBPacketCommandReturn, self).__init__(BBType.command_return,
raw=raw)
def __repr__(self):
return "BBPacketCommandReturn(exit_code=%i)" % self.exit_code
def _unpack_payload(self, data):
self.exit_code, = struct.unpack("!L", data[:4])
def _pack_payload(self):
return struct.pack("!L", self.exit_code)
class BBPacketConsoleMsg(BBPacket):
def __init__(self, raw=None, text=None):
self.text = text
super(BBPacketConsoleMsg, self).__init__(BBType.consolemsg, raw=raw)
def __repr__(self):
return "BBPacketConsoleMsg(text=%r)" % self.text
def _unpack_payload(self, payload):
self.text = payload
def _pack_payload(self):
return self.text
class BBPacketPing(BBPacket):
def __init__(self, raw=None):
super(BBPacketPing, self).__init__(BBType.ping, raw=raw)
def __repr__(self):
return "BBPacketPing()"
class BBPacketPong(BBPacket):
def __init__(self, raw=None):
super(BBPacketPong, self).__init__(BBType.pong, raw=raw)
def __repr__(self):
return "BBPacketPong()"
class BBPacketGetenv(BBPacket):
def __init__(self, raw=None, varname=None):
self.varname = varname
super(BBPacketGetenv, self).__init__(BBType.getenv, raw=raw)
def __repr__(self):
return "BBPacketGetenv(varname=%r)" % self.varname
def _unpack_payload(self, payload):
self.varname = payload
def _pack_payload(self):
return self.varname
class BBPacketGetenvReturn(BBPacket):
def __init__(self, raw=None, text=None):
self.text = text
super(BBPacketGetenvReturn, self).__init__(BBType.getenv_return,
raw=raw)
def __repr__(self):
return "BBPacketGetenvReturn(varvalue=%s)" % self.text
def _unpack_payload(self, payload):
self.text = payload
def _pack_payload(self):
return self.text
class BBPacketFS(BBPacket):
def __init__(self, raw=None, payload=None):
super(BBPacketFS, self).__init__(BBType.fs, payload=payload, raw=raw)
def __repr__(self):
return "BBPacketFS(payload=%r)" % self.payload
class BBPacketFSReturn(BBPacket):
def __init__(self, raw=None, payload=None):
super(BBPacketFSReturn, self).__init__(BBType.fs_return, payload=payload, raw=raw)
def __repr__(self):
return "BBPacketFSReturn(payload=%r)" % self.payload
|
raphui/barebox
|
scripts/remote/messages.py
|
Python
|
gpl-2.0
| 4,045
|
# -*- coding: utf-8 -*-
##
## This file is part of INSPIRE.
## Copyright (C) 2015 CERN.
##
## INSPIRE is free software; you can redistribute it and/or
## modify it under the terms of the GNU General Public License as
## published by the Free Software Foundation; either version 2 of the
## License, or (at your option) any later version.
##
## INSPIRE is distributed in the hope that it will be useful, but
## WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
## General Public License for more details.
##
## You should have received a copy of the GNU General Public License
## along with INSPIRE; if not, write to the Free Software Foundation, Inc.,
## 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
#
"""BibFormat element - Prints INSPIRE jobs contact name HEPNAMES search
"""
from datetime import datetime
def format_element(bfo, style="", separator=''):
"""Default format for the contact person link in the Jobs format.
This link will point to a direct search in the HepNames database.
@param style: CSS class of the link
@param separator: the separator between names.
"""
out = []
fulladdress = bfo.fields("111__")
sday = ''
smonth = ''
syear = ''
fday = ''
fmonth = ''
fyear = ''
printaddress = ''
for printaddress in fulladdress:
if 'd' in printaddress:
out.append(printaddress['d'])
break
else:
if 'x' in printaddress:
sdate = printaddress['x']
sday = sdate[-2:]
smonth = sdate[5:7]
syear = sdate[:4]
if 'y' in printaddress:
fdate = printaddress['y']
fday = fdate[-2:]
fmonth = fdate[5:7]
fyear = fdate[:4]
try:
smonth = datetime.strptime(smonth, "%m").strftime("%b")
fmonth = datetime.strptime(fmonth, "%m").strftime("%b")
except ValueError:
pass
if printaddress in fulladdress:
if 'd' not in printaddress:
if syear == fyear:
if smonth == fmonth:
# year matches and month matches
out.append(sday+'-'+fday+' '+fmonth+' '+fyear)
else:
# year matches and month doesn't
out.append(sday + ' ' + smonth + ' - ' + fday + ' ' + fmonth + ' ' + fyear)
if not syear == fyear and not smonth == fmonth:
# year doesn't match and don't test month
out.append(sday + ' ' + smonth + ' ' + syear + ' - ' + fday + ' ' + fmonth + ' ' + fyear)
return separator.join(out)
def escape_values(bfo):
"""
Check if output of this element should be escaped.
Called by BibFormat in order to check if output of this element
should be escaped.
"""
return 0
|
ioannistsanaktsidis/inspire-next
|
inspire/base/format_elements/bfe_inspire_conferences_date.py
|
Python
|
gpl-2.0
| 2,924
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('QuickBooking', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='timing',
name='id',
field=models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True),
),
]
|
noorelden/QuickBooking
|
QuickBooking/migrations/0002_auto_20150623_1913.py
|
Python
|
gpl-2.0
| 450
|
#!/usr/bin/env python
import os
from app import create_app, db
from app.models import User, Role
from flask.ext.script import Manager, Shell
from flask.ext.migrate import Migrate, MigrateCommand
app = create_app(os.getenv('FLASK_CONFIG') or 'default')
manager = Manager(app)
migrate = Migrate(app, db)
def make_shell_context():
return dict(app=app, db=db, User=User, Role=Role)
manager.add_command("shell", Shell(make_context=make_shell_context))
manager.add_command('db', MigrateCommand)
@manager.command
def test():
"""Run the unit tests."""
import unittest
tests = unittest.TestLoader().discover('tests')
unittest.TextTestRunner(verbosity=2).run(tests)
if __name__ == '__main__':
with app.app_context():
db.create_all()
manager.run()
|
Sarthak30/User-Registration
|
source/manage.py
|
Python
|
gpl-2.0
| 768
|
# -*- coding: utf-8 -*-
#
# This file is part of EventGhost.
# Copyright © 2005-2020 EventGhost Project <http://www.eventghost.net/>
#
# EventGhost is free software: you can redistribute it and/or modify it under
# the terms of the GNU General Public License as published by the Free
# Software Foundation, either version 2 of the License, or (at your option)
# any later version.
#
# EventGhost is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
# more details.
#
# You should have received a copy of the GNU General Public License along
# with EventGhost. If not, see <http://www.gnu.org/licenses/>.
import wx
from math import cos, pi, radians, sin
from Queue import Queue
from sys import maxint
from threading import Thread
from time import clock, sleep
from win32api import EnumDisplayMonitors, GetSystemMetrics, mouse_event as mouse_event2
from win32con import MOUSEEVENTF_ABSOLUTE, MOUSEEVENTF_MOVE
# Local imports
import eg
from eg import HasActiveHandler
from eg.cFunctions import SetMouseCallback
from eg.WinApi.Dynamic import GetCursorPos, mouse_event, POINT, SetCursorPos
from eg.WinApi.Utils import GetMonitorDimensions
ICON = """iVBORw0KGgoAAAANSUhEUgAAABAAAAAQCAYAAAAf8/9hAAAABmJLR0QA/wD/AP+gvaeT
AAAACXBIWXMAAAsTAAALEwEAmpwYAAAAB3RJTUUH1QQIDRgEM71mAAAAADV0RVh0Q29tbWVudAAoYy
kgMjAwNCBKYWt1YiBTdGVpbmVyCgpDcmVhdGVkIHdpdGggVGhlIEdJTVCQ2YtvAAACHElEQVQ4y42Q
zUtUURjGf/fcyatz73Wiklwo2R/QplXQ/AURlLYJcrJNQrvQahYFI0wQ7lu0azNtYlAj2rUJRFciUf
kRgUwOM6Y5jePXfNzznhZ+NOpIvpvD+5zn/M7DY3Fo0ul0JzBQLpdvG2M8wHi++6r7Zs+Tet/Yu9Hr
W5tb/Yqjc2m7vB3zfPd7LBbzPd/tK/5Zu5ZKpZZSb1LZ0bGRG7u+F2E3PG0dfp1MJl+2tvq9xeLaJv
AxkUj01aW7UKtV3xvYam525nq6b92znieHEkqpIWwLpRSV7YBoNEoun2VhIUOTY6ODAAmkqJT68PRZ
orf+w1AoFBq63//A2LZthcNhhoeH0VrjNLVgYTHw8DGlUonC6u/IyEj6DnAAoAAq1ar1c3FxX8zlcl
QqlX97Po/XGrEa9MWREuPxOPl8nmw2Szwe538Tql9WVlZoa2tjcHDwgHZiwGqhwGqhgO/7dHZ0MDM7
e7IEG6V1zp05uy/WghrLv5YPaBul9eMBnufuRLXAwsIYQYsgRhCt0SK0n2/nuBKnxBi00YhotA7Qoh
ERRAsiBiOy559qBJjVWmMrmyAQtNboYBcmgojQdMrZ8083Anyan5/D8zxaWpqxlEKLoPVOfNd1iZyO
MDPzDeBHow7efv3yuc9xnGhX10U8z8MAGMPOYchkFlhaygG8bgSoVavVu5MT448mJ8YvA1cadJUBrg
Jrhy/+AqGrAMOnH86mAAAAAElFTkSuQmCC"""
eg.RegisterPlugin(
name = "Mouse",
author = (
"Bitmonster",
"Sem;colon",
),
version = "1.1.1",
description = (
"Actions to control the mouse cursor and emulation of mouse events."
),
kind = "core",
guid = "{6B1751BF-F94E-4260-AB7E-64C0693FD959}",
icon = ICON,
url = "http://www.eventghost.net/forum/viewtopic.php?f=9&t=5481",
)
class Mouse(eg.PluginBase):
def __init__(self):
self.AddEvents()
self.AddAction(LeftButton)
self.AddAction(LeftDoubleClick)
self.AddAction(ToggleLeftButton)
self.AddAction(MiddleButton)
self.AddAction(MoveAbsolute)
self.AddAction(MoveRelative)
self.AddAction(RightButton)
self.AddAction(RightDoubleClick)
self.AddAction(GoDirection)
self.AddAction(MouseWheel)
@eg.LogIt
def __close__(self):
pass
def __start__(self):
self.thread = MouseThread()
self.leftMouseButtonDown = False
self.lastMouseEvent = None
self.mouseButtonWasBlocked = [False, False, False, False, False]
SetMouseCallback(self.MouseCallBack)
@eg.LogIt
def __stop__(self):
SetMouseCallback(None)
self.thread.receiveQueue.put([-1])
def MouseCallBack(self, buttonName, buttonNum, param):
if param:
if self.lastMouseEvent:
self.lastMouseEvent.SetShouldEnd()
shouldBlock = HasActiveHandler("Mouse." + buttonName)
self.mouseButtonWasBlocked[buttonNum] = shouldBlock
self.lastMouseEvent = self.TriggerEnduringEvent(buttonName)
return shouldBlock
else:
if self.lastMouseEvent:
self.lastMouseEvent.SetShouldEnd()
return self.mouseButtonWasBlocked[buttonNum]
return False
class MouseThread(Thread):
currentAngle = 0
newAngle = 0
acceleration = 0
speed = 0
maxTicks = 5
yRemainder = 0
xRemainder = 0
leftButtonDown = False
lastTime = 0
initSpeed = 0.06
maxSpeed = 7.0
useAlternateMethod = False
def __init__(self):
Thread.__init__(self, name="MouseThread")
self.receiveQueue = Queue(2048)
self.start()
@eg.LogItWithReturn
def run(self):
stop = False
point = POINT()
while True:
self.lastTime = clock()
if not self.receiveQueue.empty():
data = self.receiveQueue.get()
if data[0] == -1:
break
elif data[0] == -2:
stop = True
else:
self.newAngle = radians(data[0])
self.initSpeed = data[1]
self.maxSpeed = data[2]
self.acceleration = data[3]
self.useAlternateMethod = data[4]
if stop:
self.acceleration = 0
self.speed = 0
stop = False
continue
if self.acceleration == 0:
sleep(0.05)
continue
ticks = 10
if self.speed == 0:
self.currentAngle = self.newAngle
self.speed = self.initSpeed
else:
diff = self.newAngle - self.currentAngle
if diff > pi:
diff = diff - 2 * pi
elif diff < -1 * pi:
diff = diff + 2 * pi
self.currentAngle = self.currentAngle + (diff / 20)
self.speed = self.speed + (self.speed * self.acceleration * ticks)
if self.speed > self.maxSpeed:
self.speed = self.maxSpeed
elif self.speed <= 0:
self.speed = 0
factor = self.speed * (ticks / 10)
xCurrent = sin(self.currentAngle) * factor + self.xRemainder
yCurrent = -1 * cos(self.currentAngle) * factor + self.yRemainder
x = int(xCurrent)
y = int(yCurrent)
self.xRemainder = xCurrent - x
self.yRemainder = yCurrent - y
try:
if self.useAlternateMethod:
mouse_event2(MOUSEEVENTF_MOVE, x, y)
else:
GetCursorPos(point)
SetCursorPos(point.x + x, point.y + y)
except:
pass
if self.speed == 0:
self.acceleration = 0
waitTicks = 0.01 - (clock() - self.lastTime)
if waitTicks < 0:
waitTicks = 0.0
sleep(waitTicks)
class GoDirection(eg.ActionBase):
name = "Start Movement"
description = "Starts cursor movement in the specified direction."
class text:
label = u"Start cursor movement in direction %.2f\u00B0"
text1 = "Start moving cursor in direction"
text2 = "degrees. (0-360)"
text3 = "Initial mouse speed:"
text4 = "Maximum mouse speed:"
text5 = "Acceleration factor:"
label_AM = "Use alternate method"
def __call__(self, direction=0, initSpeed = 60, maxSpeed = 7000, accelerationFactor = 3, useAlternateMethod=False):
def UpFunc():
self.plugin.thread.receiveQueue.put([-2])
self.plugin.thread.receiveQueue.put([float(direction), float(initSpeed) / 1000, float(maxSpeed) / 1000, float(accelerationFactor) / 1000, useAlternateMethod])
eg.event.AddUpFunc(UpFunc)
def Configure(self, direction=0, initSpeed = 60, maxSpeed = 7000, accelerationFactor = 3, useAlternateMethod=False):
text = self.text
panel = eg.ConfigPanel()
direction = float(direction)
valueCtrl = panel.SpinNumCtrl(float(direction), min=0, max=360)
panel.AddLine(text.text1, valueCtrl, text.text2)
initSpeedLabel = wx.StaticText(panel, -1, text.text3)
initSpeedSpin = eg.SpinIntCtrl(panel, -1, initSpeed, 10, 2000)
maxSpeedLabel = wx.StaticText(panel, -1, text.text4)
maxSpeedSpin = eg.SpinIntCtrl(panel, -1, maxSpeed, 4000, 32000)
accelerationFactorLabel = wx.StaticText(panel, -1, text.text5)
accelerationFactorSpin = eg.SpinIntCtrl(panel, -1, accelerationFactor, 1, 200)
eg.EqualizeWidths((initSpeedLabel, maxSpeedLabel, accelerationFactorLabel))
panel.AddLine(initSpeedLabel, initSpeedSpin)
panel.AddLine(maxSpeedLabel, maxSpeedSpin)
panel.AddLine(accelerationFactorLabel, accelerationFactorSpin)
uAMCB = panel.CheckBox(useAlternateMethod, text.label_AM)
panel.AddLine(uAMCB)
while panel.Affirmed():
panel.SetResult(
valueCtrl.GetValue(),
initSpeedSpin.GetValue(),
maxSpeedSpin.GetValue(),
accelerationFactorSpin.GetValue(),
uAMCB.GetValue(),
)
def GetLabel(self, direction=0, initSpeed = 60, maxSpeed = 7000, accelerationFactor = 3, useAlternateMethod=False):
direction = float(direction)
return self.text.label % direction
class LeftButton(eg.ActionBase):
name = "Left Mouse Click"
description = "Clicks the left mouse button."
def __call__(self):
def UpFunc():
mouse_event(0x0004, 0, 0, 0, 0)
self.plugin.leftMouseButtonDown = False
mouse_event(0x0002, 0, 0, 0, 0)
self.plugin.leftMouseButtonDown = True
eg.event.AddUpFunc(UpFunc)
class LeftDoubleClick(eg.ActionBase):
name = "Left Mouse Double-Click"
description = "Double-clicks the left mouse button."
def __call__(self):
def UpFunc():
mouse_event(0x0004, 0, 0, 0, 0)
self.plugin.leftMouseButtonDown = False
mouse_event(0x0002, 0, 0, 0, 0)
mouse_event(0x0004, 0, 0, 0, 0)
mouse_event(0x0002, 0, 0, 0, 0)
eg.event.AddUpFunc(UpFunc)
class MiddleButton(eg.ActionBase):
name = "Middle Mouse Click"
description = "Clicks the middle mouse button."
def __call__(self):
def UpFunc():
mouse_event(0x0040, 0, 0, 0, 0)
mouse_event(0x0020, 0, 0, 0, 0)
eg.event.AddUpFunc(UpFunc)
class MouseWheel(eg.ActionBase):
name = "Turn Mouse Wheel"
description = "Turns the mouse wheel."
class text:
label = u"Turn mouse wheel %d clicks"
text1 = "Turn mouse wheel by"
text2 = "clicks. (Negative values turn down)"
def __call__(self, direction=0):
mouse_event(0x0800, 0, 0, direction * 120, 0)
def Configure(self, direction=0):
panel = eg.ConfigPanel()
valueCtrl = panel.SpinIntCtrl(direction, min=-100, max=100)
panel.AddLine(self.text.text1, valueCtrl, self.text.text2)
while panel.Affirmed():
panel.SetResult(valueCtrl.GetValue())
def GetLabel(self, direction=0):
return self.text.label % direction
class MoveAbsolute(eg.ActionBase):
name = "Move Absolute"
description = "Moves the cursor to an absolute position."
class text:
display = "Move cursor to"
label_M = "Monitor: %i, "
label_X = "x: %i, "
label_Y = "y: %i"
label_C = "Set position to screen center"
label_AM = "Use alternate method"
center = "center"
text1 = "Set horizontal position X to"
text2 = "pixels"
text3 = "Set vertical position Y to"
note = (
"Note: The coordinates X and Y are related to the monitor "
'(not to the "virtual screen")'
)
def __call__(self, x = None, y = None, displayNumber = None, center = False, useAlternateMethod=False):
point = POINT()
GetCursorPos(point)
X = point.x
Y = point.y
mons = EnumDisplayMonitors(None, None)
mons = [item[2] for item in mons]
for mon in range(len(mons)): # on what monitor (= mon) is the cursor?
m = mons[mon]
if m[0] <= X and X <= m[2] and m[1] <= Y and Y <= m[3]:
break
if displayNumber is None:
displayNumber = mon
monitorDimensions = GetMonitorDimensions()
try:
displayRect = monitorDimensions[displayNumber]
except IndexError:
displayNumber = 0
displayRect = monitorDimensions[displayNumber]
if center:
x = displayRect[2] / 2
y = displayRect[3] / 2
if x is None:
x = X - mons[displayNumber][0]
if y is None:
y = Y - mons[displayNumber][1]
x += displayRect[0]
y += displayRect[1]
if useAlternateMethod:
x = x * 65535 / GetSystemMetrics(0)
y = y * 65535 / GetSystemMetrics(1)
mouse_event2(MOUSEEVENTF_ABSOLUTE | MOUSEEVENTF_MOVE, x, y)
else:
SetCursorPos(x, y)
def Configure(self, x = None, y = None, displayNumber = None, center = False, useAlternateMethod=False):
panel = eg.ConfigPanel()
text = self.text
uAMCB = panel.CheckBox(useAlternateMethod, text.label_AM)
cCB = panel.CheckBox(center, text.label_C)
xCB = panel.CheckBox(x is not None, text.text1)
yCB = panel.CheckBox(y is not None, text.text3)
displayCB = panel.CheckBox(displayNumber is not None, text.display)
#xCtrl = panel.SpinIntCtrl(x or 0, min = -maxint - 1, max = maxint)
xCtrl = panel.SpinIntCtrl(x or 0, min = 0, max = maxint) # since 1.0.1
xCtrl.Enable(x is not None)
#yCtrl = panel.SpinIntCtrl(y or 0, min = -maxint - 1, max = maxint)
yCtrl = panel.SpinIntCtrl(y or 0, min = 0, max = maxint) # since 1.0.1
yCtrl.Enable(y is not None)
display = -1 if displayNumber is None else displayNumber
displayChoice = eg.DisplayChoice(panel, display)
displayChoice.Enable(displayNumber is not None)
xPixels = wx.StaticText(panel, -1, text.text2)
yPixels = wx.StaticText(panel, -1, text.text2)
monsCtrl = eg.MonitorsCtrl(panel, background = (224, 238, 238))
note = wx.StaticText(panel, -1, text.note)
note.SetForegroundColour(wx.RED)
sizer = wx.GridBagSizer(vgap = 6, hgap = 5)
sizer.Add(cCB, (0, 0), (1, 3), flag = wx.BOTTOM, border = 8)
sizer.Add(xCB, (1, 0), (1, 1))
sizer.Add(xCtrl, (1, 1), (1, 1))
sizer.Add(xPixels, (1, 2), (1, 1))
sizer.Add(yCB, (2, 0), (1, 1))
sizer.Add(yCtrl, (2, 1), (1, 1))
sizer.Add(yPixels, (2, 2), (1, 1))
sizer.Add(note, (3, 0), (1, 3))
sizer.Add(displayCB, (4, 0), (1, 1), flag = wx.TOP, border = 14)
sizer.Add(displayChoice, (4, 1), (1, 2), flag = wx.TOP, border = 13)
sizer.Add(uAMCB, (5, 0), (1, 3))
panel.sizer.Add(sizer, 1, wx.EXPAND)
panel.sizer.Add(monsCtrl, 0, wx.TOP, 8)
def HandleCenterCheckBox(event = None):
val = not cCB.GetValue()
xCB.Enable(val)
xCtrl.Enable(val)
xPixels.Enable(val)
yCB.Enable(val)
yCtrl.Enable(val)
yPixels.Enable(val)
if not val:
xCB.SetValue(False)
yCB.SetValue(False)
xCtrl.SetValue(0)
yCtrl.SetValue(0)
if event:
event.Skip()
cCB.Bind(wx.EVT_CHECKBOX, HandleCenterCheckBox)
HandleCenterCheckBox()
def HandleXCheckBox(event):
xCtrl.Enable(event.IsChecked())
event.Skip()
xCB.Bind(wx.EVT_CHECKBOX, HandleXCheckBox)
def HandleYCheckBox(event):
yCtrl.Enable(event.IsChecked())
event.Skip()
yCB.Bind(wx.EVT_CHECKBOX, HandleYCheckBox)
def HandleDisplayCB(event):
flag = event.IsChecked()
displayChoice.Enable(flag)
if flag:
display = 0 if displayNumber is None else displayNumber
else:
display = -1
displayChoice.SetValue(display)
event.Skip()
displayCB.Bind(wx.EVT_CHECKBOX, HandleDisplayCB)
while panel.Affirmed():
if xCtrl.IsEnabled():
x = xCtrl.GetValue()
else:
x = None
if yCtrl.IsEnabled():
y = yCtrl.GetValue()
else:
y = None
if displayChoice.IsEnabled():
displayNumber = displayChoice.GetValue()
else:
displayNumber = None
panel.SetResult(x, y, displayNumber, cCB.GetValue(), uAMCB.GetValue())
def GetLabel(self, x, y, displayNumber, center, useAlternateMethod=False):
if center:
res = self.text.display + " " + self.text.center
if displayNumber is not None:
res += ": %s" % (self.text.label_M % (displayNumber + 1))
return res
else:
return self.text.display + ": %s%s%s" % (
self.text.label_M % (displayNumber + 1) if displayNumber is not None else "",
self.text.label_X % x if x is not None else "",
self.text.label_Y % y if y is not None else "",
)
class MoveRelative(eg.ActionBase):
name = "Move Relative"
description = "Moves the cursor to a relative position."
class text:
label = "Change cursor position by x:%s, y:%s"
text1 = "Change horizontal position X by"
text2 = "pixels"
text3 = "Change vertical position Y by"
label_AM = "Use alternate method"
def __call__(self, x, y, useAlternateMethod=False):
if x is None:
x = 0
if y is None:
y = 0
if useAlternateMethod:
mouse_event2(MOUSEEVENTF_MOVE, x, y)
else:
point = POINT()
GetCursorPos(point)
SetCursorPos(point.x + x, point.y + y)
def Configure(self, x=0, y=0, useAlternateMethod=False):
panel = eg.ConfigPanel()
text = self.text
uAMCB = panel.CheckBox(useAlternateMethod, text.label_AM)
xCB = panel.CheckBox(x is not None, text.text1)
def HandleXCheckBox(event):
xCtrl.Enable(event.IsChecked())
event.Skip()
xCB.Bind(wx.EVT_CHECKBOX, HandleXCheckBox)
xCtrl = panel.SpinIntCtrl(x or 0, min=-maxint - 1, max=maxint)
xCtrl.Enable(x is not None)
yCB = panel.CheckBox(y is not None, text.text3)
def HandleYCheckBox(event):
yCtrl.Enable(event.IsChecked())
event.Skip()
yCB.Bind(wx.EVT_CHECKBOX, HandleYCheckBox)
yCtrl = panel.SpinIntCtrl(y or 0, min=-maxint - 1, max=maxint)
yCtrl.Enable(y is not None)
panel.AddLine(xCB, xCtrl, text.text2)
panel.AddLine(yCB, yCtrl, text.text2)
panel.AddLine(uAMCB)
while panel.Affirmed():
if xCtrl.IsEnabled():
x = xCtrl.GetValue()
else:
x = None
if yCtrl.IsEnabled():
y = yCtrl.GetValue()
else:
y = None
panel.SetResult(x, y, uAMCB.GetValue())
def GetLabel(self, x, y, useAlternateMethod=False):
return self.text.label % (str(x), str(y))
class RightButton(eg.ActionBase):
name = "Right Mouse Click"
description = "Clicks the right mouse button."
def __call__(self):
def UpFunc():
mouse_event(0x0010, 0, 0, 0, 0)
mouse_event(0x0008, 0, 0, 0, 0)
eg.event.AddUpFunc(UpFunc)
class RightDoubleClick(eg.ActionBase):
name = "Right Mouse Double-Click"
description = "Double-clicks the right mouse button."
def __call__(self):
def UpFunc():
mouse_event(0x0010, 0, 0, 0, 0)
mouse_event(0x0008, 0, 0, 0, 0)
mouse_event(0x0010, 0, 0, 0, 0)
mouse_event(0x0008, 0, 0, 0, 0)
eg.event.AddUpFunc(UpFunc)
class ToggleLeftButton(eg.ActionBase):
class text:
name = "Left Mouse Toggle"
description = "Changes the status of the left mouse button."
radioBoxLabel = "Option"
radioBoxOptions = [
"Toggle left mouse button",
"Set left mouse button \"Up\"",
"Set left mouse button \"Down\""
]
def __call__(self, data=0):
if self.plugin.leftMouseButtonDown and data == 0 or data == 1:
mouse_event(0x0004, 0, 0, 0, 0)
self.plugin.leftMouseButtonDown = False
else:
mouse_event(0x0002, 0, 0, 0, 0)
self.plugin.leftMouseButtonDown = True
def GetLabel(self, data=0):
return self.plugin.label + ': ' + self.text.radioBoxOptions[data]
def Configure(self, data=0):
panel = eg.ConfigPanel()
radioBox = wx.RadioBox(
panel,
label=self.text.radioBoxLabel,
choices=self.text.radioBoxOptions,
style=wx.RA_SPECIFY_ROWS
)
radioBox.SetSelection(data)
panel.sizer.Add(radioBox, 0, wx.EXPAND)
while panel.Affirmed():
panel.SetResult(radioBox.GetSelection())
|
tfroehlich82/EventGhost
|
plugins/Mouse/__init__.py
|
Python
|
gpl-2.0
| 21,487
|
# -*- coding: utf-8 -*-
'''
script.matchcenter - Football information for Kodi
A program addon that can be mapped to a key on your remote to display football information.
Livescores, Event details, Line-ups, League tables, next and previous matches by team. Follow what
others are saying about the match in twitter.
Copyright (C) 2016 enen92
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
'''
import xbmcgui
import xbmc
import datetime
import json
import mainmenu
import os
from resources.lib.utilities import tweet
from resources.lib.utilities.addonfileio import FileIO
from resources.lib.utilities import ssutils
from resources.lib.utilities.common_addon import *
class TwitterDialog(xbmcgui.WindowXMLDialog):
def __init__( self, *args, **kwargs ):
self.isRunning = True
self.hash = kwargs["hash"]
self.standalone = kwargs["standalone"]
self.teamObjs = {}
def onInit(self):
xbmc.log(msg="[Match Center] Twitter cycle started", level=xbmc.LOGDEBUG)
self.getControl(32540).setImage(os.path.join(addon_path,"resources","img","goal.png"))
xbmc.executebuiltin("SetProperty(loading-script-matchcenter-twitter,1,home)")
self.getTweets()
xbmc.executebuiltin("ClearProperty(loading-script-matchcenter-twitter,Home)")
i=0
while self.isRunning:
if (float(i*200)/(twitter_update_time*60*1000)).is_integer() and ((i*200)/(3*60*1000)) != 0:
self.getTweets()
xbmc.sleep(200)
i += 1
xbmc.log(msg="[Match Center] Twitter cycle stopped", level=xbmc.LOGDEBUG)
def getTweets(self):
self.getControl(32500).setLabel("#"+self.hash)
self.getControl(32503).setImage(os.path.join(addon_path,"resources","img","twitter_sm.png"))
tweetitems = []
tweets = tweet.get_hashtag_tweets(self.hash)
if tweets:
for _tweet in tweets:
td = ssutils.get_timedelta_string(datetime.datetime.utcnow() - _tweet["date"])
item = xbmcgui.ListItem(_tweet["text"].replace("\n",""))
item.setProperty("profilepic",_tweet["profilepic"])
item.setProperty("author","[B]" +"@" + _tweet["author"] + "[/B]")
item.setProperty("timedelta", td)
tweetitems.append(item)
self.getControl(32501).reset()
self.getControl(32501).addItems(tweetitems)
if tweetitems:
self.setFocusId(32501)
return
def reset(self):
if os.path.exists(tweet_file):
os.remove(tweet_file)
xbmcgui.Dialog().ok(translate(32000), translate(32045))
return
def stopRunning(self):
self.isRunning = False
self.close()
if not self.standalone:
mainmenu.start()
def onAction(self,action):
if action.getId() == 92 or action.getId() == 10:
self.stopRunning()
def onClick(self,controlId):
if controlId == 32501:
teamid = self.getControl(controlId).getSelectedItem().getProperty("teamid")
matchhistory.start(teamid)
elif controlId == 32514:
self.reset()
def start(twitterhash=None, standalone=False):
if not twitterhash:
userInput = True
if os.path.exists(tweet_file):
twitter_data = json.loads(FileIO.fileread(tweet_file))
twitterhash = twitter_data["hash"]
twitter_mediafile = twitter_data["file"]
if twitter_mediafile == xbmc.getInfoLabel('Player.Filenameandpath'):
userInput = False
else:
userInput = False
if userInput:
dialog = xbmcgui.Dialog()
twitterhash = dialog.input(translate(32046), type=xbmcgui.INPUT_ALPHANUM)
if len(twitterhash) != 0:
twitterhash = twitterhash.replace("#","")
else:
xbmcgui.Dialog().ok(translate(32000), translate(32047))
mainmenu.start()
if twitterhash:
#Save twitter hashtag
if twitter_history_enabled == 'true':
tweet.add_hashtag_to_twitter_history(twitterhash)
if xbmc.getCondVisibility("Player.HasMedia") and save_hashes_during_playback == 'true':
tweet.savecurrenthash(twitterhash)
main = TwitterDialog('script-matchcenter-Twitter.xml', addon_path, getskinfolder(), '', hash=twitterhash, standalone=standalone)
main.doModal()
del main
|
enen92/script.matchcenter
|
resources/lib/tweets.py
|
Python
|
gpl-2.0
| 4,506
|
"""Subclass of GitView, which is generated by wxFormBuilder."""
import wx
from beatle import tran, model, localpath
from beatle.model import git
from beatle.lib import wxx
from beatle.lib.decorators import classproperty
from beatle.lib.handlers import Identifiers
from beatle.app import resources as rc
from beatle.app.ui.tools import clone_mnu
from beatle.app.ui.dlg import WorkingDialog
from beatle.app.ui.view import BaseView
from beatle.ctx import localcontext as context
from beatle.activity.git.ui import ui
from beatle.activity.git.ui import dlg as ui_dlg
# Implementing GitView
class GitView(BaseView, ui.GitView):
"""View pane for git repository"""
perspective = ''
clipboard_handler = False
#command ids
_editDelete = Identifiers.register("ID_DELETE")
_openRepo = Identifiers.register("ID_OPEN_REPO")
_newRepo = Identifiers.register("ID_NEW_REPO")
_addItem = Identifiers.register("ID_ADD_ITEM")
_stageItem = Identifiers.register("ID_STAGE_ITEM")
_commitItem = Identifiers.register("ID_COMMIT_ITEM")
_refreshRepo = Identifiers.register("ID_REFRES_REPO")
_addRemote = Identifiers.register("ID_ADD_REMOTE")
_pushRemote = Identifiers.register("ID_PUSH_REMOTE")
def __init__(self, parent, root=None):
"""Initialize pane"""
super(GitView, self).__init__(parent)
self.frame = parent
self.selected = None
self.imglist = rc.GetBitmapImageList()
self.m_tree.SetImageList(self.imglist)
self.m_tree.AddRoot('Repositories')
self._create_menus()
self._bind_events()
self._set_accelerators()
# when we create a models view, we need to update elements
if root is None:
# reload full stack
for wrk in context.app.workspaces:
self.insert(wrk)
else:
self.insert(root)
def insert(self, element):
"""Nested insert elements in tree"""
if self.DoRenderAddElement(element):
for cls in element._child:
for k in element[cls]:
self.insert(k)
def GetViewStatus(self):
"""Get working info about the view"""
return str(self.m_tree.GetExpansionState())
def SetViewStatus(self, status):
"""Set working info about the view"""
try:
l = eval(status)
self.m_tree.SetExpansionState(l)
return True
except:
return False
@classproperty
def name(cls):
"""returns the name of this view"""
return 'Git'
@classmethod
def bitmap_index(cls):
"""return the bitmap index"""
return rc.GetBitmapIndex('git')
@classmethod
def bitmap(cls):
"""return the bitmap"""
return rc.GetBitmap('git')
def _create_menus(self):
"""Create a custom menu entry in main menu"""
self._menu = wxx.Menu(
[self._newRepo, u"New repository", u"create new git repository", wx.ITEM_NORMAL, wx.ArtProvider.GetBitmap(wx.ART_NEW, wx.ART_MENU)],
[self._openRepo, u"Open repository", u"open existing repository", wx.ITEM_NORMAL, wx.ArtProvider.GetBitmap(wx.ART_FILE_OPEN, wx.ART_MENU)],
[],
[self._addRemote, u"Add remote", u"add remote repository", wx.ITEM_NORMAL, wx.Bitmap(localpath("app/res/git_remote.xpm"), wx.BITMAP_TYPE_ANY)],
[self._pushRemote, u"Push remote", u"push remote repository", wx.ITEM_NORMAL, wx.Bitmap(localpath("app/res/git_push.xpm"), wx.BITMAP_TYPE_ANY)],
[],
[self._addItem, u"Add to repository", u"add item to repository control", wx.ITEM_NORMAL],
[self._stageItem, u"Stage item", u"stage item to repository control", wx.ITEM_NORMAL],
[self._commitItem, u"Commit repository", u"commit changes to repository", wx.ITEM_NORMAL],
[self._refreshRepo, u"Refresh", u"update repository status", wx.ITEM_NORMAL, rc.GetBitmap('reload')],
)
self.RegisterMenu('Git', self._menu)
def _bind_events(self):
"""Binds events"""
# Tree events
self.m_tree.Bind(wx.EVT_TREE_SEL_CHANGED, self.OnTreeSelChanged)
self.m_tree.Bind(wx.EVT_RIGHT_DOWN, self.OnTreeMenu)
self.BindSpecial(wx.EVT_UPDATE_UI, self.OnUpdateDelete, id=self._editDelete)
self.BindSpecial(wx.EVT_MENU, self.OnDelete, id=self._editDelete)
self.BindSpecial(wx.EVT_UPDATE_UI, self.OnUpdateNewRepository, id=self._newRepo)
self.BindSpecial(wx.EVT_MENU, self.OnNewRepository, id=self._newRepo)
self.BindSpecial(wx.EVT_UPDATE_UI, self.OnUpdateOpenExistingRepository, id=self._openRepo)
self.BindSpecial(wx.EVT_MENU, self.OnOpenExistingRepository, id=self._openRepo)
self.BindSpecial(wx.EVT_UPDATE_UI, self.OnUpdateAddRemote, id=self._addRemote)
self.BindSpecial(wx.EVT_MENU, self.OnAddRemote, id=self._addRemote)
self.BindSpecial(wx.EVT_UPDATE_UI, self.OnUpdatePushRemote, id=self._pushRemote)
self.BindSpecial(wx.EVT_MENU, self.OnPushRemote, id=self._pushRemote)
self.BindSpecial(wx.EVT_UPDATE_UI, self.OnUpdateAddItemToRepository, id=self._addItem)
self.BindSpecial(wx.EVT_MENU, self.OnAddItemToRepository, id=self._addItem)
self.BindSpecial(wx.EVT_UPDATE_UI, self.OnUpdateStageItem, id=self._stageItem)
self.BindSpecial(wx.EVT_MENU, self.OnStageItem, id=self._stageItem)
self.BindSpecial(wx.EVT_UPDATE_UI, self.OnUpdateCommitRepository, id=self._commitItem)
self.BindSpecial(wx.EVT_MENU, self.OnCommitRepository, id=self._commitItem)
self.BindSpecial(wx.EVT_UPDATE_UI, self.OnUpdateRefreshStatus, id=self._refreshRepo)
self.BindSpecial(wx.EVT_MENU, self.OnRefreshStatus, id=self._refreshRepo)
super(GitView, self)._bind_events()
def _set_accelerators(self):
"""Set the accelerator table"""
# ctrl_alt = wx.ACCEL_CTRL + wx.ACCEL_ALT
# ctrl_shf = wx.ACCEL_CTRL + wx.ACCEL_SHIFT
aTable = wx.AcceleratorTable([
wx.AcceleratorEntry(wx.ACCEL_NORMAL, wx.WXK_LEFT, BaseView._leftKeyId)
])
self.SetAcceleratorTable(aTable)
def DoRenderAddElement(self, obj):
"""Inserts element in tree"""
# model tree
treeOrder = [model.Workspace, git.GitRepo, git.GitRemotes,
model.git.GitRemote, git.GitDir, git.GitFile]
if type(obj) not in treeOrder:
return False
ti = treeOrder.index(type(obj))
#check parent
if self.m_tree.HoldsObject(obj.parent):
p = obj.parent
else:
p = self.m_tree.GetRootItem()
if obj.parent is not None:
# find some major friend item with the same class
tribal = obj.parent[type(obj)]
index = tribal.index(obj)
pre = None
while index > 0 and pre is None:
index = index - 1
candidate = tribal[index]
if not self.m_tree.HoldsObject(candidate):
continue
pre = candidate
if pre is not None:
self.m_tree.InsertItem(p, pre, obj.label,
obj.bitmap_index, obj.bitmap_index, obj)
if type(obj) is model.cc.Constructor:
self.m_tree.SetItemBold(obj, bold=obj.IsPreferred())
elif type(obj) in [model.py.Module, model.py.Package]:
self.m_tree.SetItemBold(obj, bold=obj._entry)
return True
itemCount = 0
citem, cookie = self.m_tree.GetFirstChild(p)
if type(citem) is wx.TreeItemId:
citem = self.m_tree.__fer__(citem)
if type(citem) in treeOrder:
if ti <= treeOrder.index(type(citem)):
self.m_tree.PrependItem(p, obj.label,
obj.bitmap_index, obj.bitmap_index, obj)
return True
while type(citem) is not wx.TreeItemId or citem.IsOk():
itemCount = itemCount + 1
citem, cookie = self.m_tree.GetNextChild(p, cookie)
if type(citem) not in treeOrder:
continue
if ti <= treeOrder.index(type(citem)):
self.m_tree.InsertItemBefore(p, itemCount,
obj.label,
obj.bitmap_index, obj.bitmap_index, data=obj)
return True
#Ok, do apppend
self.m_tree.AppendItem(p, obj.label,
obj.bitmap_index, obj.bitmap_index, obj)
return True
def DoRenderRemoveElement(self, obj):
"""Do remove element in tree"""
if self.m_tree.HoldsObject(obj):
self.m_tree.Delete(obj)
def UpdateElement(self, obj):
"""Update the tree label for a object"""
if not self.m_tree.HoldsObject(obj):
return
self.m_tree.SetItemText(obj, obj.label)
self.m_tree.SetItemImage(obj,
obj.bitmap_index, wx.TreeItemIcon_Normal)
if hasattr(obj, 'bitmap_open_index'):
self.m_tree.SetItemImage(obj,
obj.bitmap_open_index, wx.TreeItemIcon_Expanded)
self.m_tree.SetItemImage(obj,
obj.bitmap_open_index, wx.TreeItemIcon_SelectedExpanded)
self.m_tree.SetItemImage(obj,
obj.bitmap_index, wx.TreeItemIcon_Selected)
def OnTreeSelChanged(self, event):
"""Handle select changed"""
self.selected = self.m_tree.GetSelection()
def OnUpdateNewRepository(self, event):
"""Update new repository command"""
event.Enable(bool(self.selected and type(self.selected) is model.Workspace))
@tran.TransactionalMethod('new git repo {0}')
@wxx.CreationDialog(ui_dlg.GitRepoDialog, git.GitRepo)
def OnNewRepository(self, event):
"""New repository command"""
return (context.frame, self.selected.inner_repository_container)
def OnUpdateOpenExistingRepository(self, event):
"""Update open existing repository"""
event.Enable(bool(self.selected and type(self.selected) is model.Workspace))
@tran.TransactionalMethod('add git repo {0}')
@wxx.CreationDialog(ui_dlg.OpenGitRepoDialog, git.GitRepo)
def OnOpenExistingRepository(self, event):
"""Handle add folder command"""
return (context.frame, self.selected.inner_repository_container)
def OnUpdateAddRemote(self, event):
"""Update add remote repo"""
event.Enable(bool(self.selected and type(self.selected) is not model.Workspace
and self.selected.repo))
@tran.TransactionalMethod('add remote repo {0}')
@wxx.CreationDialog(ui_dlg.GitRemoteDialog, git.GitRemote)
def OnAddRemote(self, event):
"""Handle add remote repo"""
rl = self.selected.repo(model.git.GitRemotes)
return (context.frame, rl[0])
def OnUpdatePushRemote(self, event):
"""Update add remote repo"""
event.Enable(bool(self.selected and type(self.selected) is git.GitRemote))
def OnPushRemote(self, event):
"""Handle add remote repo"""
_repo = self.selected.repo._repo
remote = _repo.remote(self.selected.name)
dialog = ui_dlg.ProgressDialog(self.frame)
dialog.Show()
wx.YieldIfNeeded()
try:
remote.push(progress=dialog.link())
dialog.update('--', 0, message='DONE')
except:
dialog.update('--', 100, message='FAILED')
pass
dialog.Finished()
return
import git
g = git.cmd.Git(self.selected.repo._uri)
g.push('--set-upstream', self.selected.name, 'master')
return
_repo = self.selected.repo._repo
ref = _repo.remote(self.selected.name)
try:
dialog = ui_dlg.ProgressDialog(self, self.frame)
ui_dlg.Show()
if self.selected.password:
ref.push(refspec='refs/heads/master:refs/heads/master', password=self.selected.password,
progress=dialog)
else:
ref.push(refspec='refs/heads/master:refs/heads/master', progress=dialog)
except Exception, e:
wx.MessageBox("Push to repository {name} failed: {error}.".format(
name=self.selected.name, error=e), "Error",
wx.OK | wx.CENTER | wx.ICON_ERROR, self)
dialog.Finished()
def TreeLeftKey(self, event):
"""If the selected node is expanded, simply collapse it.
If not, navigate through parent"""
if not self.selected:
return
if self.m_tree.IsExpanded(self.selected):
self.m_tree.Collapse(self.selected)
else:
parent = self.selected.parent
if self.m_tree.HoldsObject(parent):
self.m_tree.SelectItem(parent)
def OnTreeMenu(self, event):
"""Handles context tree popup menu"""
(item, where) = self.m_tree.HitTest(event.GetPosition())
if item is None:
return
self.m_tree.SelectItem(item)
if self.selected is None:
return
menu = clone_mnu(self._menu, enabled=True, notitle=True)
if menu is None:
return
self.PopupMenu(menu, event.GetPosition())
menu.Destroy()
def OnDelete(self, event):
"""delete element"""
obj = self.selected
aliasMap = {
model.git.GitRepo: "git repository {0}",
model.git.GitFile: "git file {0}",
model.git.GitDir: "git dir {0}",
model.git.GitRemote: "git remote {0}"}
tran.TransactionStack.DoBeginTransaction("delete " +
aliasMap[type(obj)].format(obj._name))
obj.Delete()
tran.TransactionStack.DoCommit()
def OnUpdateDelete(self, event):
"""Update delete element"""
obj = self.selected
event.Enable((not obj is None) and type(obj) is not model.Workspace
and obj.can_delete)
def OnUpdateCommitRepository(self, event):
"""Update commit to repo"""
obj = self.selected
event.Enable(bool(obj and type(obj) is model.git.GitRepo
and obj.stage_size > 0))
def OnCommitRepository(self, event):
"""Process a commit"""
dialog = ui_dlg.CommitGitDialog(context.frame)
if dialog.ShowModal() == wx.ID_OK:
self.selected.commit(dialog._message)
def OnUpdateRefreshStatus(self, event):
"""Update refresh status"""
obj = self.selected
event.Enable((not obj is None) and type(obj) is not model.Workspace)
@tran.TransactionalMethod('refresh git status')
def OnRefreshStatus(self, event):
"""Refresh repository status"""
working = WorkingDialog(context.frame)
working.Show(True)
wx.YieldIfNeeded()
self.selected.update_status()
working.Close()
return True
def OnUpdateAddItemToRepository(self, event):
"""Update the operation of adding elements to repo"""
obj = self.selected
if not obj:
event.Enable(False)
return
if type(obj) is model.git.GitFile:
if obj._status == 'file':
event.Enable(True)
else:
event.Enable(False)
return
#ok, buscamos un hijo
fn = lambda x: (x._status == 'file')
if obj(model.git.GitFile, filter=fn):
event.Enable(True)
else:
event.Enable(False)
def OnAddItemToRepository(self, event):
"""Add element to track"""
obj = self.selected
repo = obj.repo
if type(obj) is model.git.GitFile:
repo.add(obj)
return
fn = lambda x: (x._status == 'file')
for item in obj(model.git.GitFile, filter=fn):
repo.add(item)
def OnUpdateStageItem(self, event):
"""Update the operation of adding elements to repo"""
obj = self.selected
if not obj:
event.Enable(False)
return
# para hacer stage es suficiente que el elemento
# seleccionado o un hijo sea un GitFile con status modified
if type(obj) is model.git.GitFile:
if obj._status == 'git_file_modified':
event.Enable(True)
else:
event.Enable(False)
return
# ok, buscamos un hijo
fn = lambda x: (x._status == 'git_file_modified')
if obj(model.git.GitFile, filter=fn):
event.Enable(True)
else:
event.Enable(False)
def OnStageItem(self, event):
"""Add element to track"""
obj = self.selected
repo = obj.repo
if type(obj) is model.git.GitFile:
repo.add(obj)
return
fn = lambda x: (x._status == 'git_file_modified')
for item in obj(model.git.GitFile, filter=fn):
repo.add(item)
def OnUpdateEditProperties(self, event):
""""""
event.Enable(False)
def OnEditProperties(self, event):
""""""
pass
|
melviso/phycpp
|
beatle/activity/git/ui/view/GitView.py
|
Python
|
gpl-2.0
| 17,147
|
#!/usr/bin/python3
Card = collections.namedtuple('Card', ['rank', 'suit'])
class FrenchDeck:
ranks = [str(n) for n in range(2, 11)] + list('JQKA')
suits = 'spades diamonds clubs hearts'.split()
def __init__(self):
self._cards = [Card(rank, suit) for suit in self.suits
for rank in self.ranks]
def __len__(self):
return len(self._cards)
def __getitem__(self, position):
return self._cards[position]
|
zhuango/python
|
pythonLearning/oo/FrenchDeck.py
|
Python
|
gpl-2.0
| 487
|
##########################################################
# view the person ZODB database in PyForm's FormGui;
# FileDB maps indexing to db root, close does commit;
# caveat 1: FormGui doesn't yet allow mixed class types;
# caveat 2: FormGui has no way to call class methods;
# caveat 3: Persistent subclasses don't allow __class__
# to be set: must have defaults for all __init__ args;
# Person here works only if always defined in __main__;
##########################################################
import sys
filename = 'data/people-simple.fs'
from zodbtools import FileDB
from PP3E.Dbase.TableBrowser.formgui import FormGui
from PP3E.Dbase.TableBrowser.formtable import Table, InstanceRecord
class Person: pass
initrecs = {'bob': dict(name='bob', job='devel', pay=30),
'sue': dict(name='sue', job='music', pay=40)}
dbtable = Table(FileDB(filename), InstanceRecord(Person))
if len(sys.argv) > 1:
for key in dbtable.keys():
del dbtable[key] # "viewzodb.py -" inits db
dbtable.storeItems(initrecs) # "viewzodb.py" browses db
FormGui(dbtable).mainloop()
dbtable.printItems()
dbtable.close()
|
simontakite/sysadmin
|
pythonscripts/programmingpython/Dbase/Zodb-2.x/viewzodb.py
|
Python
|
gpl-2.0
| 1,196
|
# Copyright (c) 2017 Charles University, Faculty of Arts,
# Institute of the Czech National Corpus
# Copyright (c) 2017 Tomas Machalek <tomas.machalek@gmail.com>
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; version 2
# dated June, 1991.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
class IssueReportingAction(object):
def to_dict(self):
return self.__dict__
class DynamicReportingAction(IssueReportingAction):
def __init__(self):
self.type = 'dynamic'
class StaticReportingAction(IssueReportingAction):
def __init__(self, url, args, label, blank_window):
self.url = url
self.args = args
self.label = label
self.blank_window = blank_window
self.type = 'static'
class AbstractIssueReporting(object):
def export_report_action(self, plugin_api):
raise NotImplementedError()
def submit(self, plugin_api, args):
raise NotImplementedError()
|
tomachalek/kontext
|
lib/plugins/abstract/issue_reporting.py
|
Python
|
gpl-2.0
| 1,501
|
from django.core.exceptions import PermissionDenied
from core.models import Author, Editor
def copy_author_to_submission(user, book):
author = Author(
first_name=user.first_name,
middle_name=user.profile.middle_name,
last_name=user.last_name,
salutation=user.profile.salutation,
institution=user.profile.institution,
department=user.profile.department,
country=user.profile.country,
author_email=user.email,
biography=user.profile.biography,
orcid=user.profile.orcid,
twitter=user.profile.twitter,
linkedin=user.profile.linkedin,
facebook=user.profile.facebook,
)
author.save()
book.author.add(author)
return author
def copy_editor_to_submission(user, book):
editor = Editor(
first_name=user.first_name,
middle_name=user.profile.middle_name,
last_name=user.last_name,
salutation=user.profile.salutation,
institution=user.profile.institution,
department=user.profile.department,
country=user.profile.country,
author_email=user.email,
biography=user.profile.biography,
orcid=user.profile.orcid,
twitter=user.profile.twitter,
linkedin=user.profile.linkedin,
facebook=user.profile.facebook,
)
editor.save()
book.editor.add(editor)
return editor
def check_stage(book, check):
if book.submission_stage >= check:
pass
elif book.submission_date:
raise PermissionDenied()
else:
raise PermissionDenied()
def handle_book_labels(post, book, kind):
for _file in book.files.all():
if _file.kind == kind and post.get("%s" % _file.id, None):
_file.label = post.get("%s" % _file.id)
_file.save()
def handle_copyedit_author_labels(post, copyedit, kind):
for _file in copyedit.author_files.all():
if _file.kind == kind and post.get("%s" % _file.id, None):
_file.label = post.get("%s" % _file.id)
_file.save()
|
ubiquitypress/rua
|
src/submission/logic.py
|
Python
|
gpl-2.0
| 2,061
|
from enigma import iPlayableService, iRdsDecoder
from Screens.Screen import Screen
from Components.ActionMap import NumberActionMap
from Components.ServiceEventTracker import ServiceEventTracker
from Components.Pixmap import Pixmap
from Components.Label import Label
from Components.Sources.StaticText import StaticText
from Tools.Directories import resolveFilename, SCOPE_ACTIVE_SKIN
from Tools.LoadPixmap import LoadPixmap
class RdsInfoDisplaySummary(Screen):
def __init__(self, session, parent):
Screen.__init__(self, session, parent=parent)
self['message'] = StaticText('')
self.parent.onText.append(self.onText)
def onText(self, message):
self['message'].text = message
if message and len(message):
self.show()
else:
self.hide()
class RdsInfoDisplay(Screen):
ALLOW_SUSPEND = True
def __init__(self, session):
Screen.__init__(self, session)
self.__event_tracker = ServiceEventTracker(screen=self, eventmap={iPlayableService.evEnd: self.__serviceStopped,
iPlayableService.evUpdatedRadioText: self.RadioTextChanged,
iPlayableService.evUpdatedRtpText: self.RtpTextChanged,
iPlayableService.evUpdatedRassInteractivePicMask: self.RassInteractivePicMaskChanged})
self['RadioText'] = Label()
self['RtpText'] = Label()
self['RassLogo'] = Pixmap()
self.onLayoutFinish.append(self.hideWidgets)
self.rassInteractivePossible = False
self.onRassInteractivePossibilityChanged = []
self.onText = []
def createSummary(self):
return RdsInfoDisplaySummary
def hideWidgets(self):
for x in (self['RadioText'], self['RtpText'], self['RassLogo']):
x.hide()
for x in self.onText:
x('')
def RadioTextChanged(self):
service = self.session.nav.getCurrentService()
decoder = service and service.rdsDecoder()
rdsText = decoder and decoder.getText(iRdsDecoder.RadioText)
if rdsText and len(rdsText):
self['RadioText'].setText(rdsText)
self['RadioText'].show()
else:
self['RadioText'].hide()
for x in self.onText:
x(rdsText)
def RtpTextChanged(self):
service = self.session.nav.getCurrentService()
decoder = service and service.rdsDecoder()
rtpText = decoder and decoder.getText(iRdsDecoder.RtpText)
if rtpText and len(rtpText):
self['RtpText'].setText(rtpText)
self['RtpText'].show()
else:
self['RtpText'].hide()
for x in self.onText:
x(rtpText)
def RassInteractivePicMaskChanged(self):
if not self.rassInteractivePossible:
service = self.session.nav.getCurrentService()
decoder = service and service.rdsDecoder()
mask = decoder and decoder.getRassInteractiveMask()
if mask[0] & 1:
self['RassLogo'].show()
self.rassInteractivePossible = True
for x in self.onRassInteractivePossibilityChanged:
x(True)
def __serviceStopped(self):
self.hideWidgets()
if self.rassInteractivePossible:
self.rassInteractivePossible = False
for x in self.onRassInteractivePossibilityChanged:
x(False)
class RassInteractive(Screen):
def __init__(self, session):
Screen.__init__(self, session)
self['actions'] = NumberActionMap(['NumberActions', 'RassInteractiveActions'], {'exit': self.close,
'0': lambda x: self.numPressed(0),
'1': lambda x: self.numPressed(1),
'2': lambda x: self.numPressed(2),
'3': lambda x: self.numPressed(3),
'4': lambda x: self.numPressed(4),
'5': lambda x: self.numPressed(5),
'6': lambda x: self.numPressed(6),
'7': lambda x: self.numPressed(7),
'8': lambda x: self.numPressed(8),
'9': lambda x: self.numPressed(9),
'nextPage': self.nextPage,
'prevPage': self.prevPage,
'nextSubPage': self.nextSubPage,
'prevSubPage': self.prevSubPage})
self.__event_tracker = ServiceEventTracker(screen=self, eventmap={iPlayableService.evUpdatedRassInteractivePicMask: self.recvRassInteractivePicMaskChanged})
self['subpages_1'] = Pixmap()
self['subpages_2'] = Pixmap()
self['subpages_3'] = Pixmap()
self['subpages_4'] = Pixmap()
self['subpages_5'] = Pixmap()
self['subpages_6'] = Pixmap()
self['subpages_7'] = Pixmap()
self['subpages_8'] = Pixmap()
self['subpages_9'] = Pixmap()
self['Marker'] = Label('>')
self.subpage = {1: self['subpages_1'],
2: self['subpages_2'],
3: self['subpages_3'],
4: self['subpages_4'],
5: self['subpages_5'],
6: self['subpages_6'],
7: self['subpages_7'],
8: self['subpages_8'],
9: self['subpages_9']}
self.subpage_png = {1: LoadPixmap(resolveFilename(SCOPE_ACTIVE_SKIN, 'icons/rass_page1.png')),
2: LoadPixmap(resolveFilename(SCOPE_ACTIVE_SKIN, 'icons/rass_page2.png')),
3: LoadPixmap(resolveFilename(SCOPE_ACTIVE_SKIN, 'icons/rass_page3.png')),
4: LoadPixmap(resolveFilename(SCOPE_ACTIVE_SKIN, 'icons/rass_page4.png'))}
self.current_page = 0
self.current_subpage = 0
self.showRassPage(0, 0)
self.onLayoutFinish.append(self.updateSubPagePixmaps)
def updateSubPagePixmaps(self):
service = self.session.nav.getCurrentService()
decoder = service and service.rdsDecoder()
if not decoder:
print 'NO RDS DECODER in showRassPage'
else:
mask = decoder.getRassInteractiveMask()
page = 1
while page < 10:
subpage_cnt = self.countAvailSubpages(page, mask)
subpage = self.subpage[page]
if subpage_cnt > 0:
if subpage.instance:
png = self.subpage_png[subpage_cnt]
if png:
subpage.instance.setPixmap(png)
subpage.show()
else:
print 'rass png missing'
else:
subpage.hide()
page += 1
def recvRassInteractivePicMaskChanged(self):
self.updateSubPagePixmaps()
def showRassPage(self, page, subpage):
service = self.session.nav.getCurrentService()
decoder = service and service.rdsDecoder()
if not decoder:
print 'NO RDS DECODER in showRassPage'
else:
decoder.showRassInteractivePic(page, subpage)
page_diff = page - self.current_page
self.current_page = page
if page_diff:
current_pos = self['Marker'].getPosition()
y = current_pos[1]
y += page_diff * 25
self['Marker'].setPosition(current_pos[0], y)
def getMaskForPage(self, page, masks = None):
if not masks:
service = self.session.nav.getCurrentService()
decoder = service and service.rdsDecoder()
if not decoder:
print 'NO RDS DECODER in getMaskForPage'
masks = decoder.getRassInteractiveMask()
if masks:
mask = masks[page * 4 / 8]
if page % 2:
mask >>= 4
else:
mask &= 15
return mask
def countAvailSubpages(self, page, masks):
mask = self.getMaskForPage(page, masks)
cnt = 0
while mask:
if mask & 1:
cnt += 1
mask >>= 1
return cnt
def nextPage(self):
mask = 0
page = self.current_page
while mask == 0:
page += 1
if page > 9:
page = 0
mask = self.getMaskForPage(page)
self.numPressed(page)
def prevPage(self):
mask = 0
page = self.current_page
while mask == 0:
if page > 0:
page -= 1
else:
page = 9
mask = self.getMaskForPage(page)
self.numPressed(page)
def nextSubPage(self):
self.numPressed(self.current_page)
def prevSubPage(self):
num = self.current_page
mask = self.getMaskForPage(num)
cur_bit = 1 << self.current_subpage
tmp = cur_bit
while True:
if tmp == 1:
tmp = 8
else:
tmp >>= 1
if tmp == cur_bit:
return
if mask & tmp:
subpage = 0
while tmp > 1:
subpage += 1
tmp >>= 1
self.current_subpage = subpage
self.showRassPage(num, subpage)
return
def numPressed(self, num):
mask = self.getMaskForPage(num)
if self.current_page == num:
self.skip = 0
cur_bit = 1 << self.current_subpage
tmp = cur_bit
else:
self.skip = 1
cur_bit = 16
tmp = 1
while True:
if not self.skip:
if tmp == 8 and cur_bit < 16:
tmp = 1
else:
tmp <<= 1
else:
self.skip = 0
if tmp == cur_bit:
return
if mask & tmp:
subpage = 0
while tmp > 1:
subpage += 1
tmp >>= 1
self.current_subpage = subpage
self.showRassPage(num, subpage)
return
|
kingvuplus/boom
|
lib/python/Screens/RdsDisplay.py
|
Python
|
gpl-2.0
| 10,176
|
#!/usr/bin/python
# *
# * Copyright (C) 2012-2013 Garrett Brown
# * Copyright (C) 2010 j48antialias
# *
# * This Program is free software; you can redistribute it and/or modify
# * it under the terms of the GNU General Public License as published by
# * the Free Software Foundation; either version 2, or (at your option)
# * any later version.
# *
# * This Program is distributed in the hope that it will be useful,
# * but WITHOUT ANY WARRANTY; without even the implied warranty of
# * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# * GNU General Public License for more details.
# *
# * You should have received a copy of the GNU General Public License
# * along with XBMC; see the file COPYING. If not, write to
# * the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA.
# * http://www.gnu.org/copyleft/gpl.html
# *
# * Based on code by tknorris:
# * https://offshoregit.com/tknorris/tknorris-release-repo/raw/master/addons_xml_generator2.py
# * Based on code by j48antialias:
# * https://anarchintosh-projects.googlecode.com/files/addons_xml_generator.py
# *
# * Changes since v2:
# * - (assumed) zips reside in folder "download"
# * - md5 checksum creation added for zips
# * - Skip moving files and zip creation if zip file for the same version already exists
# * - alphabetical sorting
""" addons.xml generator """
import os
import sys
import time
import re
import xml.etree.ElementTree as ET
try:
import shutil, zipfile
except Exception as e:
print('An error occurred importing module!\n%s\n' % e)
# Compatibility with 3.0, 3.1 and 3.2 not supporting u"" literals
print(sys.version)
if sys.version < '3':
import codecs
def u(x):
return codecs.unicode_escape_decode(x)[0]
else:
def u(x):
return x
class Generator:
"""
Generates a new addons.xml file from each addons addon.xml file
and a new addons.xml.md5 hash file. Must be run from the root of
the checked-out repo. Only handles single depth folder structure.
"""
def __init__(self):
# generate files
self._generate_addons_file()
self._generate_md5_file()
# notify user
print("Finished updating addons xml and md5 files\n")
def _generate_addons_file(self):
# addon list
addons = sorted(os.listdir("."))
# final addons text
addons_xml = u("<?xml version=\"1.0\" encoding=\"UTF-8\" standalone=\"yes\"?>\n<addons>\n")
# loop thru and add each addons addon.xml file
for addon in addons:
try:
# skip any file or .svn folder or .git folder
if (not os.path.isdir(addon) or addon == ".svn" or addon == ".git" or addon == ".github" or addon == "download"): continue
# create path
_path = os.path.join(addon, "addon.xml")
# split lines for stripping
xml_lines = open(_path, "r").read().splitlines()
# new addon
addon_xml = ""
# loop thru cleaning each line
for line in xml_lines:
# skip encoding format line
if (line.find("<?xml") >= 0): continue
# add line
if sys.version < '3':
addon_xml += unicode(line.rstrip() + "\n", "UTF-8")
else:
addon_xml += line.rstrip() + "\n"
# we succeeded so add to our final addons.xml text
addons_xml += addon_xml.rstrip() + "\n\n"
except Exception as e:
# missing or poorly formatted addon.xml
print("Excluding %s for %s" % (_path, e))
# clean and add closing tag
addons_xml = addons_xml.strip() + u("\n</addons>\n")
# save file
self._save_file(addons_xml.encode("UTF-8"), file="addons.xml")
def _generate_md5_file(self):
# create a new md5 hash
try:
import md5
m = md5.new(open("addons.xml", "r").read()).hexdigest()
except ImportError:
import hashlib
m = hashlib.md5(open("addons.xml", "r", encoding="UTF-8").read().encode("UTF-8")).hexdigest()
# save file
try:
self._save_file(m.encode("UTF-8"), file="addons.xml.md5")
except Exception as e:
# oops
print("An error occurred creating addons.xml.md5 file!\n%s" % e)
def _save_file(self, data, file):
try:
# write data to the file (use b for Python 3)
open(file, "wb").write(data)
except Exception as e:
# oops
print("An error occurred saving %s file!\n%s" % (file, e))
def zipfolder(foldername, target_dir, zips_dir, addon_dir):
zipobj = zipfile.ZipFile(zips_dir + foldername, 'w', zipfile.ZIP_DEFLATED)
rootlen = len(target_dir) + 1
for base, dirs, files in os.walk(target_dir):
for f in files:
fn = os.path.join(base, f)
zipobj.write(fn, os.path.join(addon_dir, fn[rootlen:]))
zipobj.close()
if (__name__ == "__main__"):
# start
Generator()
# rezip files and move
try:
print('Starting zip file creation...')
rootdir = sys.path[0]
zipsdir = rootdir + os.sep + 'download'
filesinrootdir = sorted(os.listdir(rootdir))
for x in filesinrootdir:
if re.search("^(context|plugin|script|service|skin|repository|docker)" , x) and not re.search('.zip', x):
zipfilename = x + '.zip'
zipfilenamefirstpart = zipfilename[:-4]
zipfilenamelastpart = zipfilename[len(zipfilename) - 4:]
zipsfolder = os.path.normpath(os.path.join('download', x)) + os.sep
foldertozip = rootdir + os.sep + x
filesinfoldertozip = sorted(os.listdir(foldertozip))
# #check if download folder exists
if not os.path.exists(zipsfolder):
os.makedirs(zipsfolder)
print('Directory doesn\'t exist, creating: ' + zipsfolder)
# #get addon version number
if "addon.xml" in filesinfoldertozip:
tree = ET.parse(os.path.join(rootdir, x, "addon.xml"))
root = tree.getroot()
for elem in root.iter('addon'):
print('%s %s version: %s' % (x, elem.tag, elem.attrib['version']))
version = '-' + elem.attrib['version']
# # #check for existing zips
if not os.path.exists(zipsfolder + x + version + '.zip'):
# #check if and move addon, changelog, fanart and icon to zipdir
for y in filesinfoldertozip:
# print('processing file: ' + os.path.join(rootdir,x,y))
if re.search("addon|changelog|icon|fanart", y):
shutil.copyfile(os.path.join(rootdir, x, y), os.path.join(zipsfolder, y))
print('Copying %s to %s' % (y, zipsfolder))
# #check for and zip the folders
print('Zipping %s and moving to %s\n' % (x, zipsfolder))
try:
zipfolder(zipfilenamefirstpart + version + zipfilenamelastpart, foldertozip, zipsfolder, x)
print('zipped with zipfolder')
# # #create md5 checksum for zips
import hashlib
try:
m = hashlib.md5(open("%s" % (zipsfolder + x + version + '.zip'), "rb").read()).hexdigest()
open("%s" % (zipsfolder + x + version + '.zip.md5'), "wb").write(m.encode("UTF-8"))
print("zip.md5 file created\n")
except Exception as e:
print("An error occurred creating zip.md5 file!\n%s" % e)
except:
if os.path.exists(zipsfolder + x + version + '.zip'):
os.remove(zipsfolder + x + version + '.zip')
print('trying shutil')
try:
shutil.move(shutil.make_archive(foldertozip + version, 'zip', rootdir, x), zipsfolder)
print('zipped with shutil\n')
except Exception as e:
print('Cannot create zip file\nshutil %s\n' % e)
else:
print('Zip file for %s version %s already exists, skipping moving files and zip creation.\n' % (x, version))
except Exception as e:
print('Cannot create or move the needed files\n%s' % e)
print('Done')
|
dknlght/dkodi
|
src/addons_xml_generator3.py
|
Python
|
gpl-2.0
| 8,998
|
"""Definitions for output formats."""
import collections
from enum import Enum, unique
__copyright__ = 'Copyright 2021, 3Liz'
__license__ = 'GPL version 3'
__email__ = 'info@3liz.org'
format_output = collections.namedtuple('format', ['label', 'driver_name', 'extension'])
@unique
class Format(Enum):
""" Name of output formats."""
GeoJSON = format_output('GeoJSON', 'GeoJSON', 'geojson')
"""GeoJSON"""
GeoPackage = format_output('GeoPackage', 'GPKG', 'gpkg')
"""GeoPackage"""
Shapefile = format_output('ESRI Shapefile', 'ESRI Shapefile', 'shp')
"""Shapefile"""
Kml = format_output('Kml', 'KML', 'kml')
"""Kml"""
|
3liz/QgisQuickOSMPlugin
|
QuickOSM/definitions/format.py
|
Python
|
gpl-2.0
| 657
|
import wx
from Gauge import Gauge
class DependencyPanel(wx.Panel):
def __init__(self, parent, text, gaugeColor, textColor, env = 0, mineral = 0, energy = 0, nothing = 0):
wx.Panel.__init__(self, parent, -1)
gaugeBorders = (5, 5, 1, 7)
self.env = Gauge(self, gaugeColor, textColor, gaugeBorders, env)
self.mineral = Gauge(self, gaugeColor, textColor, gaugeBorders, mineral)
self.energy = Gauge(self, gaugeColor, textColor, gaugeBorders, energy)
self.nothing = Gauge(self, gaugeColor, textColor, gaugeBorders, nothing)
vertBox = wx.BoxSizer(wx.VERTICAL)
panelCaption = wx.StaticText(self, -1, text)
font = panelCaption.GetFont()
font.SetWeight(wx.FONTWEIGHT_BOLD)
panelCaption.SetFont(font)
vertBox.Add(panelCaption, 0, wx.BOTTOM, 5)
gbs = wx.GridBagSizer(4, 4)
gbs.SetFlexibleDirection(wx.HORIZONTAL)
gbs.SetCols(2)
gbs.SetRows(4)
gbs.AddGrowableCol(1)
gbs.Add(wx.StaticText(self, -1, "Environment"), (0, 0), flag = wx.WEST, border = 4)
gbs.Add(self.env, (0, 1), flag = wx.EXPAND)
gbs.Add(wx.StaticText(self, -1, "Mineral"), (1, 0), flag = wx.WEST, border = 4)
gbs.Add(self.mineral, (1, 1), flag = wx.EXPAND)
gbs.Add(wx.StaticText(self, -1, "Energy"), (2, 0), flag = wx.WEST, border = 4)
gbs.Add(self.energy, (2, 1), flag = wx.EXPAND)
gbs.Add(wx.StaticText(self, -1, "Nothing"), (3, 0), flag = wx.WEST, border = 4)
gbs.Add(self.nothing, (3, 1), flag = wx.EXPAND)
vertBox.Add(gbs, 1, wx.EXPAND | wx.ALL)
self.SetSizerAndFit(vertBox)
def SetEnv(self, percent):
self.env.percent = percent
self.Refresh()
def SetMineral(self, percent):
self.mineral.percent = percent
self.Refresh()
def SetEnergy(self, percent):
self.energy.percent = percent
self.Refresh()
def SetNothing(self, percent):
self.nothing.percent = percent
self.Refresh()
def Clear(self):
self.nothing.percent = 0
self.mineral.percent = 0
self.env.percent = 0
self.energy.percent = 0
self.Refresh()
|
OuterDeepSpace/OuterDeepSpace
|
techviewer/DependencyPanel.py
|
Python
|
gpl-2.0
| 1,962
|
# This file is part of Copernicus
# http://www.copernicus-computing.org/
#
# Copyright (C) 2011, Sander Pronk, Iman Pouya, Erik Lindahl, and others.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 2 as published
# by the Free Software Foundation
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
import re
import tempfile
import mimetools
import cgi
import logging
import shutil
import filecmp
import os
import cpc.util.log
'''
Created on Mar 7, 2011
@author: iman
'''
from cpc.network.server_request import ServerRequest
import urlparse
log=logging.getLogger(__name__)
#handles parsing of the HTTP methods
class HttpMethodParser(object):
'''
classdocs
'''
def __init__(self):
pass
'''
input dict headers,string path
'''
@staticmethod
def parseGET(headers,path):
#separate the request params from the path
splittedPath = path.split('?')
msg = splittedPath[1]
parsedDict = urlparse.parse_qs(msg) #Note values here are stored in lists, this is so one can handle many inputs with same name, for now we dont want that as our multipart parsing does not support it
params = dict()
for k,v in parsedDict.iteritems():
params[k] = v[0]
request = ServerRequest(headers,None,params)
return request
'''
Input: dict headers, file message
'''
@staticmethod
def parsePUT(headers,message):
pass
'''
Input: dict headers, file message
'''
@staticmethod
def parsePOST(headers,message):
if ServerRequest.isMultiPart(headers['content-type']):
request = HttpMethodParser.handleMultipart(headers,message)
else:
request = HttpMethodParser.handleSinglePart(headers, message)
#after this is done the application XML parser should be adapted to handle non xml style commands
#next step is to make the parsing more general to work with browser, NOTE done in web branch
return request
#handles singlepart POST messages
@staticmethod
def handleSinglePart(headers,message):
contentLength = long(headers['content-length'])
if headers['content-type'] == 'application/x-www-form-urlencoded' or headers['content-type'] == 'application/x-www-form-urlencoded; charset=UTF-8': #TODO generalize
msg = message.read(contentLength)
parsedDict = urlparse.parse_qs(msg) #Note values here are stored in lists, this is so one can handle many inputs with same name, for now we dont want that as our multipart parsing does not support it
params = dict()
for k,v in parsedDict.iteritems():
params[k] = v[0]
log.log(cpc.util.log.TRACE,'msg is %s'%params)
request = ServerRequest(headers,None,params)
return request
@staticmethod
def handleMultipart(mainHeaders,msgStream):
files = dict()
params = dict()
BOUNDARY = "--"+HttpMethodParser.extractBoundary(mainHeaders)
stopBoundary = BOUNDARY+"--"
terminateBoundary = ''
msgStream.readline() #has an empty line at start that we want to get rid of
while terminateBoundary != stopBoundary:
headers = mimetools.Message(msgStream)
terminateBoundary = ''
log.log(cpc.util.log.TRACE,'multipart headers are %s'%headers.headers)
if(ServerRequest.isFile(headers['Content-Disposition'])):
file = tempfile.TemporaryFile(mode="w+b")
name = ServerRequest.getFieldName(headers['Content-Disposition'])
notused,contentDispositionParams = cgi.parse_header(headers['Content-Disposition'])
name = contentDispositionParams['name']
#if we have a content length we just read it and store the data
contentLength = headers.getheader('Content-Length')
if(contentLength): # If a content length is sent we parse the nice way
bytes = int(contentLength)
if(ServerRequest.isFile(headers['Content-Disposition'])):
file.write(msgStream.read(bytes))
else:
line = msgStream.read(bytes)
log.log(cpc.util.log.TRACE,"line is "+line)
params[name] = line
msgStream.readline() ## we will have a trailin CRLF that we just want to get rid of
if(ServerRequest.isFile(headers['Content-Disposition'])):
readBytes = 0
while(True):
line = msgStream.readline()
if re.search(BOUNDARY,line):
#time to wrap it up
if(line[-2:] == '\r\n'):
line = line[:-2]
elif(line[-1:] == '\n'):
line = line[:-1]
terminateBoundary = line
file.seek(0)
skipBytes = 2
realFile = tempfile.TemporaryFile(mode="w+b")
realFile.write(file.read(readBytes-skipBytes))
file.close()
realFile.seek(0)
#For testing during dev only!!
#runTest(realFile)
files[name]= realFile
break
else:
readBytes +=len(line)
file.write(line)
else:
while(True):
line = msgStream.readline()
if(line[-2:] == '\r\n'):
line = line[:-2]
elif(line[-1:] == '\n'):
line = line[:-1]
if re.search(BOUNDARY,line):
terminateBoundary = line
break;
else:
if name in params:
params[name]+= line
else:
params[name] = line
return ServerRequest(mainHeaders,None,params,files)
@staticmethod
#//extracts the boundary sent from the header
def extractBoundary(headers):
regexp = 'boundary=(.*)'
if 'Content-Type' in headers:
contentType = headers['Content-Type']
else:
contentType = headers['content-type']
match = re.search(regexp,contentType)
if match == None:
raise Exception('Could not find a multipart message boundary')
else:
return match.group(1)
#tests the file against a reference file
# this test can be run if one sees problems with the file transfer in multipart POST
# send a file, and specify the path in the referenceFilename variable
# will test that the received file that was parsed has same content and size as reference file
def runTest(self,realFile):
# For TESTING PURPOSES
#referenceFilename = "/Users/iman/Desktop/snowleopard_10a432_userdvd.dmg"
referenceFilename = "/Users/iman/Documents/workspace/copernicus/examples/single.tar.gz"
resultFilename = "/Users/iman/Desktop/cpctemp/resfile"
cpfile = open(resultFilename,"w+b")
shutil.copyfileobj(realFile,cpfile)
cpfile.close()
realFile.seek(0)
fileEquals = filecmp.cmp(referenceFilename,resultFilename)
print "IMAN file match is %s"%fileEquals
print "original file size is %d and transferred size is %d"%(os.path.getsize(referenceFilename),os.path.getsize(resultFilename))
realFile.seek(0)
|
gromacs/copernicus
|
cpc/network/http/http_method_parser.py
|
Python
|
gpl-2.0
| 9,130
|
import sys, os, re, string
from autotest_lib.client.bin import utils, fsinfo, fsdev_mgr, partition
from autotest_lib.client.common_lib import error
fd_mgr = fsdev_mgr.FsdevManager()
# For unmounting / formatting file systems we may have to use a device name
# that is different from the real device name that we have to use to set I/O
# scheduler tunables.
_DISKPART_FILE = '/proc/partitions'
##############################################################################
#
# The 'disk_list' array returned by get_disk_list() has an entry for each
# disk drive we find on the box. Each of these entries is a map with the
# following 3 string values:
#
# 'device' disk device name (i.e. the part after /dev/)
# 'mountpt' disk mount path
# 'tunable' disk name for setting scheduler tunables (/sys/block/sd??)
#
# The last value is an integer that indicates the current mount status
# of the drive:
#
# 'mounted' 0 = not currently mounted
# 1 = mounted r/w on the expected path
# -1 = mounted readonly or at an unexpected path
#
# When the 'std_mounts_only' argument is True we don't include drives
# mounted on 'unusual' mount points in the result.
#
##############################################################################
def get_disk_list(std_mounts_only=True):
# Get hold of the currently mounted file systems
mounts = utils.system_output('mount').splitlines()
# Grab all the interesting disk partition names from /proc/partitions,
# and build up the table of drives present in the system.
hd_list = []
hd_regexp = re.compile("([hs]d[a-z]+3)$")
partfile = open(_DISKPART_FILE)
for partline in partfile:
parts = partline.strip().split()
if len(parts) != 4 or partline.startswith('major'):
continue
# Get hold of the partition name
partname = parts[3]
# The partition name better end with a digit
if not partname[-1:].isdigit():
continue
# Process any site-specific filters on the partition name
if not fd_mgr.use_partition(partname):
continue
# We need to know the IDE/SATA/... device name for setting tunables
tunepath = fd_mgr.map_drive_name(partname)
# Check whether the device is mounted (and how)
mstat = 0
fstype = ''
fsopts = ''
fsmkfs = '?'
# Prepare the full device path for matching
chkdev = '/dev/' + partname
# If the partition is mounted, we'll record the mount point
mountpt = None
for mln in mounts:
splt = mln.split()
# Typical 'mount' output line looks like this (indices
# for the split() result shown below):
#
# <device> on <mount_point> type <fstp> <options>
# 0 1 2 3 4 5
if splt[0] == chkdev:
# Make sure the mount point looks reasonable
mountpt = fd_mgr.check_mount_point(partname, splt[2])
if not mountpt:
mstat = -1
break
# Grab the file system type and mount options
fstype = splt[4]
fsopts = splt[5]
# Check for something other than a r/w mount
if fsopts[:3] != '(rw':
mstat = -1
break
# The drive is mounted at the 'normal' mount point
mstat = 1
# Does the caller only want to allow 'standard' mount points?
if std_mounts_only and mstat < 0:
continue
# Was this partition mounted at all?
if not mountpt:
# Ask the client where we should mount this partition
mountpt = fd_mgr.check_mount_point(partname, None)
if not mountpt:
# Client doesn't know where to mount partition - ignore it
continue
# Looks like we have a valid disk drive, add it to the list
hd_list.append({ 'device' : partname,
'mountpt': mountpt,
'tunable': tunepath,
'fs_type': fstype,
'fs_opts': fsopts,
'fs_mkfs': fsmkfs,
'mounted': mstat })
return hd_list
def mkfs_all_disks(job, disk_list, fs_type, fs_makeopt, fs_mnt_opt):
"""
Prepare all the drives in 'disk_list' for testing. For each disk this means
unmounting any mount points that use the disk, running mkfs with 'fs_type'
as the file system type and 'fs_makeopt' as the 'mkfs' options, and finally
remounting the freshly formatted drive using the flags in 'fs_mnt_opt'.
"""
for disk in disk_list:
# For now, ext4 isn't quite ready for prime time
if fs_type == "ext4":
fs_type = "ext4dev"
# Grab the device and mount paths for the drive
dev_path = os.path.join('/dev', disk["device"])
mnt_path = disk['mountpt']
# Create a file system instance
try:
fs = job.filesystem(device=dev_path, mountpoint=mnt_path)
except:
raise Exception("Could not create a filesystem on '%s'" % dev_path)
# Make sure the volume is unmounted
if disk["mounted"]:
try:
fs.unmount(mnt_path)
except Exception, info:
raise Exception("umount failed: exception = %s, args = %s" %
(sys.exc_info()[0], info.args))
except:
raise Exception("Could not unmount device ", dev_path)
# Is the drive already formatted with the right file system?
skip_mkfs = match_fs(disk, dev_path, fs_type, fs_makeopt)
# Next step is to create a fresh file system (if we need to)
try:
if not skip_mkfs:
fs.mkfs(fstype = fs_type, args = fs_makeopt)
except:
raise Exception("Could not 'mkfs " + "-t " + fs_type + " " +
fs_makeopt + " " + dev_path + "'")
# Mount the drive with the appropriate FS options
try:
opts = ""
if fs_mnt_opt != "":
opts += " -o " + fs_mnt_opt
fs.mount(mountpoint = mnt_path, fstype = fs_type, args = opts)
except NameError, info:
raise Exception("mount name error: %s" % info)
except Exception, info:
raise Exception("mount failed: exception = %s, args = %s" %
(type(info), info.args))
# If we skipped mkfs we need to wipe the partition clean
if skip_mkfs:
fs.wipe()
# Record the new file system type and options in the disk list
disk["mounted"] = True
disk["fs_type"] = fs_type
disk["fs_mkfs"] = fs_makeopt
disk["fs_opts"] = fs_mnt_opt
# Try to wipe the file system slate clean
utils.drop_caches()
# XXX(gps): Remove this code once refactoring is complete to get rid of these
# nasty test description strings.
def _legacy_str_to_test_flags(fs_desc_string):
"""Convert a legacy FS_LIST string into a partition.FsOptions instance."""
match = re.search('(.*?)/(.*?)/(.*?)/(.*)$', fs_desc_string.strip())
if not match:
raise ValueError('unrecognized FS list entry %r' % fs_desc_string)
flags_obj = partition.FsOptions(fstype=match.group(1).strip(),
mkfs_flags=match.group(2).strip(),
mount_options=match.group(3).strip(),
fs_tag=match.group(4).strip())
return flags_obj
def prepare_disks(job, fs_desc, disk1_only=False, disk_list=None):
"""
Prepare drive(s) to contain the file system type / options given in the
description line 'fs_desc'. When 'disk_list' is not None, we prepare all
the drives in that list; otherwise we pick the first available data drive
(which is usually hdc3) and prepare just that one drive.
Args:
fs_desc: A partition.FsOptions instance describing the test -OR- a
legacy string describing the same in '/' separated format:
'fstype / mkfs opts / mount opts / short name'.
disk1_only: Boolean, defaults to False. If True, only test the first
disk.
disk_list: A list of disks to prepare. If None is given we default to
asking get_disk_list().
Returns:
(mount path of the first disk, short name of the test, list of disks)
OR (None, '', None) if no fs_desc was given.
"""
# Special case - do nothing if caller passes no description.
if not fs_desc:
return (None, '', None)
if not isinstance(fs_desc, partition.FsOptions):
fs_desc = _legacy_str_to_test_flags(fs_desc)
# If no disk list was given, we'll get it ourselves
if not disk_list:
disk_list = get_disk_list()
# Make sure we have the appropriate 'mkfs' binary for the file system
mkfs_bin = 'mkfs.' + fs_desc.filesystem
if fs_desc.filesystem == 'ext4':
mkfs_bin = 'mkfs.ext4dev'
try:
utils.system('which ' + mkfs_bin)
except Exception:
try:
mkfs_bin = os.path.join(job.toolsdir, mkfs_bin)
utils.system('cp -ufp %s /sbin' % mkfs_bin)
except Exception:
raise error.TestError('No mkfs binary available for ' +
fs_desc.filesystem)
# For 'ext4' we need to add '-E test_fs' to the mkfs options
if fs_desc.filesystem == 'ext4':
fs_desc.mkfs_flags += ' -E test_fs'
# If the caller only needs one drive, grab the first one only
if disk1_only:
disk_list = disk_list[0:1]
# We have all the info we need to format the drives
mkfs_all_disks(job, disk_list, fs_desc.filesystem,
fs_desc.mkfs_flags, fs_desc.mount_options)
# Return(mount path of the first disk, test tag value, disk_list)
return (disk_list[0]['mountpt'], fs_desc.fs_tag, disk_list)
def restore_disks(job, restore=False, disk_list=None):
"""
Restore ext2 on the drives in 'disk_list' if 'restore' is True; when
disk_list is None, we do nothing.
"""
if restore and disk_list is not None:
prepare_disks(job, 'ext2 / -q -i20480 -m1 / / restore_ext2',
disk1_only=False,
disk_list=disk_list)
def wipe_disks(job, disk_list):
"""
Wipe all of the drives in 'disk_list' using the 'wipe' functionality
in the filesystem class.
"""
for disk in disk_list:
partition.wipe_filesystem(job, disk['mountpt'])
def match_fs(disk, dev_path, fs_type, fs_makeopt):
"""
Matches the user provided fs_type and fs_makeopt with the current disk.
"""
if disk["fs_type"] != fs_type:
return False
elif disk["fs_mkfs"] == fs_makeopt:
# No need to mkfs the volume, we only need to remount it
return True
elif fsinfo.match_mkfs_option(fs_type, dev_path, fs_makeopt):
if disk["fs_mkfs"] != '?':
raise Exception("mkfs option strings differ but auto-detection"
" code thinks they're identical")
else:
return True
else:
return False
##############################################################################
# The following variables/methods are used to invoke fsdev in 'library' mode
FSDEV_JOB = None
FSDEV_FS_DESC = None
FSDEV_RESTORE = None
FSDEV_PREP_CNT = 0
FSDEV_DISK1_ONLY = None
FSDEV_DISKLIST = None
def use_fsdev_lib(fs_desc, disk1_only, reinit_disks):
"""
Called from the control file to indicate that fsdev is to be used.
"""
global FSDEV_FS_DESC
global FSDEV_RESTORE
global FSDEV_DISK1_ONLY
global FSDEV_PREP_CNT
# This is a bit tacky - we simply save the arguments in global variables
FSDEV_FS_DESC = fs_desc
FSDEV_DISK1_ONLY = disk1_only
FSDEV_RESTORE = reinit_disks
# We need to keep track how many times 'prepare' is called
FSDEV_PREP_CNT = 0
def prepare_fsdev(job):
"""
Called from the test file to get the necessary drive(s) ready; return
a pair of values: the absolute path to the first drive's mount point
plus the complete disk list (which is useful for tests that need to
use more than one drive).
"""
global FSDEV_JOB
global FSDEV_DISKLIST
global FSDEV_PREP_CNT
if not FSDEV_FS_DESC:
return (None, None)
# Avoid preparing the same thing more than once
FSDEV_PREP_CNT += 1
if FSDEV_PREP_CNT > 1:
return (FSDEV_DISKLIST[0]['mountpt'],FSDEV_DISKLIST)
FSDEV_JOB = job
(path,toss,disks) = prepare_disks(job, fs_desc = FSDEV_FS_DESC,
disk1_only = FSDEV_DISK1_ONLY,
disk_list = None)
FSDEV_DISKLIST = disks
return (path,disks)
def finish_fsdev(force_cleanup=False):
"""
This method can be called from the test file to optionally restore
all the drives used by the test to a standard ext2 format. Note that
if use_fsdev_lib() was invoked with 'reinit_disks' not set to True,
this method does nothing. Note also that only fsdev "server-side"
dynamic control files should ever set force_cleanup to True.
"""
if FSDEV_PREP_CNT == 1 or force_cleanup:
restore_disks(job = FSDEV_JOB,
restore = FSDEV_RESTORE,
disk_list = FSDEV_DISKLIST)
##############################################################################
class fsdev_disks:
"""
Disk drive handling class used for file system development
"""
def __init__(self, job):
self.job = job
# Some clients need to access the 'fsdev manager' instance directly
def get_fsdev_mgr(self):
return fd_mgr
def config_sched_tunables(self, desc_file):
# Parse the file that describes the scheduler tunables and their paths
self.tune_loc = eval(open(desc_file).read())
# Figure out what kernel we're running on
kver = utils.system_output('uname -r')
kver = re.match("([0-9]+\.[0-9]+\.[0-9]+).*", kver)
kver = kver.group(1)
# Make sure we know how to handle the kernel we're running on
tune_files = self.tune_loc[kver]
if tune_files is None:
raise Exception("Scheduler tunables not available for kernel " +
kver)
# Save the kernel version for later
self.kernel_ver = kver
# For now we always use 'anticipatory'
tune_paths = tune_files["anticipatory"]
# Create a dictionary out of the tunables array
self.tune_loc = {}
for tx in range(len(tune_paths)):
# Grab the next tunable path from the array
tpath = tune_paths[tx]
# Strip any leading directory names
tuner = tpath
while 1:
slash = tuner.find("/")
if slash < 0:
break
tuner = tuner[slash+1:]
# Add mapping to the dictionary
self.tune_loc[tuner] = tpath
def load_sched_tunable_values(self, val_file):
# Prepare the array of tunable values
self.tune_list = []
# Read the config parameters and find the values that match our kernel
for cfgline in open(val_file):
cfgline = cfgline.strip()
if len(cfgline) == 0:
continue
if cfgline.startswith("#"):
continue
if cfgline.startswith("tune[") == 0:
raise Exception("Config entry not recognized: " + cfgline)
endKV = cfgline.find("]:")
if endKV < 0:
raise Exception("Config entry missing closing bracket: "
+ cfgline)
if cfgline[5:endKV] != self.kernel_ver[0:endKV-5]:
continue
tune_parm = cfgline[endKV+2:].strip()
equal = tune_parm.find("=")
if equal < 1 or equal == len(tune_parm) - 1:
raise Exception("Config entry doesn't have 'parameter=value' :"
+ cfgline)
tune_name = tune_parm[:equal]
tune_val = tune_parm[equal+1:]
# See if we have a matching entry in the path dictionary
try:
tune_path = self.tune_loc[tune_name]
except:
raise Exception("Unknown config entry: " + cfgline)
self.tune_list.append((tune_name, tune_path, tune_val))
def set_sched_tunables(self, disks):
"""
Given a list of disks in the format returned by get_disk_list() above,
set the I/O scheduler values on all the disks to the values loaded
earlier by load_sched_tunables().
"""
for dx in range(len(disks)):
disk = disks[dx]['tunable']
# Set the scheduler first before setting any other tunables
self.set_tunable(disk, "scheduler",
self.tune_loc["scheduler"],
"anticipatory")
# Now set all the tunable parameters we've been given
for tune_desc in self.tune_list:
self.set_tunable(disk, tune_desc[0],
tune_desc[1],
tune_desc[2])
def set_tunable(self, disk, name, path, val):
"""
Given a disk name, a path to a tunable value under _TUNE_PATH and the
new value for the parameter, set the value and verify that the value
has been successfully set.
"""
fpath = partition.get_iosched_path(disk, path)
# Things might go wrong so we'll catch exceptions
try:
step = "open tunable path"
tunef = open(fpath, 'w', buffering=-1)
step = "write new tunable value"
tunef.write(val)
step = "close the tunable path"
tunef.close()
step = "read back new tunable value"
nval = open(fpath, 'r', buffering=-1).read().strip()
# For 'scheduler' we need to fish out the bracketed value
if name == "scheduler":
nval = re.match(".*\[(.*)\].*", nval).group(1)
except IOError, info:
# Special case: for some reason 'max_sectors_kb' often doesn't work
# with large values; try '128' if we haven't tried it already.
if name == "max_sectors_kb" and info.errno == 22 and val != '128':
self.set_tunable(disk, name, path, '128')
return;
# Something went wrong, probably a 'config' problem of some kind
raise Exception("Unable to set tunable value '" + name +
"' at step '" + step + "': " + str(info))
except Exception:
# We should only ever see 'IOError' above, but just in case ...
raise Exception("Unable to set tunable value for " + name)
# Make sure the new value is what we expected
if nval != val:
raise Exception("Unable to correctly set tunable value for "
+ name +": desired " + val + ", but found " + nval)
return
|
ceph/autotest
|
client/bin/fsdev_disks.py
|
Python
|
gpl-2.0
| 19,722
|
# shipBonusSupercarrierC2ShieldResists
#
# Used by:
# Ship: Wyvern
type = "passive"
def handler(fit, src, context):
fit.ship.boostItemAttr("shieldThermalDamageResonance", src.getModifiedItemAttr("shipBonusSupercarrierC2"),
skill="Caldari Carrier")
fit.ship.boostItemAttr("shieldEmDamageResonance", src.getModifiedItemAttr("shipBonusSupercarrierC2"),
skill="Caldari Carrier")
fit.ship.boostItemAttr("shieldKineticDamageResonance", src.getModifiedItemAttr("shipBonusSupercarrierC2"),
skill="Caldari Carrier")
fit.ship.boostItemAttr("shieldExplosiveDamageResonance", src.getModifiedItemAttr("shipBonusSupercarrierC2"),
skill="Caldari Carrier")
|
Ebag333/Pyfa
|
eos/effects/shipbonussupercarrierc2shieldresists.py
|
Python
|
gpl-3.0
| 767
|
# -*- coding: utf-8 -*-
from django.views.generic import FormView
class ChangePassword(FormView):
pass
change_password = ChangePassword.as_view()
|
tokyo-jesus/wampum
|
user/private_views/_change_password.py
|
Python
|
gpl-3.0
| 152
|
../../../../../share/pyshared/papyon/sip/call_manager.py
|
Alberto-Beralix/Beralix
|
i386-squashfs-root/usr/lib/python2.7/dist-packages/papyon/sip/call_manager.py
|
Python
|
gpl-3.0
| 56
|
from django.conf.urls import url, include, patterns
from . import views
from django.contrib.staticfiles.urls import staticfiles_urlpatterns
from django.conf.urls.static import static
from django.conf import settings
urlpatterns = patterns('',
url(r'^$', views.index, name='index'),
)
|
i32baher/practicas-iss
|
tubeworld/main/urls.py
|
Python
|
gpl-3.0
| 289
|
#!/usr/bin/env python
#
# Copyright 2014 Free Software Foundation, Inc.
#
# This file is part of GNU Radio
#
# SPDX-License-Identifier: GPL-3.0-or-later
#
#
from __future__ import absolute_import
from __future__ import unicode_literals
from gnuradio import gr, blocks
from . import fec_swig as fec
from .threaded_encoder import threaded_encoder
from .capillary_threaded_encoder import capillary_threaded_encoder
from .bitflip import read_bitlist
class extended_encoder(gr.hier_block2):
def __init__(self, encoder_obj_list, threading, puncpat=None):
gr.hier_block2.__init__(self, "extended_encoder",
gr.io_signature(1, 1, gr.sizeof_char),
gr.io_signature(1, 1, gr.sizeof_char))
self.blocks=[]
self.puncpat=puncpat
if(type(encoder_obj_list) == list):
if(type(encoder_obj_list[0]) == list):
gr.log.info("fec.extended_encoder: Parallelism must be 1.")
raise AttributeError
else:
# If it has parallelism of 0, force it into a list of 1
encoder_obj_list = [encoder_obj_list,]
if fec.get_encoder_input_conversion(encoder_obj_list[0]) == "pack":
self.blocks.append(blocks.pack_k_bits_bb(8))
if threading == 'capillary':
self.blocks.append(capillary_threaded_encoder(encoder_obj_list,
gr.sizeof_char,
gr.sizeof_char))
elif threading == 'ordinary':
self.blocks.append(threaded_encoder(encoder_obj_list,
gr.sizeof_char,
gr.sizeof_char))
else:
self.blocks.append(fec.encoder(encoder_obj_list[0],
gr.sizeof_char,
gr.sizeof_char))
if fec.get_encoder_output_conversion(encoder_obj_list[0]) == "packed_bits":
self.blocks.append(blocks.packed_to_unpacked_bb(1, gr.GR_MSB_FIRST))
if self.puncpat != '11':
self.blocks.append(fec.puncture_bb(len(puncpat), read_bitlist(puncpat), 0))
# Connect the input to the encoder and the output to the
# puncture if used or the encoder if not.
self.connect((self, 0), (self.blocks[0], 0));
self.connect((self.blocks[-1], 0), (self, 0));
# If using the puncture block, add it into the flowgraph after
# the encoder.
for i in range(len(self.blocks) - 1):
self.connect((self.blocks[i], 0), (self.blocks[i+1], 0));
|
TheWylieStCoyote/gnuradio
|
gr-fec/python/fec/extended_encoder.py
|
Python
|
gpl-3.0
| 2,704
|
# This file is part of jacqq.py
# Copyright (C) 2015 Saman Jirjies - sjirjies(at)asu(dot)edu.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import os
import csv
import argparse
# This script generates a null data set where all outputs are 0 when passed through Jacquez's Q.
if __name__ == '__main__':
parser = argparse.ArgumentParser(description="Generate a lattice of pentagon case-control points",
formatter_class=argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument('x_size', type=int, help="Number of clusters to form in the x direction.")
parser.add_argument('y_size', type=int, help="Number of clusters to form in the y direction.")
parser.add_argument('histories_data', help="Location to write individuals' residential history.")
parser.add_argument('details_data', help="Location to write individuals' status data set.")
parser.add_argument('focus_data', help="Location to write focus data set")
args = parser.parse_args()
lattice_size_y = args.x_size
lattice_size_x = args.y_size
case_locations = []
for xi in range(0, lattice_size_x):
for yi in range(0, lattice_size_y):
case_locations.append((2+(10*xi), 2+(10*yi)))
focus_locations = []
for xi in range(0, lattice_size_x - 1):
for yi in range(0, lattice_size_y - 1):
focus_locations.append((7+(10*xi), 7+(10*yi)))
# Generate details data
csv_file = open(args.details_data, 'w')
try:
writer = csv.writer(csv_file)
writer.writerow(('ID', 'is_case'))
for case_index, case_point in enumerate(case_locations):
writer.writerow(('case_'+str(case_index+1), 1))
for control_name in ('A', 'B', 'C', 'D', 'E'):
writer.writerow(('control_'+str(case_index+1)+control_name, 0))
finally:
csv_file.close()
# Generate time series data
csv_file = open(args.histories_data, 'w')
try:
writer = csv.writer(csv_file)
writer.writerow(('ID', 'start_date', 'end_date', 'x', 'y'))
start_date = '20150101'
end_date = '20150102'
for id_index, case_point in enumerate(case_locations):
writer.writerow(('case_'+str(id_index+1), start_date, end_date, case_point[0], case_point[1]))
writer.writerow(('control_'+str(id_index+1)+'A', start_date, end_date, case_point[0], case_point[1]-2))
writer.writerow(('control_'+str(id_index+1)+'B', start_date, end_date, case_point[0]+2, case_point[1]))
writer.writerow(('control_'+str(id_index+1)+'C', start_date, end_date, case_point[0]+1, case_point[1]+1))
writer.writerow(('control_'+str(id_index+1)+'D', start_date, end_date, case_point[0]-1, case_point[1]+1))
writer.writerow(('control_'+str(id_index+1)+'E', start_date, end_date, case_point[0]-2, case_point[1]))
finally:
csv_file.close()
print("Finished generating null dataset")
# Generate focus data
csv_file = open(args.focus_data, 'w')
try:
writer = csv.writer(csv_file)
writer.writerow(('ID', 'start_date', 'end_date', 'x', 'y'))
start_date = '20150101'
end_date = '20150102'
for index, location in enumerate(focus_locations):
writer.writerow(('focus_' + str(index+1), start_date, end_date, location[0], location[1]))
finally:
csv_file.close()
|
sjirjies/pyJacqQ
|
tests/generate_null_dataset.py
|
Python
|
gpl-3.0
| 4,011
|
# -*- coding: UTF-8 -*-
import re
def mgo_text_split(query_text):
''' split text to support mongodb $text match on a phrase '''
sep = r'[`\-=~!@#$%^&*()_+\[\]{};\'\\:"|<,./<>?]'
word_lst = re.split(sep, query_text)
text_query = ' '.join('\"{}\"'.format(w) for w in word_lst)
return text_query
# 搜索逻辑
def querylogic(list):
query = {}
if len(list) > 1 or len(list[0].split(':')) > 1:
for _ in list:
if _.find(':') > -1:
q_key, q_value = _.split(':', 1)
if q_key == 'port':
query['port'] = int(q_value)
elif q_key == 'banner':
zhPattern = re.compile(u'[\u4e00-\u9fa5]+')
contents = q_value
match = zhPattern.search(contents)
# 如果没有中文用全文索引
if match:
query['banner'] = {"$regex": q_value, '$options': 'i'}
else:
text_query = mgo_text_split(q_value)
query['$text'] = {'$search': text_query, '$caseSensitive':True}
elif q_key == 'ip':
query['ip'] = {"$regex": q_value}
elif q_key == 'server':
query['server'] = q_value.lower()
elif q_key == 'title':
query['webinfo.title'] = {"$regex": q_value, '$options': 'i'}
elif q_key == 'tag':
query['webinfo.tag'] = q_value.lower()
elif q_key == 'hostname':
query['hostname'] = {"$regex": q_value, '$options': 'i'}
elif q_key == 'all':
filter_lst = []
for i in ('ip', 'banner', 'port', 'time', 'webinfo.tag', 'webinfo.title', 'server', 'hostname'):
filter_lst.append({i: {"$regex": q_value, '$options': 'i'}})
query['$or'] = filter_lst
else:
query[q_key] = q_value
else:
filter_lst = []
for i in ('ip', 'banner', 'port', 'time', 'webinfo.tag', 'webinfo.title', 'server', 'hostname'):
filter_lst.append({i: {"$regex": list[0], '$options': 'i'}})
query['$or'] = filter_lst
return query
|
ysrc/xunfeng
|
views/lib/QueryLogic.py
|
Python
|
gpl-3.0
| 2,319
|
#!/usr/bin/env python3
# Listentypen:
# 1. Die Liste:
# Eine Liste ist eine beliebig lange Folge von beliebigen Objekten
# Eine Liste wird mit Hilfe von eckigen Klammern definiert.
liste = [0, "foo"] # type: list
print(liste)
# Mit list() lässt sich bspw. ein String in einen Liste verwandeln:
String = "ABCDEFGHIJ"
print(list(String))
# Auf ein Element in einer Liste wird über dessen Index
# zugegriffen. Der Index ist die Stelle, an der das Element
# steht.
# Wichtig: Die Zählung des Index beginnt mit 0,
# daher ist der Index des ersten Elements 0.
# Liste: [0, "foo"]
# Index: 0 1
element = liste[0]
print(element)
# Die Funktion liefert für viele Objekte die Länge zurück.
# Bei einer Liste enspricht die Länge der Anzahl an Elementen.
l = len(liste) # type: int
print(l)
# Die append()-Methode fügt einer Liste ein beliebiges
# Element hinzu:
liste.append("bar")
liste += ["bar"] # tut das gleiche
print(liste)
# Statt ein Objekt am Ende einer Liste anzufügen, ist es auch möglich,
# es an einem Index einzusetzen. Dabei wird das Objekt vor dem Index
# eingesetzt.
liste.insert(0, "test")
print(liste)
# Die pop()-Methode löscht das Objekt an dem Index in der Liste.
# Ist kein Index angegeben lösht pop() das letzte Element
liste.pop()
print(liste)
# alternativ per Index löschen
del liste[1]
# Ein Element kann aber nicht nur über den Index gelöscht werden, sondern
# auch über das Objekt, es wird allerdings nur das erste Auftreten des
# Objektes gelöscht. Dabei wird ein Fehler geworfen, falls das Objekt
# nicht in der Liste vorhanden ist.
liste.remove('bar') # type: None
print(liste)
# Element über den Wert finden
liste.index('foo')
# Um festzustellen, wie oft ein Wert in einer Liste vorhanden ist kann
# die count()-Methode verwendet werden.
liste3 = list("aabbbcccc")
print(liste3.count("a"))
print(liste3.count("d"))
# mit in kann man herausfinden, ob ein Element in einer Liste enthalten ist
print("a" in liste3)
# Eine Liste kann mit sort() sortiert werden:
liste2 = [9,6,3,2,7]
liste2.sort() # type: None
print(liste2)
# Auf die einzelnen Zeichen eines Strings kann ebenfalls über den Index
# zugegriffen werden, wie bei einer Liste.
String = "ABCDEFGHIJKLMNOPQRSTUVW"
print(String)
print(String[4])
# Reversing
print(String[::-1])
|
pythonfoo/pythonfooLite
|
Level_03/listen.py
|
Python
|
gpl-3.0
| 2,306
|
from collections import namedtuple
from math import pi
import pygame
# Size and Weight Constants
TOTAL_MASS = 20 # Made up units
TOTAL_HEIGHT = 350 # Pygame pixels
STARTING_SPEED = 0, 0 # pixels/sec?
BASE_STRENGTH = 1500000
# Mass Fractions #
mass_fractions = {
"head": 0.0826,
"torso": 0.551,
"upper_arm": 0.0325,
"forearm": 0.0187 + 0.0065, # Including hand
"thigh": 0.105,
"calf": 0.0475,
"foot": 0.0143
}
# Segment Masses
masses = {}
for segment in mass_fractions:
masses[segment] = mass_fractions[segment] * TOTAL_MASS
# Height Fractions #
height_fractions = {
"head": 0.2, # Larger for cartoon, anatomically correct is 0.1075
"torso": 0.3,
"upper_arm": 0.172,
"forearm": 0.157 + 0.057, # Including hand
"thigh": 0.25, # standard is .232
"calf": 0.23, # standard is .247
"foot": 0.1 # Counts foot length, not height
}
# Segment Lengths
lengths = {}
for segment in height_fractions:
lengths[segment] = height_fractions[segment] * TOTAL_HEIGHT
# Joint Constraints #
joint_ranges = {
"neck": (3 * pi / 4, 5 * pi / 4),
"elbow": (0, 3 * pi / 4),
"shoulder": (-pi / 2, pi),
"hip": (-pi / 8, pi / 2),
"knee": (-3 * pi / 4, 0),
"ankle": (0, 2 * pi / 3)
}
joint_strengths = {
"neck": .15 * BASE_STRENGTH,
"elbow": .3 * BASE_STRENGTH,
"shoulder": .5 * BASE_STRENGTH,
"hip": .8 * BASE_STRENGTH,
"knee": .8 * BASE_STRENGTH,
"ankle": .4 * BASE_STRENGTH
}
# Collision Types #
collision_types = {
"upper": 1,
"lower": 2,
"ground": 3
}
body_collision_types = {
"torso": collision_types["upper"],
"head": collision_types["upper"],
"upper_arm": collision_types["upper"],
"forearm": collision_types["upper"],
"thigh": collision_types["upper"],
"calf": collision_types["lower"],
"foot": collision_types["lower"]
}
# Images
images = {
"torso": pygame.image.load("images/torso.bmp"),
"head": pygame.image.load("images/head.bmp"),
"upper_arm": pygame.image.load("images/upper_arm.bmp"),
"forearm": pygame.image.load("images/forearm.bmp"),
"thigh": pygame.image.load("images/thigh.bmp"),
"calf": pygame.image.load("images/leg.bmp"),
"foot": pygame.image.load("images/foot.bmp")
}
SegmentInfo = namedtuple('SegmentInfo', 'mass length start_speed collision_type image')
segments = {}
# todo I don't like this loop, it assumes that all other dictionaries have the same keys
for key in mass_fractions:
segments[key] = SegmentInfo(masses[key], lengths[key], STARTING_SPEED, body_collision_types[key], images[key])
JointInfo = namedtuple('JointInfo', 'range max_torque')
joints = {}
for key in joint_ranges:
joints[key] = JointInfo(joint_ranges[key], joint_strengths[key])
|
TheoKanning/Jerry-Learns
|
jerry/body_config.py
|
Python
|
gpl-3.0
| 2,759
|
# -*- coding: utf-8 -*-
'''
Genesis Add-on
Copyright (C) 2015 lambda
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
'''
import re
from resources.lib.libraries import client
def resolve(url):
try:
url = url.replace('/embed-', '/')
url = re.compile('//.+?/([\w]+)').findall(url)[0]
url = 'http://realvid.net/embed-%s.html' % url
result = client.request(url)
url = re.compile('file *: *"(http.+?)"').findall(result)[-1]
return url
except:
return
|
hexpl0it/plugin.video.genesi-ita
|
resources/lib/resolvers/realvid.py
|
Python
|
gpl-3.0
| 1,167
|
from ImageScripter import *
from elan import *
raise ValueError('fff')
Viewer.Start()
Viewer.CloseAndClean()
Configurator.Start()
Configurator.inputoutput.Click()
addNewComDev(ComType = "Standard Connection",HardwareType = "Serial Port",Comport = '0')
addNewDevice(Configurator.genericserialdevices,"Generic Serial Device")
Configurator.system.Click()
Configurator.inputoutput.Click()
Configurator.serialport.Click()
Configurator.ComboBox.SelectAllWithIndex('1')
Configurator.apply.Click()
Configurator.changedsettings.Wait(seconds = 10)
Configurator.system.Click()
######################################Part 2
Configurator.inputoutput.Click()
for i in range(3):
Configurator.atatat2.RightClickTypeThenPress('aa','enter')
Add.PushButton.Click('OK')
Configurator.Edit.SetText(0,str(i))
Configurator.Edit.SetText(2,str(i))
Configurator.ComboBox.SelectAllWithIndex('1')
Configurator.apply.Click()
####################
Configurator.serialone.RightClickType('t')
Configurator.system.Click()
Configurator.RestartHard()
Configurator.Start()
Configurator.inputoutput.RealClick()
############################################TEST CODE
sleep(3)
Configurator.atatat2.RightClickType('d')
HlConfig.PushButton.Click('Yes')
Configurator.atatat2.WaitVanish()
Configurator.system.Click()
Configurator.Reset()
|
kenshay/ImageScript
|
ProgramData/SystemFiles/Python/Lib/site-packages/elan/Backup Pools/Repeat_One_Test/1____Serial_Device_Settings_Change_Check.py
|
Python
|
gpl-3.0
| 1,335
|
# -*- encoding: utf-8 -*-
from abjad import *
def test_pitchtools_PitchArrayCell_pitches_01():
array = pitchtools.PitchArray([[1, 2, 1], [2, 1, 1]])
array[0].cells[0].pitches.append(NamedPitch(0))
array[0].cells[1].pitches.append(NamedPitch(2))
'''
[c'] [d' ] []
[ ] [] []
'''
assert array[0].cells[0].pitches == [NamedPitch(0)]
assert array[0].cells[1].pitches == [NamedPitch(2)]
assert array[0].cells[2].pitches == []
assert array[1].cells[0].pitches == []
assert array[1].cells[1].pitches == []
assert array[1].cells[2].pitches == []
def test_pitchtools_PitchArrayCell_pitches_02():
cell = pitchtools.PitchArrayCell([NamedPitch(0)])
assert cell.pitches == [NamedPitch(0)]
|
mscuthbert/abjad
|
abjad/tools/pitchtools/test/test_pitchtools_PitchArrayCell_pitches.py
|
Python
|
gpl-3.0
| 756
|
#!/usr/bin/env python
from setuptools import setup, find_packages
from trackma import utils
try:
LONG_DESCRIPTION = open("README.rst").read()
except IOError:
LONG_DESCRIPTION = __doc__
NAME = "Trackma"
REQUIREMENTS = []
EXTRA_REQUIREMENTS = {
'curses': ['urwid'],
'GTK': ['pygobject'],
'Qt': [],
}
setup(
name=NAME,
version=utils.VERSION,
packages=find_packages(),
install_requires=REQUIREMENTS,
extras_require=EXTRA_REQUIREMENTS,
package_data={'trackma': ['data/*']},
author='z411',
author_email='z411@krutt.org',
description='Open multi-site list manager',
long_description=LONG_DESCRIPTION,
url='https://github.com/z411/trackma',
keywords='list manager, curses, gtk, qt, myanimelist, hummingbird, vndb',
license="GPL-3",
entry_points={
'console_scripts': [
'trackma = trackma.ui.cli:main',
'trackma-curses = trackma.ui.curses:main [curses]',
],
'gui_scripts': [
'trackma-gtk = trackma.ui.gtkui:main [GTK]',
'trackma-qt = trackma.ui.qtui:main [Qt]',
'trackma-qt4 = trackma.ui.qt4ui:main [Qt]',
]
},
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: End Users/Desktop',
'Topic :: Internet',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Operating System :: POSIX',
'License :: OSI Approved :: GNU General Public License v3 (GPLv3)',
]
)
|
MadeOfMagicAndWires/trackma
|
setup.py
|
Python
|
gpl-3.0
| 1,589
|
# PyDia SQL.py : SQL dump.
# Copy it to /usr/share/dia/python
import dia
# import sys
# import os
import string
import re
import datetime
class SQLRenderer:
def __init__(self):
self.f = None
def begin_render(self, data, filename):
self.f = open(filename, "w")
# name = os.path.split(filename)[1]
self.f.write('''BEGIN TRANSACTION;\n''')
for layer in data.layers:
self.WriteTables(layer)
def WriteTables(self, layer):
tables = {}
appdata = 'appdata'
priority = {'fields': 0, 'foreign_keys': 100}
# value for id
z = ["INSERT INTO zf VALUES ('id', 'No', 'INTEGER', '1');"]
z.append("INSERT INTO z VALUES('diadate', '%s');" % datetime.date.today().isoformat())
zsql = "INSERT INTO z VALUES('%s', '%s');"
zfsql = "INSERT INTO zf VALUES ('%s', '%s', '%s', '%s');"
ztsql = "INSERT INTO zt VALUES ('%s', '%s', '%s', '%s');"
for o in layer.objects:
if o.type.name == 'Database - Table':
if "name" in o.properties.keys():
table = o.properties["name"].value
elif "text" in o.properties.keys():
table = o.properties["text"].value.text
else:
continue
if len(table) == 0 or string.find(table, " ") >= 0:
continue
if table not in tables.keys():
tables[table] = ''
if table == appdata:
attrs = o.properties['attributes'].value
for attr in attrs:
z.append(zsql % (attr[0], attr[1]))
continue
# zt.append(comment)
# first line is label
# second line is label plural
# third line is rpr
clst = o.properties['comment'].value.split('\n')
if len(clst) >= 3:
z.append(ztsql % (table, clst[0], clst[1], clst[2]))
atributes = o.properties['attributes'].value
for i in range(0, len(atributes)):
a = atributes[i]
if a[0] == 'id':
tables[table] = '%0.3d\tid INTEGER PRIMARY KEY\n' %\
(priority['fields'] + i)
continue
if len(a[0]) > 4:
if a[0][-3:] == '_id':
nnul = ''
if a[4] == 0:
nnul = ' NOT NULL'
tables[table] += '%0.3d\t%s INTEGER%s REFERENCES %s(id)\n' % (priority['fields'] + i, a[0], nnul, a[0][:-3])
continue
tipo = ''
if re.match('.*enum\(.*', a[1], re.I):
tipo = a[1]
else:
tipo = a[1].upper()
if tipo == '':
tipo = 'TEXT'
tables[table] += '%0.3d\t%s %s' % (priority['fields'] + i, a[0], tipo)
if a[3] == 1:
tables[table] += ' PRIMARY KEY'
if a[4] == 0:
if a[3] != 1:
tables[table] += ' NOT NULL'
notnull = 1
else:
tables[table] += ''
notnull = 0
if a[5] == 1:
if a[3] != 1:
tables[table] += ' UNIQUE'
# Create insert for table zflbl
if (len(a[2]) > 0):
z.append(zfsql % (a[0], a[2], tipo, notnull))
tables[table] += '\n'
elif o.type.name == 'Database - Reference':
continue
for k in sorted(tables.keys()):
# self.f.write('\n-- %s --\nDROP TABLE IF EXISTS `%s`;\n' % (k,k) )
if k != appdata:
self.f.write('CREATE TABLE IF NOT EXISTS %s (\n' % k)
sentences = sorted(tables[k].split('\n'))
sentences = [str(s[3:]) for s in sentences if len(s) > 4]
sentences = ",\n".join(sentences)
self.f.write('%s\n' % sentences)
self.f.write(');\n')
self.f.write('CREATE TABLE IF NOT EXISTS z (key TEXT PRIMARY KEY, val TEXT NOT NULL);\n')
self.f.write('CREATE TABLE IF NOT EXISTS zt (tbl TEXT PRIMARY KEY, tlbl TEXT NOT NULL UNIQUE, tlblp TEXT NOT NULL UNIQUE, rpr TEXT NOT NULL);\n')
self.f.write('CREATE TABLE IF NOT EXISTS zf (fld TEXT PRIMARY KEY, flbl TEXT NOT NULL UNIQUE, typos TEXT NOT NULL, nonull INTEGER NOT NULL DEFAULT 1);\n')
self.f.write('\n'.join(sorted(z)))
self.f.write('\n')
def end_render(self):
self.f.write('COMMIT;\n')
self.f.close()
# reference
dia.register_export("PyDia SQL generator", "sql", SQLRenderer())
|
tedlaz/pyted
|
pymiles/pyMiles2.old/pymiles/sqlite/diasql.py
|
Python
|
gpl-3.0
| 5,065
|
# This file is part of Indico.
# Copyright (C) 2002 - 2015 European Organization for Nuclear Research (CERN).
#
# Indico is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 3 of the
# License, or (at your option) any later version.
#
# Indico is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Indico; if not, see <http://www.gnu.org/licenses/>.
from __future__ import unicode_literals, absolute_import
from flask import render_template, request
from operator import attrgetter
from indico.core import signals
from indico.util.signals import named_objects_from_signal
from indico.util.struct.iterables import group_list
from indico.util.string import return_ascii, format_repr
from indico.web.flask.util import url_for
class HeaderMenuEntry(object):
"""Defines a header menu entry.
:param url: the url the menu item points to
:param caption: the caption of the menu item
:param parent: when used, all menu entries with the same parent
are shown in a dropdown with the parent name as its
caption
"""
def __init__(self, url, caption, parent=None):
self.url = url
self.caption = caption
self.parent = parent
@return_ascii
def __repr__(self):
return '<HeaderMenuEntry({}, {}, {})>'.format(self.caption, self.parent, self.url)
@classmethod
def group(cls, entries):
"""Returns the given entries grouped by its parent"""
return sorted(group_list(entries, key=attrgetter('parent'), sort_by=attrgetter('caption')).items())
class MenuItem(object):
"""Defines a generic menu item
:param title: the title of the item
:param endpoint: shortcut to define a menu item that points to the
specified endpoint and is considered active only
on that endpoints. Cannot be combined with `url` or
`endpoints`.
:param url: url of the menu item
:param endpoints: set of endpoints on which this menu item is considered
active. Can also be a string if only one endpoint is
used.
"""
def __init__(self, title, endpoint=None, url=None, endpoints=None):
self.title = title
self.url = url
if endpoint is not None:
assert url is None and endpoints is None
self.url = url_for(endpoint)
self.endpoints = {endpoint}
elif endpoints is None:
self.endpoints = set()
elif isinstance(endpoints, basestring):
self.endpoints = {endpoints}
else:
self.endpoints = set(endpoints)
@return_ascii
def __repr__(self):
return '<MenuItem({}, {})>'.format(self.title, self.url)
@property
def active(self):
return request.endpoint in self.endpoints
class SideMenuSection(object):
"""Defines a side menu section (item set).
:param name: the unique name of the section
:param title: the title of the section (displayed)
:param weight: the "weight" (higher means it shows up first)
:param active: whether the section should be shown expanded by default
:param icon: icon that will be displayed next to the section title.
"""
is_section = True
def __init__(self, name, title, weight=-1, active=False, icon=None):
self.name = name
self.title = title
self._active = active
self._items = set()
self.icon = 'icon-' + icon
self.weight = weight
self._sorted_items = None
def add_item(self, item):
self._sorted_items = None
self._items.add(item)
@property
def items(self):
if self._sorted_items is None:
self._sorted_items = sorted(self._items, key=lambda x: (-x.weight, x.title))
return self._sorted_items
@property
def active(self):
return self._active or any(item.active for item in self._items)
@return_ascii
def __repr__(self):
return format_repr(self, 'name', 'title', active=False)
class SideMenuItem(object):
"""Defines a side menu item.
:param name: the unique name (within the menu) of the item
:param title: the title of the menu item (displayed)
:param url: the URL that the link will point to
:param weight: the "weight" (higher means it shows up first)
:param active: whether the item will be shown as active by default
:param disabled: if `True`, the item will be displayed as disabled
:param section: section the item will be put in
:param icon: icon that will be displayed next to the item
"""
is_section = False
def __init__(self, name, title, url, weight=-1, active=False, disabled=False, section=None, icon=None):
self.name = name
self.title = title
self.url = url
self.active = active
self.disabled = disabled
self.section = section
self.weight = weight
self.icon = ('icon-' + icon) if icon else None
@return_ascii
def __repr__(self):
return format_repr(self, 'name', 'title', 'url', active=False, disabled=False)
def build_menu_structure(menu_id, active_item=None, **kwargs):
"""
Build a menu (list of entries) with sections/items.
Information is provided by specific signals and filtered
by menu id.
This can be used as a very thin framework for menu
handling across the app.
:param menu_id: menu_id used to filter out signal calls
:param active_item: ID of currently active menu item
:param kwargs: extra arguments passed to the signals
:returns: properly sorted list (taking weights into account)
"""
top_level = set()
sections = {}
for id_, section in named_objects_from_signal(signals.menu.sections.send(menu_id, **kwargs)).iteritems():
sections[id_] = section
top_level.add(section)
for id_, item in named_objects_from_signal(signals.menu.items.send(menu_id, **kwargs)).iteritems():
if id_ == active_item:
item.active = True
if item.section is None:
top_level.add(item)
else:
sections[item.section].add_item(item)
return sorted(top_level, key=lambda x: (-x.weight, x.title))
def render_sidemenu(menu_id, active_item=None, old_style=False, **kwargs):
"""Render a sidemenu with sections/items.
:param menu_id: The identifier of the menu.
:param active_item: The name of the currently-active menu item.
:param old_style: Whether the menu should be rendered using the
"old" menu style.
:param kwargs: Additional arguments passed to the menu signals.
"""
items = build_menu_structure(menu_id, active_item=active_item, **kwargs)
return render_template('side_menu.html', items=items, old_style=old_style, menu_id=menu_id)
|
XeCycle/indico
|
indico/web/menu.py
|
Python
|
gpl-3.0
| 7,214
|
# shipBonusMissileLauncherHeavyROFATC1
#
# Used by:
# Ship: Vangel
type = "passive"
def handler(fit, ship, context):
fit.modules.filteredItemBoost(lambda mod: mod.item.group.name == "Missile Launcher Heavy",
"speed", ship.getModifiedItemAttr("shipBonusATC1"))
|
bsmr-eve/Pyfa
|
eos/effects/shipbonusmissilelauncherheavyrofatc1.py
|
Python
|
gpl-3.0
| 300
|
# -*- coding: utf-8 -*-
from test_settings import Settings
class TestCase(Settings):
def test_sidebar(self):
# Ayarlari yapiyor.
self.do_settings()
# Genel'e tikliyor.
self.driver.find_element_by_css_selector(
'li.ng-binding:nth-child(3) > a:nth-child(1) > span:nth-child(2)').click()
# Ogrenci Iletisim Bilgilerine tikliyor.
self.driver.find_element_by_css_selector('ul.in:nth-child(2) > li:nth-child(2) > a:nth-child(1)').click()
self.do_login()
# Genel'e tikliyor.
self.driver.find_element_by_css_selector(
'li.ng-binding:nth-child(3) > a:nth-child(1) > span:nth-child(2)').click()
# Ogrenci Iletisim Bilgilerine tikliyor.
self.driver.find_element_by_css_selector('ul.in:nth-child(2) > li:nth-child(2) > a:nth-child(1)').click()
# Ikamet Il'e deger gonderiyor.
self.driver.find_element_by_css_selector('#ikamet_il').send_keys('Bilecik')
# Ikamet Ilce'ye deger gonderiyor.
self.driver.find_element_by_css_selector('#ikamet_ilce').send_keys('Merkez')
# Ikametgah Adresine deger yolluyor.
self.driver.find_element_by_css_selector('#ikamet_adresi').send_keys('balim sokak')
# Posta Kodu'na deger yolluyor.
self.driver.find_element_by_css_selector('#posta_kodu').send_keys('11000')
# Telefon Numarasi'na deger yolluyor.
self.driver.find_element_by_css_selector('#tel_no').send_keys('0534626286816')
# Kaydet'e tikliyor
self.driver.find_element_by_css_selector('button.btn-danger:nth-child(1)').click()
|
zetaops/ulakbus
|
selenium_tests/test_ogrenci_iletisim_bilgileri.py
|
Python
|
gpl-3.0
| 1,614
|
# -*- coding: utf-8 -*-
import os
import shutil
from tempfile import mkdtemp
import wx
from outwiker.core.attachment import Attachment
from outwiker.core.tree import WikiDocument
from outwiker.pages.text.textpage import TextPageFactory
from outwiker.core.application import Application
from outwiker.core.attachwatcher import AttachWatcher
from test.utils import removeDir
from test.basetestcases import BaseWxTestCase
class AttachWatcherTest(BaseWxTestCase):
def setUp(self):
super().setUp()
self._eventCount = 0
self._period_ms = 50
self._application = Application
self._application.onAttachListChanged += self._onAttachListChanged
# Path to path with files
self._sample_path = '../test/samplefiles'
# Path to wiki
self.path = mkdtemp(prefix='OutWiker AttachWatcherTest Тесты')
self.wikiroot = WikiDocument.create(self.path)
self.page_01 = TextPageFactory().create(self.wikiroot,
"Страница 1",
[])
self.page_02 = TextPageFactory().create(self.wikiroot,
"Страница 2",
[])
def tearDown(self):
super().tearDown()
self._application.onAttachListChanged -= self._onAttachListChanged
removeDir(self.path)
def _onAttachListChanged(self, page, params):
self._eventCount += 1
def _attach_files(self, page, files_list):
'''
Copy files to attachments without explicit
onAttachListChanged event calling.
'''
files_full = [os.path.join(self._sample_path, fname)
for fname
in files_list]
attach = Attachment(page)
attach_path = attach.getAttachPath(True)
for fname in files_full:
shutil.copy(fname, attach_path)
def test_empty_01(self):
'''
Wiki is not added to Application
'''
watcher = AttachWatcher(self._application, self._period_ms)
watcher.initialize()
wx.MilliSleep(500)
self.myYield()
watcher.clear()
self.assertEqual(self._eventCount, 0)
def test_empty_02(self):
'''
Wiki added to Application _before_ AttachWatcher initializing.
No selected pages.
'''
self._application.wikiroot = self.wikiroot
watcher = AttachWatcher(self._application, self._period_ms)
watcher.initialize()
wx.MilliSleep(500)
self.myYield()
watcher.clear()
self.assertEqual(self._eventCount, 0)
def test_empty_03(self):
'''
Wiki added to Application _after_ AttachWatcher initializing.
No selected pages.
'''
watcher = AttachWatcher(self._application, self._period_ms)
watcher.initialize()
self._application.wikiroot = self.wikiroot
wx.MilliSleep(500)
self.myYield()
watcher.clear()
self.assertEqual(self._eventCount, 0)
def test_empty_04(self):
'''
Wiki added to Application _before_ AttachWatcher initializing.
Selected page.
'''
self._application.wikiroot = self.wikiroot
self._application.selectedPage = self.page_01
watcher = AttachWatcher(self._application, self._period_ms)
watcher.initialize()
wx.MilliSleep(500)
self.myYield()
watcher.clear()
self.assertEqual(self._eventCount, 0)
def test_empty_05(self):
'''
Wiki added to Application _after_ AttachWatcher initializing.
Selected page.
'''
watcher = AttachWatcher(self._application, self._period_ms)
watcher.initialize()
self._application.wikiroot = self.wikiroot
self._application.selectedPage = self.page_01
wx.MilliSleep(500)
self.myYield()
watcher.clear()
self.assertEqual(self._eventCount, 0)
def test_empty_06(self):
'''
Change the selected page
'''
self._application.wikiroot = self.wikiroot
self._application.selectedPage = None
watcher = AttachWatcher(self._application, self._period_ms)
watcher.initialize()
self._application.selectedPage = self.page_01
wx.MilliSleep(500)
self.myYield()
watcher.clear()
self.assertEqual(self._eventCount, 0)
def test_empty_07(self):
'''
Change the selected page
'''
self._application.wikiroot = self.wikiroot
self._application.selectedPage = self.page_01
watcher = AttachWatcher(self._application, self._period_ms)
watcher.initialize()
self._application.selectedPage = None
wx.MilliSleep(500)
self.myYield()
watcher.clear()
self.assertEqual(self._eventCount, 0)
def test_empty_08(self):
'''
Change the selected page
'''
self._application.wikiroot = self.wikiroot
self._application.selectedPage = self.page_01
watcher = AttachWatcher(self._application, self._period_ms)
watcher.initialize()
self._application.selectedPage = self.page_02
wx.MilliSleep(500)
self.myYield()
watcher.clear()
self.assertEqual(self._eventCount, 0)
def test_empty_09_close_wiki(self):
'''
Close current notes tree
'''
self._application.wikiroot = self.wikiroot
self._application.selectedPage = self.page_01
watcher = AttachWatcher(self._application, self._period_ms)
watcher.initialize()
self._application.wikiroot = None
wx.MilliSleep(500)
self.myYield()
watcher.clear()
self.assertEqual(self._eventCount, 0)
def test_empty_10_create_empty_attach_dir(self):
self._application.wikiroot = self.wikiroot
self._application.selectedPage = self.page_01
watcher = AttachWatcher(self._application, self._period_ms)
watcher.initialize()
Attachment(self._application.selectedPage).getAttachPath(True)
wx.MilliSleep(500)
self.myYield()
watcher.clear()
self.assertEqual(self._eventCount, 0)
def test_files_not_change_01(self):
self._application.wikiroot = self.wikiroot
self._application.selectedPage = self.page_01
self._attach_files(self.page_01, ['add.png'])
watcher = AttachWatcher(self._application, self._period_ms)
watcher.initialize()
wx.MilliSleep(500)
self.myYield()
watcher.clear()
self.assertEqual(self._eventCount, 0)
def test_files_not_change_02(self):
self._application.wikiroot = self.wikiroot
self._application.selectedPage = None
self._attach_files(self.page_01, ['add.png'])
watcher = AttachWatcher(self._application, self._period_ms)
watcher.initialize()
self._application.selectedPage = self.page_01
wx.MilliSleep(500)
self.myYield()
watcher.clear()
self.assertEqual(self._eventCount, 0)
def test_files_not_change_03(self):
self._application.wikiroot = self.wikiroot
self._application.selectedPage = None
self._attach_files(self.page_01, ['add.png'])
self._attach_files(self.page_02, ['add.png', 'dir.png'])
watcher = AttachWatcher(self._application, self._period_ms)
watcher.initialize()
# Switch pages
self._application.selectedPage = self.page_01
self._application.selectedPage = self.page_02
wx.MilliSleep(500)
self.myYield()
watcher.clear()
self.assertEqual(self._eventCount, 0)
def test_files_not_change_04(self):
self._attach_files(self.page_01, ['add.png'])
self._application.wikiroot = self.wikiroot
self._application.selectedPage = self.page_01
watcher = AttachWatcher(self._application, self._period_ms)
watcher.initialize()
# Close the wiki
self._application.wikiroot = None
wx.MilliSleep(500)
self.myYield()
watcher.clear()
self.assertEqual(self._eventCount, 0)
def test_add_files_01(self):
self._application.wikiroot = self.wikiroot
self._application.selectedPage = self.page_01
watcher = AttachWatcher(self._application, self._period_ms)
watcher.initialize()
self._attach_files(self.page_01, ['add.png'])
wx.MilliSleep(500)
self.myYield()
watcher.clear()
self.assertEqual(self._eventCount, 1)
def test_add_files_02(self):
self._application.wikiroot = self.wikiroot
self._application.selectedPage = self.page_01
watcher = AttachWatcher(self._application, self._period_ms)
watcher.initialize()
self._attach_files(self.page_01, ['add.png', 'dir.png'])
wx.MilliSleep(500)
self.myYield()
watcher.clear()
self.assertEqual(self._eventCount, 1)
def test_add_files_03(self):
self._application.wikiroot = self.wikiroot
self._application.selectedPage = self.page_01
watcher = AttachWatcher(self._application, self._period_ms)
watcher.initialize()
self._attach_files(self.page_01, ['add.png'])
wx.MilliSleep(500)
self.myYield()
self._attach_files(self.page_01, ['dir.png'])
wx.MilliSleep(500)
self.myYield()
watcher.clear()
self.assertEqual(self._eventCount, 2)
def test_attach_touch_read(self):
self._application.wikiroot = self.wikiroot
self._application.selectedPage = self.page_01
self._attach_files(self.page_01, ['add.png'])
watcher = AttachWatcher(self._application, self._period_ms)
watcher.initialize()
attach = Attachment(self.page_01)
with open(attach.getFullPath('add.png')):
pass
wx.MilliSleep(500)
self.myYield()
watcher.clear()
self.assertEqual(self._eventCount, 0)
def test_attach_touch_write(self):
self._application.wikiroot = self.wikiroot
self._application.selectedPage = self.page_01
self._attach_files(self.page_01, ['add.png'])
watcher = AttachWatcher(self._application, self._period_ms)
watcher.initialize()
attach = Attachment(self.page_01)
with open(attach.getFullPath('add.png'), 'w'):
pass
wx.MilliSleep(500)
self.myYield()
watcher.clear()
self.assertEqual(self._eventCount, 0)
def test_attach_rename(self):
self._application.wikiroot = self.wikiroot
self._application.selectedPage = self.page_01
self._attach_files(self.page_01, ['add.png'])
attach = Attachment(self.page_01)
src_fname = attach.getFullPath('add.png')
dest_fname = attach.getFullPath('newname.png')
watcher = AttachWatcher(self._application, self._period_ms)
watcher.initialize()
os.rename(src_fname, dest_fname)
wx.MilliSleep(500)
self.myYield()
watcher.clear()
self.assertEqual(self._eventCount, 1)
def test_attach_delete(self):
self._application.wikiroot = self.wikiroot
self._application.selectedPage = self.page_01
self._attach_files(self.page_01, ['add.png'])
watcher = AttachWatcher(self._application, self._period_ms)
watcher.initialize()
attach = Attachment(self.page_01)
attach.removeAttach(['add.png'])
wx.MilliSleep(500)
self.myYield()
watcher.clear()
self.assertEqual(self._eventCount, 1)
def test_switch_and_add_file(self):
self._application.wikiroot = self.wikiroot
self._application.selectedPage = self.page_01
self._attach_files(self.page_01, ['add.png'])
self._attach_files(self.page_02, ['add.png', 'dir.png'])
watcher = AttachWatcher(self._application, self._period_ms)
watcher.initialize()
# Switch pages
self._application.selectedPage = self.page_02
self._attach_files(self.page_02, ['accept.png'])
wx.MilliSleep(500)
self.myYield()
watcher.clear()
self.assertEqual(self._eventCount, 1)
def test_close_wiki(self):
self._application.wikiroot = self.wikiroot
self._application.selectedPage = self.page_01
self._attach_files(self.page_01, ['add.png'])
watcher = AttachWatcher(self._application, self._period_ms)
watcher.initialize()
self._application.wikiroot = None
self._attach_files(self.page_01, ['dir.png'])
wx.MilliSleep(500)
self.myYield()
watcher.clear()
self.assertEqual(self._eventCount, 0)
def test_unselect_page(self):
self._application.wikiroot = self.wikiroot
self._application.selectedPage = self.page_01
self._attach_files(self.page_01, ['add.png'])
watcher = AttachWatcher(self._application, self._period_ms)
watcher.initialize()
self._application.selectedPage = None
self._attach_files(self.page_01, ['dir.png'])
wx.MilliSleep(500)
self.myYield()
watcher.clear()
self.assertEqual(self._eventCount, 0)
def test_select_again(self):
self._application.wikiroot = self.wikiroot
self._application.selectedPage = self.page_01
self._attach_files(self.page_01, ['add.png'])
watcher = AttachWatcher(self._application, self._period_ms)
watcher.initialize()
self._application.selectedPage = None
self._attach_files(self.page_01, ['dir.png'])
self._application.selectedPage = self.page_01
wx.MilliSleep(500)
self.myYield()
watcher.clear()
self.assertEqual(self._eventCount, 0)
def test_select_again_and_add(self):
self._application.wikiroot = self.wikiroot
self._application.selectedPage = self.page_01
self._attach_files(self.page_01, ['add.png'])
watcher = AttachWatcher(self._application, self._period_ms)
watcher.initialize()
self._application.selectedPage = None
self._attach_files(self.page_01, ['dir.png'])
self._application.selectedPage = self.page_01
self._attach_files(self.page_01, ['accept.png'])
wx.MilliSleep(500)
self.myYield()
watcher.clear()
self.assertEqual(self._eventCount, 1)
def test_rename_page_01(self):
self._application.wikiroot = self.wikiroot
self._application.selectedPage = self.page_01
watcher = AttachWatcher(self._application, self._period_ms)
watcher.initialize()
self.page_01.title = 'Новый заголовок'
wx.MilliSleep(500)
self.myYield()
watcher.clear()
self.assertEqual(self._eventCount, 0)
def test_rename_page_02(self):
self._application.wikiroot = self.wikiroot
self._application.selectedPage = self.page_01
self._attach_files(self.page_01, ['add.png'])
watcher = AttachWatcher(self._application, self._period_ms)
watcher.initialize()
self.page_01.title = 'Новый заголовок'
wx.MilliSleep(500)
self.myYield()
watcher.clear()
self.assertEqual(self._eventCount, 0)
def test_rename_page_03(self):
self._application.wikiroot = self.wikiroot
self._application.selectedPage = self.page_01
self._attach_files(self.page_01, ['add.png'])
watcher = AttachWatcher(self._application, self._period_ms)
watcher.initialize()
self.page_01.title = 'Новый заголовок'
self._attach_files(self.page_01, ['dir.png'])
wx.MilliSleep(500)
self.myYield()
watcher.clear()
self.assertEqual(self._eventCount, 1)
def test_delete_page_01(self):
self._application.wikiroot = self.wikiroot
self._application.selectedPage = self.page_01
self._attach_files(self.page_01, ['add.png'])
watcher = AttachWatcher(self._application, self._period_ms)
watcher.initialize()
self.page_01.remove()
wx.MilliSleep(500)
self.myYield()
watcher.clear()
self.assertEqual(self._eventCount, 0)
def test_delete_page_02(self):
self._application.wikiroot = self.wikiroot
self._application.selectedPage = self.page_01
self._attach_files(self.page_01, ['add.png'])
watcher = AttachWatcher(self._application, self._period_ms)
watcher.initialize()
self.page_01.remove()
self._application.selectedPage = self.page_02
self._attach_files(self.page_02, ['add.png'])
wx.MilliSleep(500)
self.myYield()
watcher.clear()
self.assertEqual(self._eventCount, 1)
def test_move_to_page_01(self):
self._application.wikiroot = self.wikiroot
self._application.selectedPage = self.page_01
self._attach_files(self.page_01, ['add.png'])
watcher = AttachWatcher(self._application, self._period_ms)
watcher.initialize()
self.page_01.moveTo(self.page_02)
wx.MilliSleep(500)
self.myYield()
watcher.clear()
self.assertEqual(self._eventCount, 0)
def test_move_to_page_02(self):
self._application.wikiroot = self.wikiroot
self._application.selectedPage = self.page_01
watcher = AttachWatcher(self._application, self._period_ms)
watcher.initialize()
self.page_01.moveTo(self.page_02)
self._attach_files(self.page_01, ['add.png'])
wx.MilliSleep(500)
self.myYield()
watcher.clear()
self.assertEqual(self._eventCount, 1)
def test_remove_empty_attach_dir(self):
self._application.wikiroot = self.wikiroot
self._application.selectedPage = self.page_01
attach = Attachment(self.page_01)
attach_dir = attach.getAttachPath(True)
watcher = AttachWatcher(self._application, self._period_ms)
watcher.initialize()
shutil.rmtree(attach_dir)
wx.MilliSleep(500)
self.myYield()
watcher.clear()
self.assertEqual(self._eventCount, 0)
def test_remove_attach_dir(self):
self._application.wikiroot = self.wikiroot
self._application.selectedPage = self.page_01
attach = Attachment(self.page_01)
attach_dir = attach.getAttachPath(True)
self._attach_files(self.page_01, ['add.png'])
watcher = AttachWatcher(self._application, self._period_ms)
watcher.initialize()
shutil.rmtree(attach_dir)
wx.MilliSleep(500)
self.myYield()
watcher.clear()
self.assertEqual(self._eventCount, 1)
def test_race_01(self):
self._application.wikiroot = self.wikiroot
self._application.selectedPage = self.page_01
self._attach_files(self.page_01, ['add.png'])
watcher = AttachWatcher(self._application, 500)
watcher.initialize()
self._application.selectedPage = self.page_02
wx.MilliSleep(1000)
self.myYield()
watcher.clear()
self.assertEqual(self._eventCount, 0)
def test_race_02(self):
self._application.wikiroot = self.wikiroot
self._application.selectedPage = self.page_01
watcher = AttachWatcher(self._application, 500)
watcher.initialize()
self._attach_files(self.page_01, ['add.png'])
self._application.selectedPage = self.page_02
wx.MilliSleep(1000)
self.myYield()
watcher.clear()
self.assertEqual(self._eventCount, 1)
|
unreal666/outwiker
|
src/test/core/test_attachwatcher.py
|
Python
|
gpl-3.0
| 20,012
|
#====================== BEGIN GPL LICENSE BLOCK ======================
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
#======================= END GPL LICENSE BLOCK ========================
# <pep8 compliant>
import bpy
from ....utils import MetarigError
from ....utils import create_widget, copy_bone
from ....utils import strip_org
from .limb_utils import *
from ..super_widgets import create_hand_widget
from rna_prop_ui import rna_idprop_ui_prop_get
def create_arm( cls, bones ):
org_bones = cls.org_bones
bpy.ops.object.mode_set(mode='EDIT')
eb = cls.obj.data.edit_bones
ctrl = get_bone_name( org_bones[2], 'ctrl', 'ik' )
# Create IK arm control
ctrl = copy_bone( cls.obj, org_bones[2], ctrl )
# clear parent (so that rigify will parent to root)
eb[ ctrl ].parent = None
eb[ ctrl ].use_connect = False
# Parent
eb[ bones['ik']['mch_target'] ].parent = eb[ ctrl ]
eb[ bones['ik']['mch_target'] ].use_connect = False
# Set up constraints
# Constrain mch target bone to the ik control and mch stretch
make_constraint( cls, bones['ik']['mch_target'], {
'constraint' : 'COPY_LOCATION',
'subtarget' : bones['ik']['mch_str'],
'head_tail' : 1.0
})
# Constrain mch ik stretch bone to the ik control
make_constraint( cls, bones['ik']['mch_str'], {
'constraint' : 'DAMPED_TRACK',
'subtarget' : ctrl,
})
make_constraint( cls, bones['ik']['mch_str'], {
'constraint' : 'STRETCH_TO',
'subtarget' : ctrl,
})
make_constraint( cls, bones['ik']['mch_str'], {
'constraint' : 'LIMIT_SCALE',
'use_min_y' : True,
'use_max_y' : True,
'max_y' : 1.05,
'owner_space' : 'LOCAL'
})
pb = cls.obj.pose.bones
# Modify rotation mode for ik and tweak controls
pb[bones['ik']['ctrl']['limb']].rotation_mode = 'ZXY'
for b in bones['tweak']['ctrl']:
pb[b].rotation_mode = 'ZXY'
# Create ik/fk switch property
pb_parent = pb[ bones['parent'] ]
pb_parent['IK_Strertch'] = 1.0
prop = rna_idprop_ui_prop_get( pb_parent, 'IK_Strertch', create=True )
prop["min"] = 0.0
prop["max"] = 1.0
prop["soft_min"] = 0.0
prop["soft_max"] = 1.0
prop["description"] = 'IK Stretch'
# Add driver to limit scale constraint influence
b = bones['ik']['mch_str']
drv = pb[b].constraints[-1].driver_add("influence").driver
drv.type = 'SUM'
var = drv.variables.new()
var.name = prop.name
var.type = "SINGLE_PROP"
var.targets[0].id = cls.obj
var.targets[0].data_path = \
pb_parent.path_from_id() + '['+ '"' + prop.name + '"' + ']'
drv_modifier = cls.obj.animation_data.drivers[-1].modifiers[0]
drv_modifier.mode = 'POLYNOMIAL'
drv_modifier.poly_order = 1
drv_modifier.coefficients[0] = 1.0
drv_modifier.coefficients[1] = -1.0
# Create hand widget
create_hand_widget(cls.obj, ctrl, bone_transform_name=None)
bones['ik']['ctrl']['terminal'] = [ ctrl ]
return bones
|
Microvellum/Fluid-Designer
|
win64-vc/2.78/Python/bin/2.78/scripts/addons/rigify/rigs/pitchipoy/limbs/arm.py
|
Python
|
gpl-3.0
| 3,863
|
# coding: utf8
# Copyright 2014-2020 CERN. This software is distributed under the
# terms of the GNU General Public Licence version 3 (GPL Version 3),
# copied verbatim in the file LICENCE.md.
# In applying this licence, CERN does not waive the privileges and immunities
# granted to it by virtue of its status as an Intergovernmental Organization or
# submit itself to any jurisdiction.
# Project website: http://blond.web.cern.ch/
'''
**Scaling of longitudinal beam and machine parameters, with user interface.**
:Authors: **Konstantinos Iliakis**, **Helga Timko**
'''
from PyQt5 import QtCore, QtGui, QtWidgets
from PyQt5.Qt import QButtonGroup, QHBoxLayout, QGroupBox
from scipy import integrate
from scipy.constants import m_p, e, c
import numpy as np
# Machine-dependent parameters [SI-units] -------------------------------------
set_ups = {'PSB': '0',
'CPS': '1',
'SPS, Q20': '2', 'SPS, Q22': '3', 'SPS, Q26': '4',
'LHC, -2016': '5', 'LHC, 2017-': '6'}
gamma_ts = {'0': 4.0767,
'1': np.sqrt(37.2),
'2': 18., '3': 20., '4': 22.83,
'5': 55.759505, '6': 53.8}
harmonics = {'0': 1,
'1': 21,
'2': 4620, '3': 4620, '4': 4620,
'5': 35640, '6': 35640}
circumferences = {'0': 2*np.pi*25,
'1': 2*np.pi*100.,
'2': 2*np.pi*1100.009, '3': 2*np.pi*1100.009, '4': 2*np.pi*1100.009,
'5': 26658.883, '6': 26658.883}
energies_fb = {'0': (160.e6 + m_p*c**2/e),
'1': (2.0e9 + m_p*c**2/e),
'2': 25.92e9, '3': 25.92e9, '4': 25.92e9,
'5': 450.e9, '6': 450.e9}
energies_ft = {'0': (2.0e9 + m_p*c**2/e),
'1': 25.92e9,
'2': 450.e9, '3': 450.e9, '4': 450.e9,
'5': 6.5e12, '6': 6.5e12}
# Machine-dependent parameters [SI-units] -------------------------------------
class ParameterScaling(object):
@property
def phi_b(self):
return self.omega_rf*self.tau/2.
@property
def delta_b(self):
return self.dE_b/(self.beta_sq*self.energy)
@property
def dE_b(self):
return np.sqrt(self.beta_sq*self.energy*self.voltage*(1 -
np.cos(self.phi_b)) / (np.pi*self.harmonic*self.eta_0))
@property
def integral(self):
return integrate.quad(lambda x: np.sqrt(2.*(np.cos(x) -
np.cos(self.phi_b))), 0, self.phi_b)[0]
@property
def emittance(self):
return 4.*self.energy*self.omega_s0*self.beta_sq*self.integral / \
(self.omega_rf**2*self.eta_0)
def relativistic_quantities(self):
self.momentum = np.sqrt(self.energy**2 - self.mass**2)
self.tb1.append(" Synchronous momentum: "+
np.str(self.momentum)+" eV")
self.kinetic_energy = self.energy - self.mass
self.tb1.append(" Synchronous kinetic energy: "+
np.str(self.kinetic_energy)+" eV")
self.gamma = self.energy/self.mass
self.tb1.append(" Synchronous relativistic gamma: "+
np.str(self.gamma)+"")
self.beta = np.sqrt(1. - 1./self.gamma**2)
self.tb1.append(" Synchronous relativistic beta: "+
np.str(self.beta)+"")
self.beta_sq = self.beta ** 2
self.tb1.append(" Synchronous relativistic beta squared: "+
np.str(self.beta_sq)+"\n")
def frequencies(self):
self.t_rev = self.circumference/(self.beta*c)
self.tb1.append(" Revolution period: "+
np.str(self.t_rev * 1.e6)+" us")
self.f_rev = 1./self.t_rev
self.tb1.append(" Revolution frequency: "+
np.str(self.f_rev)+" Hz")
self.omega_rev = 2.*np.pi*self.f_rev
self.tb1.append(" Angular revolution frequency: "+
np.str(self.omega_rev)+" 1/s")
self.f_RF = self.harmonic*self.f_rev
self.tb1.append(" RF frequency: "+np.str(self.f_RF*1.e-6)+" MHz")
self.omega_rf = 2.*np.pi*self.f_RF
self.tb1.append(" Angular RF frequency: "+
np.str(self.omega_rf)+" 1/s\n")
def tune(self):
self.eta_0 = np.fabs(1./self.gamma_t**2 - 1./self.gamma**2)
self.tb1.append(" Slippage factor (zeroth order): "+
np.str(self.eta_0)+"")
self.Q_s0 = np.sqrt(self.harmonic*self.voltage*self.eta_0 /
(2.*np.pi*self.beta_sq*self.energy))
self.tb1.append(" Central synchrotron tune: "+np.str(self.Q_s0)+"")
self.f_s0 = self.Q_s0*self.f_rev
self.tb1.append(" Central synchrotron frequency: "+
np.str(self.f_s0)+"")
self.omega_s0 = 2.*np.pi*self.f_s0
self.tb1.append(" Angular synchrotron frequency: "+
np.str(self.omega_s0)+" 1/s\n")
def bucket_parameters(self):
self.tb1.append("Bucket parameters assume: single RF, stationary case, and no intensity effects.\n")
self.bucket_area = 8.*np.sqrt(2.*self.beta_sq*self.energy*self.voltage /
(np.pi*self.harmonic*self.eta_0)) / self.omega_rf
self.tb1.append(" Bucket area: "+np.str(self.bucket_area)+" eVs")
self.dt_max = 0.5*self.t_rev/self.harmonic
self.tb1.append(" Half of bucket length: "+
np.str(self.dt_max*1.e9)+" ns")
self.dE_max = np.sqrt(2.*self.beta**2*self.energy*self.voltage /
(np.pi*self.eta_0*self.harmonic))
self.tb1.append(" Half of bucket height: "+
np.str(self.dE_max*1.e-6)+" MeV")
self.delta_max = self.dE_max/(self.beta_sq*self.energy)
self.tb1.append(" In relative momentum offset: "+
np.str(self.delta_max)+"\n")
def emittance_from_bunch_length(self, four_sigma_bunch_length):
self.tau = four_sigma_bunch_length
if self.tau >= 2.*self.dt_max:
self.tb1.append("Chosen bunch length too large for this bucket. Aborting!")
raise RuntimeError("Chosen bunch length too large for this bucket. Aborting!")
self.tb1.append("Calculating emittance of 4-sigma bunch length: "+
np.str(self.tau*1.e9)+" ns")
self.tb1.append(" Emittance contour in phase: "+
np.str(self.phi_b)+" rad")
self.tb1.append(" Emittance contour in relative momentum: "+
np.str(self.delta_b)+"")
self.tb1.append(" Emittance contour in energy offset: "+
np.str(self.dE_b*1.e-6)+" MeV")
self.tb1.append(" R.m.s. bunch length is: "+
np.str(self.tau*c/4*100)+" cm")
self.tb1.append(" R.m.s. energy spread is: "+
np.str(0.5*self.dE_b/self.kinetic_energy)+"")
self.tb1.append(" Longitudinal emittance is: "+
np.str(self.emittance)+" eVs\n")
def bunch_length_from_emittance(self, emittance):
self.emittance_aim = emittance
if self.emittance_aim >= self.bucket_area:
self.tb1.append("Chosen emittance too large for this bucket. Aborting!")
raise RuntimeError("Chosen emittance too large for this bucket. Aborting!")
self.tb1.append("Calculating 4-sigma bunch length for an emittance of "
+np.str(self.emittance_aim)+" eVs")
# Make a guess, iterate to get closer
self.tau = self.dt_max/2.
while (np.fabs((self.emittance - self.emittance_aim)
/self.emittance_aim) > 0.001):
self.tau *= np.sqrt(self.emittance_aim/self.emittance)
self.tb1.append(" Bunch length is: "+np.str(self.tau*1.e9)+" ns")
self.tb1.append(" Corresponding matched rms relative momentum offset: "+
np.str(self.delta_b)+"")
self.tb1.append(" Emittance contour in phase: "+
np.str(self.phi_b)+" rad")
def setupUi(self, mainWindow):
mainWindow.setObjectName("mainWindow")
mainWindow.resize(586, 611)
mainWindow.setWindowOpacity(1.0)
mainWindow.setFixedSize(mainWindow.size())
# Label "Machine/Optics"
self.lbMachine = QtWidgets.QLabel(mainWindow)
self.lbMachine.setGeometry(QtCore.QRect(20, 20, 120, 17))
self.lbMachine.setMinimumSize(QtCore.QSize(70, 0))
self.lbMachine.setMaximumSize(QtCore.QSize(16777215, 17))
self.lbMachine.setObjectName("lbMachine")
# Label "Energy"
self.lbEnergy = QtWidgets.QLabel(mainWindow)
self.lbEnergy.setGeometry(QtCore.QRect(20, 80, 70, 17))
self.lbEnergy.setObjectName("lbEnergy")
# Custom energy box
self.leCustom = QtWidgets.QLineEdit(mainWindow)
self.leCustom.setEnabled(True)
self.leCustom.setGeometry(QtCore.QRect(145, 100, 70, 25))
self.leCustom.setStyleSheet("background-color: rgb(255, 255, 255);\n"
"border-color: rgb(0, 0, 0);")
self.leCustom.hide()
self.leCustom.setText("")
self.leCustom.setObjectName("leCustom")
# Custom energy label (unit)
self.lbEV1 = QtWidgets.QLabel(mainWindow)
self.lbEV1.setEnabled(True)
self.lbEV1.setGeometry(QtCore.QRect(220, 100, 30, 25))
self.lbEV1.setObjectName("lbEV1")
self.lbEV1.hide()
# Label "Gamma Transition"
self.rbGammaT = QtWidgets.QLabel(mainWindow)
self.rbGammaT.setGeometry(QtCore.QRect(260, 80, 120, 17))
self.rbGammaT.setObjectName("rbGammaT")
# Custom gamma_t box
self.reCustom = QtWidgets.QLineEdit(mainWindow)
self.reCustom.setEnabled(True)
self.reCustom.setGeometry(QtCore.QRect(385, 100, 70, 25))
self.reCustom.setStyleSheet("background-color: rgb(255, 255, 255);\n"
"border-color: rgb(0, 0, 0);")
self.reCustom.hide()
self.reCustom.setText("")
self.reCustom.setObjectName("reCustom")
# Label "Voltage" with units
self.lbVoltage = QtWidgets.QLabel(mainWindow)
self.lbVoltage.setGeometry(QtCore.QRect(20, 160, 70, 25))
self.lbVoltage.setObjectName("lbVoltage")
self.lbEV2 = QtWidgets.QLabel(mainWindow)
self.lbEV2.setGeometry(QtCore.QRect(150, 160, 31, 25))
self.lbEV2.setObjectName("lbEV2")
self.leVoltage = QtWidgets.QLineEdit(mainWindow)
self.leVoltage.setGeometry(QtCore.QRect(80, 155, 70, 25))
self.leVoltage.setStyleSheet("background-color: rgb(255, 255, 255);\n"
"border-color: rgb(0, 0, 0);")
self.leVoltage.setText("")
self.leVoltage.setObjectName("leVoltage")
# Label "Optional"
self.lbOptional = QtWidgets.QLabel(mainWindow)
self.lbOptional.setGeometry(QtCore.QRect(20, 230, 70, 17))
self.lbOptional.setObjectName("lbOptional")
# Label "Emittance" with units
self.leEmittance = QtWidgets.QLineEdit(mainWindow)
self.leEmittance.setGeometry(QtCore.QRect(130, 270, 70, 25))
self.leEmittance.setStyleSheet("background-color: rgb(255, 255, 255);\n"
"border-color: rgb(0, 0, 0);")
self.leEmittance.setText("")
self.leEmittance.setObjectName("leEmittance")
self.lbEVS1 = QtWidgets.QLabel(mainWindow)
self.lbEVS1.setGeometry(QtCore.QRect(200, 275, 41, 25))
self.lbEVS1.setObjectName("lbEVS1")
self.lbEVS2 = QtWidgets.QLabel(mainWindow)
self.lbEVS2.setGeometry(QtCore.QRect(330, 275, 41, 25))
self.lbEVS2.setObjectName("lbEVS2")
# Label "Bunch Length" with units
self.leBunchLength = QtWidgets.QLineEdit(mainWindow)
self.leBunchLength.setGeometry(QtCore.QRect(260, 270, 70, 25))
self.leBunchLength.setStyleSheet("background-color: rgb(255, 255, 255);\n"
"border-color: rgb(0, 0, 0);")
self.leBunchLength.setText("")
self.leBunchLength.setObjectName("leBunchLength")
# "Submit" button
self.pbSubmit = QtWidgets.QPushButton(mainWindow)
self.pbSubmit.setGeometry(QtCore.QRect(230, 320, 101, 27))
self.pbSubmit.setObjectName("pbSumbit")
self.tb1 = QtWidgets.QTextBrowser(mainWindow)
self.tb1.setGeometry(QtCore.QRect(10, 350, 561, 241))
self.tb1.setVerticalScrollBarPolicy(QtCore.Qt.ScrollBarAlwaysOn)
self.tb1.setObjectName("tb1")
# Drop-down menus Machine/Optics, Energy, Gamma Transition
self.cbMachine = QtWidgets.QComboBox(mainWindow)
self.cbMachine.setGeometry(QtCore.QRect(20, 40, 115, 25))
self.cbMachine.setEditable(False)
self.cbMachine.setObjectName("cbMachine")
for i in range(len(gamma_ts)):
self.cbMachine.addItem("")
self.cbEnergy = QtWidgets.QComboBox(mainWindow)
self.cbEnergy.setGeometry(QtCore.QRect(20, 100, 115, 25))
self.cbEnergy.setObjectName("cbEnergy")
self.cbEnergy.addItem("")
self.cbEnergy.addItem("")
self.cbEnergy.addItem("")
self.cbGammaT = QtWidgets.QComboBox(mainWindow)
self.cbGammaT.setGeometry(QtCore.QRect(260, 100, 115, 25))
self.cbGammaT.setObjectName("cbGammaT")
self.cbGammaT.addItem("")
self.cbGammaT.addItem("")
# Radio button Bunch Length
self.rbBunchLength = QtWidgets.QRadioButton(mainWindow)
self.rbBunchLength.setGeometry(QtCore.QRect(260, 250, 140, 22))
self.rbBunchLength.setObjectName("rbBunchLength")
# Radio button Emittance
self.rbEmittance = QtWidgets.QRadioButton(mainWindow)
self.rbEmittance.setGeometry(QtCore.QRect(130, 250, 100, 22))
self.rbEmittance.setObjectName("rbEmittance")
# Radio button No option
self.rbNoOption = QtWidgets.QRadioButton(mainWindow)
self.rbNoOption.setGeometry(QtCore.QRect(20, 250, 100, 22))
self.rbNoOption.setObjectName('rbNoOption')
self.rbNoOption.setChecked(True)
self.retranslateUi(mainWindow)
QtCore.QMetaObject.connectSlotsByName(mainWindow)
self.addactions(mainWindow)
def retranslateUi(self, mainWindow):
_translate = QtCore.QCoreApplication.translate
# Label texts
mainWindow.setWindowTitle(_translate("mainWindow", "Bunch Parameter Calculator"))
self.lbMachine.setText(_translate("mainWindow", "Machine, Optics"))
self.lbEnergy.setText(_translate("mainWindow", "Energy"))
self.lbEV1.setText(_translate("mainWindow", "[eV]"))
self.rbGammaT.setText(_translate("mainWindow", "Transition Gamma"))
self.lbVoltage.setText(_translate("mainWindow", "Voltage"))
self.lbEV2.setText(_translate("mainWindow", "[V]"))
self.lbOptional.setText(_translate("mainWindow", "Optional"))
self.rbEmittance.setText(_translate("mainWindow", "Emittance"))
self.lbEVS1.setText(_translate("mainWindow", "[eVs]"))
self.lbEVS2.setText(_translate("mainWindow", "[s]"))
self.rbBunchLength.setText(_translate("mainWindow", "Bunch Length"))
self.rbNoOption.setText(_translate("mainWindow", "No Options"))
self.pbSubmit.setText(_translate("mainWindow", "Submit"))
# Options in roll-down menu
for i, key in enumerate(set_ups.keys()):
self.cbMachine.setItemText(i, _translate("mainWindow", key))
self.cbEnergy.setItemText(0, _translate("mainWindow", "Flat bottom"))
self.cbEnergy.setItemText(1, _translate("mainWindow", "Flat top"))
self.cbEnergy.setItemText(2, _translate("mainWindow", "Custom"))
self.cbGammaT.setItemText(0, _translate("mainWindow", "Default"))
self.cbGammaT.setItemText(1, _translate("mainWindow", "Custom"))
def addactions(self, mainWindow):
self.pbSubmit.clicked.connect(self.pbHandler)
self.cbEnergy.activated[str].connect(self.cbEnergyHandler)
self.cbGammaT.activated[str].connect(self.cbGammaTHandler)
def pbHandler(self):
self.machine = str(self.cbMachine.currentText())
self.setup = set_ups[self.machine]
self.energy_type = self.cbEnergy.currentText()
if self.energy_type == 'Custom':
self.custom_energy = self.leCustom.text()
try:
self.energy = np.double(self.custom_energy)
except ValueError:
self.tb1.append("Energy not recognized!")
return
self.gamma_t = gamma_ts[self.setup]
self.gamma_t_type = self.cbGammaT.currentText()
if self.gamma_t_type == 'Custom':
self.custom_gamma_t = self.reCustom.text()
try:
self.gamma_t = np.double(self.custom_gamma_t)
except ValueError:
self.tb1.append("Gamma transition not recognized!")
return
self.voltage = self.leVoltage.text()
self.emittance_target = self.leEmittance.text()
self.bunch_length_target = self.leBunchLength.text()
self.tb1.append("\n\n"+"**************************** BEAM PARAMETER CALCULATOR ****************************"+"\n")
self.tb1.append("Input -- chosen machine/optics: "+
np.str(self.machine)+"\n")
# Derived parameters --------------------------------------------------
self.alpha = 1./self.gamma_t**2
self.tb1.append(" * with relativistic gamma at transition: "+
np.str(self.gamma_t)+"")
self.tb1.append(" * with momentum compaction factor: "+
np.str(self.alpha)+"")
self.harmonic = harmonics[self.setup]
self.tb1.append(" * with main harmonic: " +np.str(self.harmonic)+"")
self.circumference = circumferences[self.setup]
self.tb1.append(" * and machine circumference: "+
np.str(self.circumference)+" m\n")
if self.energy_type == 'Flat bottom':
self.energy = energies_fb[self.setup]
elif self.energy_type == 'Flat top':
self.energy = energies_ft[self.setup]
self.tb1.append("Input -- synchronous total energy: "+
np.str(self.energy*1.e-6)+" MeV")
try:
self.voltage = np.double(self.voltage)
except ValueError:
self.tb1.append("Voltage not recognised!")
return
self.tb1.append("Input -- RF voltage: "+
np.str(self.voltage*1.e-6)+" MV")
self.mass = m_p*c**2/e
self.tb1.append("Input -- particle mass: "+
np.str(self.mass*1.e-6)+" MeV\n")
# Derived quantities --------------------------------------------------
self.relativistic_quantities()
self.frequencies()
self.tune()
self.bucket_parameters()
if self.rbEmittance.isChecked():
try:
self.emittance_target = np.double(self.emittance_target)
except ValueError:
self.tb1.append("Target emittance not recognised!")
return
self.bunch_length_from_emittance(self.emittance_target)
elif self.rbBunchLength.isChecked():
try:
self.bunch_length_target = np.double(self.bunch_length_target)
except ValueError:
self.tb1.append("Target bunch length not recognised!")
return
self.emittance_from_bunch_length(self.bunch_length_target)
def cbEnergyHandler(self, text):
if text == 'Custom':
self.leCustom.show()
self.lbEV1.show()
else:
self.leCustom.hide()
self.lbEV1.hide()
def cbGammaTHandler(self, text):
if text == 'Custom':
self.reCustom.show()
else:
self.reCustom.hide()
if __name__ == "__main__":
import sys
app = QtWidgets.QApplication(sys.argv)
mainWindow = QtWidgets.QMainWindow()
ui = ParameterScaling()
ui.setupUi(mainWindow)
mainWindow.show()
sys.exit(app.exec_())
|
blond-admin/BLonD
|
blond/toolbox/parameter_scaling.py
|
Python
|
gpl-3.0
| 21,008
|
import os
import time
import hashlib
import logging
from base64 import b64encode
from collections import OrderedDict, defaultdict
from twisted.internet.task import LoopingCall
from twisted.internet.defer import Deferred
from dispersy.authentication import MemberAuthentication
from dispersy.candidate import Candidate
from dispersy.community import Community
from dispersy.conversion import DefaultConversion
from dispersy.destination import CandidateDestination
from dispersy.distribution import DirectDistribution
from dispersy.message import Message
from dispersy.resolution import PublicResolution
from dispersy.requestcache import RandomNumberCache
from market.community.blockchain.conversion import BlockchainConversion
from market.community.payload import ProtobufPayload
from market.database.datamanager import BlockchainDataManager
from market.models.block import Block
from market.models.block_index import BlockIndex
from market.models.contract import Contract
from market.util.misc import median
from market.util.uint256 import full_to_uint256, compact_to_uint256, uint256_to_compact
from market.models import ObjectType
COMMIT_INTERVAL = 60
BLOCK_CREATION_INTERNAL = 1
BLOCK_TARGET_SPACING = 30 # 10 * 60
BLOCK_TARGET_TIMESPAN = 300 # 14 * 24 * 60 * 60
BLOCK_TARGET_BLOCKSPAN = BLOCK_TARGET_TIMESPAN / BLOCK_TARGET_SPACING
BLOCK_DIFFICULTY_INIT = 0x05ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff
BLOCK_DIFFICULTY_MIN = 0x05ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff
BLOCK_GENESIS_HASH = '\00' * 32
MAX_CLOCK_DRIFT = 15 * 60
MAX_PACKET_SIZE = 1500
class SignatureRequestCache(RandomNumberCache):
def __init__(self, community):
super(SignatureRequestCache, self).__init__(community.request_cache, u'signature-request')
def on_timeout(self):
pass
class BlockRequestCache(RandomNumberCache):
def __init__(self, community, block_id):
super(BlockRequestCache, self).__init__(community.request_cache, u'block-request')
self.community = community
self.block_id = block_id
def on_timeout(self):
# Retry to download block
self.community.send_block_request(self.block_id)
class TraversalRequestCache(RandomNumberCache):
def __init__(self, community, contract_id, contract_type, deferred, min_responses, max_responses):
super(TraversalRequestCache, self).__init__(community.request_cache, u'traversal-request')
self.logger = community.logger
self.contract_id = contract_id
self.contract_type = contract_type
self.deferred = deferred
self.min_responses = min_responses
self.max_responses = max_responses
self.responses = {}
self.public_keys = []
def callback(self):
responses_sorted = sorted(self.responses.items(), key=lambda item: item[1])
if responses_sorted and responses_sorted[-1][1] >= self.min_responses:
self.deferred.callback(responses_sorted[-1][0])
else:
self.logger.warning('Not enough similar responses to traversal-request')
self.deferred.errback()
def add_response(self, public_key, response_tuple):
# Only allow 1 response per peer
if public_key in self.public_keys:
return False
self.public_keys.append(public_key)
self.responses[response_tuple] = self.responses.get(response_tuple, 0) + 1
# If we already have all responses there is not need to wait for the timeout
if sum(self.responses.values()) >= self.max_responses:
self.callback()
return True
return False
def on_timeout(self):
self.callback()
class BlockchainCommunity(Community):
def __init__(self, dispersy, master, my_member):
super(BlockchainCommunity, self).__init__(dispersy, master, my_member)
self.logger = logging.getLogger('BlockchainLogger')
self.incoming_contracts = OrderedDict()
self.incoming_blocks = {}
self.data_manager = None
def initialize(self, verifier=True, **db_kwargs):
super(BlockchainCommunity, self).initialize()
self.initialize_database(**db_kwargs)
if verifier:
self.register_task('create_block', LoopingCall(self.create_block)).start(BLOCK_CREATION_INTERNAL)
self.register_task('commit', LoopingCall(self.data_manager.commit)).start(COMMIT_INTERVAL)
self.logger.info('BlockchainCommunity initialized')
def initialize_database(self, database_fn=''):
if database_fn:
database_fn = os.path.join(self.dispersy.working_directory, database_fn)
self.data_manager = BlockchainDataManager(database_fn)
self.data_manager.initialize()
@classmethod
def get_master_members(cls, dispersy):
# generated: Fri Feb 24 11:22:22 2017
# curve: None
# len: 571 bits ~ 144 bytes signature
# pub: 170 3081a7301006072a8648ce3d020106052b81040027038192000407b
# acf5ae4d3fe94d49a7f94b7239e9c2d878b29f0fbdb7374d5b6a09d9d6fba80d
# 3807affd0ba45ba1ac1c278ca59bec422d8a44b5fefaabcdd62c2778414c01da
# 4578b304b104b00eec74de98dcda803b79fd1783d76cc1bd7aab75cfd8fff982
# 7a9647ae3c59423c2a9a984700e7cb43b881a6455574032cc11dba806dba9699
# f54f2d30b10eed5c7c0381a0915a5
# pub-sha1 56553661e30b342b2fc39f1a425eb612ef8b8c33
# -----BEGIN PUBLIC KEY-----
# MIGnMBAGByqGSM49AgEGBSuBBAAnA4GSAAQHus9a5NP+lNSaf5S3I56cLYeLKfD7
# 23N01bagnZ1vuoDTgHr/0LpFuhrBwnjKWb7EItikS1/vqrzdYsJ3hBTAHaRXizBL
# EEsA7sdN6Y3NqAO3n9F4PXbMG9eqt1z9j/+YJ6lkeuPFlCPCqamEcA58tDuIGmRV
# V0AyzBHbqAbbqWmfVPLTCxDu1cfAOBoJFaU=
# -----END PUBLIC KEY-----
master_key = '3081a7301006072a8648ce3d020106052b81040027038192000407bacf5ae4d3fe94d49a7f94b7239e9c2d878b29' + \
'f0fbdb7374d5b6a09d9d6fba80d3807affd0ba45ba1ac1c278ca59bec422d8a44b5fefaabcdd62c2778414c01da4' + \
'578b304b104b00eec74de98dcda803b79fd1783d76cc1bd7aab75cfd8fff9827a9647ae3c59423c2a9a984700e7c' + \
'b43b881a6455574032cc11dba806dba9699f54f2d30b10eed5c7c0381a0915a5'
master = dispersy.get_member(public_key=master_key.decode('hex'))
return [master]
def initiate_meta_messages(self):
meta_messages = super(BlockchainCommunity, self).initiate_meta_messages()
return meta_messages + [
Message(self, u"signature-request",
MemberAuthentication(),
PublicResolution(),
DirectDistribution(),
CandidateDestination(),
ProtobufPayload(),
self._generic_timeline_check,
self.on_signature_request),
Message(self, u"signature-response",
MemberAuthentication(),
PublicResolution(),
DirectDistribution(),
CandidateDestination(),
ProtobufPayload(),
self._generic_timeline_check,
self.on_signature_response),
Message(self, u"contract",
MemberAuthentication(),
PublicResolution(),
DirectDistribution(),
CandidateDestination(),
ProtobufPayload(),
self._generic_timeline_check,
self.on_contract),
Message(self, u"block-request",
MemberAuthentication(),
PublicResolution(),
DirectDistribution(),
CandidateDestination(),
ProtobufPayload(),
self._generic_timeline_check,
self.on_block_request),
Message(self, u"block",
MemberAuthentication(),
PublicResolution(),
DirectDistribution(),
CandidateDestination(),
ProtobufPayload(),
self._generic_timeline_check,
self.on_block),
Message(self, u"traversal-request",
MemberAuthentication(),
PublicResolution(),
DirectDistribution(),
CandidateDestination(),
ProtobufPayload(),
self._generic_timeline_check,
self.on_traversal_request),
Message(self, u"traversal-response",
MemberAuthentication(),
PublicResolution(),
DirectDistribution(),
CandidateDestination(),
ProtobufPayload(),
self._generic_timeline_check,
self.on_traversal_response)
]
def initiate_conversions(self):
return [DefaultConversion(self), BlockchainConversion(self)]
def get_verifiers(self):
return list(self.dispersy_yield_verified_candidates())
def send_message(self, msg_type, candidates, payload_dict):
self.logger.debug('Sending %s message to %d candidate(s)', msg_type, len(candidates))
meta = self.get_meta_message(msg_type)
message = meta.impl(authentication=(self.my_member,),
distribution=(self.claim_global_time(),),
destination=candidates,
payload=(payload_dict,))
return self.dispersy.store_update_forward([message], False, False, True)
def multicast_message(self, msg_type, payload_dict, exclude=None):
candidates = self.get_verifiers()
if exclude in candidates:
candidates.remove(exclude)
return self.send_message(msg_type, tuple(candidates), payload_dict)
def send_signature_request(self, contract, candidate):
cache = self.request_cache.add(SignatureRequestCache(self))
return self.send_message(u'signature-request', (candidate,), {'identifier': cache.number,
'contract': contract.to_dict()})
def on_signature_request(self, messages):
for message in messages:
contract = Contract.from_dict(message.payload.dictionary['contract'])
if contract is None:
self.logger.warning('Dropping invalid signature-request from %s', message.candidate.sock_addr)
continue
elif not contract.verify(message.candidate.get_member()):
self.logger.warning('Dropping signature-request with incorrect signature')
continue
self.logger.debug('Got signature-request from %s', message.candidate.sock_addr)
if self.finalize_contract(contract, sign=True):
self.send_signature_response(message.candidate, contract, message.payload.dictionary['identifier'])
self.incoming_contracts[contract.id] = contract
self.multicast_message(u'contract', {'contract': contract.to_dict()})
def send_signature_response(self, candidate, contract, identifier):
return self.send_message(u'signature-response', (candidate,), {'identifier': identifier,
'contract': contract.to_dict()})
def on_signature_response(self, messages):
for message in messages:
cache = self.request_cache.get(u'signature-request', message.payload.dictionary['identifier'])
if not cache:
self.logger.warning("Dropping unexpected signature-response from %s", message.candidate.sock_addr)
continue
contract = Contract.from_dict(message.payload.dictionary['contract'])
if contract is None:
self.logger.warning('Dropping invalid signature-response from %s', message.candidate.sock_addr)
continue
elif not contract.verify(message.candidate.get_member()):
self.logger.warning('Dropping signature-response with incorrect signature')
continue
self.logger.debug('Got signature-response from %s', message.candidate.sock_addr)
if self.finalize_contract(contract):
self.incoming_contracts[contract.id] = contract
self.multicast_message(u'contract', {'contract': contract.to_dict()})
def on_contract(self, messages):
for message in messages:
contract = Contract.from_dict(message.payload.dictionary['contract'])
if contract is None:
self.logger.warning('Dropping invalid contract from %s', message.candidate.sock_addr)
continue
elif self.incoming_contracts.get(contract.id) or self.data_manager.get_contract(contract.id):
self.logger.debug('Dropping contract %s (duplicate)', b64encode(contract.id))
continue
# Preliminary check to see if contract is allowed. A final check will be performed in check_block.
if not self.check_contract(contract, fail_without_parent=False):
self.logger.warning('Dropping contract %s (check failed)', b64encode(contract.id))
continue
self.logger.debug('Got contract %s', b64encode(contract.id))
# Forward if needed
if contract.id not in self.incoming_contracts:
self.incoming_contracts[contract.id] = contract
self.multicast_message(u'contract', {'contract': contract.to_dict()}, exclude=message.candidate)
def send_block_request(self, block_id):
self.request_cache.add(BlockRequestCache(self, block_id))
verifiers = self.get_verifiers()
if verifiers:
self.send_message(u'block-request', (verifiers[0],), {'block_id': block_id})
def on_block_request(self, messages):
for message in messages:
block_id = message.payload.dictionary['block_id']
self.logger.debug('Got block-request for id %s', b64encode(block_id))
block = self.data_manager.get_block(block_id)
if block is not None:
self.send_message(u'block', (message.candidate,), {'block': block.to_dict()})
def on_block(self, messages):
for message in messages:
block = Block.from_dict(message.payload.dictionary['block'])
if not block:
self.logger.warning('Dropping invalid block from %s', message.candidate.sock_addr)
continue
# If we're trying to download this block, stop it. This needs to happen before any additional checks.
# TODO: fix this
for cache in self.request_cache._identifiers.values():
if isinstance(cache, BlockRequestCache) and cache.block_id == block.id:
self.request_cache.pop(cache.prefix, cache.number)
if not self.check_block(block):
self.logger.warning('Dropping illegal block from %s', message.candidate.sock_addr)
continue
self.logger.debug('Got block %s', b64encode(block.id))
# Are we dealing with an orphan block?
if block.previous_hash != BLOCK_GENESIS_HASH and not self.data_manager.get_block(block.previous_hash):
# Postpone processing the current block and request missing blocks
self.incoming_blocks[block.id] = block
# TODO: address issues with memory filling up
self.send_block_request(block.previous_hash)
self.logger.debug('Postpone block %s', b64encode(block.id))
continue
if self.process_block(block):
self.logger.debug('Added received block with %s contract(s)', len(block.contracts))
self.process_blocks_after(block)
def process_blocks_after(self, block):
# Process any orphan blocks that depend on the current block
for orphan in self.incoming_blocks.values():
if orphan.previous_hash == block.id:
del self.incoming_blocks[orphan.id]
if self.process_block(orphan):
self.logger.debug('Added postponed block with %s contract(s)', len(orphan.contracts))
self.process_blocks_after(orphan)
def process_block(self, block):
# We have already checked the proof of this block, but not whether the target_difficulty itself is as expected.
# Note that we can't to this in check_block, because at that time the previous block may not be known yet.
prev_block = self.data_manager.get_block(block.previous_hash)
if block.target_difficulty != self.get_next_difficulty(prev_block):
self.logger.debug('Block processing failed (unexpected target difficulty)')
return False
# Save block
self.data_manager.add_block(block)
# Get best chain
latest_index = self.data_manager.get_block_indexes(limit=1)[0]
# Calculate height of the chain this block is the head of
block_ids = []
from_height = 0
cur_block = block
while cur_block:
block_ids.append(cur_block.id)
block_index = self.data_manager.get_block_index(cur_block.previous_hash)
if block_index is not None:
# We can connect to the best chain
from_height = block_index.height
break
cur_block = self.data_manager.get_block(cur_block.previous_hash)
# Make sure that we are not dealing with a chain of orphan blocks
if cur_block is None and block_ids[-1] != BLOCK_GENESIS_HASH:
self.logger.error('Block processing failed (chain of orphan blocks)')
return False
# For now, the longest chain wins
if len(block_ids) + from_height > latest_index.height:
self.data_manager.remove_block_indexes(from_height + 1)
for index, block_id in enumerate(reversed(block_ids)):
self.data_manager.add_block_index(BlockIndex(block_id, from_height + 1 + index))
# Make sure we stop trying to create blocks with the contracts in this block
for contract in block.contracts:
if contract.id in self.incoming_contracts:
del self.incoming_contracts[contract.id]
return True
def check_block(self, block):
if self.get_block_packet_size(block) > MAX_PACKET_SIZE:
self.logger.debug('Block failed check (block too large)')
return False
if not self.check_proof(block):
# Don't log message when we created the block
if block.creator != self.my_member.public_key:
self.logger.debug('Block failed check (incorrect proof)')
return False
if not block.verify():
self.logger.debug('Block failed check (invalid signature)')
return False
if self.data_manager.get_block(block.id):
self.logger.debug('Block failed check (duplicate block)')
return False
if block.time > int(time.time()) + MAX_CLOCK_DRIFT:
self.logger.debug('Block failed check (max clock drift exceeded)')
return False
for contract in block.contracts:
if block.time < contract.time:
self.logger.debug('Block failed check (block created before contract)')
return False
if not self.check_contract(contract):
self.logger.warning('Block check failed (contract check failed)')
self.incoming_contracts.pop(contract.id, None)
return False
if len(block.contracts) != len(set([contract.id for contract in block.contracts])):
self.logger.debug('Block failed check (duplicate contracts)')
return False
if block.merkle_root_hash != block.merkle_tree.build():
self.logger.debug('Block failed check (incorrect merkle root hash)')
return False
past_blocks = self.get_past_blocks(block, 11)
if past_blocks and block.time < median([b.time for b in past_blocks]):
self.logger.debug('Block failed check (block time smaller than median time of past 11 blocks)')
return False
return True
def check_proof(self, block):
proof = hashlib.sha256(str(block)).digest()
return full_to_uint256(proof) < block.target_difficulty
def create_block(self):
latest_index = self.data_manager.get_block_indexes(limit=1)[0]
prev_block = self.data_manager.get_block(latest_index.block_id) if latest_index is not None else None
block = Block()
block.previous_hash = prev_block.id if prev_block is not None else BLOCK_GENESIS_HASH
block.target_difficulty = self.get_next_difficulty(prev_block)
block.time = int(time.time())
# Placeholder information (for calculating packet size)
block.merkle_root_hash = block.merkle_tree.build()
block.sign(self.my_member)
# Find dependencies
contracts = []
dependencies = defaultdict(list)
for contract in self.incoming_contracts.itervalues():
if contract.previous_hash:
# Get the previous contract from memory or the database
prev_contract = self.incoming_contracts.get(contract.previous_hash) or \
self.data_manager.get_contract(contract.previous_hash)
on_blockchain = self.data_manager.contract_on_blockchain(prev_contract.id) if prev_contract else False
# We need to wait until the previous contract is received and on the blockchain
if not on_blockchain:
dependencies[contract.id].append(prev_contract)
continue
contracts.append(contract)
# Add contracts to block
while contracts:
contract = contracts.pop(0)
block.contracts.append(contract)
if self.get_block_packet_size(block) > MAX_PACKET_SIZE:
block.contracts.pop()
break
if contract.id in dependencies:
# Put dependencies at the front of the list, so they will be processed in the next iterations
for index, dependency in enumerate(dependencies[contract.id]):
contracts.insert(index, dependency)
# Calculate final merkle root hash + sign block
block.merkle_root_hash = block.merkle_tree.build()
block.sign(self.my_member)
if self.check_block(block):
self.logger.debug('Created block with target difficulty 0x%064x', block.target_difficulty)
if self.process_block(block):
self.logger.debug('Added created block with %s contract(s)', len(block.contracts))
self.multicast_message(u'block', {'block': block.to_dict()})
return block
def get_next_difficulty(self, block):
# Determine difficulty for the next block
if block is not None:
target_difficulty = block.target_difficulty
# Go back BLOCK_TARGET_BLOCKSPAN
past_blocks = self.get_past_blocks(block, BLOCK_TARGET_BLOCKSPAN)
if past_blocks:
target_difficulty *= float(block.time - past_blocks[-1].time) / BLOCK_TARGET_TIMESPAN
else:
target_difficulty = BLOCK_DIFFICULTY_INIT
target_difficulty = min(target_difficulty, BLOCK_DIFFICULTY_MIN)
return compact_to_uint256(uint256_to_compact(target_difficulty))
def get_past_blocks(self, block, num_past):
result = []
current = block
for _ in range(num_past):
current = self.data_manager.get_block(current.previous_hash)
if current is None:
return None
result.append(current)
return result
def get_block_packet_size(self, block):
meta = self.get_meta_message(u'block')
message = meta.impl(authentication=(self.my_member,),
distribution=(self.claim_global_time(),),
destination=(Candidate(('1.1.1.1', 1), False),),
payload=({'block': block.to_dict()},))
return len(message.packet)
def check_contract(self, contract, fail_without_parent=True):
if not contract.verify():
self.logger.debug('Contract failed check (invalid signature)')
return False
if contract.previous_hash and fail_without_parent:
prev_contract = self.incoming_contracts.get(contract.previous_hash) or \
self.data_manager.get_contract(contract.previous_hash)
if prev_contract is None:
self.logger.error('Contract failed check (parent is unknown)')
return False
return True
def begin_contract(self, candidate, document, contract_type, from_public_key, to_public_key, previous_hash=''):
assert to_public_key == self.my_member.public_key or from_public_key == self.my_member.public_key
contract = Contract()
contract.from_public_key = from_public_key
contract.to_public_key = to_public_key
contract.document = document
contract.type = contract_type
contract.previous_hash = previous_hash
contract.time = int(time.time())
contract.sign(self.my_member)
return self.send_signature_request(contract, candidate)
def finalize_contract(self, contract, sign=False):
# Final checks?
if sign:
contract.sign(self.my_member)
# Add contract to database
self.data_manager.add_contract(contract)
return True
def send_traversal_request(self, contract_id, contract_type=None, max_requests=5, min_responses=1):
# Send a message to a limited number of verifiers
verifiers = self.get_verifiers()[:max_requests]
if len(verifiers) < min_responses:
self.logger.warning('Not enough verifiers to send traversal-request')
return
# Use a request cache to keep track of the responses. We require a minimum number of (equal) responses
deferred = Deferred()
cache = self.request_cache.add(TraversalRequestCache(self, contract_id, contract_type,
deferred, min_responses, len(verifiers)))
msg_dict = {'identifier': cache.number,
'contract_id': contract_id}
if contract_type != None:
msg_dict['contract_type'] = contract_type
self.send_message(u'traversal-request', tuple(verifiers), msg_dict)
return deferred
def on_traversal_request(self, messages):
for message in messages:
msg_dict = {'identifier': message.payload.dictionary['identifier']}
try:
contract_type = ObjectType(message.payload.dictionary['contract_type'])
except (ValueError, KeyError):
contract_type = None
contract = self.traverse_contracts(message.payload.dictionary['contract_id'],
contract_type)
if contract is not None:
msg_dict['contract'] = contract.to_dict()
# Add the number of confirmations this contract has
confirmations = self.find_confirmation_count(message.payload.dictionary['contract_id'])
if confirmations is not None:
msg_dict['confirmations'] = confirmations
self.send_message(u'traversal-response', (message.candidate,), msg_dict)
def on_traversal_response(self, messages):
for message in messages:
cache = self.request_cache.get(u'traversal-request', message.payload.dictionary['identifier'])
if not cache:
self.logger.warning("Dropping unexpected traversal-response from %s", message.candidate.sock_addr)
continue
self.logger.debug('Got traversal-response from %s', message.candidate.sock_addr)
contract = Contract.from_dict(message.payload.dictionary['contract']) \
if 'contract' in message.payload.dictionary else None
confirmations = message.payload.dictionary.get('confirmations', None)
if cache.add_response(message.candidate.get_member().public_key, (contract, confirmations)):
# If all responses are received remove the cache
self.request_cache.pop(u'traversal-request', message.payload.dictionary['identifier'])
def traverse_contracts(self, contract_id, contract_type):
contract_of_type = None
contract = self.data_manager.get_contract(contract_id) \
if self.data_manager.contract_on_blockchain(contract_id) else None
# Traverse contract chain
while contract:
if contract.type == contract_type:
contract_of_type = contract
contracts = self.data_manager.find_contracts(Contract.previous_hash == contract.id)
contracts = [contract for contract in list(contracts) if self.data_manager.contract_on_blockchain(contract.id)]
if len(contracts) == 1:
# Keep traversing the contract chain
contract = contracts[0]
continue
elif len(contracts) == 0:
# Found end of contract chain
return contract if contract_type is None else contract_of_type
break
def find_confirmation_count(self, contract_id):
# Find the number of confirmations this contract has
block_id = self.data_manager.get_blockchain_block_id(contract_id)
block = self.data_manager.get_block(block_id)
if block:
first_index = self.data_manager.get_block_index(block.id)
last_index = self.data_manager.get_block_indexes(limit=1)[0]
if first_index and last_index:
return last_index.height - first_index.height
|
Tribler/decentralized-mortgage-market
|
market/community/blockchain/community.py
|
Python
|
gpl-3.0
| 30,409
|
# -*- coding: utf-8 -*-
"""
oauthlib.oauth2.rfc6749.endpoint.introspect
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
An implementation of the OAuth 2.0 `Token Introspection`.
.. _`Token Introspection`: https://tools.ietf.org/html/rfc7662
"""
from __future__ import absolute_import, unicode_literals
import json
import logging
from oauthlib.common import Request
from ..errors import OAuth2Error, UnsupportedTokenTypeError
from .base import BaseEndpoint, catch_errors_and_unavailability
log = logging.getLogger(__name__)
class IntrospectEndpoint(BaseEndpoint):
"""Introspect token endpoint.
This endpoint defines a method to query an OAuth 2.0 authorization
server to determine the active state of an OAuth 2.0 token and to
determine meta-information about this token. OAuth 2.0 deployments
can use this method to convey information about the authorization
context of the token from the authorization server to the protected
resource.
To prevent the values of access tokens from leaking into
server-side logs via query parameters, an authorization server
offering token introspection MAY disallow the use of HTTP GET on
the introspection endpoint and instead require the HTTP POST method
to be used at the introspection endpoint.
"""
valid_token_types = ('access_token', 'refresh_token')
def __init__(self, request_validator, supported_token_types=None):
BaseEndpoint.__init__(self)
self.request_validator = request_validator
self.supported_token_types = (
supported_token_types or self.valid_token_types)
@catch_errors_and_unavailability
def create_introspect_response(self, uri, http_method='POST', body=None,
headers=None):
"""Create introspect valid or invalid response
If the authorization server is unable to determine the state
of the token without additional information, it SHOULD return
an introspection response indicating the token is not active
as described in Section 2.2.
"""
headers = {
'Content-Type': 'application/json',
'Cache-Control': 'no-store',
'Pragma': 'no-cache',
}
request = Request(uri, http_method, body, headers)
try:
self.validate_introspect_request(request)
log.debug('Token introspect valid for %r.', request)
except OAuth2Error as e:
log.debug('Client error during validation of %r. %r.', request, e)
headers.update(e.headers)
return headers, e.json, e.status_code
claims = self.request_validator.introspect_token(
request.token,
request.token_type_hint,
request
)
if claims is None:
return headers, json.dumps(dict(active=False)), 200
if "active" in claims:
claims.pop("active")
return headers, json.dumps(dict(active=True, **claims)), 200
def validate_introspect_request(self, request):
"""Ensure the request is valid.
The protected resource calls the introspection endpoint using
an HTTP POST request with parameters sent as
"application/x-www-form-urlencoded".
token REQUIRED. The string value of the token.
token_type_hint OPTIONAL.
A hint about the type of the token submitted for
introspection. The protected resource MAY pass this parameter to
help the authorization server optimize the token lookup. If the
server is unable to locate the token using the given hint, it MUST
extend its search across all of its supported token types. An
authorization server MAY ignore this parameter, particularly if it
is able to detect the token type automatically.
* access_token: An Access Token as defined in [`RFC6749`],
`section 1.4`_
* refresh_token: A Refresh Token as defined in [`RFC6749`],
`section 1.5`_
The introspection endpoint MAY accept other OPTIONAL
parameters to provide further context to the query. For
instance, an authorization server may desire to know the IP
address of the client accessing the protected resource to
determine if the correct client is likely to be presenting the
token. The definition of this or any other parameters are
outside the scope of this specification, to be defined by
service documentation or extensions to this specification.
.. _`section 1.4`: http://tools.ietf.org/html/rfc6749#section-1.4
.. _`section 1.5`: http://tools.ietf.org/html/rfc6749#section-1.5
.. _`RFC6749`: http://tools.ietf.org/html/rfc6749
"""
self._raise_on_missing_token(request)
self._raise_on_invalid_client(request)
self._raise_on_unsupported_token(request)
|
pymedusa/Medusa
|
ext/oauthlib/oauth2/rfc6749/endpoints/introspect.py
|
Python
|
gpl-3.0
| 4,937
|
# versione 0.5
import socket
import threading
import hashlib
import base64
import json
class BadWSRequest(Exception):
pass
class BadWSFrame(Exception):
pass
class BadCmdCall(Exception):
pass
class BadCmdParam(Exception):
pass
class Client(threading.Thread):
_MAGIC_STRING = '258EAFA5-E914-47DA-95CA-C5AB0DC85B11'
_OPCODE_TEXT = 0x1
_OPCODE_CLOSE = 0x8
def __init__(self, Manager, socket, address):
super().__init__()
self.Manager = Manager
self.socket = socket
self.ip, self.port = address
self.invokedPath = None
self.sessionStarted = False
def _parseHeader(self):
self.socket.settimeout(2.0)
rcvBuffer = ''
toRead = True
while toRead:
rcvBuffer += self.socket.recv(128).decode('utf-8')
#Check for the termination sequence
if rcvBuffer[-4:] == '\r\n\r\n': toRead = False
#vedere di usare splitlines
headerLines = rcvBuffer.split('\r\n')
requestLineElements = headerLines[0].split(' ')
if requestLineElements[0] == 'GET' and requestLineElements[-1] == 'HTTP/1.1':
self.invokedPath = requestLineElements[2]
else:
raise BadWSRequest
self.headerDict = {}
#Cut off rubbish (first line and termination sequence)
for header in headerLines[1:-2]:
headerKey, headerVal = header.split(':', 1)
self.headerDict.update({ headerKey: headerVal.strip() })
if (
'upgrade' not in self.headerDict['Connection'].lower().split(', ') or
self.headerDict['Upgrade'].lower() != 'websocket' or
'Sec-WebSocket-Key' not in self.headerDict
#Very weak part
):
raise BadWSRequest
#Operative mode needs more time
self.socket.settimeout(3600.0)
def _initComunication(self):
payload = 'HTTP/1.1 101 Web Socket Protocol Handshake\r\n'
payload += 'Upgrade: WebSocket\r\n'
payload += 'Connection: Upgrade\r\n'
#Generate the security key
acceptKey = self.headerDict['Sec-WebSocket-Key'] + self._MAGIC_STRING
acceptKey = hashlib.sha1( acceptKey.encode('ascii') ).digest()
acceptKey = base64.b64encode(acceptKey)
payload += 'Sec-WebSocket-Accept: ' + acceptKey.decode('utf-8') + '\r\n\r\n'
self.socket.send( payload.encode('utf-8') )
def _rcvRequest(self):
#1st byte: FIN, RUBBISH1, RUBBISH2, RUBBISH3, OPCODE (4 bit)
#2nd byte: MASKED, PAYLOAD_LENGTH (7 bit)
rcvBuffer = self.socket.recv(2)
print('FIN: ' + str( rcvBuffer[0] >> 7 ))
#0x0f is 00001111 binary sequence
opcode = rcvBuffer[0] & 0x0f
print('opcode: ' + hex( opcode ))
maskBit = rcvBuffer[1] >> 7
print('mask: ' + str( maskBit ))
if maskBit != 1:
raise BadWSFrame('Unmasked data')
#0x7f is 01111111 binary sequence
length = rcvBuffer[1] & 0x7f
if length == 126:
#A long length is stored in more space
rcvBuffer = self.socket.recv(2)
length = int.from_bytes(rcvBuffer, 'big')
elif length == 127:
#un carico maggiore di 65kb a thread mi fa collassare il tutto..
#Ma poi.. perche' un utente dovrebbe caricare cosi' tanti dati? :O
raise BadWSFrame('Too big payload')
print('length: ' + str(length))
#Read the mask applied to data
maskKey = self.socket.recv(4)
#valutare di bufferizzare per rendere il thread piu' parsionioso
rcvBuffer = self.socket.recv(length)
message = b''
for i in range(length):
#Unmask the original message
message += bytes([ rcvBuffer[i] ^ maskKey[i % 4] ])
print(message)
if opcode == self._OPCODE_TEXT:
return json.loads( message.decode('utf-8') )
elif opcode == self._OPCODE_CLOSE:
return None
else:
raise BadWSFrame('Unknown OpCode')
def _sndResponse(self, data):
data = json.dumps(data).encode('utf-8')
length = len(data)
#FIN bit and opcode 0x1 (0x81 is 10000001 binary sequence)
payload = b'\x81'
if length >= 65535:
#Over the maximum length allowed by 16bit addressing
raise BadWSFrame('Too big payload')
elif length <= 125:
payload += bytes([length])
else:
payload += bytes([126])
payload += length.to_bytes(2, 'big')
#si potrebbe bufferizzare l'invio
self.socket.send(payload + data)
#Chiudere inviando un codice di errore e usando l'opcode globale
def _sndClose(self):
#FIN bit and opcode 0x8 (0x88 is 10001000 binary sequence)
#Mask and length bits are zero
self.socket.send(b'\x88\x00')
#Empty the remote buffer
self.socket.recv(100)
def run(self):
print('[+] Connection established with ' + self.ip + ':' + str(self.port), "[%s]" % str(len(self.Manager)))
try:
self._parseHeader()
self._initComunication()
self.sessionStarted = True
#socket non bloccanti potrebbero essere di aiuto per smaltire prima i dati
while True:
request = self._rcvRequest()
if not request: break
response = self.Manager.executeAction(self, request)
if response == None:
raise UnknownCommand
self._sndResponse(response)
except BadWSRequest:
print('[!] Bad-formed request from ' + self.ip + ':' + str(self.port))
except BadWSFrame as err:
print('[!] Bad-formed frame from ' + self.ip + ':' + str(self.port), str(err))
#valutare se lasciare il messaggio o meno
except BadCmdCall as err:
print('[!] Unknown command received from ' + self.ip + ':' + str(self.port), str(err))
except BadCmdParam as err:
print('[!] Invalid parameters from ' + self.ip + ':' + str(self.port), str(err))
except socket.timeout:
print('[!] Timeout occurred for ' + self.ip + ':' + str(self.port))
finally:
if self.sessionStarted:
self._sndClose()
self.socket.close()
self.Manager.rmvClient(self)
print('[-] Connection closed with ' + self.ip + ':' + str(self.port), "[%s]" % str(len(self.Manager)))
class ClientManager:
def __init__(self):
self.clientList = []
self.actionDict = {}
def __len__(self):
return len(self.clientList)
def addClient(self, clientSocket, address):
newClient = Client(self, clientSocket, address)
newClient.start()
self.clientList.append(newClient)
def rmvClient(self, clientInstance):
self.clientList.remove(clientInstance)
def registerAction(self, functionName, function):
self.actionDict.update({ functionName: function })
def executeAction(self, clientInstance, request):
#Array of two element is expected
function, parameters = request
if function in self.actionDict:
try:
return self.actionDict[function](*parameters)
except TypeError:
raise BadCmdParam(request)
else:
raise BadCmdCall(function)
def shutdown(self):
for client in self.clientList:
client.join()
class WebSocketServer:
def __init__(self, ip = '0.0.0.0', port = 8888, conns = 9999):
self.ip = ip
self.port = port
self.CM = ClientManager()
try:
self.socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self.socket.bind( (self.ip, self.port) )
self.socket.listen(conns)
print('[#] Waiting for connections on ' + self.ip + ':' + str(self.port) + '...')
except socket.error as err:
print('[!] Error opening the socket: ' + str(err))
def register(self, functionName, function):
self.CM.registerAction(functionName, function)
def start(self):
try:
while True:
clientSocket, address = self.socket.accept()
self.CM.addClient(clientSocket, address)
except:
print('[#] Shutting down the server...')
self.stop()
def stop(self):
self.CM.shutdown()
self.socket.close()
|
ferdas/ws-rpc
|
websocket.py
|
Python
|
gpl-3.0
| 8,711
|
# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4 -*-
#
# Copyright (C) 2015 Canonical Ltd
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 3 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import logging
import requests
import yaml
PARTS_URI = 'https://wiki.ubuntu.com/Snappy/Parts'
PARTS_URI_PARAMS = {'action': 'raw'}
_WIKI_OPEN = '{{{'
_WIKI_CLOSE = '}}}'
logging.getLogger("urllib3").setLevel(logging.CRITICAL)
class Wiki:
wiki_parts = None
def _fetch(self):
if self.wiki_parts is None:
raw_content = requests.get(PARTS_URI, params=PARTS_URI_PARAMS)
content = raw_content.text.strip()
if content.startswith(_WIKI_OPEN):
content = content[len(_WIKI_OPEN):].strip()
if content.endswith(_WIKI_CLOSE):
content = content[:-len(_WIKI_CLOSE)]
self.wiki_parts = yaml.load(content)
def get_part(self, name):
self._fetch()
if name in self.wiki_parts:
if 'plugin' and 'type' in self.wiki_parts[name]:
del self.wiki_parts[name]['type']
return self.wiki_parts[name]
def compose(self, name, properties):
"""Return properties composed with the ones from part name in the wiki.
:param str name: The name of the part to query from the wiki
:param dict properties: The current set of properties
:return: Part properties from the wiki composed with the properties
passed as a parameter. If there is no wiki part named name,
properties will be returned.
:rtype: dict
:raises KeyError: if the part named name is not found in the wiki.
"""
self._fetch()
wiki_properties = self.wiki_parts[name]
for key in wiki_properties:
properties[key] = properties.get(key, wiki_properties[key])
properties['plugin'] = wiki_properties.get('plugin', None)
return properties
|
rbreitenmoser/snapcraft
|
snapcraft/wiki.py
|
Python
|
gpl-3.0
| 2,447
|
"""
This module provides methods that parse variables into
one of the three base level variable types. We map the
variable type names to methods that can handle them. Each
method consumes a Request object 'rq' as the first positional
argument, and does its work by calling one of the variable-setting
methods on rq.
"""
from __future__ import absolute_import, division
from __future__ import unicode_literals, print_function
from uuid import uuid4
from functools import partial
from base64 import b64encode
from beekeeper.data_handlers import encode
from beekeeper.exceptions import CannotHandleVariableTypes
class VariableHandler(object):
registry = {}
def __init__(self, *var_types):
self.var_types = var_types
def __call__(self, func):
def wrapped(rq, **values):
return func(rq, **values)
for each in self.var_types:
VariableHandler.registry[each] = wrapped
return wrapped
def identity(**values):
return {name: val['value'] for name, val in values.items()}
def set_content_type(rq, mimetype):
rq.set_headers(**{'Content-Type': mimetype})
@VariableHandler('data')
def render_data(rq, **data):
for val in data.values():
set_content_type(rq, val['mimetype'])
rq.set_data(encode(val['value'], val['mimetype']))
@VariableHandler('http_form')
def http_form(rq, **values):
form = {
'x': {
'mimetype': 'application/x-www-form-urlencoded',
'value': {
name: val['value'] for name, val in values.items()
}
}
}
render(rq, 'data', **form)
@VariableHandler('http_basic_auth')
def basic_auth(rq, **values):
username = values.get('username', {}).get('value', '')
password = values.get('password', {}).get('value', '')
authinfo = b64encode("{}:{}".format(username, password).encode('utf-8'))
authinfo = 'Basic {}'.format(authinfo.decode('utf-8'))
rq.set_headers(Authorization=authinfo)
@VariableHandler('bearer_token')
def bearer(rq, **values):
if len(values) > 1:
raise Exception('Only one bearer token allowed')
else:
for token in values.values():
text = 'Bearer {}'.format(token['value'])
rq.set_headers(Authorization=text)
@VariableHandler('cookie')
def cookies(rq, **values):
cookie = '; '.join([value['value'] for value in values.values()])
rq.set_headers(Cookie=cookie)
@VariableHandler('multipart')
def multipart(rq, **values):
frame = '\n--{}\nContent-Disposition: form-data; name="{}"'
boundary = uuid4().hex
files = [name for name, data in values.items() if 'mimetype' in data]
output = bytes()
for name, value in values.items():
if name in files:
fname = value.get('filename', getattr(value['value'], 'name', uuid4().hex))
this_frame = frame + '; filename="{}"\nContent-Type: {}\n\n'
this_data = encode(value['value'], value['mimetype'])
args = (boundary, name, fname, value['mimetype'])
else:
this_frame = frame + '\n\n'
this_data = value['value'].encode('ascii')
args = (boundary, name)
output += this_frame.format(*args).encode('ascii') + this_data
output += '\n--{}--'.format(boundary).encode('ascii')
rq.set_data(output)
content_type_header = 'multipart/form-data; boundary={}'.format(boundary)
set_content_type(rq, content_type_header)
@VariableHandler('header')
def header(rq, **values):
rq.set_headers(**identity(**values))
@VariableHandler('url_replacement')
def replacement(rq, **values):
rq.set_url_replacements(**identity(**values))
@VariableHandler('url_param')
def url_param(rq, **values):
rq.set_url_params(**identity(**values))
def render(rq, var_type, **values):
if var_type in VariableHandler.registry:
variables = {val.pop('name', name): val for name, val in values.items()}
VariableHandler.registry[var_type](rq, **variables)
else:
raise CannotHandleVariableTypes(var_type)
|
Vagab0nd/SiCKRAGE
|
lib3/beekeeper/variable_handlers.py
|
Python
|
gpl-3.0
| 4,051
|
r"""JSON (JavaScript Object Notation) <http://json.org> is a subset of
JavaScript syntax (ECMA-262 3rd edition) used as a lightweight data
interchange format.
:mod:`simplejson` exposes an API familiar to users of the standard library
:mod:`marshal` and :mod:`pickle` modules. It is the externally maintained
version of the :mod:`json` library contained in Python 2.6, but maintains
compatibility back to Python 2.5 and (currently) has significant performance
advantages, even without using the optional C extension for speedups.
Encoding basic Python object hierarchies::
>>> import simplejson as json
>>> json.dumps(['foo', {'bar': ('baz', None, 1.0, 2)}])
'["foo", {"bar": ["baz", null, 1.0, 2]}]'
>>> print(json.dumps("\"foo\bar"))
"\"foo\bar"
>>> print(json.dumps(u'\u1234'))
"\u1234"
>>> print(json.dumps('\\'))
"\\"
>>> print(json.dumps({"c": 0, "b": 0, "a": 0}, sort_keys=True))
{"a": 0, "b": 0, "c": 0}
>>> from simplejson.compat import StringIO
>>> io = StringIO()
>>> json.dump(['streaming API'], io)
>>> io.getvalue()
'["streaming API"]'
Compact encoding::
>>> import simplejson as json
>>> obj = [1,2,3,{'4': 5, '6': 7}]
>>> json.dumps(obj, separators=(',',':'), sort_keys=True)
'[1,2,3,{"4":5,"6":7}]'
Pretty printing::
>>> import simplejson as json
>>> print(json.dumps({'4': 5, '6': 7}, sort_keys=True, indent=' '))
{
"4": 5,
"6": 7
}
Decoding JSON::
>>> import simplejson as json
>>> obj = [u'foo', {u'bar': [u'baz', None, 1.0, 2]}]
>>> json.loads('["foo", {"bar":["baz", null, 1.0, 2]}]') == obj
True
>>> json.loads('"\\"foo\\bar"') == u'"foo\x08ar'
True
>>> from simplejson.compat import StringIO
>>> io = StringIO('["streaming API"]')
>>> json.load(io)[0] == 'streaming API'
True
Specializing JSON object decoding::
>>> import simplejson as json
>>> def as_complex(dct):
... if '__complex__' in dct:
... return complex(dct['real'], dct['imag'])
... return dct
...
>>> json.loads('{"__complex__": true, "real": 1, "imag": 2}',
... object_hook=as_complex)
(1+2j)
>>> from decimal import Decimal
>>> json.loads('1.1', parse_float=Decimal) == Decimal('1.1')
True
Specializing JSON object encoding::
>>> import simplejson as json
>>> def encode_complex(obj):
... if isinstance(obj, complex):
... return [obj.real, obj.imag]
... raise TypeError('Object of type %s is not JSON serializable' %
... obj.__class__.__name__)
...
>>> json.dumps(2 + 1j, default=encode_complex)
'[2.0, 1.0]'
>>> json.JSONEncoder(default=encode_complex).encode(2 + 1j)
'[2.0, 1.0]'
>>> ''.join(json.JSONEncoder(default=encode_complex).iterencode(2 + 1j))
'[2.0, 1.0]'
Using simplejson.tool from the shell to validate and pretty-print::
$ echo '{"json":"obj"}' | python -m simplejson.tool
{
"json": "obj"
}
$ echo '{ 1.2:3.4}' | python -m simplejson.tool
Expecting property name: line 1 column 3 (char 2)
Parsing multiple documents serialized as JSON lines (newline-delimited JSON)::
>>> import simplejson as json
>>> def loads_lines(docs):
... for doc in docs.splitlines():
... yield json.loads(doc)
...
>>> sum(doc["count"] for doc in loads_lines('{"count":1}\n{"count":2}\n{"count":3}\n'))
6
Serializing multiple objects to JSON lines (newline-delimited JSON)::
>>> import simplejson as json
>>> def dumps_lines(objs):
... for obj in objs:
... yield json.dumps(obj, separators=(',',':')) + '\n'
...
>>> ''.join(dumps_lines([{'count': 1}, {'count': 2}, {'count': 3}]))
'{"count":1}\n{"count":2}\n{"count":3}\n'
"""
from __future__ import absolute_import
__version__ = '3.16.1'
__all__ = [
'dump', 'dumps', 'load', 'loads',
'JSONDecoder', 'JSONDecodeError', 'JSONEncoder',
'OrderedDict', 'simple_first', 'RawJSON'
]
__author__ = 'Bob Ippolito <bob@redivi.com>'
from decimal import Decimal
from .errors import JSONDecodeError
from .raw_json import RawJSON
from .decoder import JSONDecoder
from .encoder import JSONEncoder, JSONEncoderForHTML
def _import_OrderedDict():
import collections
try:
return collections.OrderedDict
except AttributeError:
from . import ordered_dict
return ordered_dict.OrderedDict
OrderedDict = _import_OrderedDict()
def _import_c_make_encoder():
try:
from ._speedups import make_encoder
return make_encoder
except ImportError:
return None
_default_encoder = JSONEncoder(
skipkeys=False,
ensure_ascii=True,
check_circular=True,
allow_nan=True,
indent=None,
separators=None,
encoding='utf-8',
default=None,
use_decimal=True,
namedtuple_as_object=True,
tuple_as_array=True,
iterable_as_array=False,
bigint_as_string=False,
item_sort_key=None,
for_json=False,
ignore_nan=False,
int_as_string_bitcount=None,
)
def dump(obj, fp, skipkeys=False, ensure_ascii=True, check_circular=True,
allow_nan=True, cls=None, indent=None, separators=None,
encoding='utf-8', default=None, use_decimal=True,
namedtuple_as_object=True, tuple_as_array=True,
bigint_as_string=False, sort_keys=False, item_sort_key=None,
for_json=False, ignore_nan=False, int_as_string_bitcount=None,
iterable_as_array=False, **kw):
"""Serialize ``obj`` as a JSON formatted stream to ``fp`` (a
``.write()``-supporting file-like object).
If *skipkeys* is true then ``dict`` keys that are not basic types
(``str``, ``unicode``, ``int``, ``long``, ``float``, ``bool``, ``None``)
will be skipped instead of raising a ``TypeError``.
If *ensure_ascii* is false, then the some chunks written to ``fp``
may be ``unicode`` instances, subject to normal Python ``str`` to
``unicode`` coercion rules. Unless ``fp.write()`` explicitly
understands ``unicode`` (as in ``codecs.getwriter()``) this is likely
to cause an error.
If *check_circular* is false, then the circular reference check
for container types will be skipped and a circular reference will
result in an ``OverflowError`` (or worse).
If *allow_nan* is false, then it will be a ``ValueError`` to
serialize out of range ``float`` values (``nan``, ``inf``, ``-inf``)
in strict compliance of the original JSON specification, instead of using
the JavaScript equivalents (``NaN``, ``Infinity``, ``-Infinity``). See
*ignore_nan* for ECMA-262 compliant behavior.
If *indent* is a string, then JSON array elements and object members
will be pretty-printed with a newline followed by that string repeated
for each level of nesting. ``None`` (the default) selects the most compact
representation without any newlines. For backwards compatibility with
versions of simplejson earlier than 2.1.0, an integer is also accepted
and is converted to a string with that many spaces.
If specified, *separators* should be an
``(item_separator, key_separator)`` tuple. The default is ``(', ', ': ')``
if *indent* is ``None`` and ``(',', ': ')`` otherwise. To get the most
compact JSON representation, you should specify ``(',', ':')`` to eliminate
whitespace.
*encoding* is the character encoding for str instances, default is UTF-8.
*default(obj)* is a function that should return a serializable version
of obj or raise ``TypeError``. The default simply raises ``TypeError``.
If *use_decimal* is true (default: ``True``) then decimal.Decimal
will be natively serialized to JSON with full precision.
If *namedtuple_as_object* is true (default: ``True``),
:class:`tuple` subclasses with ``_asdict()`` methods will be encoded
as JSON objects.
If *tuple_as_array* is true (default: ``True``),
:class:`tuple` (and subclasses) will be encoded as JSON arrays.
If *iterable_as_array* is true (default: ``False``),
any object not in the above table that implements ``__iter__()``
will be encoded as a JSON array.
If *bigint_as_string* is true (default: ``False``), ints 2**53 and higher
or lower than -2**53 will be encoded as strings. This is to avoid the
rounding that happens in Javascript otherwise. Note that this is still a
lossy operation that will not round-trip correctly and should be used
sparingly.
If *int_as_string_bitcount* is a positive number (n), then int of size
greater than or equal to 2**n or lower than or equal to -2**n will be
encoded as strings.
If specified, *item_sort_key* is a callable used to sort the items in
each dictionary. This is useful if you want to sort items other than
in alphabetical order by key. This option takes precedence over
*sort_keys*.
If *sort_keys* is true (default: ``False``), the output of dictionaries
will be sorted by item.
If *for_json* is true (default: ``False``), objects with a ``for_json()``
method will use the return value of that method for encoding as JSON
instead of the object.
If *ignore_nan* is true (default: ``False``), then out of range
:class:`float` values (``nan``, ``inf``, ``-inf``) will be serialized as
``null`` in compliance with the ECMA-262 specification. If true, this will
override *allow_nan*.
To use a custom ``JSONEncoder`` subclass (e.g. one that overrides the
``.default()`` method to serialize additional types), specify it with
the ``cls`` kwarg. NOTE: You should use *default* or *for_json* instead
of subclassing whenever possible.
"""
# cached encoder
if (not skipkeys and ensure_ascii and
check_circular and allow_nan and
cls is None and indent is None and separators is None and
encoding == 'utf-8' and default is None and use_decimal
and namedtuple_as_object and tuple_as_array and not iterable_as_array
and not bigint_as_string and not sort_keys
and not item_sort_key and not for_json
and not ignore_nan and int_as_string_bitcount is None
and not kw
):
iterable = _default_encoder.iterencode(obj)
else:
if cls is None:
cls = JSONEncoder
iterable = cls(skipkeys=skipkeys, ensure_ascii=ensure_ascii,
check_circular=check_circular, allow_nan=allow_nan, indent=indent,
separators=separators, encoding=encoding,
default=default, use_decimal=use_decimal,
namedtuple_as_object=namedtuple_as_object,
tuple_as_array=tuple_as_array,
iterable_as_array=iterable_as_array,
bigint_as_string=bigint_as_string,
sort_keys=sort_keys,
item_sort_key=item_sort_key,
for_json=for_json,
ignore_nan=ignore_nan,
int_as_string_bitcount=int_as_string_bitcount,
**kw).iterencode(obj)
# could accelerate with writelines in some versions of Python, at
# a debuggability cost
for chunk in iterable:
fp.write(chunk)
def dumps(obj, skipkeys=False, ensure_ascii=True, check_circular=True,
allow_nan=True, cls=None, indent=None, separators=None,
encoding='utf-8', default=None, use_decimal=True,
namedtuple_as_object=True, tuple_as_array=True,
bigint_as_string=False, sort_keys=False, item_sort_key=None,
for_json=False, ignore_nan=False, int_as_string_bitcount=None,
iterable_as_array=False, **kw):
"""Serialize ``obj`` to a JSON formatted ``str``.
If ``skipkeys`` is false then ``dict`` keys that are not basic types
(``str``, ``unicode``, ``int``, ``long``, ``float``, ``bool``, ``None``)
will be skipped instead of raising a ``TypeError``.
If ``ensure_ascii`` is false, then the return value will be a
``unicode`` instance subject to normal Python ``str`` to ``unicode``
coercion rules instead of being escaped to an ASCII ``str``.
If ``check_circular`` is false, then the circular reference check
for container types will be skipped and a circular reference will
result in an ``OverflowError`` (or worse).
If ``allow_nan`` is false, then it will be a ``ValueError`` to
serialize out of range ``float`` values (``nan``, ``inf``, ``-inf``) in
strict compliance of the JSON specification, instead of using the
JavaScript equivalents (``NaN``, ``Infinity``, ``-Infinity``).
If ``indent`` is a string, then JSON array elements and object members
will be pretty-printed with a newline followed by that string repeated
for each level of nesting. ``None`` (the default) selects the most compact
representation without any newlines. For backwards compatibility with
versions of simplejson earlier than 2.1.0, an integer is also accepted
and is converted to a string with that many spaces.
If specified, ``separators`` should be an
``(item_separator, key_separator)`` tuple. The default is ``(', ', ': ')``
if *indent* is ``None`` and ``(',', ': ')`` otherwise. To get the most
compact JSON representation, you should specify ``(',', ':')`` to eliminate
whitespace.
``encoding`` is the character encoding for str instances, default is UTF-8.
``default(obj)`` is a function that should return a serializable version
of obj or raise TypeError. The default simply raises TypeError.
If *use_decimal* is true (default: ``True``) then decimal.Decimal
will be natively serialized to JSON with full precision.
If *namedtuple_as_object* is true (default: ``True``),
:class:`tuple` subclasses with ``_asdict()`` methods will be encoded
as JSON objects.
If *tuple_as_array* is true (default: ``True``),
:class:`tuple` (and subclasses) will be encoded as JSON arrays.
If *iterable_as_array* is true (default: ``False``),
any object not in the above table that implements ``__iter__()``
will be encoded as a JSON array.
If *bigint_as_string* is true (not the default), ints 2**53 and higher
or lower than -2**53 will be encoded as strings. This is to avoid the
rounding that happens in Javascript otherwise.
If *int_as_string_bitcount* is a positive number (n), then int of size
greater than or equal to 2**n or lower than or equal to -2**n will be
encoded as strings.
If specified, *item_sort_key* is a callable used to sort the items in
each dictionary. This is useful if you want to sort items other than
in alphabetical order by key. This option takes precendence over
*sort_keys*.
If *sort_keys* is true (default: ``False``), the output of dictionaries
will be sorted by item.
If *for_json* is true (default: ``False``), objects with a ``for_json()``
method will use the return value of that method for encoding as JSON
instead of the object.
If *ignore_nan* is true (default: ``False``), then out of range
:class:`float` values (``nan``, ``inf``, ``-inf``) will be serialized as
``null`` in compliance with the ECMA-262 specification. If true, this will
override *allow_nan*.
To use a custom ``JSONEncoder`` subclass (e.g. one that overrides the
``.default()`` method to serialize additional types), specify it with
the ``cls`` kwarg. NOTE: You should use *default* instead of subclassing
whenever possible.
"""
# cached encoder
if (not skipkeys and ensure_ascii and
check_circular and allow_nan and
cls is None and indent is None and separators is None and
encoding == 'utf-8' and default is None and use_decimal
and namedtuple_as_object and tuple_as_array and not iterable_as_array
and not bigint_as_string and not sort_keys
and not item_sort_key and not for_json
and not ignore_nan and int_as_string_bitcount is None
and not kw
):
return _default_encoder.encode(obj)
if cls is None:
cls = JSONEncoder
return cls(
skipkeys=skipkeys, ensure_ascii=ensure_ascii,
check_circular=check_circular, allow_nan=allow_nan, indent=indent,
separators=separators, encoding=encoding, default=default,
use_decimal=use_decimal,
namedtuple_as_object=namedtuple_as_object,
tuple_as_array=tuple_as_array,
iterable_as_array=iterable_as_array,
bigint_as_string=bigint_as_string,
sort_keys=sort_keys,
item_sort_key=item_sort_key,
for_json=for_json,
ignore_nan=ignore_nan,
int_as_string_bitcount=int_as_string_bitcount,
**kw).encode(obj)
_default_decoder = JSONDecoder(encoding=None, object_hook=None,
object_pairs_hook=None)
def load(fp, encoding=None, cls=None, object_hook=None, parse_float=None,
parse_int=None, parse_constant=None, object_pairs_hook=None,
use_decimal=False, namedtuple_as_object=True, tuple_as_array=True,
**kw):
"""Deserialize ``fp`` (a ``.read()``-supporting file-like object containing
a JSON document) to a Python object.
*encoding* determines the encoding used to interpret any
:class:`str` objects decoded by this instance (``'utf-8'`` by
default). It has no effect when decoding :class:`unicode` objects.
Note that currently only encodings that are a superset of ASCII work,
strings of other encodings should be passed in as :class:`unicode`.
*object_hook*, if specified, will be called with the result of every
JSON object decoded and its return value will be used in place of the
given :class:`dict`. This can be used to provide custom
deserializations (e.g. to support JSON-RPC class hinting).
*object_pairs_hook* is an optional function that will be called with
the result of any object literal decode with an ordered list of pairs.
The return value of *object_pairs_hook* will be used instead of the
:class:`dict`. This feature can be used to implement custom decoders
that rely on the order that the key and value pairs are decoded (for
example, :func:`collections.OrderedDict` will remember the order of
insertion). If *object_hook* is also defined, the *object_pairs_hook*
takes priority.
*parse_float*, if specified, will be called with the string of every
JSON float to be decoded. By default, this is equivalent to
``float(num_str)``. This can be used to use another datatype or parser
for JSON floats (e.g. :class:`decimal.Decimal`).
*parse_int*, if specified, will be called with the string of every
JSON int to be decoded. By default, this is equivalent to
``int(num_str)``. This can be used to use another datatype or parser
for JSON integers (e.g. :class:`float`).
*parse_constant*, if specified, will be called with one of the
following strings: ``'-Infinity'``, ``'Infinity'``, ``'NaN'``. This
can be used to raise an exception if invalid JSON numbers are
encountered.
If *use_decimal* is true (default: ``False``) then it implies
parse_float=decimal.Decimal for parity with ``dump``.
To use a custom ``JSONDecoder`` subclass, specify it with the ``cls``
kwarg. NOTE: You should use *object_hook* or *object_pairs_hook* instead
of subclassing whenever possible.
"""
return loads(fp.read(),
encoding=encoding, cls=cls, object_hook=object_hook,
parse_float=parse_float, parse_int=parse_int,
parse_constant=parse_constant, object_pairs_hook=object_pairs_hook,
use_decimal=use_decimal, **kw)
def loads(s, encoding=None, cls=None, object_hook=None, parse_float=None,
parse_int=None, parse_constant=None, object_pairs_hook=None,
use_decimal=False, **kw):
"""Deserialize ``s`` (a ``str`` or ``unicode`` instance containing a JSON
document) to a Python object.
*encoding* determines the encoding used to interpret any
:class:`str` objects decoded by this instance (``'utf-8'`` by
default). It has no effect when decoding :class:`unicode` objects.
Note that currently only encodings that are a superset of ASCII work,
strings of other encodings should be passed in as :class:`unicode`.
*object_hook*, if specified, will be called with the result of every
JSON object decoded and its return value will be used in place of the
given :class:`dict`. This can be used to provide custom
deserializations (e.g. to support JSON-RPC class hinting).
*object_pairs_hook* is an optional function that will be called with
the result of any object literal decode with an ordered list of pairs.
The return value of *object_pairs_hook* will be used instead of the
:class:`dict`. This feature can be used to implement custom decoders
that rely on the order that the key and value pairs are decoded (for
example, :func:`collections.OrderedDict` will remember the order of
insertion). If *object_hook* is also defined, the *object_pairs_hook*
takes priority.
*parse_float*, if specified, will be called with the string of every
JSON float to be decoded. By default, this is equivalent to
``float(num_str)``. This can be used to use another datatype or parser
for JSON floats (e.g. :class:`decimal.Decimal`).
*parse_int*, if specified, will be called with the string of every
JSON int to be decoded. By default, this is equivalent to
``int(num_str)``. This can be used to use another datatype or parser
for JSON integers (e.g. :class:`float`).
*parse_constant*, if specified, will be called with one of the
following strings: ``'-Infinity'``, ``'Infinity'``, ``'NaN'``. This
can be used to raise an exception if invalid JSON numbers are
encountered.
If *use_decimal* is true (default: ``False``) then it implies
parse_float=decimal.Decimal for parity with ``dump``.
To use a custom ``JSONDecoder`` subclass, specify it with the ``cls``
kwarg. NOTE: You should use *object_hook* or *object_pairs_hook* instead
of subclassing whenever possible.
"""
if (cls is None and encoding is None and object_hook is None and
parse_int is None and parse_float is None and
parse_constant is None and object_pairs_hook is None
and not use_decimal and not kw):
return _default_decoder.decode(s)
if cls is None:
cls = JSONDecoder
if object_hook is not None:
kw['object_hook'] = object_hook
if object_pairs_hook is not None:
kw['object_pairs_hook'] = object_pairs_hook
if parse_float is not None:
kw['parse_float'] = parse_float
if parse_int is not None:
kw['parse_int'] = parse_int
if parse_constant is not None:
kw['parse_constant'] = parse_constant
if use_decimal:
if parse_float is not None:
raise TypeError("use_decimal=True implies parse_float=Decimal")
kw['parse_float'] = Decimal
return cls(encoding=encoding, **kw).decode(s)
def _toggle_speedups(enabled):
from . import decoder as dec
from . import encoder as enc
from . import scanner as scan
c_make_encoder = _import_c_make_encoder()
if enabled:
dec.scanstring = dec.c_scanstring or dec.py_scanstring
enc.c_make_encoder = c_make_encoder
enc.encode_basestring_ascii = (enc.c_encode_basestring_ascii or
enc.py_encode_basestring_ascii)
scan.make_scanner = scan.c_make_scanner or scan.py_make_scanner
else:
dec.scanstring = dec.py_scanstring
enc.c_make_encoder = None
enc.encode_basestring_ascii = enc.py_encode_basestring_ascii
scan.make_scanner = scan.py_make_scanner
dec.make_scanner = scan.make_scanner
global _default_decoder
_default_decoder = JSONDecoder(
encoding=None,
object_hook=None,
object_pairs_hook=None,
)
global _default_encoder
_default_encoder = JSONEncoder(
skipkeys=False,
ensure_ascii=True,
check_circular=True,
allow_nan=True,
indent=None,
separators=None,
encoding='utf-8',
default=None,
)
def simple_first(kv):
"""Helper function to pass to item_sort_key to sort simple
elements to the top, then container elements.
"""
return (isinstance(kv[1], (list, dict, tuple)), kv[0])
|
SickGear/SickGear
|
lib/simplejson/__init__.py
|
Python
|
gpl-3.0
| 24,480
|
from __future__ import print_function
import numpy as np
h, l, c = np.loadtxt('data.csv', delimiter=',', usecols=(4, 5, 6), unpack=True)
N = 5
h = h[-N:]
l = l[-N:]
print("len(h)", len(h), "len(l)", len(l))
print("Close", c)
previousclose = c[-N -1: -1]
print("len(previousclose)", len(previousclose))
print("Previous close", previousclose)
truerange = np.maximum(h - l, h - previousclose, previousclose - l)
print("True range", truerange)
atr = np.zeros(N)
atr[0] = np.mean(truerange)
for i in range(1, N):
atr[i] = (N - 1) * atr[i - 1] + truerange[i]
atr[i] /= N
print("ATR", atr)
|
moonbury/notebooks
|
github/Numpy/Chapter3/atr.py
|
Python
|
gpl-3.0
| 599
|
from django.contrib import admin
from modeltranslation.admin import TranslationAdmin
from .models import Campaign
class CampaignAdmin(TranslationAdmin):
list_display = ("__str__", "url", "image", "active")
admin.site.register(Campaign, CampaignAdmin)
|
erudit/eruditorg
|
eruditorg/apps/public/campaign/admin.py
|
Python
|
gpl-3.0
| 260
|
import kivy
kivy.require('1.9.1')
from kivy.uix.label import Label
from kivy.uix.boxlayout import BoxLayout
from kivy.uix.widget import Widget
from kivy.uix.scatter import Scatter
from kivy.app import Builder
from kivy.metrics import dp
from kivy.graphics import Color, Line
from autosportlabs.racecapture.geo.geopoint import GeoPoint
from autosportlabs.uix.track.trackmap import TrackMapView
from utils import *
Builder.load_file('autosportlabs/uix/track/racetrackview.kv')
class RaceTrackView(BoxLayout):
def __init__(self, **kwargs):
super(RaceTrackView, self).__init__(**kwargs)
def loadTrack(self, track):
self.initMap(track)
def initMap(self, track):
self.ids.trackmap.setTrackPoints(track.map_points)
def remove_reference_mark(self, key):
self.ids.trackmap.remove_marker(key)
def add_reference_mark(self, key, color):
trackmap = self.ids.trackmap
if trackmap.get_marker(key) is None:
trackmap.add_marker(key, color)
def update_reference_mark(self, key, geo_point):
self.ids.trackmap.update_marker(key, geo_point)
def add_map_path(self, key, path, color):
self.ids.trackmap.add_path(key, path, color)
def remove_map_path(self, key):
self.ids.trackmap.remove_path(key)
def add_heat_values(self, key, heat_values):
self.ids.trackmap.add_heat_values(key, heat_values)
def remove_heat_values(self, key):
self.ids.trackmap.remove_heat_values(key)
|
ddimensia/RaceCapture_App
|
autosportlabs/uix/track/racetrackview.py
|
Python
|
gpl-3.0
| 1,499
|
# Copyright 2012 Lee Verberne <lee@blarg.org>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import json
import os, os.path
import shutil as sh
import sys
from fabric.api import abort, local, prompt, warn
# Fabric 1.0 changed changed the scope of cd() to only affect remote calls.
# This bit of kludgery maintains compatibility of this file with fabric 0.9,
# but it is only possible because no remote calls are made in this file
try:
from fabric.api import lcd as cd
except ImportError:
from fabric.api import cd
from ubik import builder, packager
# filemap copies files directly from source to root, there is no build step
defenv = builder.BuildEnv('_root','_root','.')
file_map, file_map_table = None, None
def _install_file_map(fmap, installdir):
for src, dst in fmap:
_install(src, os.path.join(installdir,dst))
def _install(src, dst):
if src and os.path.isdir(src):
sh.copytree(src, dst)
else:
if not os.path.exists(os.path.dirname(dst)):
os.makedirs(os.path.dirname(dst))
if src:
sh.copy(src, dst)
def build(pkgtype='deb', env=defenv):
'Builds this package into a directory tree'
if file_map:
_install_file_map(file_map, env.rootdir)
elif file_map_table:
_install_file_map(file_map_table[pkgtype], env.rootdir)
else:
abort("You must register a filemap with this module using register().")
def clean(env=defenv):
'Remove build directory and packages'
with cd(env.srcdir):
local('rm -rf _* *.deb *.rpm', capture=False)
local('find . -name \*.pyc -print -exec rm \{\} \;', capture=False)
def deb(version=None):
'Build a debian package'
package(version, 'deb')
def debiandir(version='0.0', env=defenv):
"Generate DEBIAN dir in rootdir, but don't build package"
if not env.exists('builddir'):
build('deb', env)
packager.DebPackage('package.ini', env).debiandir(version)
def filelist(pkgtype='deb', env=defenv):
'''Outputs default filelist as json (see details)
Generates and prints to stdout a filelist json that can be modified and
used with package.ini's "filelist" option to override the default.
Useful for setting file modes in RPMs'''
if not env.exists('builddir'):
build(pkgtype, env)
packager.Package('package.ini', env).filelist()
def package(version=None, pkgtype='deb', env=defenv):
'Creates deployable packages'
if not version:
version = prompt("What version did you want packaged there, hotshot?")
if not env.exists('builddir'):
warn('Implicitly invoking build')
build(pkgtype, env)
pkg = packager.Package('package.ini', env, pkgtype)
pkg.build(version)
def register(filemap_or_table):
'Register a filemap for use with this module'
global file_map, file_map_table
if isinstance(filemap_or_table, list):
file_map = filemap_or_table
elif isinstance(filemap_or_table, dict):
file_map_table = filemap_or_table
else:
abort("I don't even know what you're talking about.")
def rpm(version=None):
'Build a Red Hat package'
package(version, 'rpm')
def rpmspec(version='0.0', env=defenv):
'Output the generated RPM spec file'
if not env.exists('builddir'):
build('rpm', env)
packager.RpmPackage('package.ini', env).rpmspec(sys.stdout, version)
|
kafana/ubik
|
lib/ubik/fab/filemap.py
|
Python
|
gpl-3.0
| 3,975
|
# ----------------------------------------------------------------------
# Numenta Platform for Intelligent Computing (NuPIC)
# Copyright (C) 2013, Numenta, Inc. Unless you have purchased from
# Numenta, Inc. a separate commercial license for this software code, the
# following terms and conditions apply:
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 3 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# See the GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see http://www.gnu.org/licenses.
#
# http://numenta.org/licenses/
# ----------------------------------------------------------------------
"""
Tests the following set of sequences:
z-a-b-c: (1X)
a-b-c: (6X)
a-d-e: (2X)
a-f-g-a-h: (1X)
We want to insure that when we see 'a', that we predict 'b' with highest
confidence, then 'd', then 'f' and 'h' with equally low confidence.
We expect the following prediction scores:
inputPredScore_at1 : 0.7
inputPredScore_at2 : 1.0
inputPredScore_at3 : 1.0
inputPredScore_at4 : 1.0
"""
from nupic.frameworks.prediction.helpers import importBaseDescription
config = dict(
sensorVerbosity=0,
spVerbosity=0,
tpVerbosity=0,
ppVerbosity=2,
filenameTrain = 'confidence/confidence2.csv',
filenameTest = 'confidence/confidence2.csv',
iterationCountTrain=None,
iterationCountTest=None,
trainTPRepeats = 5,
trainTP=True,
)
mod = importBaseDescription('../base/description.py', config)
locals().update(mod.__dict__)
|
tkaitchuck/nupic
|
examples/prediction/experiments/confidenceTest/2/description.py
|
Python
|
gpl-3.0
| 2,040
|
import IMP
import IMP.algebra
import IMP.core
import IMP.atom
import IMP.test
class Tests(IMP.test.TestCase):
"""Tests for SurfaceMover."""
def test_init(self):
"""Test creation of surface mover."""
m = IMP.Model()
surf = IMP.core.Surface.setup_particle(IMP.Particle(m))
surf.set_coordinates_are_optimized(True)
surf.set_normal_is_optimized(True)
mv = IMP.core.SurfaceMover(surf, 1, .1, 1.)
mv.set_was_used(True)
def test_propose_move(self):
"""Test proposing move alters center and normal."""
m = IMP.Model()
surf = IMP.core.Surface.setup_particle(IMP.Particle(m))
n = surf.get_normal()
c = surf.get_coordinates()
surf.set_coordinates_are_optimized(True)
surf.set_normal_is_optimized(True)
mv = IMP.core.SurfaceMover(surf, 1, .1, 1.)
mv.propose()
self.assertNotAlmostEqual((n - surf.get_normal()).get_magnitude(), 0)
self.assertNotAlmostEqual((c - surf.get_coordinates()).get_magnitude(), 0)
def test_propose_reflect(self):
"""Test reflect correctly flips normal."""
m = IMP.Model()
surf = IMP.core.Surface.setup_particle(IMP.Particle(m))
n = surf.get_normal()
surf.set_normal_is_optimized(True)
mv = IMP.core.SurfaceMover(surf, 0, 0, 1.)
mv.propose()
self.assertAlmostEqual((n + surf.get_normal()).get_magnitude(), 0)
def test_reject_restores_initial_state(self):
"""Test rejecting a move returns the surface to previous state."""
m = IMP.Model()
surf = IMP.core.Surface.setup_particle(IMP.Particle(m))
n = surf.get_normal()
c = surf.get_coordinates()
surf.set_coordinates_are_optimized(True)
surf.set_normal_is_optimized(True)
mv = IMP.core.SurfaceMover(surf, 1, .1, 1.)
mv.propose()
mv.reject()
self.assertAlmostEqual((n - surf.get_normal()).get_magnitude(), 0)
self.assertAlmostEqual((c - surf.get_coordinates()).get_magnitude(), 0)
def test_inputs(self):
"""Test only input is Surface."""
m = IMP.Model()
surf = IMP.core.Surface.setup_particle(IMP.Particle(m))
surf.set_coordinates_are_optimized(True)
surf.set_normal_is_optimized(True)
mv = IMP.core.SurfaceMover(surf, 1, .1, 1.)
self.assertSetEqual(set([surf.get_particle()]), set(mv.get_inputs()))
mv.set_was_used(True)
if __name__ == '__main__':
IMP.test.main()
|
shanot/imp
|
modules/core/test/test_surface_mover.py
|
Python
|
gpl-3.0
| 2,528
|
# vim: ft=python fileencoding=utf-8 sts=4 sw=4 et:
# Copyright 2014-2017 Florian Bruhin (The Compiler) <mail@qutebrowser.org>
#
# This file is part of qutebrowser.
#
# qutebrowser is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# qutebrowser is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with qutebrowser. If not, see <http://www.gnu.org/licenses/>.
"""Exception classes for commands modules.
Defined here to avoid circular dependency hell.
"""
class Error(Exception):
"""Base class for all cmdexc errors."""
class CommandError(Error):
"""Raised when a command encounters an error while running."""
pass
class NoSuchCommandError(Error):
"""Raised when a command wasn't found."""
pass
class ArgumentTypeError(Error):
"""Raised when an argument had an invalid type."""
pass
class PrerequisitesError(Error):
"""Raised when a cmd can't be used because some prerequisites aren't met.
This is raised for example when we're in the wrong mode while executing the
command, or we need javascript enabled but don't have done so.
"""
pass
|
NoctuaNivalis/qutebrowser
|
qutebrowser/commands/cmdexc.py
|
Python
|
gpl-3.0
| 1,545
|
import serial
import serial.tools.list_ports
import copy
import numpy as np
import math
import random
class AsciiSerial:
def __init__(self):
self._graphsChannels = {'graph1': None, 'graph2': None, 'graph3': None, 'graph4': None}
self._enChannels = {'graph1': False, 'graph2': False, 'graph3': False, 'graph4': False}
# Structure definition:
# {'ChannelName': channelData('display', {'lineName': [lowLevelID, xFieldID, yFieldID(optional)], ... }), ... }
self._channelsDataStructure = {
'POSITION': channelData('line-scatter', {'p': [0, 0, 1]}),
'TRAJECTORY': channelData('line-scatter', {'t': [1, 0, 1]}),
'PID_V_G': channelData('line', {'setPoint': [2, 0], 'value': [2, 1], 'output': [2, 2]}),
'PID_V_D': channelData('line', {'setPoint': [3, 0], 'value': [3, 1], 'output': [3, 2]}),
'PID_TRANS': channelData('line', {'setPoint': [4, 0], 'value': [4, 1], 'output': [4, 2]}),
'BLOCKING_M_G': channelData('line', {'aimSpeed': [5, 0], 'realSpeed': [5, 1], 'isBlocked': [5, 2]}),
'BLOCKING_M_D': channelData('line', {'aimSpeed': [6, 0], 'realSpeed': [6, 1], 'isBlocked': [6, 2]}),
'STOPPING_MGR': channelData('line', {'speed': [7, 0], 'isStopped': [7, 1]}),
'DIRECTION': channelData('line', {'aimDirection': [8, 0], 'realDirection': [8, 1]}),
'SENSORS': channelData('scatter', {'sensorTest': [9, 0, 1]}),
'PID_TRAJ': channelData('scatter', {'p': [0, 0, 1], 't': [1, 0, 1]}),
'TRAJ_ERRORS': channelData('line', {'t': [10, 0], 'c': [10, 1], 'ac': [10, 2], 'ang': [10, 3], 'pos': [10, 4]}),
'AX12_G': channelData('line', {'aim': [8, 4], 'real': [8, 2]}),
'AX12_D': channelData('line', {'aim': [8, 5], 'real': [8, 3]})
}
self._shapeInitData = {
'line': [],
'line-scatter': [[], []],
'scatter': [[], []]
}
self.linesToSend = []
self.receivedLines_main = []
self.receivedLines_warning = []
self.receivedLines_error = []
self.serial = serial.Serial()
self.incomingLine = ""
# Format des données :
# {'graphN': {'data': {'lineName': lineData, ...}, 'shape': String}
#
# 'shape' peut être :
# "line" : ligne continue reliant chaque point
# "scatter": nuage de points (x,y) indépendants
# "line-scatter: nuage de points (x,y) reliés entre eux
#
# Pour chaque 'shape', 'lineData' a une forme différente :
# "line" : tableau à 1 dimension représentant les coordonnées y de chaque point
# "scatter": tableau t à 2 dimensions. t[0] est un tableau représentant x pour chaque point. t[1] représente y
# "line-scatter": idem que 'scatter'
self.graphData = {'graph1': {'data': None, 'shape': None},
'graph2': {'data': None, 'shape': None},
'graph3': {'data': None, 'shape': None},
'graph4': {'data': None, 'shape': None}}
self.phase = np.linspace(0, 10 * np.pi, 100)
self.index = 0
@staticmethod
def scanPorts():
return list(serial.tools.list_ports.comports())
def open(self, port):
self.serial.port = port.split(" ")[0]
self.serial.open()
def close(self):
self.serial.close()
def getChannelsList(self):
channelsList = []
for key in self._channelsDataStructure:
channelsList.append(key)
channelsList.sort()
return channelsList
def getChannelsFromID(self, identifier):
channels = set()
for channel, cData in self._channelsDataStructure.items():
lines = cData.lineNames
for lineName, lineIds in lines.items():
if lineIds[0] == identifier:
channels.add(channel)
return channels
def getIDsFromChannel(self, channel):
ids = set()
lines = self._channelsDataStructure[channel].lineNames
for lineName, lineIds in lines.items():
ids.add(lineIds[0])
return ids
def communicate(self):
if self.serial.is_open:
for line in self.linesToSend:
self.serial.write(line.encode('ascii'))
self.linesToSend.clear()
nbB = self.serial.in_waiting
if nbB > 0:
self.incomingLine += self.serial.read(nbB).decode(encoding='utf-8', errors='ignore')
newLineIndex = self.incomingLine.find('\n')
while newLineIndex != -1:
self.addLineToProperList(self.incomingLine[0:newLineIndex+1])
self.incomingLine = self.incomingLine[newLineIndex+1:]
newLineIndex = self.incomingLine.find('\n')
def addLineToProperList(self, line):
if len(line) > 5 and line[0:6] == "_data_":
try:
self.addGraphData(line[6:])
except ValueError:
self.receivedLines_main.append(line)
elif len(line) > 8 and line[0:9] == "_warning_":
self.receivedLines_warning.append(line[9:])
elif len(line) > 7 and line[0:7] == "_error_":
splittedLine = line.split("_")
errorLine = "#" + splittedLine[2] + "# "
for s in splittedLine[3:]:
errorLine += s
self.receivedLines_error.append(errorLine)
else:
self.receivedLines_main.append(line)
def addGraphData(self, strData):
data = strData.split("_")
idChannel = int(data[0])
channels = self.getChannelsFromID(idChannel)
values = []
for strValue in data[1:]:
values.append(float(strValue))
for graph in ['graph1', 'graph2', 'graph3', 'graph4']:
gChannel = self._graphsChannels[graph]
if gChannel in channels and self._enChannels[graph]:
lines = self._channelsDataStructure[gChannel].lineNames
for lineName, ids in lines.items():
if ids[0] == idChannel:
if len(ids) == 2: # One dimension data
if len(values) <= 1:
raise ValueError
self.graphData[graph]['data'][lineName].append(values[ids[1]])
elif len(ids) == 3: # Two dimensions data
if len(values) <= 2:
raise ValueError
self.graphData[graph]['data'][lineName][0].append(values[ids[1]])
self.graphData[graph]['data'][lineName][1].append(values[ids[2]])
def setEnabledChannels(self, competeConfig):
newGraphsChannels = {'graph1': competeConfig['graph1']['channel'],
'graph2': competeConfig['graph2']['channel'],
'graph3': competeConfig['graph3']['channel'],
'graph4': competeConfig['graph4']['channel']}
newEnabledList = {'graph1': competeConfig['graph1']['enable'],
'graph2': competeConfig['graph2']['enable'],
'graph3': competeConfig['graph3']['enable'],
'graph4': competeConfig['graph4']['enable']}
commandLines = []
graphs = ['graph1', 'graph2', 'graph3', 'graph4']
for graph in graphs:
if newGraphsChannels[graph] != self._graphsChannels[graph]:
if self._enChannels[graph]:
commandLines += self.enableChannel(self._graphsChannels[graph], False)
else:
if newEnabledList[graph] != self._enChannels[graph]:
if not newEnabledList[graph]:
commandLines += self.enableChannel(self._graphsChannels[graph], False)
for graph in graphs:
if newGraphsChannels[graph] != self._graphsChannels[graph]:
if newEnabledList[graph]:
self.resetGraphData(graph, newGraphsChannels[graph])
commandLines += self.enableChannel(newGraphsChannels[graph], True)
else:
if newEnabledList[graph] != self._enChannels[graph]:
if newEnabledList[graph]:
self.resetGraphData(graph, newGraphsChannels[graph])
commandLines += self.enableChannel(self._graphsChannels[graph], True)
self._graphsChannels = newGraphsChannels
self._enChannels = newEnabledList
return commandLines
def enableChannel(self, channel, enable):
commandLines = []
ids = self.getIDsFromChannel(channel)
for i in ids:
if enable:
commandLine = "logon "
else:
commandLine = "logoff "
commandLine += str(i)
commandLine += '\n'
self.addLinesToSend([commandLine])
commandLines.append(commandLine)
return commandLines
def resetGraphData(self, graph, channel):
cData = self._channelsDataStructure[channel]
self.graphData[graph]['shape'] = cData.shape
initData = self._shapeInitData[cData.shape]
initDict = {}
for name in cData.lineNames:
initDict[name] = copy.deepcopy(initData)
self.graphData[graph]['data'] = copy.deepcopy(initDict)
def getLines_main(self):
lines = copy.deepcopy(self.receivedLines_main)
self.receivedLines_main.clear()
return lines
def getLines_warning(self):
lines = copy.deepcopy(self.receivedLines_warning)
self.receivedLines_warning.clear()
return lines
def getLines_error(self):
lines = copy.deepcopy(self.receivedLines_error)
self.receivedLines_error.clear()
return lines
def addLinesToSend(self, lines):
self.linesToSend += lines
def clearLinesToSend(self):
self.linesToSend = []
def getAllData(self):
# y = np.multiply(np.sin(np.linspace(0, 6 * np.pi, 100) + self.phase[self.index]), self.index/20)
# y2 = np.multiply(np.sin(np.linspace(0, 6 * np.pi, 100) + (self.phase[self.index] + 0.1)), self.index/30)
# self.index = int(math.fmod((self.index + 1), len(self.phase)))
# return {'graph1': {'data': {'pwm': y, 'bite': y2}, 'shape': 'line'},
# 'graph2': {'data':
# {'traj': [[0,1,5*random.random(),9,12,6,3],[0,2,3,6*random.random(),7,2,-3]],
# 'bite': [[0, 2, 4 * random.random(), 9, 12, 7, 3],
# [3, 2, 3, 5 * random.random(), 3, 2, -1]]},
# 'shape': 'scatter'},
# 'graph3': {'data': {}, 'shape': 'line'},
# 'graph4': {'data': {}, 'shape': 'line'}
# }
for graph in ['graph1', 'graph2', 'graph3', 'graph4']:
if self.graphData[graph]['data'] is not None:
for key, value in self.graphData[graph]['data'].items():
if len(value) > 1000:
value = value[len(value) - 1000:]
#print(key, value)
return self.graphData
class channelData:
def __init__(self, shape, lineNames):
self.shape = shape
self.lineNames = lineNames
|
INTechSenpai/moon-rover
|
debug_tools/python_debug_console/AsciiSerial.py
|
Python
|
gpl-3.0
| 11,638
|
#!/usr/bin/env python
# Copyright 2013-2015 Julian Metzler
"""
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU Affero General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Affero General Public License for more details.
You should have received a copy of the GNU Affero General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
from setuptools import setup, find_packages
metadata = {}
with open('tweetpony/metadata.py') as f:
exec(f.read(), metadata)
setup(
name = metadata['name'],
version = metadata['version'],
description = metadata['description'],
license = metadata['license'],
author = metadata['author'],
author_email = metadata['author_email'],
install_requires = metadata['requires'],
url = metadata['url'],
keywords = metadata['keywords'],
packages = find_packages(),
use_2to3 = True,
)
|
Mezgrman/TweetPony
|
setup.py
|
Python
|
agpl-3.0
| 1,186
|
# Copyright (c) 2008 Mikeal Rogers
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distribuetd under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from dealer.git import git
from django.template import RequestContext
requestcontext = None
class MakoMiddleware(object):
def process_request(self, request):
global requestcontext
requestcontext = RequestContext(request)
requestcontext['is_secure'] = request.is_secure()
requestcontext['site'] = request.get_host()
requestcontext['REVISION'] = git.revision
|
abo-abo/edx-platform
|
common/djangoapps/mitxmako/middleware.py
|
Python
|
agpl-3.0
| 1,006
|
from bears.natural_language.AlexBear import AlexBear
from tests.LocalBearTestHelper import verify_local_bear
good_file = "Their network looks good."
bad_file = "His network looks good."
AlexBearTest = verify_local_bear(AlexBear,
valid_files=(good_file,),
invalid_files=(bad_file,))
|
chriscoyfish/coala-bears
|
tests/natural_language/AlexBearTest.py
|
Python
|
agpl-3.0
| 352
|
from amon.apps.alerts.checkers.system import system_alerts
from amon.apps.alerts.checkers.process import process_alerts
from amon.apps.alerts.checkers.plugin import plugin_alerts
from amon.apps.alerts.checkers.healthcheck import healthcheck_alert_checker
from amon.apps.alerts.models import alerts_model
from amon.apps.plugins.models import plugin_model
from amon.apps.processes.models import process_model
from amon.utils.dates import unix_utc_now
class Alerter(object):
def check_tags(self, server=None, rule=None):
valid_rule = True
server_tags = server.get('tags', [])
server_tags = [str(t) for t in server_tags]
tags = rule.get('tags', [])
tags = [str(t) for t in tags]
# Check tags first
if len(server_tags) > 0 and len(tags) > 0:
valid_rule = set(tags).issubset(server_tags)
return valid_rule
class ServerAlerter(Alerter):
def check(self, data, server):
alerts = False
account_id = server.get('account_id', None)
# System alerts
rules = alerts_model.get_alerts(type='system', server=server)
if rules:
alerts = system_alerts.check(data=data, rules=rules, server=server)
if alerts:
alerts_model.save_system_occurence(alerts, server_id=server['_id'])
# Global rules
global_rules = alerts_model.get_global_alerts(account_id=account_id)
if global_rules:
alerts = system_alerts.check(data=data, rules=global_rules, server=server)
if alerts:
alerts_model.save_system_occurence(alerts, server_id=server['_id'])
return alerts # For the test suite
class ProcessAlerter(Alerter):
def check_rule_and_save(self, process_data_dict=None, rule=None, process_id=None, server_id=None):
process_data = next((item for item in process_data_dict if item["p"] == process_id), None)
if process_data:
alert = process_alerts.check(process_data, rule)
if alert:
alerts_model.save_occurence(alert, server_id=server_id)
def check(self, data, server):
process_data_dict = data.get('data', None)
rules = alerts_model.get_alerts(type='process', server=server)
if len(rules) + len(process_data_dict) > 0:
for rule in rules:
process_id = rule['process']
self.check_rule_and_save(process_id=process_id, rule=rule, process_data_dict=process_data_dict, server_id=server['_id'])
# Global alerts
rules = alerts_model.get_alerts(type='process_global')
if len(rules) + len(process_data_dict) > 0:
all_processes = process_model.get_all_for_server(server['_id'])
for rule in rules:
valid_rule = self.check_tags(server=server, rule=rule)
if valid_rule:
process_name = rule.get('process')
process_id = None
# Check if this server has a process with this name
for p in all_processes.clone():
if p.get('name') == process_name:
process_id = p.get('_id')
if process_id:
self.check_rule_and_save(process_id=process_id, rule=rule, process_data_dict=process_data_dict, server_id=server['_id'])
class PluginAlerter(Alerter):
def check(self, data=None, plugin=None, server=None):
plugin_data = data.get('gauges', None)
rules = alerts_model.get_alerts_for_plugin(plugin=plugin)
if len(rules) > 0:
for rule in rules:
alert = plugin_alerts.check(data=plugin_data, rule=rule)
if alert:
alerts_model.save_occurence(alert)
# Global alerts
rules = alerts_model.get_alerts(type='plugin_global')
if len(rules) > 0:
all_plugins = plugin_model.get_for_server(server_id=server['_id'])
for rule in rules:
valid_rule = self.check_tags(server=server, rule=rule)
if valid_rule:
plugin_name = rule.get('plugin')
plugin_id = None
# Check if this server has a plugin with this name
for p in all_plugins.clone():
if p.get('name') == plugin_name:
plugin_id = p.get('_id')
if plugin_id:
alert = plugin_alerts.check(data=plugin_data, rule=rule)
if alert:
alerts_model.save_occurence(alert, server_id=server['_id'])
class UptimeAlerter(object):
def check(self, data, server):
process_data_dict = data.get('data', None)
rules = alerts_model.get_alerts(type='uptime', server=server)
if len(rules) + len(process_data_dict) > 0:
for rule in rules:
process_id = rule['process']
process_data = next((item for item in process_data_dict if item["p"] == process_id), None)
# Process is down
if not process_data:
alerts_model.save_uptime_occurence(rule, data=process_data)
class NotSendingDataAlerter(object):
def check(self):
time_now = unix_utc_now()
alerts = alerts_model.get_alerts_not_sending_data()
for alert in alerts:
period = alert.get('period')
for server in alert.get('server_data'):
last_check = server.get('last_check')
# Skip all the servers with no agent installed
if last_check != None:
since_last_check = time_now - last_check # 65 seconds, 60 seconds sleep, 5 seconds to collect
if since_last_check > (period + 10): # Trigger alert, add 10 seconds buffer
alert['server'] = server
alerts_model.save_notsendingdata_occurence(alert=alert)
class HealthCheckAlerter(object):
def check(self, data=None, server=None):
alerts = alerts_model.get_alerts(type='health_check')
for alert in alerts:
# Data is list
for d in data:
trigger = healthcheck_alert_checker.check(data=d, rule=alert)
# Will scan all the data, check for relevancy and then check the specific entry
if trigger:
alerts_model.save_healtcheck_occurence(trigger=trigger, server_id=server['_id'])
server_alerter = ServerAlerter()
process_alerter = ProcessAlerter()
uptime_alerter = UptimeAlerter()
plugin_alerter = PluginAlerter()
health_check_alerter = HealthCheckAlerter()
notsendingdata_alerter = NotSendingDataAlerter()
|
amonapp/amon
|
amon/apps/alerts/alerter.py
|
Python
|
agpl-3.0
| 6,829
|
from lims.browser import BrowserView
from dependencies.dependency import getToolByName
from dependencies.dependency import check as CheckAuthenticator
import json
class ajaxGetInstruments(BrowserView):
""" Returns a json list with the instruments assigned to the method
with the following structure:
[{'uid': <instrument_uid>,
'title': <instrument_absolute_path>,
'url': <instrument_url>,
'outofdate': True|False,
'qcfail': True|False,
'isvalid': True|False},
]
"""
def __call__(self):
instruments = []
try:
CheckAuthenticator(self.request)
except Forbidden:
return json.dumps(instruments)
bsc = getToolByName(self, 'portal_catalog')
method = bsc(portal_type='Method', UID=self.request.get("uid", '0'))
if method and len(method) == 1:
method = method[0].getObject()
for i in method.getInstruments():
instrument = { 'uid' : i.UID(),
'title': i.Title(),
'url': i.absolute_url_path(),
'outofdate': i.isOutOfDate(),
'qcfail': not i.isQCValid(),
'isvalid': i.isValid()}
instruments.append(instrument)
return json.dumps(instruments)
class ajaxGetMethodServiceInstruments(BrowserView):
""" Returns a json list with the instruments assigned to the method
and to the analysis service with the following structure:
[{'uid': <instrument_uid>,
'title': <instrument_absolute_path>,
'url': <instrument_url>,
'outofdate': True|False,
'qcfail': True|False,
'isvalid': True|False},
]
If no method assigned, returns the instruments assigned to the
service that have no method assigned.
If no service assigned, returns empty
"""
def __call__(self):
instruments = []
try:
CheckAuthenticator(self.request)
except Forbidden:
return json.dumps(instruments)
uc = getToolByName(self, 'uid_catalog')
service = uc(portal_type='AnalysisService', UID=self.request.get("suid", '0'))
if not service or len(service) != 1:
return json.dumps(instruments)
service = service[0].getObject()
sinstr = service.getAvailableInstruments()
if not sinstr:
return json.dumps(instruments)
method = uc(portal_type='Method', UID=self.request.get("muid", '0'))
if not method or len(method) != 1:
for i in sinstr:
if not i.getMethod():
instrument = { 'uid' : i.UID(),
'title': i.Title(),
'url': i.absolute_url_path(),
'outofdate': i.isOutOfDate(),
'qcfail': not i.isQCValid(),
'isvalid': i.isValid()}
instruments.append(instrument)
return json.dumps(instruments)
method = method[0].getObject()
iuids = [s.UID() for s in sinstr]
for i in method.getInstruments():
if i.UID() in iuids:
instrument = { 'uid' : i.UID(),
'title': i.Title(),
'url': i.absolute_url_path(),
'outofdate': i.isOutOfDate(),
'qcfail': not i.isQCValid(),
'isvalid': i.isValid()}
instruments.append(instrument)
return json.dumps(instruments)
|
sciCloud/OLiMS
|
lims/browser/method.py
|
Python
|
agpl-3.0
| 3,775
|
# -*- coding: utf-8 -*-
"""
End-to-end tests for the Account Settings page.
"""
from unittest import skip
from nose.plugins.attrib import attr
from bok_choy.web_app_test import WebAppTest
from ...pages.lms.account_settings import AccountSettingsPage
from ...pages.lms.auto_auth import AutoAuthPage
from ...pages.lms.dashboard import DashboardPage
from ..helpers import EventsTestMixin
class AccountSettingsTestMixin(EventsTestMixin, WebAppTest):
"""
Mixin with helper methods to test the account settings page.
"""
CHANGE_INITIATED_EVENT_NAME = u"edx.user.settings.change_initiated"
USER_SETTINGS_CHANGED_EVENT_NAME = 'edx.user.settings.changed'
ACCOUNT_SETTINGS_REFERER = u"/account/settings"
def visit_account_settings_page(self):
"""
Visit the account settings page for the current user, and store the page instance
as self.account_settings_page.
"""
# pylint: disable=attribute-defined-outside-init
self.account_settings_page = AccountSettingsPage(self.browser)
self.account_settings_page.visit()
self.account_settings_page.wait_for_ajax()
def log_in_as_unique_user(self, email=None):
"""
Create a unique user and return the account's username and id.
"""
username = "test_{uuid}".format(uuid=self.unique_id[0:6])
auto_auth_page = AutoAuthPage(self.browser, username=username, email=email).visit()
user_id = auto_auth_page.get_user_id()
return username, user_id
def settings_changed_event_filter(self, event):
"""Filter out any events that are not "settings changed" events."""
return event['event_type'] == self.USER_SETTINGS_CHANGED_EVENT_NAME
def expected_settings_changed_event(self, setting, old, new, table=None):
"""A dictionary representing the expected fields in a "settings changed" event."""
return {
'username': self.username,
'referer': self.get_settings_page_url(),
'event': {
'user_id': self.user_id,
'setting': setting,
'old': old,
'new': new,
'truncated': [],
'table': table or 'auth_userprofile'
}
}
def settings_change_initiated_event_filter(self, event):
"""Filter out any events that are not "settings change initiated" events."""
return event['event_type'] == self.CHANGE_INITIATED_EVENT_NAME
def expected_settings_change_initiated_event(self, setting, old, new, username=None, user_id=None):
"""A dictionary representing the expected fields in a "settings change initiated" event."""
return {
'username': username or self.username,
'referer': self.get_settings_page_url(),
'event': {
'user_id': user_id or self.user_id,
'setting': setting,
'old': old,
'new': new,
}
}
def get_settings_page_url(self):
"""The absolute URL of the account settings page given the test context."""
return self.relative_path_to_absolute_uri(self.ACCOUNT_SETTINGS_REFERER)
def assert_no_setting_changed_event(self):
"""Assert no setting changed event has been emitted thus far."""
self.assert_no_matching_events_were_emitted({'event_type': self.USER_SETTINGS_CHANGED_EVENT_NAME})
@attr('shard_8')
class DashboardMenuTest(AccountSettingsTestMixin, WebAppTest):
"""
Tests that the dashboard menu works correctly with the account settings page.
"""
def test_link_on_dashboard_works(self):
"""
Scenario: Verify that the "Account" link works from the dashboard.
Given that I am a registered user
And I visit my dashboard
And I click on "Account" in the top drop down
Then I should see my account settings page
"""
self.log_in_as_unique_user()
dashboard_page = DashboardPage(self.browser)
dashboard_page.visit()
dashboard_page.click_username_dropdown()
self.assertIn('Account', dashboard_page.username_dropdown_link_text)
dashboard_page.click_account_settings_link()
@attr('shard_8')
class AccountSettingsPageTest(AccountSettingsTestMixin, WebAppTest):
"""
Tests that verify behaviour of the Account Settings page.
"""
SUCCESS_MESSAGE = 'Your changes have been saved.'
def setUp(self):
"""
Initialize account and pages.
"""
super(AccountSettingsPageTest, self).setUp()
self.username, self.user_id = self.log_in_as_unique_user()
self.visit_account_settings_page()
def test_page_view_event(self):
"""
Scenario: An event should be recorded when the "Account Settings"
page is viewed.
Given that I am a registered user
And I visit my account settings page
Then a page view analytics event should be recorded
"""
actual_events = self.wait_for_events(
event_filter={'event_type': 'edx.user.settings.viewed'}, number_of_matches=1)
self.assert_events_match(
[
{
'event': {
'user_id': self.user_id,
'page': 'account',
'visibility': None
}
}
],
actual_events
)
def test_all_sections_and_fields_are_present(self):
"""
Scenario: Verify that all sections and fields are present on the page.
"""
expected_sections_structure = [
{
'title': 'Basic Account Information (required)',
'fields': [
'Username',
'Full Name',
'Email Address',
'Password',
'Language',
'Country or Region'
]
},
{
'title': 'Additional Information (optional)',
'fields': [
'Education Completed',
'Gender',
'Year of Birth',
'Preferred Language',
]
},
{
'title': 'Connected Accounts',
'fields': [
'Dummy',
'Facebook',
'Google',
]
}
]
self.assertEqual(self.account_settings_page.sections_structure(), expected_sections_structure)
def _test_readonly_field(self, field_id, title, value):
"""
Test behavior of a readonly field.
"""
self.assertEqual(self.account_settings_page.title_for_field(field_id), title)
self.assertEqual(self.account_settings_page.value_for_readonly_field(field_id), value)
def _test_text_field(
self, field_id, title, initial_value, new_invalid_value, new_valid_values, success_message=SUCCESS_MESSAGE,
assert_after_reload=True
):
"""
Test behaviour of a text field.
"""
self.assertEqual(self.account_settings_page.title_for_field(field_id), title)
self.assertEqual(self.account_settings_page.value_for_text_field(field_id), initial_value)
self.assertEqual(
self.account_settings_page.value_for_text_field(field_id, new_invalid_value), new_invalid_value
)
self.account_settings_page.wait_for_indicator(field_id, 'validation-error')
self.browser.refresh()
self.assertNotEqual(self.account_settings_page.value_for_text_field(field_id), new_invalid_value)
for new_value in new_valid_values:
self.assertEqual(self.account_settings_page.value_for_text_field(field_id, new_value), new_value)
self.account_settings_page.wait_for_message(field_id, success_message)
if assert_after_reload:
self.browser.refresh()
self.assertEqual(self.account_settings_page.value_for_text_field(field_id), new_value)
def _test_dropdown_field(
self, field_id, title, initial_value, new_values, success_message=SUCCESS_MESSAGE, reloads_on_save=False
):
"""
Test behaviour of a dropdown field.
"""
self.assertEqual(self.account_settings_page.title_for_field(field_id), title)
self.assertEqual(self.account_settings_page.value_for_dropdown_field(field_id), initial_value)
for new_value in new_values:
self.assertEqual(self.account_settings_page.value_for_dropdown_field(field_id, new_value), new_value)
self.account_settings_page.wait_for_message(field_id, success_message)
if reloads_on_save:
self.account_settings_page.wait_for_loading_indicator()
else:
self.browser.refresh()
self.account_settings_page.wait_for_page()
self.assertEqual(self.account_settings_page.value_for_dropdown_field(field_id), new_value)
def _test_link_field(self, field_id, title, link_title, success_message):
"""
Test behaviour a link field.
"""
self.assertEqual(self.account_settings_page.title_for_field(field_id), title)
self.assertEqual(self.account_settings_page.link_title_for_link_field(field_id), link_title)
self.account_settings_page.click_on_link_in_link_field(field_id)
self.account_settings_page.wait_for_message(field_id, success_message)
def test_username_field(self):
"""
Test behaviour of "Username" field.
"""
self._test_readonly_field('username', 'Username', self.username)
def test_full_name_field(self):
"""
Test behaviour of "Full Name" field.
"""
self._test_text_field(
u'name',
u'Full Name',
self.username,
u'@',
[u'another name', self.username],
)
actual_events = self.wait_for_events(event_filter=self.settings_changed_event_filter, number_of_matches=2)
self.assert_events_match(
[
self.expected_settings_changed_event('name', self.username, 'another name'),
self.expected_settings_changed_event('name', 'another name', self.username),
],
actual_events
)
def test_email_field(self):
"""
Test behaviour of "Email" field.
"""
email = u"test@example.com"
username, user_id = self.log_in_as_unique_user(email=email)
self.visit_account_settings_page()
self._test_text_field(
u'email',
u'Email Address',
email,
u'@',
[u'me@here.com', u'you@there.com'],
success_message='Click the link in the message to update your email address.',
assert_after_reload=False
)
actual_events = self.wait_for_events(
event_filter=self.settings_change_initiated_event_filter, number_of_matches=2)
self.assert_events_match(
[
self.expected_settings_change_initiated_event(
'email', email, 'me@here.com', username=username, user_id=user_id),
# NOTE the first email change was never confirmed, so old has not changed.
self.expected_settings_change_initiated_event(
'email', email, 'you@there.com', username=username, user_id=user_id),
],
actual_events
)
# Email is not saved until user confirms, so no events should have been
# emitted.
self.assert_no_setting_changed_event()
def test_password_field(self):
"""
Test behaviour of "Password" field.
"""
self._test_link_field(
u'password',
u'Password',
u'Reset Password',
success_message='Click the link in the message to reset your password.',
)
event_filter = self.expected_settings_change_initiated_event('password', None, None)
self.wait_for_events(event_filter=event_filter, number_of_matches=1)
# Like email, since the user has not confirmed their password change,
# the field has not yet changed, so no events will have been emitted.
self.assert_no_setting_changed_event()
@skip(
'On bokchoy test servers, language changes take a few reloads to fully realize '
'which means we can no longer reliably match the strings in the html in other tests.'
)
def test_language_field(self):
"""
Test behaviour of "Language" field.
"""
self._test_dropdown_field(
u'pref-lang',
u'Language',
u'English',
[u'Dummy Language (Esperanto)', u'English'],
reloads_on_save=True,
)
def test_education_completed_field(self):
"""
Test behaviour of "Education Completed" field.
"""
self._test_dropdown_field(
u'level_of_education',
u'Education Completed',
u'',
[u'Bachelor\'s degree', u''],
)
actual_events = self.wait_for_events(event_filter=self.settings_changed_event_filter, number_of_matches=2)
self.assert_events_match(
[
self.expected_settings_changed_event('level_of_education', None, 'b'),
self.expected_settings_changed_event('level_of_education', 'b', None),
],
actual_events
)
def test_gender_field(self):
"""
Test behaviour of "Gender" field.
"""
self._test_dropdown_field(
u'gender',
u'Gender',
u'',
[u'Female', u''],
)
actual_events = self.wait_for_events(event_filter=self.settings_changed_event_filter, number_of_matches=2)
self.assert_events_match(
[
self.expected_settings_changed_event('gender', None, 'f'),
self.expected_settings_changed_event('gender', 'f', None),
],
actual_events
)
def test_year_of_birth_field(self):
"""
Test behaviour of "Year of Birth" field.
"""
# Note that when we clear the year_of_birth here we're firing an event.
self.assertEqual(self.account_settings_page.value_for_dropdown_field('year_of_birth', ''), '')
expected_events = [
self.expected_settings_changed_event('year_of_birth', None, 1980),
self.expected_settings_changed_event('year_of_birth', 1980, None),
]
with self.assert_events_match_during(self.settings_changed_event_filter, expected_events):
self._test_dropdown_field(
u'year_of_birth',
u'Year of Birth',
u'',
[u'1980', u''],
)
def test_country_field(self):
"""
Test behaviour of "Country or Region" field.
"""
self._test_dropdown_field(
u'country',
u'Country or Region',
u'',
[u'Pakistan', u'Palau'],
)
def test_preferred_language_field(self):
"""
Test behaviour of "Preferred Language" field.
"""
self._test_dropdown_field(
u'language_proficiencies',
u'Preferred Language',
u'',
[u'Pushto', u''],
)
actual_events = self.wait_for_events(event_filter=self.settings_changed_event_filter, number_of_matches=2)
self.assert_events_match(
[
self.expected_settings_changed_event(
'language_proficiencies', [], [{'code': 'ps'}], table='student_languageproficiency'),
self.expected_settings_changed_event(
'language_proficiencies', [{'code': 'ps'}], [], table='student_languageproficiency'),
],
actual_events
)
def test_connected_accounts(self):
"""
Test that fields for third party auth providers exist.
Currently there is no way to test the whole authentication process
because that would require accounts with the providers.
"""
providers = (
['auth-oa2-facebook', 'Facebook', 'Link'],
['auth-oa2-google-oauth2', 'Google', 'Link'],
)
for field_id, title, link_title in providers:
self.assertEqual(self.account_settings_page.title_for_field(field_id), title)
self.assertEqual(self.account_settings_page.link_title_for_link_field(field_id), link_title)
@attr('a11y')
class AccountSettingsA11yTest(AccountSettingsTestMixin, WebAppTest):
"""
Class to test account settings accessibility.
"""
def test_account_settings_a11y(self):
"""
Test the accessibility of the account settings page.
"""
self.log_in_as_unique_user()
self.visit_account_settings_page()
self.account_settings_page.a11y_audit.config.set_rules({
'ignore': [
'link-href', # TODO: AC-233
],
})
self.account_settings_page.a11y_audit.check_for_accessibility_errors()
|
analyseuc3m/ANALYSE-v1
|
common/test/acceptance/tests/lms/test_account_settings.py
|
Python
|
agpl-3.0
| 17,430
|
# -*- coding: utf-8 -*-
# Copyright(C) 2013 Bezleputh
#
# This file is part of weboob.
#
# weboob is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# weboob is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with weboob. If not, see <http://www.gnu.org/licenses/>.
import sys
from weboob.capabilities.job import ICapJob
from weboob.tools.application.repl import ReplApplication, defaultcount
from weboob.tools.application.formatters.iformatter import IFormatter, PrettyFormatter
__all__ = ['Handjoob']
class JobAdvertFormatter(IFormatter):
MANDATORY_FIELDS = ('id', 'url', 'publication_date', 'title')
def format_obj(self, obj, alias):
result = u'%s%s%s\n' % (self.BOLD, obj.title, self.NC)
result += 'url: %s\n' % obj.url
if hasattr(obj, 'publication_date') and obj.publication_date:
result += 'Publication date : %s\n' % obj.publication_date.strftime('%Y-%m-%d')
if hasattr(obj, 'place') and obj.place:
result += 'Location: %s\n' % obj.place
if hasattr(obj, 'society_name') and obj.society_name:
result += 'Society : %s\n' % obj.society_name
if hasattr(obj, 'job_name') and obj.job_name:
result += 'Job name : %s\n' % obj.job_name
if hasattr(obj, 'contract_type') and obj.contract_type:
result += 'Contract : %s\n' % obj.contract_type
if hasattr(obj, 'pay') and obj.pay:
result += 'Pay : %s\n' % obj.pay
if hasattr(obj, 'formation') and obj.formation:
result += 'Formation : %s\n' % obj.formation
if hasattr(obj, 'experience') and obj.experience:
result += 'Experience : %s\n' % obj.experience
if hasattr(obj, 'description') and obj.description:
result += 'Description : %s\n' % obj.description
return result
class JobAdvertListFormatter(PrettyFormatter):
MANDATORY_FIELDS = ('id', 'title')
def get_title(self, obj):
return '%s' % (obj.title)
def get_description(self, obj):
result = u''
if hasattr(obj, 'publication_date') and obj.publication_date:
result += '\tPublication date : %s\n' % obj.publication_date.strftime('%Y-%m-%d')
if hasattr(obj, 'place') and obj.place:
result += '\tLocation: %s\n' % obj.place
if hasattr(obj, 'society_name') and obj.society_name:
result += '\tSociety : %s\n' % obj.society_name
if hasattr(obj, 'contract_type') and obj.contract_type:
result += '\tContract : %s\n' % obj.contract_type
return result.strip('\n\t')
class Handjoob(ReplApplication):
APPNAME = 'handjoob'
VERSION = '0.i'
COPYRIGHT = 'Copyright(C) 2012 Bezleputh'
DESCRIPTION = "Console application to search for a job."
SHORT_DESCRIPTION = "search for a job"
CAPS = ICapJob
EXTRA_FORMATTERS = {'job_advert_list': JobAdvertListFormatter,
'job_advert': JobAdvertFormatter,
}
COMMANDS_FORMATTERS = {'search': 'job_advert_list',
'ls': 'job_advert_list',
'info': 'job_advert',
}
@defaultcount(10)
def do_search(self, pattern):
"""
search PATTERN
Search for an advert matching a PATTERN.
"""
self.change_path([u'search'])
self.start_format(pattern=pattern)
for backend, job_advert in self.do('search_job', pattern):
self.cached_format(job_advert)
@defaultcount(10)
def do_ls(self, line):
"""
advanced search
Search for an advert matching to advanced filters.
"""
self.change_path([u'advanced'])
for backend, job_advert in self.do('advanced_search_job'):
self.cached_format(job_advert)
def complete_info(self, text, line, *ignored):
args = line.split(' ')
if len(args) == 2:
return self._complete_object()
def do_info(self, _id):
"""
info ID
Get information about an advert.
"""
if not _id:
print >>sys.stderr, 'This command takes an argument: %s' % self.get_command_help('info', short=True)
return 2
job_advert = self.get_object(_id, 'get_job_advert')
if not job_advert:
print >>sys.stderr, 'Job advert not found: %s' % _id
return 3
self.start_format()
self.format(job_advert)
|
yannrouillard/weboob
|
weboob/applications/handjoob/handjoob.py
|
Python
|
agpl-3.0
| 4,944
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.osv import fields
from openerp.osv import osv
from openerp import netsvc
import time
from datetime import datetime
from openerp.tools.translate import _
#----------------------------------------------------------
# Work Centers
#----------------------------------------------------------
# capacity_hour : capacity per hour. default: 1.0.
# Eg: If 5 concurrent operations at one time: capacity = 5 (because 5 employees)
# unit_per_cycle : how many units are produced for one cycle
class stock_move(osv.osv):
_inherit = 'stock.move'
_columns = {
'move_dest_id_lines': fields.one2many('stock.move','move_dest_id', 'Children Moves')
}
def copy(self, cr, uid, id, default=None, context=None):
if default is None:
default = {}
default.update({
'move_dest_id_lines': [],
})
return super(stock_move, self).copy(cr, uid, id, default, context)
stock_move()
class mrp_production_workcenter_line(osv.osv):
def _get_date_end(self, cr, uid, ids, field_name, arg, context=None):
""" Finds ending date.
@return: Dictionary of values.
"""
ops = self.browse(cr, uid, ids, context=context)
date_and_hours_by_cal = [(op.date_planned, op.hour, op.workcenter_id.calendar_id.id) for op in ops if op.date_planned]
intervals = self.pool.get('resource.calendar').interval_get_multi(cr, uid, date_and_hours_by_cal)
res = {}
for op in ops:
res[op.id] = False
if op.date_planned:
i = intervals.get((op.date_planned, op.hour, op.workcenter_id.calendar_id.id))
if i:
res[op.id] = i[-1][1].strftime('%Y-%m-%d %H:%M:%S')
else:
res[op.id] = op.date_planned
return res
def onchange_production_id(self, cr, uid, ids, production_id, context=None):
if not production_id:
return {}
production = self.pool.get('mrp.production').browse(cr, uid, production_id, context=None)
result = {
'product': production.product_id.id,
'qty': production.product_qty,
'uom': production.product_uom.id,
}
return {'value': result}
_inherit = 'mrp.production.workcenter.line'
_order = "sequence, date_planned"
_columns = {
'state': fields.selection([('draft','Draft'),('cancel','Cancelled'),('pause','Pending'),('startworking', 'In Progress'),('done','Finished')],'Status', readonly=True,
help="* When a work order is created it is set in 'Draft' status.\n" \
"* When user sets work order in start mode that time it will be set in 'In Progress' status.\n" \
"* When work order is in running mode, during that time if user wants to stop or to make changes in order then can set in 'Pending' status.\n" \
"* When the user cancels the work order it will be set in 'Canceled' status.\n" \
"* When order is completely processed that time it is set in 'Finished' status."),
'date_planned': fields.datetime('Scheduled Date', select=True),
'date_planned_end': fields.function(_get_date_end, string='End Date', type='datetime'),
'date_start': fields.datetime('Start Date'),
'date_finished': fields.datetime('End Date'),
'delay': fields.float('Working Hours',help="The elapsed time between operation start and stop in this Work Center",readonly=True),
'production_state':fields.related('production_id','state',
type='selection',
selection=[('draft','Draft'),('picking_except', 'Picking Exception'),('confirmed','Waiting Goods'),('ready','Ready to Produce'),('in_production','In Production'),('cancel','Canceled'),('done','Done')],
string='Production Status', readonly=True),
'product':fields.related('production_id','product_id',type='many2one',relation='product.product',string='Product',
readonly=True),
'qty':fields.related('production_id','product_qty',type='float',string='Qty',readonly=True, store=True),
'uom':fields.related('production_id','product_uom',type='many2one',relation='product.uom',string='Unit of Measure',readonly=True),
}
_defaults = {
'state': 'draft',
'delay': 0.0,
'production_state': 'draft'
}
def modify_production_order_state(self, cr, uid, ids, action):
""" Modifies production order state if work order state is changed.
@param action: Action to perform.
@return: Nothing
"""
wf_service = netsvc.LocalService("workflow")
prod_obj_pool = self.pool.get('mrp.production')
oper_obj = self.browse(cr, uid, ids)[0]
prod_obj = oper_obj.production_id
if action == 'start':
if prod_obj.state =='confirmed':
prod_obj_pool.force_production(cr, uid, [prod_obj.id])
wf_service.trg_validate(uid, 'mrp.production', prod_obj.id, 'button_produce', cr)
elif prod_obj.state =='ready':
wf_service.trg_validate(uid, 'mrp.production', prod_obj.id, 'button_produce', cr)
elif prod_obj.state =='in_production':
return
else:
raise osv.except_osv(_('Error!'),_('Manufacturing order cannot be started in state "%s"!') % (prod_obj.state,))
else:
oper_ids = self.search(cr,uid,[('production_id','=',prod_obj.id)])
obj = self.browse(cr,uid,oper_ids)
flag = True
for line in obj:
if line.state != 'done':
flag = False
if flag:
for production in prod_obj_pool.browse(cr, uid, [prod_obj.id], context= None):
if production.move_lines or production.move_created_ids:
prod_obj_pool.action_produce(cr,uid, production.id, production.product_qty, 'consume_produce', context = None)
wf_service.trg_validate(uid, 'mrp.production', oper_obj.production_id.id, 'button_produce_done', cr)
return
def write(self, cr, uid, ids, vals, context=None, update=True):
result = super(mrp_production_workcenter_line, self).write(cr, uid, ids, vals, context=context)
prod_obj = self.pool.get('mrp.production')
if vals.get('date_planned', False) and update:
for prod in self.browse(cr, uid, ids, context=context):
if prod.production_id.workcenter_lines:
dstart = min(vals['date_planned'], prod.production_id.workcenter_lines[0]['date_planned'])
prod_obj.write(cr, uid, [prod.production_id.id], {'date_start':dstart}, context=context, mini=False)
return result
def action_draft(self, cr, uid, ids, context=None):
""" Sets state to draft.
@return: True
"""
return self.write(cr, uid, ids, {'state': 'draft'}, context=context)
def action_start_working(self, cr, uid, ids, context=None):
""" Sets state to start working and writes starting date.
@return: True
"""
self.modify_production_order_state(cr, uid, ids, 'start')
self.write(cr, uid, ids, {'state':'startworking', 'date_start': time.strftime('%Y-%m-%d %H:%M:%S')}, context=context)
return True
def action_done(self, cr, uid, ids, context=None):
""" Sets state to done, writes finish date and calculates delay.
@return: True
"""
delay = 0.0
date_now = time.strftime('%Y-%m-%d %H:%M:%S')
obj_line = self.browse(cr, uid, ids[0])
date_start = datetime.strptime(obj_line.date_start,'%Y-%m-%d %H:%M:%S')
date_finished = datetime.strptime(date_now,'%Y-%m-%d %H:%M:%S')
delay += (date_finished-date_start).days * 24
delay += (date_finished-date_start).seconds / float(60*60)
self.write(cr, uid, ids, {'state':'done', 'date_finished': date_now,'delay':delay}, context=context)
self.modify_production_order_state(cr,uid,ids,'done')
return True
def action_cancel(self, cr, uid, ids, context=None):
""" Sets state to cancel.
@return: True
"""
return self.write(cr, uid, ids, {'state':'cancel'}, context=context)
def action_pause(self, cr, uid, ids, context=None):
""" Sets state to pause.
@return: True
"""
return self.write(cr, uid, ids, {'state':'pause'}, context=context)
def action_resume(self, cr, uid, ids, context=None):
""" Sets state to startworking.
@return: True
"""
return self.write(cr, uid, ids, {'state':'startworking'}, context=context)
class mrp_production(osv.osv):
_inherit = 'mrp.production'
_columns = {
'allow_reorder': fields.boolean('Free Serialisation', help="Check this to be able to move independently all production orders, without moving dependent ones."),
}
def _production_date_end(self, cr, uid, ids, prop, unknow_none, context=None):
""" Calculates planned end date of production order.
@return: Dictionary of values
"""
result = {}
for prod in self.browse(cr, uid, ids, context=context):
result[prod.id] = prod.date_planned
for line in prod.workcenter_lines:
result[prod.id] = max(line.date_planned_end, result[prod.id])
return result
def action_production_end(self, cr, uid, ids, context=None):
""" Finishes work order if production order is done.
@return: Super method
"""
obj = self.browse(cr, uid, ids)[0]
wf_service = netsvc.LocalService("workflow")
for workcenter_line in obj.workcenter_lines:
if workcenter_line.state == 'draft':
wf_service.trg_validate(uid, 'mrp.production.workcenter.line', workcenter_line.id, 'button_start_working', cr)
wf_service.trg_validate(uid, 'mrp.production.workcenter.line', workcenter_line.id, 'button_done', cr)
return super(mrp_production,self).action_production_end(cr, uid, ids, context=context)
def action_in_production(self, cr, uid, ids, context=None):
""" Changes state to In Production and writes starting date.
@return: True
"""
obj = self.browse(cr, uid, ids)[0]
workcenter_pool = self.pool.get('mrp.production.workcenter.line')
wf_service = netsvc.LocalService("workflow")
for prod in self.browse(cr, uid, ids):
if prod.workcenter_lines:
wf_service.trg_validate(uid, 'mrp.production.workcenter.line', prod.workcenter_lines[0].id, 'button_start_working', cr)
return super(mrp_production,self).action_in_production(cr, uid, ids, context=context)
def action_cancel(self, cr, uid, ids, context=None):
""" Cancels work order if production order is canceled.
@return: Super method
"""
obj = self.browse(cr, uid, ids,context=context)[0]
wf_service = netsvc.LocalService("workflow")
for workcenter_line in obj.workcenter_lines:
wf_service.trg_validate(uid, 'mrp.production.workcenter.line', workcenter_line.id, 'button_cancel', cr)
return super(mrp_production,self).action_cancel(cr,uid,ids,context=context)
def _compute_planned_workcenter(self, cr, uid, ids, context=None, mini=False):
""" Computes planned and finished dates for work order.
@return: Calculated date
"""
dt_end = datetime.now()
if context is None:
context = {}
for po in self.browse(cr, uid, ids, context=context):
dt_end = datetime.strptime(po.date_planned, '%Y-%m-%d %H:%M:%S')
if not po.date_start:
self.write(cr, uid, [po.id], {
'date_start': po.date_planned
}, context=context, update=False)
old = None
for wci in range(len(po.workcenter_lines)):
wc = po.workcenter_lines[wci]
if (old is None) or (wc.sequence>old):
dt = dt_end
if context.get('__last_update'):
del context['__last_update']
if (wc.date_planned < dt.strftime('%Y-%m-%d %H:%M:%S')) or mini:
self.pool.get('mrp.production.workcenter.line').write(cr, uid, [wc.id], {
'date_planned': dt.strftime('%Y-%m-%d %H:%M:%S')
}, context=context, update=False)
i = self.pool.get('resource.calendar').interval_get(
cr,
uid,
wc.workcenter_id.calendar_id and wc.workcenter_id.calendar_id.id or False,
dt,
wc.hour or 0.0
)
if i:
dt_end = max(dt_end, i[-1][1])
else:
dt_end = datetime.strptime(wc.date_planned_end, '%Y-%m-%d %H:%M:%S')
old = wc.sequence or 0
super(mrp_production, self).write(cr, uid, [po.id], {
'date_finished': dt_end
})
return dt_end
def _move_pass(self, cr, uid, ids, context=None):
""" Calculates start date for stock moves finding interval from resource calendar.
@return: True
"""
for po in self.browse(cr, uid, ids, context=context):
if po.allow_reorder:
continue
todo = po.move_lines
dt = datetime.strptime(po.date_start,'%Y-%m-%d %H:%M:%S')
while todo:
l = todo.pop(0)
if l.state in ('done','cancel','draft'):
continue
todo += l.move_dest_id_lines
if l.production_id and (l.production_id.date_finished > dt.strftime('%Y-%m-%d %H:%M:%S')):
if l.production_id.state not in ('done','cancel'):
for wc in l.production_id.workcenter_lines:
i = self.pool.get('resource.calendar').interval_min_get(
cr,
uid,
wc.workcenter_id.calendar_id.id or False,
dt, wc.hour or 0.0
)
dt = i[0][0]
if l.production_id.date_start > dt.strftime('%Y-%m-%d %H:%M:%S'):
self.write(cr, uid, [l.production_id.id], {'date_start':dt.strftime('%Y-%m-%d %H:%M:%S')}, mini=True)
return True
def _move_futur(self, cr, uid, ids, context=None):
""" Calculates start date for stock moves.
@return: True
"""
for po in self.browse(cr, uid, ids, context=context):
if po.allow_reorder:
continue
for line in po.move_created_ids:
l = line
while l.move_dest_id:
l = l.move_dest_id
if l.state in ('done','cancel','draft'):
break
if l.production_id.state in ('done','cancel'):
break
if l.production_id and (l.production_id.date_start < po.date_finished):
self.write(cr, uid, [l.production_id.id], {'date_start': po.date_finished})
break
return True
def write(self, cr, uid, ids, vals, context=None, update=True, mini=True):
direction = {}
if vals.get('date_start', False):
for po in self.browse(cr, uid, ids, context=context):
direction[po.id] = cmp(po.date_start, vals.get('date_start', False))
result = super(mrp_production, self).write(cr, uid, ids, vals, context=context)
if (vals.get('workcenter_lines', False) or vals.get('date_start', False) or vals.get('date_planned', False)) and update:
self._compute_planned_workcenter(cr, uid, ids, context=context, mini=mini)
for d in direction:
if direction[d] == 1:
# the production order has been moved to the passed
self._move_pass(cr, uid, [d], context=context)
pass
elif direction[d] == -1:
self._move_futur(cr, uid, [d], context=context)
# the production order has been moved to the future
pass
return result
def action_compute(self, cr, uid, ids, properties=None, context=None):
""" Computes bills of material of a product and planned date of work order.
@param properties: List containing dictionaries of properties.
@return: No. of products.
"""
result = super(mrp_production, self).action_compute(cr, uid, ids, properties=properties, context=context)
self._compute_planned_workcenter(cr, uid, ids, context=context)
return result
mrp_production()
class mrp_operations_operation_code(osv.osv):
_name="mrp_operations.operation.code"
_columns={
'name': fields.char('Operation Name',size=64, required=True),
'code': fields.char('Code', size=16, required=True),
'start_stop': fields.selection([('start','Start'),('pause','Pause'),('resume','Resume'),('cancel','Cancelled'),('done','Done')], 'Status', required=True),
}
mrp_operations_operation_code()
class mrp_operations_operation(osv.osv):
_name="mrp_operations.operation"
def _order_date_search_production(self, cr, uid, ids, context=None):
""" Finds operations for a production order.
@return: List of ids
"""
operation_ids = self.pool.get('mrp_operations.operation').search(cr, uid, [('production_id','=',ids[0])], context=context)
return operation_ids
def _get_order_date(self, cr, uid, ids, field_name, arg, context=None):
""" Calculates planned date for an operation.
@return: Dictionary of values
"""
res={}
operation_obj = self.browse(cr, uid, ids, context=context)
for operation in operation_obj:
res[operation.id] = operation.production_id.date_planned
return res
def calc_delay(self, cr, uid, vals):
""" Calculates delay of work order.
@return: Delay
"""
code_lst = []
time_lst = []
code_ids = self.pool.get('mrp_operations.operation.code').search(cr, uid, [('id','=',vals['code_id'])])
code = self.pool.get('mrp_operations.operation.code').browse(cr, uid, code_ids)[0]
oper_ids = self.search(cr,uid,[('production_id','=',vals['production_id']),('workcenter_id','=',vals['workcenter_id'])])
oper_objs = self.browse(cr,uid,oper_ids)
for oper in oper_objs:
code_lst.append(oper.code_id.start_stop)
time_lst.append(oper.date_start)
code_lst.append(code.start_stop)
time_lst.append(vals['date_start'])
diff = 0
for i in range(0,len(code_lst)):
if code_lst[i] == 'pause' or code_lst[i] == 'done' or code_lst[i] == 'cancel':
if not i: continue
if code_lst[i-1] not in ('resume','start'):
continue
a = datetime.strptime(time_lst[i-1],'%Y-%m-%d %H:%M:%S')
b = datetime.strptime(time_lst[i],'%Y-%m-%d %H:%M:%S')
diff += (b-a).days * 24
diff += (b-a).seconds / float(60*60)
return diff
def check_operation(self, cr, uid, vals):
""" Finds which operation is called ie. start, pause, done, cancel.
@param vals: Dictionary of values.
@return: True or False
"""
code_ids=self.pool.get('mrp_operations.operation.code').search(cr,uid,[('id','=',vals['code_id'])])
code=self.pool.get('mrp_operations.operation.code').browse(cr,uid,code_ids)[0]
code_lst = []
oper_ids=self.search(cr,uid,[('production_id','=',vals['production_id']),('workcenter_id','=',vals['workcenter_id'])])
oper_objs=self.browse(cr,uid,oper_ids)
if not oper_objs:
if code.start_stop!='start':
raise osv.except_osv(_('Sorry!'),_('Operation is not started yet!'))
return False
else:
for oper in oper_objs:
code_lst.append(oper.code_id.start_stop)
if code.start_stop=='start':
if 'start' in code_lst:
raise osv.except_osv(_('Sorry!'),_('Operation has already started! You can either Pause/Finish/Cancel the operation.'))
return False
if code.start_stop=='pause':
if code_lst[len(code_lst)-1]!='resume' and code_lst[len(code_lst)-1]!='start':
raise osv.except_osv(_('Error!'),_('In order to Pause the operation, it must be in the Start or Resume state!'))
return False
if code.start_stop=='resume':
if code_lst[len(code_lst)-1]!='pause':
raise osv.except_osv(_('Error!'),_('In order to Resume the operation, it must be in the Pause state!'))
return False
if code.start_stop=='done':
if code_lst[len(code_lst)-1]!='start' and code_lst[len(code_lst)-1]!='resume':
raise osv.except_osv(_('Sorry!'),_('In order to Finish the operation, it must be in the Start or Resume state!'))
return False
if 'cancel' in code_lst:
raise osv.except_osv(_('Sorry!'),_('Operation is Already Cancelled!'))
return False
if code.start_stop=='cancel':
if not 'start' in code_lst :
raise osv.except_osv(_('Error!'),_('No operation to cancel.'))
return False
if 'done' in code_lst:
raise osv.except_osv(_('Error!'),_('Operation is already finished!'))
return False
return True
def write(self, cr, uid, ids, vals, context=None):
oper_objs = self.browse(cr, uid, ids, context=context)[0]
vals['production_id']=oper_objs.production_id.id
vals['workcenter_id']=oper_objs.workcenter_id.id
if 'code_id' in vals:
self.check_operation(cr, uid, vals)
if 'date_start' in vals:
vals['date_start']=vals['date_start']
vals['code_id']=oper_objs.code_id.id
delay=self.calc_delay(cr, uid, vals)
wc_op_id=self.pool.get('mrp.production.workcenter.line').search(cr,uid,[('workcenter_id','=',vals['workcenter_id']),('production_id','=',vals['production_id'])])
self.pool.get('mrp.production.workcenter.line').write(cr,uid,wc_op_id,{'delay':delay})
return super(mrp_operations_operation, self).write(cr, uid, ids, vals, context=context)
def create(self, cr, uid, vals, context=None):
wf_service = netsvc.LocalService('workflow')
code_ids=self.pool.get('mrp_operations.operation.code').search(cr,uid,[('id','=',vals['code_id'])])
code=self.pool.get('mrp_operations.operation.code').browse(cr, uid, code_ids, context=context)[0]
wc_op_id=self.pool.get('mrp.production.workcenter.line').search(cr,uid,[('workcenter_id','=',vals['workcenter_id']),('production_id','=',vals['production_id'])])
if code.start_stop in ('start','done','pause','cancel','resume'):
if not wc_op_id:
production_obj=self.pool.get('mrp.production').browse(cr, uid, vals['production_id'], context=context)
wc_op_id.append(self.pool.get('mrp.production.workcenter.line').create(cr,uid,{'production_id':vals['production_id'],'name':production_obj.product_id.name,'workcenter_id':vals['workcenter_id']}))
if code.start_stop=='start':
self.pool.get('mrp.production.workcenter.line').action_start_working(cr,uid,wc_op_id)
wf_service.trg_validate(uid, 'mrp.production.workcenter.line', wc_op_id[0], 'button_start_working', cr)
if code.start_stop=='done':
self.pool.get('mrp.production.workcenter.line').action_done(cr,uid,wc_op_id)
wf_service.trg_validate(uid, 'mrp.production.workcenter.line', wc_op_id[0], 'button_done', cr)
self.pool.get('mrp.production').write(cr,uid,vals['production_id'],{'date_finished':datetime.now().strftime('%Y-%m-%d %H:%M:%S')})
if code.start_stop=='pause':
self.pool.get('mrp.production.workcenter.line').action_pause(cr,uid,wc_op_id)
wf_service.trg_validate(uid, 'mrp.production.workcenter.line', wc_op_id[0], 'button_pause', cr)
if code.start_stop=='resume':
self.pool.get('mrp.production.workcenter.line').action_resume(cr,uid,wc_op_id)
wf_service.trg_validate(uid, 'mrp.production.workcenter.line', wc_op_id[0], 'button_resume', cr)
if code.start_stop=='cancel':
self.pool.get('mrp.production.workcenter.line').action_cancel(cr,uid,wc_op_id)
wf_service.trg_validate(uid, 'mrp.production.workcenter.line', wc_op_id[0], 'button_cancel', cr)
if not self.check_operation(cr, uid, vals):
return
delay=self.calc_delay(cr, uid, vals)
line_vals = {}
line_vals['delay'] = delay
if vals.get('date_start',False):
if code.start_stop == 'done':
line_vals['date_finished'] = vals['date_start']
elif code.start_stop == 'start':
line_vals['date_start'] = vals['date_start']
self.pool.get('mrp.production.workcenter.line').write(cr, uid, wc_op_id, line_vals, context=context)
return super(mrp_operations_operation, self).create(cr, uid, vals, context=context)
def initialize_workflow_instance(self, cr, uid, context=None):
wf_service = netsvc.LocalService("workflow")
line_ids = self.pool.get('mrp.production.workcenter.line').search(cr, uid, [], context=context)
for line_id in line_ids:
wf_service.trg_create(uid, 'mrp.production.workcenter.line', line_id, cr)
return True
_columns={
'production_id':fields.many2one('mrp.production','Production',required=True),
'workcenter_id':fields.many2one('mrp.workcenter','Work Center',required=True),
'code_id':fields.many2one('mrp_operations.operation.code','Code',required=True),
'date_start': fields.datetime('Start Date'),
'date_finished': fields.datetime('End Date'),
'order_date': fields.function(_get_order_date,string='Order Date',type='date',store={'mrp.production':(_order_date_search_production,['date_planned'], 10)}),
}
_defaults={
'date_start': lambda *a:datetime.now().strftime('%Y-%m-%d %H:%M:%S')
}
mrp_operations_operation()
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
KousikaGanesh/purchaseandInventory
|
openerp/addons/mrp_operations/mrp_operations.py
|
Python
|
agpl-3.0
| 28,224
|
# This file is part of OpenHatch.
# Copyright (C) 2010 Parker Phinney
# Copyright (C) 2009, 2010 OpenHatch, Inc.
# Copyright (C) 2010 John Stumpo
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from django import template
import re
from django.utils.html import escape
from urlparse import urlparse
import logging
register = template.Library()
# See Django document on inclusion tags
# <http://docs.djangoproject.com/en/dev/howto/custom-template-tags/#inclusion-tags>
@register.inclusion_tag('profile/contributors.html')
def show_other_contributors(project, user, count, **args):
return {
'project': project,
'user': user,
'contributors': project.get_n_other_contributors_than(count, user.get_profile()),
}
def person_tag_descriptions_for_tag_text(person, tag_text):
"Returns a boolean of whether the value is greater than the argument"
return person.get_tag_descriptions_for_keyword(tag_text)
# From <http://code.djangoproject.com/wiki/BasicComparisonFilters>
def gt(value, arg):
"Returns a boolean of whether the value is greater than the argument"
return value > int(arg)
@register.filter
def equals(value, arg):
return value == arg
def lt(value, arg):
"Returns a boolean of whether the value is less than the argument"
return value < int(arg)
def gte(value, arg):
"Returns a boolean of whether the value is greater than or equal to the argument"
return value >= int(arg)
def lte(value, arg):
"Returns a boolean of whether the value is less than or equal to the argument"
return value <= int(arg)
def length_gt(value, arg):
"Returns a boolean of whether the value's length is greater than the argument"
return len(value) > int(arg)
def length_lt(value, arg):
"Returns a boolean of whether the value's length is less than the argument"
return len(value) < int(arg)
def length_gte(value, arg):
"Returns a boolean of whether the value's length is greater than or equal to the argument"
return len(value) >= int(arg)
def length_lte(value, arg):
"Returns a boolean of whether the value's length is less than or equal to the argument"
return len(value) <= int(arg)
@register.filter
def sub(value, arg):
"Subtracts the arg from the value"
return int(value) - int(arg)
def break_long_words(value, max_word_length=8):
# if the word is really long, insert a <wbr> occasionally.
# We really want "value" to be Unicode. Sometimes it is, and sometimes it isn't. So...
if type(value) == str:
# Django 2.6 sometimes gives us the empty string as '', not u''.
# Are there other cases where we do get a byte string, not a Unicode string?
if value != '':
logging.warn("Wanted %r to be unicode. Instead it's %s. Moving on with life." % (value, type(value)))
# In all cases, I guess we should just buckle up and move on with life.
value = unicode(value, 'utf-8')
re_capitalized_word = re.compile(r'([A-Z][a-z][a-z]+)', re.UNICODE)
words = re_capitalized_word.split(value)
re_too_many_letters_in_a_row = re.compile(r'([\w]{%d}|[.\_^/])' % max_word_length, re.UNICODE)
broken_words = []
for word in words:
if word:
broken_words += re_too_many_letters_in_a_row.split(word)
broken_words = filter(lambda x: x, broken_words)
return "<wbr />".join(broken_words)
@register.filter
def prepend_http_if_necessary(value):
"""If someone makes "www.example.org" their homepage, then we need to prepend "http://" so it doesn't link to https://openhatch.org/people/username/www.example.org. This template filter prepends that."""
parsed = urlparse(value)
if not parsed.scheme:
return "http://" + parsed.geturl()
return value
@register.filter
def is_logged_in(user, request):
return (user == request.user)
register.filter('gt', gt)
register.filter('lt', lt)
register.filter('gte', gte)
register.filter('lte', lte)
register.filter('length_gt', length_gt)
register.filter('length_lt', length_lt)
register.filter('length_gte', length_gte)
register.filter('length_lte', length_lte)
register.filter('break_long_words', break_long_words)
register.filter('person_tag_descriptions_for_tag_text', person_tag_descriptions_for_tag_text)
|
mzdaniel/oh-mainline
|
mysite/profile/templatetags/profile_extras.py
|
Python
|
agpl-3.0
| 4,899
|
# -*- coding: utf-8 -*-
#
# Author: Joël Grand-Guillaume
# Copyright 2013 Camptocamp SA
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
#
from openerp.osv import orm
from openerp.tools.translate import _
class logistic_requisition_cost_estimate(orm.TransientModel):
_inherit = 'logistic.requisition.cost.estimate'
def _check_requisition(self, cr, uid, requisition, context=None):
""" Check the rules to create a cost estimate from the
requisition
:returns: list of tuples ('message, 'error_code')
"""
errors = []
if not requisition.budget_holder_id:
error = (_('The requisition must be validated '
'by the Budget Holder.'),
'NO_BUDGET_VALID')
errors.append(error)
return errors
|
jgrandguillaume/vertical-ngo
|
logistic_budget/wizard/cost_estimate.py
|
Python
|
agpl-3.0
| 1,462
|
# -*- encoding: utf-8 -*-
##############################################################################
#
# Copyright (c) 2008-2012 Alistek Ltd (http://www.alistek.com) All Rights Reserved.
# General contacts <info@alistek.com>
#
# WARNING: This program as such is intended to be used by professional
# programmers who take the whole responsability of assessing all potential
# consequences resulting from its eventual inadequacies and bugs
# End users who are looking for a ready-to-use solution with commercial
# garantees and support are strongly adviced to contract a Free Software
# Service Company
#
# This program is Free Software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 3
# of the License, or (at your option) any later version.
#
# This module is GPLv3 or newer and incompatible
# with OpenERP SA "AGPL + Private Use License"!
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
#
##############################################################################
from osv import fields
from osv import osv
import netsvc
import tools
from xml.dom import minidom
import os, base64
import urllib2
try:
from cStringIO import StringIO
except ImportError:
from StringIO import StringIO
from tools.translate import _
from report_aeroo_ooo.DocumentConverter import DocumentConversionException
from report_aeroo_ooo.report import OpenOffice_service
from report_aeroo.report_aeroo import aeroo_lock
_url = 'http://www.alistek.com/aeroo_banner/v6_1_report_aeroo_ooo.png'
class aeroo_config_installer(osv.osv_memory):
_name = 'aeroo_config.installer'
_inherit = 'res.config.installer'
_rec_name = 'host'
_logo_image = None
def _get_image(self, cr, uid, context=None):
if self._logo_image:
return self._logo_image
try:
im = urllib2.urlopen(_url.encode("UTF-8"))
if im.headers.maintype!='image':
raise TypeError(im.headers.maintype)
except Exception, e:
path = os.path.join('report_aeroo','config_pixmaps','module_banner.png')
image_file = file_data = tools.file_open(path,'rb')
try:
file_data = image_file.read()
self._logo_image = base64.encodestring(file_data)
return self._logo_image
finally:
image_file.close()
else:
self._logo_image = base64.encodestring(im.read())
return self._logo_image
def _get_image_fn(self, cr, uid, ids, name, args, context=None):
image = self._get_image(cr, uid, context)
return dict.fromkeys(ids, image) # ok to use .fromkeys() as the image is same for all
_columns = {
'host':fields.char('Host', size=64, required=True),
'port':fields.integer('Port', required=True),
'ooo_restart_cmd': fields.char('OOO restart command', size=256, \
help='Enter the shell command that will be executed to restart the LibreOffice/OpenOffice background process.'+ \
'The command will be executed as the user of the OpenERP server process,'+ \
'so you may need to prefix it with sudo and configure your sudoers file to have this command executed without password.'),
'state':fields.selection([
('init','Init'),
('error','Error'),
('done','Done'),
],'State', select=True, readonly=True),
'msg': fields.text('Message', readonly=True),
'error_details': fields.text('Error Details', readonly=True),
'link':fields.char('Installation Manual', size=128, help='Installation (Dependencies and Base system setup)', readonly=True),
'config_logo': fields.function(_get_image_fn, string='Image', type='binary', method=True),
}
def default_get(self, cr, uid, fields, context=None):
config_obj = self.pool.get('oo.config')
data = super(aeroo_config_installer, self).default_get(cr, uid, fields, context=context)
ids = config_obj.search(cr, 1, [], context=context)
if ids:
res = config_obj.read(cr, 1, ids[0], context=context)
del res['id']
data.update(res)
return data
def check(self, cr, uid, ids, context=None):
config_obj = self.pool.get('oo.config')
data = self.read(cr, uid, ids, ['host','port','ooo_restart_cmd'])[0]
del data['id']
config_id = config_obj.search(cr, 1, [], context=context)
if config_id:
config_obj.write(cr, 1, config_id, data, context=context)
else:
config_id = config_obj.create(cr, 1, data, context=context)
try:
fp = tools.file_open('report_aeroo_ooo/test_temp.odt', mode='rb')
file_data = fp.read()
DC = netsvc.Service._services.setdefault('openoffice', \
OpenOffice_service(cr, data['host'], data['port']))
with aeroo_lock:
DC.putDocument(file_data)
DC.saveByStream()
fp.close()
DC.closeDocument()
del DC
except DocumentConversionException, e:
netsvc.Service.remove('openoffice')
error_details = str(e)
state = 'error'
except Exception, e:
error_details = str(e)
state = 'error'
else:
error_details = ''
state = 'done'
if state=='error':
msg = _('Connection to OpenOffice.org instance was not established or convertion to PDF unsuccessful!')
else:
msg = _('Connection to the OpenOffice.org instance was successfully established and PDF convertion is working.')
return self.write(cr, uid, ids, {'msg':msg,'error_details':error_details,'state':state})
_defaults = {
'config_logo': _get_image,
'host':'localhost',
'port':8100,
'ooo_restart_cmd': 'sudo /etc/init.d/libreoffice restart',
'state':'init',
'link':'http://www.alistek.com/wiki/index.php/Aeroo_Reports_Linux_server#Installation_.28Dependencies_and_Base_system_setup.29',
}
aeroo_config_installer()
|
dhp-denero/LibrERP
|
report_aeroo_ooo/installer.py
|
Python
|
agpl-3.0
| 6,702
|
from django.apps import AppConfig
class CheckoutAppConfig(AppConfig):
name = 'ecommerce.extensions.checkout'
verbose_name = 'Checkout'
def ready(self):
super(CheckoutAppConfig, self).ready()
# noinspection PyUnresolvedReferences
import ecommerce.extensions.checkout.signals # pylint: disable=unused-variable
|
mferenca/HMS-ecommerce
|
ecommerce/extensions/checkout/apps.py
|
Python
|
agpl-3.0
| 349
|
###########################################################################
# (C) Vrije Universiteit, Amsterdam (the Netherlands) #
# #
# This file is part of AmCAT - The Amsterdam Content Analysis Toolkit #
# #
# AmCAT is free software: you can redistribute it and/or modify it under #
# the terms of the GNU Affero General Public License as published by the #
# Free Software Foundation, either version 3 of the License, or (at your #
# option) any later version. #
# #
# AmCAT is distributed in the hope that it will be useful, but WITHOUT #
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or #
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Affero General Public #
# License for more details. #
# #
# You should have received a copy of the GNU Affero General Public #
# License along with AmCAT. If not, see <http://www.gnu.org/licenses/>. #
###########################################################################
import collections
from functools import partial
from django.db import models, transaction, connection, IntegrityError
import logging
from django.db.models import sql
import itertools
from amcat.models.coding.codingschemafield import CodingSchemaField
from amcat.models.coding.coding import CodingValue, Coding
from amcat.tools.model import AmcatModel
log = logging.getLogger(__name__)
STATUS_NOTSTARTED, STATUS_INPROGRESS, STATUS_COMPLETE, STATUS_IRRELEVANT = 0, 1, 2, 9
class CodedArticleStatus(AmcatModel):
id = models.IntegerField(primary_key=True, db_column='status_id')
label = models.CharField(max_length=50)
class Meta():
db_table = 'coded_article_status'
app_label = 'amcat'
def _to_coding(coded_article, coding):
"""
Takes a dictionary with keys 'sentence_id', 'start', 'end', and creates
an (unsaved) Coding object.
@type codingjob: CodingJob
@type article: Article
@type coding: dict
"""
return Coding(
coded_article=coded_article, sentence_id=coding.get("sentence_id"),
start=coding.get("start"), end=coding.get("end")
)
def _to_codingvalue(coding, codingvalue):
"""
Takes a dictionary with keys 'codingschemafield_id', 'intval', 'strval' and creates
an (unsaved) CodingValue object.
@type coding: Coding
@type codingvalue: dict
"""
return CodingValue(
field_id=codingvalue.get("codingschemafield_id"),
intval=codingvalue.get("intval"),
strval=codingvalue.get("strval"),
coding=coding
)
def _to_codingvalues(coding, values):
"""
Takes an iterator with codingvalue dictionaries (see _to_coding) and a coding,
and returns an iterator with CodingValue's.
"""
return map(partial(_to_codingvalue, coding), values)
class CodedArticle(models.Model):
"""
A CodedArticle is an article in a context of two other objects: a codingjob and an
article. It exist for every (codingjob, article) in {codingjobs} X {codingjobarticles}
and is created when creating a codingjob (see `create_coded_articles` in codingjob.py).
Each coded article contains codings (1:N) and each coding contains codingvalues (1:N).
"""
comments = models.TextField(blank=True, null=True)
status = models.ForeignKey(CodedArticleStatus, default=STATUS_NOTSTARTED)
article = models.ForeignKey("amcat.Article", related_name="coded_articles")
codingjob = models.ForeignKey("amcat.CodingJob", related_name="coded_articles")
def __unicode__(self):
return "Article: {self.article}, Codingjob: {self.codingjob}".format(**locals())
def set_status(self, status):
"""Set the status of this coding, deserialising status as needed"""
if type(status) == int:
status = CodedArticleStatus.objects.get(pk=status)
self.status = status
self.save()
def get_codings(self):
"""Returns a generator yielding tuples (coding, [codingvalues])"""
codings = Coding.objects.filter(coded_article=self)
values = CodingValue.objects.filter(coding__in=codings)
values_dict = collections.defaultdict(list)
for value in values:
values_dict[value.coding_id].append(value)
for coding in codings:
yield (coding, values_dict[coding.id])
def _replace_codings(self, new_codings):
# Updating tactic: delete all existing codings and codingvalues, then insert
# the new ones. This prevents calculating a delta, and confronting the
# database with (potentially) many update queries.
CodingValue.objects.filter(coding__coded_article=self).delete()
Coding.objects.filter(coded_article=self).delete()
new_coding_objects = map(partial(_to_coding, self), new_codings)
# Saving each coding is pretty inefficient, but Django doesn't allow retrieving
# id's when using bulk_create. See Django ticket #19527.
if connection.vendor == "postgresql":
query = sql.InsertQuery(Coding)
query.insert_values(Coding._meta.fields[1:], new_coding_objects)
raw_sql, params = query.sql_with_params()[0]
new_coding_objects = Coding.objects.raw("%s %s" % (raw_sql, "RETURNING coding_id"), params)
else:
# Do naive O(n) approach
for coding in new_coding_objects:
coding.save()
coding_values = itertools.chain.from_iterable(
_to_codingvalues(co, c["values"]) for c, co in itertools.izip(new_codings, new_coding_objects)
)
return (new_coding_objects, CodingValue.objects.bulk_create(coding_values))
def replace_codings(self, coding_dicts):
"""
Creates codings and replace currently existing ones. It takes one parameter
which has to be an iterator of dictionaries with each dictionary following
a specific format:
{
"sentence_id" : int,
"start" : int,
"end" : int,
"values" : [CodingDict]
}
with CodingDict being:
{
"codingschemafield_id" : int,
"intval" : int / NoneType,
"strval" : str / NoneType
}
@raises IntegrityError: codingschemafield_id is None
@raises ValueError: intval == strval == None
@raises ValueError: intval != None and strval != None
@returns: ([Coding], [CodingValue])
"""
coding_dicts = tuple(coding_dicts)
values = tuple(itertools.chain.from_iterable(cd["values"] for cd in coding_dicts))
if any(v.get("intval") == v.get("strval") == None for v in values):
raise ValueError("intval and strval cannot both be None")
if any(v.get("intval") is not None and v.get("strval") is not None for v in values):
raise ValueError("intval and strval cannot both be not None")
schemas = (self.codingjob.unitschema_id, self.codingjob.articleschema_id)
fields = CodingSchemaField.objects.filter(codingschema__id__in=schemas)
field_ids = set(fields.values_list("id", flat=True)) | {None}
if any(v.get("codingschemafield_id") not in field_ids for v in values):
raise ValueError("codingschemafield_id must be in codingjob")
with transaction.atomic():
return self._replace_codings(coding_dicts)
class Meta():
db_table = 'coded_articles'
app_label = 'amcat'
unique_together = ("codingjob", "article")
###########################################################################
# U N I T T E S T S #
###########################################################################
from amcat.tools import amcattest
class TestCodedArticle(amcattest.AmCATTestCase):
def test_comments(self):
"""Can we set and read comments?"""
from amcat.models import CodedArticle
ca = amcattest.create_test_coded_article()
self.assertIsNone(ca.comments)
for offset in range(4563, 20000, 1000):
s = "".join(unichr(offset + c) for c in range(12, 1000, 100))
ca.comments = s
ca.save()
ca = CodedArticle.objects.get(pk=ca.id)
self.assertEqual(ca.comments, s)
def _get_coding_dict(self, sentence_id=None, field_id=None, intval=None, strval=None, start=None, end=None):
return {
"sentence_id" : sentence_id,
"start" : start,
"end" : end,
"values" : [{
"codingschemafield_id" : field_id,
"intval" : intval,
"strval" : strval
}]
}
def test_replace_codings(self):
schema, codebook, strf, intf, codef = amcattest.create_test_schema_with_fields(isarticleschema=True)
schema2, codebook2, strf2, intf2, codef2 = amcattest.create_test_schema_with_fields(isarticleschema=True)
codingjob = amcattest.create_test_job(articleschema=schema, narticles=10)
coded_article = CodedArticle.objects.get(article=codingjob.articleset.articles.all()[0], codingjob=codingjob)
coded_article.replace_codings([self._get_coding_dict(intval=10, field_id=codef.id)])
self.assertEqual(1, coded_article.codings.all().count())
self.assertEqual(1, coded_article.codings.all()[0].values.all().count())
coding = coded_article.codings.all()[0]
value = coding.values.all()[0]
self.assertEqual(coding.sentence, None)
self.assertEqual(value.strval, None)
self.assertEqual(value.intval, 10)
self.assertEqual(value.field, codef)
# Overwrite previous coding
coded_article.replace_codings([self._get_coding_dict(intval=11, field_id=intf.id)])
self.assertEqual(1, coded_article.codings.all().count())
self.assertEqual(1, coded_article.codings.all()[0].values.all().count())
coding = coded_article.codings.all()[0]
value = coding.values.all()[0]
self.assertEqual(coding.sentence, None)
self.assertEqual(value.strval, None)
self.assertEqual(value.intval, 11)
self.assertEqual(value.field, intf)
# Try to insert illigal values
illval1 = self._get_coding_dict(intval=1, strval="a", field_id=intf.id)
illval2 = self._get_coding_dict(field_id=intf.id)
illval3 = self._get_coding_dict(intval=1)
illval4 = self._get_coding_dict(intval=1, field_id=strf2.id)
self.assertRaises(ValueError, coded_article.replace_codings, [illval1])
self.assertRaises(ValueError, coded_article.replace_codings, [illval2])
self.assertRaises(IntegrityError, coded_article.replace_codings, [illval3])
self.assertRaises(ValueError, coded_article.replace_codings, [illval4])
# Unspecified values default to None
val = self._get_coding_dict(intval=1, field_id=intf.id)
del val["values"][0]["strval"]
coded_article.replace_codings([val])
value = coded_article.codings.all()[0].values.all()[0]
self.assertEqual(value.strval, None)
self.assertEqual(value.intval, 1)
val = self._get_coding_dict(strval="a", field_id=intf.id)
del val["values"][0]["intval"]
coded_article.replace_codings([val])
value = coded_article.codings.all()[0].values.all()[0]
self.assertEqual(value.strval, "a")
self.assertEqual(value.intval, None)
class TestCodedArticleStatus(amcattest.AmCATTestCase):
def test_status(self):
"""Is initial status 0? Can we set it?"""
ca = amcattest.create_test_coded_article()
self.assertEqual(ca.status.id, 0)
self.assertEqual(ca.status, CodedArticleStatus.objects.get(pk=STATUS_NOTSTARTED))
ca.set_status(STATUS_INPROGRESS)
self.assertEqual(ca.status, CodedArticleStatus.objects.get(pk=1))
ca.set_status(STATUS_COMPLETE)
self.assertEqual(ca.status, CodedArticleStatus.objects.get(pk=2))
ca.set_status(STATUS_IRRELEVANT)
self.assertEqual(ca.status, CodedArticleStatus.objects.get(pk=9))
ca.set_status(STATUS_NOTSTARTED)
self.assertEqual(ca.status, CodedArticleStatus.objects.get(pk=0))
|
tschmorleiz/amcat
|
amcat/models/coding/codedarticle.py
|
Python
|
agpl-3.0
| 12,691
|
from pixelated.adapter.mailstore.searchable_mailstore import SearchableMailStore
from pixelated.adapter.services.mail_service import MailService
from pixelated.adapter.model.mail import InputMail
from pixelated.adapter.services.mail_sender import MailSender
from pixelated.adapter.search import SearchEngine
from pixelated.adapter.services.draft_service import DraftService
from pixelated.adapter.listeners.mailbox_indexer_listener import listen_all_mailboxes
from twisted.internet import defer
from pixelated.adapter.search.index_storage_key import SearchIndexStorageKey
from pixelated.adapter.services.feedback_service import FeedbackService
class Services(object):
def __init__(self, leap_home, leap_session):
pass
@defer.inlineCallbacks
def setup(self, leap_home, leap_session):
InputMail.FROM_EMAIL_ADDRESS = leap_session.account_email()
search_index_storage_key = self.setup_search_index_storage_key(leap_session.soledad)
yield self.setup_search_engine(
leap_home,
search_index_storage_key)
self.wrap_mail_store_with_indexing_mail_store(leap_session)
yield listen_all_mailboxes(leap_session.account, self.search_engine, leap_session.mail_store)
self.mail_service = self.setup_mail_service(
leap_session,
self.search_engine)
self.keymanager = leap_session.nicknym
self.draft_service = self.setup_draft_service(leap_session.mail_store)
self.feedback_service = self.setup_feedback_service(leap_session)
yield self.index_all_mails()
def wrap_mail_store_with_indexing_mail_store(self, leap_session):
leap_session.mail_store = SearchableMailStore(leap_session.mail_store, self.search_engine)
@defer.inlineCallbacks
def index_all_mails(self):
all_mails = yield self.mail_service.all_mails()
self.search_engine.index_mails(all_mails)
@defer.inlineCallbacks
def setup_search_engine(self, leap_home, search_index_storage_key):
key_unicode = yield search_index_storage_key.get_or_create_key()
key = str(key_unicode)
print 'The key len is: %s' % len(key)
search_engine = SearchEngine(key, agent_home=leap_home)
self.search_engine = search_engine
def setup_mail_service(self, leap_session, search_engine):
pixelated_mail_sender = MailSender(leap_session.smtp_config, leap_session.nicknym.keymanager)
return MailService(
pixelated_mail_sender,
leap_session.mail_store,
search_engine,
leap_session.account_email())
def setup_draft_service(self, mail_store):
return DraftService(mail_store)
def setup_search_index_storage_key(self, soledad):
return SearchIndexStorageKey(soledad)
def setup_feedback_service(self, leap_session):
return FeedbackService(leap_session)
|
rdoh/pixelated-user-agent
|
service/pixelated/config/services.py
|
Python
|
agpl-3.0
| 2,909
|
"""SCons.Platform.win32
Platform-specific initialization for Win32 systems.
There normally shouldn't be any need to import this module directly. It
will usually be imported through the generic SCons.Platform.Platform()
selection method.
"""
#
# Copyright (c) 2001, 2002, 2003, 2004 The SCons Foundation
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
__revision__ = "/home/scons/scons/branch.0/baseline/src/engine/SCons/Platform/win32.py 0.96.1.D001 2004/08/23 09:55:29 knight"
import os
import os.path
import string
import sys
import tempfile
from SCons.Platform.posix import exitvalmap
# XXX See note below about why importing SCons.Action should be
# eventually refactored.
import SCons.Action
import SCons.Util
class TempFileMunge:
"""A callable class. You can set an Environment variable to this,
then call it with a string argument, then it will perform temporary
file substitution on it. This is used to circumvent the win32 long command
line limitation.
Example usage:
env["TEMPFILE"] = TempFileMunge
env["LINKCOM"] = "${TEMPFILE('$LINK $TARGET $SOURCES')}"
"""
def __init__(self, cmd):
self.cmd = cmd
def __call__(self, target, source, env, for_signature):
if for_signature:
return self.cmd
cmd = env.subst_list(self.cmd, 0, target, source)[0]
try:
maxline = int(env.subst('$MAXLINELENGTH'))
except ValueError:
maxline = 2048
if (reduce(lambda x, y: x + len(y), cmd, 0) + len(cmd)) <= maxline:
return self.cmd
else:
# We do a normpath because mktemp() has what appears to be
# a bug in Win32 that will use a forward slash as a path
# delimiter. Win32's link mistakes that for a command line
# switch and barfs.
#
# We use the .lnk suffix for the benefit of the Phar Lap
# linkloc linker, which likes to append an .lnk suffix if
# none is given.
tmp = os.path.normpath(tempfile.mktemp('.lnk'))
native_tmp = SCons.Util.get_native_path(tmp)
if env['SHELL'] and env['SHELL'] == 'sh':
# The sh shell will try to escape the backslashes in the
# path, so unescape them.
native_tmp = string.replace(native_tmp, '\\', r'\\\\')
# In Cygwin, we want to use rm to delete the temporary
# file, because del does not exist in the sh shell.
rm = env.Detect('rm') or 'del'
else:
# Don't use 'rm' if the shell is not sh, because rm won't
# work with the win32 shells (cmd.exe or command.com) or
# win32 path names.
rm = 'del'
args = map(SCons.Util.quote_spaces, cmd[1:])
open(tmp, 'w').write(string.join(args, " ") + "\n")
# XXX Using the SCons.Action.print_actions value directly
# like this is bogus, but expedient. This class should
# really be rewritten as an Action that defines the
# __call__() and strfunction() methods and lets the
# normal action-execution logic handle whether or not to
# print/execute the action. The problem, though, is all
# of that is decided before we execute this method as
# part of expanding the $TEMPFILE construction variable.
# Consequently, refactoring this will have to wait until
# we get more flexible with allowing Actions to exist
# independently and get strung together arbitrarily like
# Ant tasks. In the meantime, it's going to be more
# user-friendly to not let obsession with architectural
# purity get in the way of just being helpful, so we'll
# reach into SCons.Action directly.
if SCons.Action.print_actions:
print("Using tempfile "+native_tmp+" for command line:\n"+
str(cmd[0]) + " " + string.join(args," "))
return [ cmd[0], '@' + native_tmp + '\n' + rm, native_tmp ]
# The upshot of all this is that, if you are using Python 1.5.2,
# you had better have cmd or command.com in your PATH when you run
# scons.
def piped_spawn(sh, escape, cmd, args, env, stdout, stderr):
# There is no direct way to do that in python. What we do
# here should work for most cases:
# In case stdout (stderr) is not redirected to a file,
# we redirect it into a temporary file tmpFileStdout
# (tmpFileStderr) and copy the contents of this file
# to stdout (stderr) given in the argument
if not sh:
sys.stderr.write("scons: Could not find command interpreter, is it in your PATH?\n")
return 127
else:
# one temporary file for stdout and stderr
tmpFileStdout = os.path.normpath(tempfile.mktemp())
tmpFileStderr = os.path.normpath(tempfile.mktemp())
# check if output is redirected
stdoutRedirected = 0
stderrRedirected = 0
for arg in args:
# are there more possibilities to redirect stdout ?
if (string.find( arg, ">", 0, 1 ) != -1 or
string.find( arg, "1>", 0, 2 ) != -1):
stdoutRedirected = 1
# are there more possibilities to redirect stderr ?
if string.find( arg, "2>", 0, 2 ) != -1:
stderrRedirected = 1
# redirect output of non-redirected streams to our tempfiles
if stdoutRedirected == 0:
args.append(">" + str(tmpFileStdout))
if stderrRedirected == 0:
args.append("2>" + str(tmpFileStderr))
# actually do the spawn
try:
args = [sh, '/C', escape(string.join(args)) ]
ret = os.spawnve(os.P_WAIT, sh, args, env)
except OSError, e:
# catch any error
ret = exitvalmap[e[0]]
if stderr != None:
stderr.write("scons: %s: %s\n" % (cmd, e[1]))
# copy child output from tempfiles to our streams
# and do clean up stuff
if stdout != None and stdoutRedirected == 0:
try:
stdout.write(open( tmpFileStdout, "r" ).read())
os.remove( tmpFileStdout )
except (IOError, OSError):
pass
if stderr != None and stderrRedirected == 0:
try:
stderr.write(open( tmpFileStderr, "r" ).read())
os.remove( tmpFileStderr )
except (IOError, OSError):
pass
return ret
def spawn(sh, escape, cmd, args, env):
if not sh:
sys.stderr.write("scons: Could not find command interpreter, is it in your PATH?\n")
return 127
else:
try:
args = [sh, '/C', escape(string.join(args)) ]
ret = os.spawnve(os.P_WAIT, sh, args, env)
except OSError, e:
ret = exitvalmap[e[0]]
sys.stderr.write("scons: %s: %s\n" % (cmd, e[1]))
return ret
# Windows does not allow special characters in file names anyway, so
# no need for a complex escape function, we will just quote the arg.
escape = lambda x: '"' + x + '"'
# Get the windows system directory name
def get_system_root():
# A resonable default if we can't read the registry
try:
val = os.environ['SYSTEMROOT']
except KeyError:
val = "C:/WINDOWS"
pass
# First see if we can look in the registry...
if SCons.Util.can_read_reg:
try:
# Look for Windows NT system root
k=SCons.Util.RegOpenKeyEx(SCons.Util.hkey_mod.HKEY_LOCAL_MACHINE,
'Software\\Microsoft\\Windows NT\\CurrentVersion')
val, tok = SCons.Util.RegQueryValueEx(k, 'SystemRoot')
except SCons.Util.RegError:
try:
# Okay, try the Windows 9x system root
k=SCons.Util.RegOpenKeyEx(SCons.Util.hkey_mod.HKEY_LOCAL_MACHINE,
'Software\\Microsoft\\Windows\\CurrentVersion')
val, tok = SCons.Util.RegQueryValueEx(k, 'SystemRoot')
except KeyboardInterrupt:
raise
except:
pass
return val
# Get the location of the program files directory
def get_program_files_dir():
# Now see if we can look in the registry...
val = ''
if SCons.Util.can_read_reg:
try:
# Look for Windows Program Files directory
k=SCons.Util.RegOpenKeyEx(SCons.Util.hkey_mod.HKEY_LOCAL_MACHINE,
'Software\\Microsoft\\Windows\\CurrentVersion')
val, tok = SCons.Util.RegQueryValueEx(k, 'ProgramFilesDir')
except SCons.Util.RegError:
val = ''
pass
if val == '':
# A reasonable default if we can't read the registry
# (Actually, it's pretty reasonable even if we can :-)
val = os.path.join(os.path.dirname(get_system_root()),"Program Files")
return val
def generate(env):
# Attempt to find cmd.exe (for WinNT/2k/XP) or
# command.com for Win9x
cmd_interp = ''
# First see if we can look in the registry...
if SCons.Util.can_read_reg:
try:
# Look for Windows NT system root
k=SCons.Util.RegOpenKeyEx(SCons.Util.hkey_mod.HKEY_LOCAL_MACHINE,
'Software\\Microsoft\\Windows NT\\CurrentVersion')
val, tok = SCons.Util.RegQueryValueEx(k, 'SystemRoot')
cmd_interp = os.path.join(val, 'System32\\cmd.exe')
except SCons.Util.RegError:
try:
# Okay, try the Windows 9x system root
k=SCons.Util.RegOpenKeyEx(SCons.Util.hkey_mod.HKEY_LOCAL_MACHINE,
'Software\\Microsoft\\Windows\\CurrentVersion')
val, tok = SCons.Util.RegQueryValueEx(k, 'SystemRoot')
cmd_interp = os.path.join(val, 'command.com')
except KeyboardInterrupt:
raise
except:
pass
# For the special case of not having access to the registry, we
# use a temporary path and pathext to attempt to find the command
# interpreter. If we fail, we try to find the interpreter through
# the env's PATH. The problem with that is that it might not
# contain an ENV and a PATH.
if not cmd_interp:
systemroot = r'C:\Windows'
if os.environ.has_key('SYSTEMROOT'):
systemroot = os.environ['SYSTEMROOT']
tmp_path = systemroot + os.pathsep + \
os.path.join(systemroot,'System32')
tmp_pathext = '.com;.exe;.bat;.cmd'
if os.environ.has_key('PATHEXT'):
tmp_pathext = os.environ['PATHEXT']
cmd_interp = SCons.Util.WhereIs('cmd', tmp_path, tmp_pathext)
if not cmd_interp:
cmd_interp = SCons.Util.WhereIs('command', tmp_path, tmp_pathext)
if not cmd_interp:
cmd_interp = env.Detect('cmd')
if not cmd_interp:
cmd_interp = env.Detect('command')
if not env.has_key('ENV'):
env['ENV'] = {}
# Import things from the external environment to the construction
# environment's ENV. This is a potential slippery slope, because we
# *don't* want to make builds dependent on the user's environment by
# default. We're doing this for SYSTEMROOT, though, because it's
# needed for anything that uses sockets, and seldom changes. Weigh
# the impact carefully before adding other variables to this list.
import_env = [ 'SYSTEMROOT' ]
for var in import_env:
v = os.environ.get(var)
if v:
env['ENV'][var] = v
env['ENV']['PATHEXT'] = '.COM;.EXE;.BAT;.CMD'
env['OBJPREFIX'] = ''
env['OBJSUFFIX'] = '.obj'
env['SHOBJPREFIX'] = '$OBJPREFIX'
env['SHOBJSUFFIX'] = '$OBJSUFFIX'
env['PROGPREFIX'] = ''
env['PROGSUFFIX'] = '.exe'
env['LIBPREFIX'] = ''
env['LIBSUFFIX'] = '.lib'
env['SHLIBPREFIX'] = ''
env['SHLIBSUFFIX'] = '.dll'
env['LIBPREFIXES'] = [ '$LIBPREFIX' ]
env['LIBSUFFIXES'] = [ '$LIBSUFFIX' ]
env['PSPAWN'] = piped_spawn
env['SPAWN'] = spawn
env['SHELL'] = cmd_interp
env['TEMPFILE'] = TempFileMunge
env['MAXLINELENGTH'] = 2048
env['ESCAPE'] = escape
|
bilke/OpenSG-1.8
|
SConsLocal/scons-local-0.96.1/SCons/Platform/win32.py
|
Python
|
lgpl-2.1
| 13,583
|
import os
import unicodedata
from tendrl.commons.event import Event
from tendrl.commons.message import ExceptionMessage
from tendrl.commons.utils import cmd_utils
from tendrl.commons.utils import etcd_utils
from tendrl.commons.utils import log_utils as logger
def sync():
try:
_keep_alive_for = int(NS.config.data.get("sync_interval", 10)) + 250
disks = get_node_disks()
disk_map = {}
for disk in disks:
# Creating dict with disk name as key and disk_id as value
# It will help populate block device disk_id attribute
_map = dict(disk_id=disks[disk]['disk_id'], ssd=False)
disk_map[disks[disk]['disk_name']] = _map
block_devices = get_node_block_devices(disk_map)
for disk in disks:
if disk_map[disks[disk]['disk_name']]:
disks[disk]['ssd'] = disk_map[disks[disk][
'disk_name']]['ssd']
if "virtio" in disks[disk]["driver"]:
# Virtual disk
NS.tendrl.objects.VirtualDisk(**disks[disk]).save(
ttl=_keep_alive_for
)
else:
# physical disk
NS.tendrl.objects.Disk(**disks[disk]).save(ttl=_keep_alive_for)
for device in block_devices['all']:
NS.tendrl.objects.BlockDevice(**device).save(ttl=_keep_alive_for)
for device_id in block_devices['used']:
etcd_utils.write(
"nodes/%s/LocalStorage/BlockDevices/used/%s" %
(NS.node_context.node_id,
device_id.replace("/", "_").replace("_", "", 1)),
device_id, ttl=_keep_alive_for
)
for device_id in block_devices['free']:
etcd_utils.write(
"nodes/%s/LocalStorage/BlockDevices/free/%s" %
(NS.node_context.node_id,
device_id.replace("/", "_").replace("_", "", 1)),
device_id, ttl=_keep_alive_for
)
raw_reference = get_raw_reference()
etcd_utils.write(
"nodes/%s/LocalStorage/DiskRawReference" %
NS.node_context.node_id,
raw_reference,
ttl=_keep_alive_for,
)
except(Exception, KeyError) as ex:
_msg = "node_sync disks sync failed: " + ex.message
Event(
ExceptionMessage(
priority="error",
publisher=NS.publisher_id,
payload={"message": _msg,
"exception": ex}
)
)
def get_node_disks():
disks, disks_map, err = get_disk_details()
if not err:
cmd = cmd_utils.Command('hwinfo --partition')
out, err, rc = cmd.run()
if not err:
for partitions in out.split('\n\n'):
devlist = {"hardware_id": "",
"parent_hardware_id": "",
"sysfs_id": "",
"hardware_class": "",
"model": "",
"partition_name": "",
"device_files": "",
"config_status": "",
}
for partition in partitions.split('\n'):
key = partition.split(':')[0]
if key.strip() == "Unique ID":
devlist["hardware_id"] = \
partition.split(':')[1].lstrip()
if key.strip() == "Parent ID":
devlist["parent_hardware_id"] = \
partition.split(':')[1].lstrip()
if key.strip() == "SysFS ID":
devlist["sysfs_id"] = \
partition.split(':')[1].lstrip()
if key.strip() == "Hardware Class":
devlist["hardware_class"] = \
partition.split(':')[1].lstrip()
if key.strip() == "Model":
devlist["model"] = \
partition.split(':')[1].lstrip().replace('"', "")
if key.strip() == "Device File":
_name = partition.split(':')[1].lstrip()
devlist["partition_name"] = \
"".join(_name.split(" ")[0])
if key.strip() == "Device Files":
devlist["device_files"] = \
partition.split(':')[1].lstrip()
if key.strip() == "Config Status":
devlist["config_status"] = \
partition.split(':')[1].lstrip()
# checking if partition parent id is in collected
# disk_ids or not
if devlist["parent_hardware_id"] in disks_map:
part_name = devlist["partition_name"]
parent = disks_map[devlist["parent_hardware_id"]]
disks[parent]["partitions"][part_name] = devlist
return disks
def get_disk_details():
disks = {}
disks_map = {}
cmd = cmd_utils.Command('hwinfo --disk')
out, err, rc = cmd.run()
if not err:
out = unicodedata.normalize('NFKD', out).encode('utf8', 'ignore') \
if isinstance(out, unicode) \
else unicode(out, errors="ignore").encode('utf8')
for all_disks in out.split('\n\n'):
devlist = {"disk_id": "",
"hardware_id": "",
"parent_id": "",
"disk_name": "",
"sysfs_id": "",
"sysfs_busid": "",
"sysfs_device_link": "",
"hardware_class": "",
"model": "",
"vendor": "",
"device": "",
"rmversion": "",
"serial_no": "",
"driver_modules": "",
"driver": "",
"device_files": "",
"device_number": "",
"bios_id": "",
"geo_bios_edd": "",
"geo_logical": "",
"size": "",
"size_bios_edd": "",
"geo_bios_legacy": "",
"config_status": "",
"partitions": {}
}
for disk in all_disks.split('\n'):
key = disk.split(':')[0]
if key.strip() == "Unique ID":
devlist["hardware_id"] = \
disk.split(':')[1].lstrip()
elif key.strip() == "Parent ID":
devlist["parent_id"] = \
disk.split(':')[1].lstrip()
elif key.strip() == "SysFS ID":
devlist["sysfs_id"] = \
disk.split(':')[1].lstrip()
elif key.strip() == "SysFS BusID":
devlist["sysfs_busid"] = \
disk.split(':')[1].lstrip()
elif key.strip() == "SysFS Device Link":
devlist["sysfs_device_link"] = \
disk.split(':')[1].lstrip()
elif key.strip() == "Hardware Class":
devlist["hardware_class"] = \
disk.split(':')[1].lstrip()
elif key.strip() == "Model":
devlist["model"] = \
disk.split(':')[1].lstrip().replace('"', "")
elif key.strip() == "Vendor":
devlist["vendor"] = \
disk.split(':')[1].replace(" ", "").replace('"', "")
elif key.strip() == "Device":
devlist["device"] = \
disk.split(':')[1].replace(" ", "").replace('"', "")
elif key.strip() == "Revision":
devlist["rmversion"] = \
disk.split(':')[1].lstrip().replace('"', "")
elif key.strip() == "Serial ID":
devlist["serial_no"] = \
disk.split(':')[1].replace(" ", "").replace('"', "")
elif key.strip() == "Driver":
devlist["driver"] = \
disk.split(':')[1].lstrip().replace('"', "")
elif key.strip() == "Driver Modules":
devlist["driver_modules"] = \
disk.split(':')[1].lstrip().replace('"', "")
elif key.strip() == "Device File":
_name = disk.split(':')[1].lstrip()
devlist["disk_name"] = \
"".join(_name.split(" ")[0])
elif key.strip() == "Device Files":
devlist["device_files"] = \
disk.split(':')[1].lstrip()
elif key.strip() == "Device Number":
devlist["device_number"] = \
disk.split(':')[1].lstrip()
elif key.strip() == "BIOS id":
devlist["bios_id"] = \
disk.split(':')[1].lstrip()
elif key.strip() == "Geometry (Logical)":
devlist["geo_logical"] = \
disk.split(':')[1].lstrip()
elif key.strip() == "Capacity":
devlist["size"] = \
disk.split('(')[1].split()[0]
elif key.strip() == "Geometry (BIOS EDD)":
devlist["geo_bios_edd"] = \
disk.split(':')[1].lstrip()
elif key.strip() == "Size (BIOS EDD)":
devlist["size_bios_edd"] = \
disk.split(':')[1].lstrip()
elif key.strip() == "Geometry (BIOS Legacy)":
devlist["geo_bios_legacy"] = \
disk.split(':')[1].lstrip()
elif key.strip() == "Config Status":
devlist["config_status"] = \
disk.split(':')[1].lstrip()
if ("virtio" in devlist["driver"] and
"by-id/virtio" in devlist['device_files']):
# split from:
# /dev/vdc, /dev/disk/by-id/virtio-0200f64e-5892-40ee-8,
# /dev/disk/by-path/virtio-pci-0000:00:08.0
for entry in devlist['device_files'].split(','):
if "by-id/virtio" in entry:
devlist['disk_id'] = entry.split('/')[-1]
break
elif "VMware" in devlist["vendor"]:
devlist["disk_id"] = \
"{vendor}_{device}_{parent_id}_{hardware_id}".format(**devlist)
elif (devlist["vendor"] != "" and
devlist["device"] != "" and
devlist["serial_no"] != ""):
devlist["disk_id"] = (devlist["vendor"] + "_" +
devlist["device"] + "_" + devlist[
"serial_no"])
else:
devlist['disk_id'] = devlist['disk_name']
if devlist["disk_id"] in disks.keys():
# Multipath is like multiple I/O paths between
# server nodes and storage arrays into a single device
# If single device is connected with more than one path
# then hwinfo and lsblk will give same device details with
# different device names. To avoid this duplicate entry,
# If multiple devices exists with same disk_id then
# device_name which is lower in alphabetical order is stored.
# It will avoid redundacy of disks and next sync it will
# make sure same device detail is populated
if devlist["disk_name"] < disks[
devlist['disk_id']]['disk_name']:
disks[devlist["disk_id"]] = devlist
disks_map[devlist['hardware_id']] = devlist["disk_id"]
else:
disks[devlist["disk_id"]] = devlist
disks_map[devlist['hardware_id']] = devlist["disk_id"]
return disks, disks_map, err
def get_node_block_devices(disks_map):
block_devices = dict(all=list(), free=list(), used=list())
columns = 'NAME,KNAME,PKNAME,MAJ:MIN,FSTYPE,MOUNTPOINT,LABEL,' \
'UUID,RA,RO,RM,SIZE,STATE,OWNER,GROUP,MODE,ALIGNMENT,' \
'MIN-IO,OPT-IO,PHY-SEC,LOG-SEC,ROTA,SCHED,RQ-SIZE,' \
'DISC-ALN,DISC-GRAN,DISC-MAX,DISC-ZERO,TYPE'
keys = columns.split(',')
lsblk = (
"lsblk --all --bytes --noheadings --output='%s' --path --raw" %
columns)
cmd = cmd_utils.Command(lsblk)
out, err, rc = cmd.run()
if not err:
out = unicodedata.normalize('NFKD', out).encode('utf8', 'ignore') \
if isinstance(out, unicode) \
else unicode(out, errors="ignore").encode('utf8')
devlist = map(
lambda line: dict(zip(keys, line.split(' '))),
out.splitlines())
all_parents = []
parent_ids = []
multipath = {}
for dev_info in devlist:
device = dict()
device['device_name'] = dev_info['NAME']
device['device_kernel_name'] = dev_info['KNAME']
device['parent_name'] = dev_info['PKNAME']
device['major_to_minor_no'] = dev_info['MAJ:MIN']
device['fstype'] = dev_info['FSTYPE']
device['mount_point'] = dev_info['MOUNTPOINT']
device['label'] = dev_info['LABEL']
device['fsuuid'] = dev_info['UUID']
device['read_ahead'] = dev_info['RA']
if dev_info['RO'] == '0':
device['read_only'] = False
else:
device['read_only'] = True
if dev_info['RM'] == '0':
device['removable_device'] = False
else:
device['removable_device'] = True
device['size'] = dev_info['SIZE']
device['state'] = dev_info['STATE']
device['owner'] = dev_info['OWNER']
device['group'] = dev_info['GROUP']
device['mode'] = dev_info['MODE']
device['alignment'] = dev_info['ALIGNMENT']
device['min_io_size'] = dev_info['MIN-IO']
device['optimal_io_size'] = dev_info['OPT-IO']
device['phy_sector_size'] = dev_info['PHY-SEC']
device['log_sector_size'] = dev_info['LOG-SEC']
device['device_type'] = dev_info['TYPE']
device['scheduler_name'] = dev_info['SCHED']
device['req_queue_size'] = dev_info['RQ-SIZE']
device['discard_align_offset'] = dev_info['DISC-ALN']
device['discard_granularity'] = dev_info['DISC-GRAN']
device['discard_max_bytes'] = dev_info['DISC-MAX']
device['discard_zeros_data'] = dev_info['DISC-ZERO']
device['rotational'] = dev_info['ROTA']
if dev_info['TYPE'] == 'disk':
device['ssd'] = is_ssd(dev_info['ROTA'])
else:
device['ssd'] = False
if dev_info['TYPE'] == 'part':
device['used'] = True
# if partition is under multipath then parent of multipath
# is assigned
if dev_info['PKNAME'] in multipath.keys():
dev_info['PKNAME'] = multipath[dev_info['PKNAME']]
if dev_info['PKNAME'] in disks_map.keys():
device['disk_id'] = disks_map[
dev_info['PKNAME']]['disk_id']
block_devices['all'].append(device)
block_devices['used'].append(device['device_name'])
if dev_info['TYPE'] == 'disk':
if dev_info['NAME'] in disks_map.keys():
device['disk_id'] = disks_map[dev_info['NAME']]['disk_id']
disks_map[dev_info['NAME']]['ssd'] = device['ssd']
all_parents.append(device)
if dev_info['TYPE'] == 'mpath':
multipath[device['device_kernel_name']] = dev_info['PKNAME']
else:
if dev_info['PKNAME'] in multipath.keys():
dev_info['PKNAME'] = multipath[dev_info['PKNAME']]
parent_ids.append(dev_info['PKNAME'])
for parent in all_parents:
if parent['device_name'] in parent_ids:
parent['used'] = True
block_devices['used'].append(parent['device_name'])
else:
parent['used'] = False
block_devices['free'].append(parent['device_name'])
block_devices['all'].append(parent)
else:
logger.log(
"debug",
NS.publisher_id,
{"message": err}
)
return block_devices
def get_raw_reference():
base_path = '/dev/disk/'
paths = os.listdir(base_path)
raw_reference = {}
for path in paths:
raw_reference[path] = []
full_path = base_path + path
cmd = cmd_utils.Command("ls -l %s" % full_path)
out, err, rc = cmd.run()
if not err:
out = unicodedata.normalize('NFKD', out).encode('utf8', 'ignore') \
if isinstance(out, unicode) \
else unicode(out, errors="ignore").encode('utf8')
count = 0
for line in out.split('\n'):
if count == 0:
# to skip first line
count = count + 1
continue
line = line.replace(" ", " ")
raw_reference[path].append(line.split(' ', 8)[-1])
else:
logger.log(
"debug",
NS.publisher_id,
{"message": err}
)
return raw_reference
def is_ssd(rotational):
if rotational == '0':
return True
if rotational == '1':
return False
"""Rotational attribute not found for
this device which is not either SSD or HD
"""
return False
|
Tendrl/node_agent
|
tendrl/node_agent/node_sync/disk_sync.py
|
Python
|
lgpl-2.1
| 18,338
|
import re
# flag that caches the information whether Windows firewall is running or not
fireWallState = None
# this function modifies all necessary run settings to make it possible to hook into
# the application compiled by Creator
def modifyRunSettingsForHookInto(projectName, port):
prepareBuildSettings(1, 0)
# this uses the defaultQtVersion currently
switchViewTo(ViewConstants.PROJECTS)
switchToBuildOrRunSettingsFor(1, 0, ProjectSettings.BUILD)
qtVersion, mkspec, qtBinPath, qtLibPath = getQtInformationForBuildSettings(True)
if None in (qtVersion, mkspec, qtBinPath, qtLibPath):
test.fatal("At least one of the Qt information returned None - leaving...",
"Qt version: %s, mkspec: %s, Qt BinPath: %s, Qt LibPath: %s" %
(qtVersion, mkspec, qtBinPath, qtLibPath))
return False
qtVersion = ".".join(qtVersion.split(".")[:2])
switchToBuildOrRunSettingsFor(1, 0, ProjectSettings.RUN)
result = __configureCustomExecutable__(projectName, port, mkspec, qtVersion)
if result:
clickButton(waitForObject("{window=':Qt Creator_Core::Internal::MainWindow' text='Details' "
"type='Utils::DetailsButton' unnamed='1' visible='1' "
"leftWidget={type='QLabel' text~='Us(e|ing) <b>Build Environment</b>' unnamed='1' visible='1'}}"))
envVarsTableView = waitForObject("{type='QTableView' visible='1' unnamed='1'}")
model = envVarsTableView.model()
changingVars = []
for index in dumpIndices(model):
# get var name
envVarsTableView.scrollTo(index)
varName = str(model.data(index).toString())
# if its a special SQUISH var simply unset it, SQUISH_LIBQTDIR and PATH will be replaced with Qt paths
if varName == "PATH":
test.log("Replacing PATH with '%s'" % qtBinPath)
changingVars.append("PATH=%s" % qtBinPath)
elif varName.find("SQUISH") == 0:
if varName == "SQUISH_LIBQTDIR":
if platform.system() in ('Microsoft', 'Windows'):
replacement = qtBinPath
else:
replacement = qtLibPath
test.log("Replacing SQUISH_LIBQTDIR with '%s'" % replacement)
changingVars.append("SQUISH_LIBQTDIR=%s" % replacement)
else:
changingVars.append(varName)
#test.log("Unsetting %s for run" % varName)
clickButton(waitForObject("{text='Batch Edit...' type='QPushButton' unnamed='1' visible='1' "
"window=':Qt Creator_Core::Internal::MainWindow'}"))
editor = waitForObject("{type='TextEditor::SnippetEditorWidget' unnamed='1' visible='1' "
"window=':Edit Environment_ProjectExplorer::EnvironmentItemsDialog'}")
typeLines(editor, changingVars)
clickButton(waitForObject("{text='OK' type='QPushButton' unnamed='1' visible='1' "
"window=':Edit Environment_ProjectExplorer::EnvironmentItemsDialog'}"))
switchViewTo(ViewConstants.EDIT)
return result
def modifyRunSettingsForHookIntoQtQuickUI(workingDir, projectName, port):
switchViewTo(ViewConstants.PROJECTS)
switchToBuildOrRunSettingsFor(1, 0, ProjectSettings.RUN, True)
qtVersion, mkspec, qtLibPath, qmake = getQtInformationForQmlProject()
if None in (qtVersion, mkspec, qtLibPath, qmake):
test.fatal("At least one of the Qt information returned None - leaving...",
"Qt version: %s, mkspec: %s, Qt LibPath: %s, qmake: '%s'"
% (qtVersion, mkspec, qtLibPath, qmake))
return None
squishPath = getSquishPath(mkspec, qtVersion)
if squishPath == None:
test.warning("Could not determine the Squish path for %s/%s" % (qtVersion, mkspec),
"Using fallback of pushing STOP inside Creator.")
return None
test.log("Using (QtVersion/mkspec) %s/%s with SquishPath %s" % (qtVersion, mkspec, squishPath))
if platform.system() == "Darwin":
qmlViewer = os.path.abspath(os.path.dirname(qmake) + "/QMLViewer.app")
else:
qmlViewer = os.path.abspath(os.path.dirname(qmake) + "/qmlviewer")
if platform.system() in ('Microsoft', 'Windows'):
qmlViewer = qmlViewer + ".exe"
addRunConfig = waitForObject("{container={window=':Qt Creator_Core::Internal::MainWindow' "
"type='ProjectExplorer::Internal::RunSettingsWidget' unnamed='1' "
"visible='1'} occurrence='2' text='Add' type='QPushButton' "
"unnamed='1' visible='1'}")
clickButton(addRunConfig)
activateItem(waitForObject("{type='QMenu' visible='1' unnamed='1'}"), "Custom Executable")
exePathChooser = waitForObject("{buddy={window=':Qt Creator_Core::Internal::MainWindow' "
"text='Command:' type='QLabel' unnamed='1' visible='1'} "
"type='Utils::PathChooser' unnamed='1' visible='1'}")
exeLineEd = getChildByClass(exePathChooser, "Utils::BaseValidatingLineEdit")
argLineEd = waitForObject("{buddy={window=':Qt Creator_Core::Internal::MainWindow' "
"type='QLabel' text='Arguments:' visible='1'} type='QLineEdit' "
"unnamed='1' visible='1'}")
wdPathChooser = waitForObject("{buddy={window=':Qt Creator_Core::Internal::MainWindow' "
"text='Working directory:' type='QLabel'} "
"type='Utils::PathChooser' unnamed='1' visible='1'}")
wdLineEd = getChildByClass(wdPathChooser, "Utils::BaseValidatingLineEdit")
startAUT = os.path.abspath(squishPath + "/bin/startaut")
if platform.system() in ('Microsoft', 'Windows'):
startAUT = startAUT + ".exe"
projectPath = os.path.abspath("%s/%s" % (workingDir, projectName))
replaceEditorContent(exeLineEd, startAUT)
replaceEditorContent(argLineEd, "--verbose --port=%d %s %s.qml"
% (port, qmlViewer, projectName))
replaceEditorContent(wdLineEd, projectPath)
clickButton(waitForObject("{text='Details' type='Utils::DetailsButton' unnamed='1' visible='1' "
"window=':Qt Creator_Core::Internal::MainWindow' "
"leftWidget={type='QLabel' text~='Us(e|ing) <b>Build Environment</b>'"
" unnamed='1' visible='1'}}"))
row = 0
for varName in ("PATH", "SQUISH_LIBQTDIR"):
__addVariableToRunEnvironment__(varName, qtLibPath, row)
row = row + 1
if not platform.system() in ('Microsoft', 'Windows', 'Darwin'):
__addVariableToRunEnvironment__("LD_LIBRARY_PATH", qtLibPath, 0)
if platform.system() == "Darwin":
__addVariableToRunEnvironment__("DYLD_FRAMEWORK_PATH", qtLibPath, 0)
if not platform.system() in ('Microsoft', 'Windows'):
__addVariableToRunEnvironment__("DISPLAY", ":0.0", 0)
result = qmlViewer
switchViewTo(ViewConstants.EDIT)
return result
# this helper method must be called on the run settings page of a Qt Quick UI with DetailsWidget
# for the run settings already opened - it won't work on other views because of a different layout
def __addVariableToRunEnvironment__(name, value, row):
clickButton(waitForObject("{text='Add' type='QPushButton' unnamed='1' visible='1' "
"container={window=':Qt Creator_Core::Internal::MainWindow' "
"type='Utils::DetailsWidget' unnamed='1' visible='1' occurrence='2'}}"))
varNameLineEd = waitForObject("{type='QExpandingLineEdit' visible='1' unnamed='1'}")
replaceEditorContent(varNameLineEd, name)
valueLineEd = __doubleClickQTableView__(":Qt Creator_QTableView", row, 1)
replaceEditorContent(valueLineEd, value)
type(valueLineEd, "<Return>")
def __getMkspecFromQMakeConf__(qmakeConf):
if qmakeConf==None or not os.path.exists(qmakeConf):
return None
if not platform.system() in ('Microsoft', 'Windows'):
return os.path.basename(os.path.realpath(os.path.dirname(qmakeConf)))
mkspec = None
file = codecs.open(qmakeConf, "r", "utf-8")
for line in file:
if "QMAKESPEC_ORIGINAL" in line:
mkspec = line.split("=")[1]
break
file.close()
if mkspec == None:
test.warning("Could not determine mkspec from '%s'" % qmakeConf)
return None
return os.path.basename(mkspec)
def __getMkspecFromQmake__(qmakeCall):
QmakeConfPath = getOutputFromCmdline("%s -query QMAKE_MKSPECS" % qmakeCall).strip()
for tmpPath in QmakeConfPath.split(os.pathsep):
tmpPath = tmpPath + os.sep + "default" + os.sep +"qmake.conf"
result = __getMkspecFromQMakeConf__(tmpPath)
if result != None:
return result.strip()
test.warning("Could not find qmake.conf inside provided QMAKE_MKSPECS path",
"QMAKE_MKSPECS returned: '%s'" % QmakeConfPath)
return None
# helper that double clicks the table view at specified row and column
# returns the QExpandingLineEdit (the editable table cell)
def __doubleClickQTableView__(qtableView, row, column):
doubleClick(waitForObject("{container='%s' "
"type='QModelIndex' row='%d' column='%d'}" % (qtableView, row, column)), 5, 5, 0, Qt.LeftButton)
return waitForObject("{type='QExpandingLineEdit' visible='1' unnamed='1'}")
# this function configures the custom executable onto the run settings page (using startaut from Squish)
def __configureCustomExecutable__(projectName, port, mkspec, qmakeVersion):
startAUT = getSquishPath(mkspec, qmakeVersion)
if startAUT == None:
test.warning("Something went wrong determining the right Squish for %s / %s combination - "
"using fallback without hooking into subprocess." % (qmakeVersion, mkspec))
return False
else:
startAUT = os.path.abspath(startAUT + "/bin/startaut")
if platform.system() in ('Microsoft', 'Windows'):
startAUT += ".exe"
if not os.path.exists(startAUT):
test.warning("Configured Squish directory seems to be missing - using fallback without hooking into subprocess.",
"Failed to find '%s'" % startAUT)
return False
addButton = waitForObject("{container={window=':Qt Creator_Core::Internal::MainWindow' "
"type='ProjectExplorer::Internal::RunSettingsWidget' unnamed='1' "
"visible='1'} occurrence='2' text='Add' type='QPushButton' "
"unnamed='1' visible='1'}")
clickButton(addButton)
addMenu = addButton.menu()
activateItem(waitForObjectItem(objectMap.realName(addMenu), 'Custom Executable'))
exePathChooser = waitForObject("{buddy={window=':Qt Creator_Core::Internal::MainWindow' text='Command:' type='QLabel' unnamed='1' visible='1'} "
"type='Utils::PathChooser' unnamed='1' visible='1'}", 2000)
exeLineEd = getChildByClass(exePathChooser, "Utils::BaseValidatingLineEdit")
argLineEd = waitForObject("{buddy={window=':Qt Creator_Core::Internal::MainWindow' "
"type='QLabel' text='Arguments:' visible='1'} type='QLineEdit' "
"unnamed='1' visible='1'}")
wdPathChooser = waitForObject("{buddy={window=':Qt Creator_Core::Internal::MainWindow' text='Working directory:' type='QLabel'} "
"type='Utils::PathChooser' unnamed='1' visible='1'}")
replaceEditorContent(exeLineEd, startAUT)
# the following is currently only configured for release builds (will be enhanced later)
if platform.system() in ('Microsoft', 'Windows'):
debOrRel = "release" + os.sep
else:
debOrRel = ""
replaceEditorContent(argLineEd, "--verbose --port=%d %s%s" % (port, debOrRel, projectName))
return True
# function that retrieves a specific child object by its class
# this is sometimes the best way to avoid using waitForObject() on objects that
# occur more than once - but could easily be found by using a compound object
# (e.g. search for Utils::PathChooser instead of Utils::BaseValidatingLineEdit and get the child)
def getChildByClass(parent, classToSearchFor, occurence=1):
children = [child for child in object.children(parent) if className(child) == classToSearchFor]
if len(children) < occurence:
return None
else:
return children[occurence - 1]
# get the Squish path that is needed to successfully hook into the compiled app
def getSquishPath(mkspec, qmakev):
qmakev = ".".join(qmakev.split(".")[0:2])
path = None
mapfile = os.environ.get("QT_SQUISH_MAPFILE")
if mapfile and os.path.isfile(mapfile):
file = codecs.open(mapfile, "r", "utf-8")
pattern = re.compile("\s+")
for line in file:
if line[0] == "#":
continue
tmp = pattern.split(line, 2)
if tmp[0].strip("'\"") == qmakev and tmp[1].strip("'\"") == mkspec:
path = os.path.expanduser(tmp[2].strip().strip("'\""))
break
file.close()
else:
if not mapfile:
test.warning("Environment variable QT_SQUISH_MAPFILE isn't set. Using fallback test data.",
"See the README file how to use it.")
else:
test.warning("Environment variable QT_SQUISH_MAPFILE isn't set correctly or map file does not exist. Using fallback test data.",
"See the README file how to use it.")
# try the test data fallback
mapData = testData.dataset(os.getcwd() + "/../../shared_data/qt_squish_mapping.tsv")
for row, record in enumerate(mapData):
if testData.field(record, "qtversion") == qmakev and testData.field(record, "mkspec") == mkspec:
path = os.path.expanduser(testData.field(record, "path"))
break
if path == None or not os.path.exists(path):
test.warning("Path '%s' from fallback test data file does not exist!" % path,
"See the README file how to set up your environment.")
return None
return path
# function to add a program to allow communication through the win firewall
# param workingDir this directory is the parent of the project folder
# param projectName this is the name of the project (the folder inside workingDir as well as the name for the executable)
# param isReleaseBuild should currently always be set to True (will later add debug build testing)
def allowAppThroughWinFW(workingDir, projectName, isReleaseBuild=True):
if not __isWinFirewallRunning__():
return
# WinFirewall seems to run - hopefully no other
result = __configureFW__(workingDir, projectName, isReleaseBuild)
if result == 0:
test.log("Added %s to firewall" % projectName)
else:
test.fatal("Could not add %s as allowed program to win firewall" % projectName)
# function to delete a (former added) program from the win firewall
# param workingDir this directory is the parent of the project folder
# param projectName this is the name of the project (the folder inside workingDir as well as the name for the executable)
# param isReleaseBuild should currently always be set to True (will later add debug build testing)
def deleteAppFromWinFW(workingDir, projectName, isReleaseBuild=True):
if not __isWinFirewallRunning__():
return
# WinFirewall seems to run - hopefully no other
result = __configureFW__(workingDir, projectName, isReleaseBuild, False)
if result == 0:
test.log("Deleted %s from firewall" % projectName)
else:
test.fatal("Could not delete %s as allowed program from win firewall" % (mode, projectName))
# helper that can modify the win firewall to allow a program to communicate through it or delete it
# param addToFW defines whether to add (True) or delete (False) this programm to/from the firewall
def __configureFW__(workingDir, projectName, isReleaseBuild, addToFW=True):
if isReleaseBuild == None:
if projectName[-4:] == ".exe":
projectName = projectName[:-4]
path = "%s%s%s" % (workingDir, os.sep, projectName)
elif isReleaseBuild:
path = "%s%s%s%srelease%s%s" % (workingDir, os.sep, projectName, os.sep, os.sep, projectName)
else:
path = "%s%s%s%sdebug%s%s" % (workingDir, os.sep, projectName, os.sep, os.sep, projectName)
if addToFW:
mode = "add"
enable = "ENABLE"
else:
mode = "delete"
enable = ""
return subprocess.call('netsh firewall %s allowedprogram "%s.exe" %s %s' % (mode, path, projectName, enable))
# helper to check whether win firewall is running or not
# this doesn't check for other firewalls!
def __isWinFirewallRunning__():
global fireWallState
if fireWallState != None:
return fireWallState
if not platform.system() in ('Microsoft' 'Windows'):
fireWallState = False
return False
result = getOutputFromCmdline("netsh firewall show state")
for line in result.splitlines():
if "Operational mode" in line:
fireWallState = not "Disable" in line
return fireWallState
return None
# this function adds the given executable as an attachable AUT
# Bad: executable/port could be empty strings - you should be aware of this
def addExecutableAsAttachableAUT(executable, port, host=None):
if not __checkParamsForAttachableAUT__(executable, port):
return False
if host == None:
host = "localhost"
squishSrv = __getSquishServer__()
if (squishSrv == None):
return False
result = subprocess.call('%s --config addAttachableAUT "%s" %s:%s' % (squishSrv, executable, host, port), shell=True)
if result == 0:
test.passes("Added %s as attachable AUT" % executable)
else:
test.fail("Failed to add %s as attachable AUT" % executable)
return result == 0
# this function removes the given executable as an attachable AUT
# Bad: executable/port could be empty strings - you should be aware of this
def removeExecutableAsAttachableAUT(executable, port, host=None):
if not __checkParamsForAttachableAUT__(executable, port):
return False
if host == None:
host = "localhost"
squishSrv = __getSquishServer__()
if (squishSrv == None):
return False
result = subprocess.call('%s --config removeAttachableAUT "%s" %s:%s' % (squishSrv, executable, host, port), shell=True)
if result == 0:
test.passes("Removed %s as attachable AUT" % executable)
else:
test.fail("Failed to remove %s as attachable AUT" % executable)
return result == 0
def __checkParamsForAttachableAUT__(executable, port):
return port != None and executable != None
def __getSquishServer__():
squishSrv = currentApplicationContext().environmentVariable("SQUISH_PREFIX")
if (squishSrv == ""):
test.fatal("SQUISH_PREFIX isn't set - leaving test")
return None
return os.path.abspath(squishSrv + "/bin/squishserver")
|
mornelon/QtCreator_compliments
|
tests/system/shared/hook_utils.py
|
Python
|
lgpl-2.1
| 19,422
|
"""
Current driven domain-wall motion with constant current and spin accumulation.
"""
# Copyright (C) 2011-2015 Claas Abert
#
# This file is part of magnum.fe.
#
# magnum.fe is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# magnum.fe is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with magnum.fe. If not, see <http://www.gnu.org/licenses/>.
#
# Last modified by Claas Abert, 2015-02-16
from magnumfe import *
#######################################
#### DEFINE MESH, STATE AND MATERIAL
#######################################
mesh = BoxMesh(-600.0/2, -100.0/2, -10.0/2, 600.0/2, 100.0/2, 10.0/2, 120, 20, 1)
state = State(mesh, scale = 1e-9,
material = Material(
alpha = 0.1,
ms = 8e5,
Aex = 1.3e-11,
D0 = 1e-3,
beta = 0.9,
beta_prime = 0.8,
lambda_sf = 10e-9,
lambda_j = 4e-9,
c = 3.125e-3
),
m = Expression(('1.0 - 2*(x[0] < 0.0)', 'x[0] > -10.0 && x[0] < 10.0', '0.0')),
s = Constant((0.0, 0.0, 0.0)),
j = Constant((0.0, 0.0, 0.0))
)
# normalize since initial configuration is not normalized
state.m.normalize()
# setup integrators
llg = LLGAlougesProject([
ExchangeField(),
DemagField("FK"),
SpinTorque()
])
spindiff = SpinDiffusion()
# relax
for j in range(200): state.step(llg, 1e-12)
# apply constant current
state.j = Constant((3e12, 0, 0))
state.t = 0.0
# prepare log files
mfile = File("data/m.pvd")
sfile = File("data/s.pvd")
for j in range(1000):
# save fields every 10th step
if j % 10 == 0:
mfile << (state.m, state.t)
sfile << (state.s, state.t)
# calculate next step
state.step([llg, spindiff], 1e-12)
|
micromagnetics/magnum.fe
|
examples/current_wall_motion/run.py
|
Python
|
lgpl-3.0
| 2,142
|
from django import forms
from . import models
class GroupForm(forms.ModelForm):
class Meta:
model = models.Group
fields = '__all__'
|
nerosketch/djing
|
group_app/forms.py
|
Python
|
unlicense
| 154
|
from __future__ import absolute_import
import logging
import os
import shlex
import shutil
import sys
import traceback
from flask import current_app
from subprocess import PIPE, Popen, STDOUT
from uuid import uuid1
from freight.exceptions import CommandError
class Workspace(object):
log = logging.getLogger('workspace')
def __init__(self, path, log=None):
self.path = path
if log is not None:
self.log = log
def whereis(self, program, env):
for path in env.get('PATH', '').split(':'):
if os.path.exists(os.path.join(path, program)) and \
not os.path.isdir(os.path.join(path, program)):
return os.path.join(path, program)
return None
def _get_writer(self, pipe):
if not isinstance(pipe, int):
pipe = pipe.fileno()
return os.fdopen(pipe, 'w')
def _run_process(self, command, *args, **kwargs):
stdout = kwargs.get('stdout', sys.stdout)
stderr = kwargs.get('stderr', sys.stderr)
kwargs.setdefault('cwd', self.path)
if isinstance(command, basestring):
command = shlex.split(command)
command = map(str, command)
env = os.environ.copy()
env['PYTHONUNBUFFERED'] = '1'
if kwargs.get('env'):
for key, value in kwargs['env'].iteritems():
env[key] = value
kwargs['env'] = env
kwargs['bufsize'] = 0
self.log.info('Running {}'.format(command))
try:
proc = Popen(command, *args, **kwargs)
except OSError as exc:
if not self.whereis(command[0], env):
msg = 'ERROR: Command not found: {}'.format(command[0])
else:
msg = traceback.format_exc()
raise CommandError(command, 1, stdout=None, stderr=msg)
return proc
def capture(self, command, *args, **kwargs):
kwargs['stdout'] = PIPE
kwargs['stderr'] = STDOUT
proc = self._run_process(command, *args, **kwargs)
(stdout, stderr) = proc.communicate()
if proc.returncode != 0:
raise CommandError(command, proc.returncode, stdout, stderr)
return stdout
def run(self, command, *args, **kwargs):
proc = self._run_process(command, *args, **kwargs)
proc.wait()
if proc.returncode != 0:
raise CommandError(command, proc.returncode)
def remove(self):
if os.path.exists(self.path):
shutil.rmtree(self.path)
class TemporaryWorkspace(Workspace):
def __init__(self, *args, **kwargs):
path = os.path.join(
current_app.config['WORKSPACE_ROOT'],
'freight-workspace-{}'.format(uuid1().hex),
)
super(TemporaryWorkspace, self).__init__(path, *args, **kwargs)
|
klynton/freight
|
freight/utils/workspace.py
|
Python
|
apache-2.0
| 2,839
|
# -*- coding: utf-8 -*-
"""The Virtual File System (VFS) extent."""
class Extent(object):
"""Extent.
Attributes:
extent_type (str): type of the extent, for example EXTENT_TYPE_SPARSE.
offset (int): offset of the extent relative from the start of the file
system in bytes.
size (int): size of the extent in bytes.
"""
def __init__(self, extent_type=None, offset=None, size=None):
"""Initializes an extent.
Args:
extent_type (Optional[str]): type of the extent, for example
EXTENT_TYPE_SPARSE.
offset (Optional[int]): offset of the extent relative from the start of
the file system in bytes.
size (Optional{int]): size of the extent in bytes.
"""
super(Extent, self).__init__()
self.extent_type = extent_type
self.offset = offset
self.size = size
|
joachimmetz/dfvfs
|
dfvfs/vfs/extent.py
|
Python
|
apache-2.0
| 844
|
#!/usr/bin/env python
__author__ = 'greghines'
import numpy as np
import os
import pymongo
import sys
import cPickle as pickle
import bisect
import random
import csv
import matplotlib.pyplot as plt
if os.path.exists("/home/ggdhines"):
base_directory = "/home/ggdhines"
else:
base_directory = "/home/greg"
def index(a, x):
'Locate the leftmost value exactly equal to x'
i = bisect.bisect_left(a, x)
if i != len(a) and a[i] == x:
return i
raise ValueError
sys.path.append(base_directory+"/github/reduction/experimental/classifier")
sys.path.append(base_directory+"/github/pyIBCC/python")
import ibcc
from iterativeEM import IterativeEM
if os.path.exists("/home/ggdhines"):
base_directory = "/home/ggdhines"
else:
base_directory = "/home/greg"
client = pymongo.MongoClient()
db = client['condor_2014-11-23']
classification_collection = db["condor_classifications"]
subject_collection = db["condor_subjects"]
gold = pickle.load(open(base_directory+"/condor_gold.pickle","rb"))
gold.sort(key = lambda x:x[1])
to_sample_from = (zip(*gold)[0])[1301:]
sample = random.sample(to_sample_from,100)
big_userList = []
big_subjectList = []
animal_count = 0
f = open(base_directory+"/Databases/condor_ibcc.csv","wb")
f.write("a,b,c\n")
alreadyDone = []
subjectVote = {}
gold_condor = []
only_one = []
vote_list = []
for count,zooniverse_id in enumerate(sample):
subject = subject_collection.find_one({"zooniverse_id":zooniverse_id})
if subject["classification_count"] < 3:
print "**"
only_one.append(zooniverse_id)
continue
print count
#gold standard
gold_classification = classification_collection.find_one({"user_name":"wreness", "subjects.zooniverse_id":zooniverse_id})
assert gold_classification["tutorial"] == False
found_condor = False
try:
mark_index = [ann.keys() for ann in gold_classification["annotations"]].index(["marks",])
markings = gold_classification["annotations"][mark_index].values()[0]
try:
for animal in markings.values():
animal_type = animal["animal"]
found_condor = (animal_type == "condor")
except KeyError:
continue
except ValueError:
pass
if found_condor:
gold_condor.append(1)
else:
gold_condor.append(0)
alreadyDone = []
classification_count = 0
for classification in classification_collection.find({"subjects.zooniverse_id":zooniverse_id}):
if "user_name" in classification:
user = classification["user_name"]
else:
user = classification["user_ip"]
#print user
if ("user_name" in classification) and (classification["user_name"] == "wreness"):
continue
if user in alreadyDone:
continue
classification_count += 1
if classification_count == 3:
break
alreadyDone.append(user)
if not(user in big_userList):
big_userList.append(user)
if not(zooniverse_id in big_subjectList):
big_subjectList.append(zooniverse_id)
user_index = big_userList.index(user)
subject_index = big_subjectList.index(zooniverse_id)
try:
mark_index = [ann.keys() for ann in classification["annotations"]].index(["marks",])
markings = classification["annotations"][mark_index].values()[0]
found = False
for animal in markings.values():
animal_type = animal["animal"]
if animal_type in ["condor"]:
found = True
break
if found:
vote_list.append((user_index,subject_index,1))
f.write(str(user_index) + ","+str(subject_index) + ",1\n")
if not(zooniverse_id in subjectVote):
subjectVote[zooniverse_id] = [1]
else:
subjectVote[zooniverse_id].append(1)
else:
vote_list.append((user_index,subject_index,0))
f.write(str(user_index) + ","+str(subject_index) + ",0\n")
if not(zooniverse_id in subjectVote):
subjectVote[zooniverse_id] = [0]
else:
subjectVote[zooniverse_id].append(0)
except (ValueError,KeyError):
f.write(str(user_index) + ","+str(subject_index) + ",0\n")
if not(zooniverse_id in subjectVote):
subjectVote[zooniverse_id] = [0]
else:
subjectVote[zooniverse_id].append(0)
if classification_count == 0:
print subject
assert classification_count > 0
condor_count = 0.
total_count = 0.
false_positives = []
true_positives = []
false_negatives = []
true_negatives = []
confusion = [[0.,0.],[0.,0.]]
for votes in subjectVote.values():
if np.mean(votes) >= 0.5:
condor_count += 1
confusion[1][1] += np.mean(votes)
confusion[1][0] += 1 - np.mean(votes)
true_positives.append(np.mean(votes))
#false_negatives.append(1-np.mean(votes))
else:
#false_positives.append(np.mean(votes))
true_negatives.append(1-np.mean(votes))
confusion[0][0] += 1 - np.mean(votes)
confusion[0][1] += np.mean(votes)
total_count += 1
pp = condor_count / total_count
print confusion
confusion = [[max(int(confusion[0][0]),1),max(int(confusion[0][1]),1)],[max(int(confusion[1][0]),1),max(int(confusion[1][1]),1)]]
print confusion
print pp
f.close()
with open(base_directory+"/Databases/condor_ibcc.py","wb") as f:
f.write("import numpy as np\n")
f.write("scores = np.array([0,1])\n")
f.write("nScores = len(scores)\n")
f.write("nClasses = 2\n")
f.write("inputFile = \""+base_directory+"/Databases/condor_ibcc.csv\"\n")
f.write("outputFile = \""+base_directory+"/Databases/condor_ibcc.out\"\n")
f.write("confMatFile = \""+base_directory+"/Databases/condor_ibcc.mat\"\n")
f.write("nu0 = np.array(["+str(int((1-pp)*100))+","+str(int(pp*100))+"])\n")
f.write("alpha0 = np.array("+str(confusion)+")\n")
#f.write("alpha0 = np.array([[185,1],[6,52]])\n")
#f.write("alpha0 = np.array([[3,1],[1,3]])\n")
#start by removing all temp files
try:
os.remove(base_directory+"/Databases/condor_ibcc.out")
except OSError:
pass
try:
os.remove(base_directory+"/Databases/condor_ibcc.mat")
except OSError:
pass
try:
os.remove(base_directory+"/Databases/condor_ibcc.csv.dat")
except OSError:
pass
#pickle.dump((big_subjectList,big_userList),open(base_directory+"/Databases/tempOut.pickle","wb"))
ibcc.runIbcc(base_directory+"/Databases/condor_ibcc.py")
values = []
errors = 0
low = 0
X_positive = []
X_negative = []
with open(base_directory+"/Databases/condor_ibcc.out","rb") as f:
ibcc_results = csv.reader(f, delimiter=' ')
for ii,row in enumerate(ibcc_results):
if ii == 20000:
break
wreness_condor = gold_condor[ii]
ibcc_condor = float(row[2])
if wreness_condor == 0:
X_negative.append(ibcc_condor)
else:
X_positive.append(ibcc_condor)
#print X_negative
# print X_positive
# plt.hist([X_positive,X_negative],10)
# plt.show()
alpha_list = X_negative[:]
alpha_list.extend(X_positive)
alpha_list.sort()
roc_X = []
roc_Y = []
for alpha in alpha_list:
positive_count = sum([1 for x in X_positive if x >= alpha])
positive_rate = positive_count/float(len(X_positive))
negative_count = sum([1 for x in X_negative if x >= alpha])
negative_rate = negative_count/float(len(X_negative))
roc_X.append(negative_rate)
roc_Y.append(positive_rate)
#print roc_X
plt.plot(roc_X,roc_Y,color="red")
X_positive = []
X_negative = []
#repeat with MV
for subject_index,zooniverse_id in enumerate(big_subjectList):
votes = subjectVote[zooniverse_id]
wreness_condor = gold_condor[subject_index]
if wreness_condor == 0:
X_negative.append(np.mean(votes))
else:
X_positive.append(np.mean(votes))
alpha_list = X_negative[:]
alpha_list.extend(X_positive)
alpha_list.sort()
roc_X = []
roc_Y = []
for alpha in alpha_list:
positive_count = sum([1 for x in X_positive if x >= alpha])
positive_rate = positive_count/float(len(X_positive))
negative_count = sum([1 for x in X_negative if x >= alpha])
negative_rate = negative_count/float(len(X_negative))
roc_X.append(negative_rate)
roc_Y.append(positive_rate)
#print roc_X
plt.plot(roc_X,roc_Y,color="green")
classify = IterativeEM()
classify.__classify__(vote_list,2)
estimates = classify.__getEstimates__()
X_positive = []
X_negative = []
for subject_index,zooniverse_id in enumerate(big_subjectList):
probability = estimates[subject_index]
wreness_condor = gold_condor[subject_index]
if wreness_condor == 0:
X_negative.append(probability)
else:
X_positive.append(probability)
alpha_list = X_negative[:]
alpha_list.extend(X_positive)
alpha_list.sort()
roc_X = []
roc_Y = []
for alpha in alpha_list:
positive_count = sum([1 for x in X_positive if x >= alpha])
positive_rate = positive_count/float(len(X_positive))
negative_count = sum([1 for x in X_negative if x >= alpha])
negative_rate = negative_count/float(len(X_negative))
roc_X.append(negative_rate)
roc_Y.append(positive_rate)
#print roc_X
plt.plot(roc_X,roc_Y,color="blue")
#plt.xlim((0,1.05))
plt.plot((0,1),(0,1),'--')
plt.xlabel("False Positive Rate")
plt.ylabel("True Positive Rate")
#plt.plot([0.058],[0.875],'o')
plt.show()
|
camallen/aggregation
|
experimental/condor/presentation/condor_IBCC.py
|
Python
|
apache-2.0
| 9,662
|
#!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Simple datastore view and interactive console, for use in dev_appserver."""
import cgi
import csv
import cStringIO
import datetime
import logging
import math
import mimetypes
import os
import os.path
import pickle
import pprint
import random
import sys
import time
import traceback
import types
import urllib
import urlparse
import wsgiref.handlers
from google.appengine.api import datastore
from google.appengine.api import datastore_admin
from google.appengine.api import datastore_types
from google.appengine.api import datastore_errors
from google.appengine.api import memcache
from google.appengine.api import users
from google.appengine.ext import db
from google.appengine.ext import webapp
from google.appengine.ext.webapp import template
_DEBUG = True
class ImageHandler(webapp.RequestHandler):
"""Serves a static image.
This exists because we don't want to burden the user with specifying
a static file handler for the image resources used by the admin tool.
"""
PATH = '/images/.*'
def get(self):
image_name = os.path.basename(self.request.path)
content_type, encoding = mimetypes.guess_type(image_name)
if not content_type or not content_type.startswith('image/'):
logging.debug('image_name=%r, content_type=%r, encoding=%r',
image_name, content_type, encoding)
self.error(404)
return
directory = os.path.dirname(__file__)
path = os.path.join(directory, 'templates', 'images', image_name)
try:
image_stream = open(path, 'rb')
except IOError, e:
logging.error('Cannot open image %s: %s', image_name, e)
self.error(404)
return
try:
image_data = image_stream.read()
finally:
image_stream.close()
self.response.headers['Content-Type'] = content_type
self.response.out.write(image_data)
class BaseRequestHandler(webapp.RequestHandler):
"""Supplies a common template generation function.
When you call generate(), we augment the template variables supplied with
the current user in the 'user' variable and the current webapp request
in the 'request' variable.
"""
def generate(self, template_name, template_values={}):
base_path = self.base_path()
values = {
'application_name': self.request.environ['APPLICATION_ID'],
'user': users.get_current_user(),
'request': self.request,
'home_path': base_path + DefaultPageHandler.PATH,
'datastore_path': base_path + DatastoreQueryHandler.PATH,
'datastore_edit_path': base_path + DatastoreEditHandler.PATH,
'datastore_batch_edit_path': base_path + DatastoreBatchEditHandler.PATH,
'interactive_path': base_path + InteractivePageHandler.PATH,
'interactive_execute_path': base_path + InteractiveExecuteHandler.PATH,
'memcache_path': base_path + MemcachePageHandler.PATH,
}
values.update(template_values)
directory = os.path.dirname(__file__)
path = os.path.join(directory, os.path.join('templates', template_name))
self.response.out.write(template.render(path, values, debug=_DEBUG))
def base_path(self):
"""Returns the base path of this admin app, which is chosen by the user.
The user specifies which paths map to this application in their app.cfg.
You can get that base path with this method. Combine with the constant
paths specified by the classes to construct URLs.
"""
path = self.__class__.PATH
return self.request.path[:-len(path)]
def filter_url(self, args):
"""Filters the current URL to only have the given list of arguments.
For example, if your URL is /search?q=foo&num=100&start=10, then
self.filter_url(['start', 'num']) => /search?num=100&start=10
self.filter_url(['q']) => /search?q=10
self.filter_url(['random']) => /search?
"""
queries = []
for arg in args:
value = self.request.get(arg)
if value:
queries.append(arg + '=' + urllib.quote_plus(self.request.get(arg)))
return self.request.path + '?' + '&'.join(queries)
def in_production(self):
"""Detects if app is running in production.
Returns a boolean.
"""
server_software = os.environ['SERVER_SOFTWARE']
return not server_software.startswith('Development')
class DefaultPageHandler(BaseRequestHandler):
"""Redirects to the Datastore application by default."""
PATH = '/'
def get(self):
if self.request.path.endswith('/'):
base = self.request.path[:-1]
else:
base = self.request.path
self.redirect(base + DatastoreQueryHandler.PATH)
class InteractivePageHandler(BaseRequestHandler):
"""Shows our interactive console HTML."""
PATH = '/interactive'
def get(self):
self.generate('interactive.html')
class InteractiveExecuteHandler(BaseRequestHandler):
"""Executes the Python code submitted in a POST within this context.
For obvious reasons, this should only be available to administrators
of the applications.
"""
PATH = InteractivePageHandler.PATH + '/execute'
def post(self):
save_stdout = sys.stdout
results_io = cStringIO.StringIO()
try:
sys.stdout = results_io
code = self.request.get('code')
code = code.replace("\r\n", "\n")
try:
compiled_code = compile(code, '<string>', 'exec')
exec(compiled_code, globals())
except Exception, e:
traceback.print_exc(file=results_io)
finally:
sys.stdout = save_stdout
results = results_io.getvalue()
self.generate('interactive-output.html', {'output': results})
class MemcachePageHandler(BaseRequestHandler):
"""Shows stats about memcache and query form to get values."""
PATH = '/memcache'
TYPES = ((str, str, 'String'),
(unicode, unicode, 'Unicode String'),
(bool, lambda value: MemcachePageHandler._ToBool(value), 'Boolean'),
(int, int, 'Integer'),
(long, long, 'Long Integer'),
(float, float, 'Float'))
DEFAULT_TYPESTR_FOR_NEW = 'String'
@staticmethod
def _ToBool(string_value):
"""Convert string to boolean value.
Args:
string_value: A string.
Returns:
Boolean. True if string_value is "true", False if string_value is
"false". This is case-insensitive.
Raises:
ValueError: string_value not "true" or "false".
"""
string_value_low = string_value.lower()
if string_value_low not in ('false', 'true'):
raise ValueError('invalid literal for boolean: %s' % string_value)
return string_value_low == 'true'
def _GetValueAndType(self, key):
"""Fetch value from memcache and detect its type.
Args:
key: String
Returns:
(value, type), value is a Python object or None if the key was not set in
the cache, type is a string describing the type of the value.
"""
try:
value = memcache.get(key)
except (pickle.UnpicklingError, AttributeError, EOFError, ImportError,
IndexError), e:
msg = 'Failed to retrieve value from cache: %s' % e
return msg, 'error'
if value is None:
return None, self.DEFAULT_TYPESTR_FOR_NEW
for typeobj, _, typestr in self.TYPES:
if isinstance(value, typeobj):
break
else:
typestr = 'pickled'
value = pprint.pformat(value, indent=2)
return value, typestr
def _SetValue(self, key, type_, value):
"""Convert a string value and store the result in memcache.
Args:
key: String
type_: String, describing what type the value should have in the cache.
value: String, will be converted according to type_.
Returns:
Result of memcache.set(ket, converted_value). True if value was set.
Raises:
ValueError: Value can't be converted according to type_.
"""
for _, converter, typestr in self.TYPES:
if typestr == type_:
value = converter(value)
break
else:
raise ValueError('Type %s not supported.' % type_)
return memcache.set(key, value)
def get(self):
"""Show template and prepare stats and/or key+value to display/edit."""
values = {'request': self.request,
'message': self.request.get('message')}
edit = self.request.get('edit')
key = self.request.get('key')
if edit:
key = edit
values['show_stats'] = False
values['show_value'] = False
values['show_valueform'] = True
values['types'] = [typestr for _, _, typestr in self.TYPES]
elif key:
values['show_stats'] = True
values['show_value'] = True
values['show_valueform'] = False
else:
values['show_stats'] = True
values['show_valueform'] = False
values['show_value'] = False
if key:
values['key'] = key
values['value'], values['type'] = self._GetValueAndType(key)
values['key_exists'] = values['value'] is not None
if values['type'] in ('pickled', 'error'):
values['writable'] = False
else:
values['writable'] = True
if values['show_stats']:
memcache_stats = memcache.get_stats()
if not memcache_stats:
memcache_stats = {'hits': 0, 'misses': 0, 'byte_hits': 0, 'items': 0,
'bytes': 0, 'oldest_item_age': 0}
values['stats'] = memcache_stats
try:
hitratio = memcache_stats['hits'] * 100 / (memcache_stats['hits']
+ memcache_stats['misses'])
except ZeroDivisionError:
hitratio = 0
values['hitratio'] = hitratio
delta_t = datetime.timedelta(seconds=memcache_stats['oldest_item_age'])
values['oldest_item_age'] = datetime.datetime.now() - delta_t
self.generate('memcache.html', values)
def _urlencode(self, query):
"""Encode a dictionary into a URL query string.
In contrast to urllib this encodes unicode characters as UTF8.
Args:
query: Dictionary of key/value pairs.
Returns:
String.
"""
return '&'.join('%s=%s' % (urllib.quote_plus(k.encode('utf8')),
urllib.quote_plus(v.encode('utf8')))
for k, v in query.iteritems())
def post(self):
"""Handle modifying actions and/or redirect to GET page."""
next_param = {}
if self.request.get('action:flush'):
if memcache.flush_all():
next_param['message'] = 'Cache flushed, all keys dropped.'
else:
next_param['message'] = 'Flushing the cache failed. Please try again.'
elif self.request.get('action:display'):
next_param['key'] = self.request.get('key')
elif self.request.get('action:edit'):
next_param['edit'] = self.request.get('key')
elif self.request.get('action:delete'):
key = self.request.get('key')
result = memcache.delete(key)
if result == memcache.DELETE_NETWORK_FAILURE:
next_param['message'] = ('ERROR: Network failure, key "%s" not deleted.'
% key)
elif result == memcache.DELETE_ITEM_MISSING:
next_param['message'] = 'Key "%s" not in cache.' % key
elif result == memcache.DELETE_SUCCESSFUL:
next_param['message'] = 'Key "%s" deleted.' % key
else:
next_param['message'] = ('Unknown return value. Key "%s" might still '
'exist.' % key)
elif self.request.get('action:save'):
key = self.request.get('key')
value = self.request.get('value')
type_ = self.request.get('type')
next_param['key'] = key
try:
if self._SetValue(key, type_, value):
next_param['message'] = 'Key "%s" saved.' % key
else:
next_param['message'] = 'ERROR: Failed to save key "%s".' % key
except ValueError, e:
next_param['message'] = 'ERROR: Unable to encode value: %s' % e
elif self.request.get('action:cancel'):
next_param['key'] = self.request.get('key')
else:
next_param['message'] = 'Unknown action.'
next = self.request.path_url
if next_param:
next = '%s?%s' % (next, self._urlencode(next_param))
self.redirect(next)
class DatastoreRequestHandler(BaseRequestHandler):
"""The base request handler for our datastore admin pages.
We provide utility functions for quering the datastore and infering the
types of entity properties.
"""
def start(self):
"""Returns the santized "start" argument from the URL."""
return self.request.get_range('start', min_value=0, default=0)
def num(self):
"""Returns the sanitized "num" argument from the URL."""
return self.request.get_range('num', min_value=1, max_value=100,
default=10)
def execute_query(self, start=0, num=0, no_order=False):
"""Parses the URL arguments and executes the query.
We return a tuple (list of entities, total entity count).
If the appropriate URL arguments are not given, we return an empty
set of results and 0 for the entity count.
"""
kind = self.request.get('kind')
if not kind:
return ([], 0)
query = datastore.Query(kind)
order = self.request.get('order')
order_type = self.request.get('order_type')
if order and order_type:
order_type = DataType.get_by_name(order_type).python_type()
if order.startswith('-'):
direction = datastore.Query.DESCENDING
order = order[1:]
else:
direction = datastore.Query.ASCENDING
try:
query.Order((order, order_type, direction))
except datastore_errors.BadArgumentError:
pass
if not start:
start = self.start()
if not num:
num = self.num()
total = query.Count()
entities = query.Get(start + num)[start:]
return (entities, total)
def get_key_values(self, entities):
"""Returns the union of key names used by the given list of entities.
We return the union as a dictionary mapping the key names to a sample
value from one of the entities for the key name.
"""
key_dict = {}
for entity in entities:
for key, value in entity.iteritems():
if key_dict.has_key(key):
key_dict[key].append(value)
else:
key_dict[key] = [value]
return key_dict
class DatastoreQueryHandler(DatastoreRequestHandler):
"""Our main request handler that executes queries and lists entities.
We use execute_query() in our base request handler to parse URL arguments
and execute the datastore query.
"""
PATH = '/datastore'
def get_kinds(self):
"""Get sorted list of kind names the datastore knows about.
This should only be called in the development environment as GetSchema is
expensive and no caching is done.
"""
schema = datastore_admin.GetSchema()
kinds = []
for entity_proto in schema:
kinds.append(entity_proto.key().path().element_list()[-1].type())
kinds.sort()
return kinds
def get(self):
"""Formats the results from execute_query() for datastore.html.
The only complex part of that process is calculating the pager variables
to generate the Gooooogle pager at the bottom of the page.
"""
result_set, total = self.execute_query()
key_values = self.get_key_values(result_set)
keys = key_values.keys()
keys.sort()
headers = []
for key in keys:
sample_value = key_values[key][0]
headers.append({
'name': key,
'type': DataType.get(sample_value).name(),
})
entities = []
edit_path = self.base_path() + DatastoreEditHandler.PATH
for entity in result_set:
attributes = []
for key in keys:
if entity.has_key(key):
raw_value = entity[key]
value = DataType.get(raw_value).format(raw_value)
short_value = DataType.get(raw_value).short_format(raw_value)
else:
value = ''
short_value = ''
attributes.append({
'name': key,
'value': value,
'short_value': short_value,
})
entities.append({
'key': str(entity.key()),
'key_name': entity.key().name(),
'key_id': entity.key().id(),
'shortened_key': str(entity.key())[:8] + '...',
'attributes': attributes,
'edit_uri': edit_path + '?key=' + str(entity.key()) + '&kind=' + urllib.quote(self.request.get('kind')) + '&next=' + urllib.quote(self.request.uri),
})
start = self.start()
num = self.num()
max_pager_links = 8
current_page = start / num
num_pages = int(math.ceil(total * 1.0 / num))
page_start = max(math.floor(current_page - max_pager_links / 2), 0)
page_end = min(page_start + max_pager_links, num_pages)
pages = []
for page in range(page_start + 1, page_end + 1):
pages.append({
'number': page,
'start': (page - 1) * num,
})
current_page += 1
in_production = self.in_production()
if in_production:
kinds = None
else:
kinds = self.get_kinds()
values = {
'request': self.request,
'in_production': in_production,
'kinds': kinds,
'kind': self.request.get('kind'),
'order': self.request.get('order'),
'headers': headers,
'entities': entities,
'message': self.request.get('msg'),
'pages': pages,
'current_page': current_page,
'num': num,
'next_start': -1,
'prev_start': -1,
'start': start,
'total': total,
'start_base_url': self.filter_url(['kind', 'order', 'order_type',
'num']),
'order_base_url': self.filter_url(['kind', 'num']),
}
if current_page > 1:
values['prev_start'] = int((current_page - 2) * num)
if current_page < num_pages:
values['next_start'] = int(current_page * num)
self.generate('datastore.html', values)
class DatastoreBatchEditHandler(DatastoreRequestHandler):
"""Request handler for a batch operation on entities.
Supports deleting multiple entities by key, then redirecting to another url.
"""
PATH = DatastoreQueryHandler.PATH + '/batchedit'
def post(self):
kind = self.request.get('kind')
keys = []
index = 0
num_keys = int(self.request.get('numkeys'))
for i in xrange(1, num_keys+1):
key = self.request.get('key%d' % i)
if key:
keys.append(key)
if self.request.get('action') == 'Delete':
num_deleted = 0
for key in keys:
datastore.Delete(datastore.Key(key))
num_deleted = num_deleted + 1
message = '%d entit%s deleted.' % (
num_deleted, ('ies', 'y')[num_deleted == 1])
self.redirect(
'%s&msg=%s' % (self.request.get('next'), urllib.quote_plus(message)))
return
self.error(404)
class DatastoreEditHandler(DatastoreRequestHandler):
"""Request handler for the entity create/edit form.
We determine how to generate a form to edit an entity by doing a query
on the entity kind and looking at the set of keys and their types in
the result set. We use the DataType subclasses for those introspected types
to generate the form and parse the form results.
"""
PATH = DatastoreQueryHandler.PATH + '/edit'
def get(self):
kind = self.request.get('kind')
sample_entities = self.execute_query()[0]
if len(sample_entities) < 1:
next_uri = self.request.get('next')
kind_param = 'kind=%s' % kind
if not kind_param in next_uri:
if '?' in next_uri:
next_uri += '&' + kind_param
else:
next_uri += '?' + kind_param
self.redirect(next_uri)
return
entity_key = self.request.get('key')
if entity_key:
key_instance = datastore.Key(entity_key)
entity_key_name = key_instance.name()
entity_key_id = key_instance.id()
parent_key = key_instance.parent()
entity = datastore.Get(key_instance)
else:
key_instance = None
entity_key_name = None
entity_key_id = None
parent_key = None
entity = None
if parent_key:
parent_kind = parent_key.kind()
else:
parent_kind = None
fields = []
key_values = self.get_key_values(sample_entities)
for key, sample_values in key_values.iteritems():
if entity and entity.has_key(key):
data_type = DataType.get(entity[key])
else:
data_type = DataType.get(sample_values[0])
name = data_type.name() + "|" + key
if entity and entity.has_key(key):
value = entity[key]
else:
value = None
field = data_type.input_field(name, value, sample_values)
fields.append((key, data_type.name(), field))
self.generate('datastore_edit.html', {
'kind': kind,
'key': entity_key,
'key_name': entity_key_name,
'key_id': entity_key_id,
'fields': fields,
'focus': self.request.get('focus'),
'next': self.request.get('next'),
'parent_key': parent_key,
'parent_kind': parent_kind,
})
def post(self):
kind = self.request.get('kind')
entity_key = self.request.get('key')
if entity_key:
if self.request.get('action') == 'Delete':
datastore.Delete(datastore.Key(entity_key))
self.redirect(self.request.get('next'))
return
entity = datastore.Get(datastore.Key(entity_key))
else:
entity = datastore.Entity(kind)
args = self.request.arguments()
for arg in args:
bar = arg.find('|')
if bar > 0:
data_type_name = arg[:bar]
field_name = arg[bar + 1:]
form_value = self.request.get(arg)
data_type = DataType.get_by_name(data_type_name)
if entity and entity.has_key(field_name):
old_formatted_value = data_type.format(entity[field_name])
if old_formatted_value == form_value:
continue
if len(form_value) > 0:
value = data_type.parse(form_value)
entity[field_name] = value
elif entity.has_key(field_name):
del entity[field_name]
datastore.Put(entity)
self.redirect(self.request.get('next'))
class DataType(object):
"""A DataType represents a data type in the datastore.
Each DataType subtype defines four methods:
format: returns a formatted string for a datastore value
input_field: returns a string HTML <input> element for this DataType
name: the friendly string name of this DataType
parse: parses the formatted string representation of this DataType
python_type: the canonical Python type for this datastore type
We use DataType instances to display formatted values in our result lists,
and we uses input_field/format/parse to generate forms and parse the results
from those forms to allow editing of entities.
"""
@staticmethod
def get(value):
return _DATA_TYPES[value.__class__]
@staticmethod
def get_by_name(name):
return _NAMED_DATA_TYPES[name]
def format(self, value):
return str(value)
def short_format(self, value):
return self.format(value)
def input_field(self, name, value, sample_values):
if value is not None:
string_value = self.format(value)
else:
string_value = ''
return '<input class="%s" name="%s" type="text" size="%d" value="%s"/>' % (cgi.escape(self.name()), cgi.escape(name), self.input_field_size(),
cgi.escape(string_value, True))
def input_field_size(self):
return 30
class StringType(DataType):
def format(self, value):
return value
def input_field(self, name, value, sample_values):
multiline = False
if value:
multiline = len(value) > 255 or value.find('\n') >= 0
if not multiline:
for sample_value in sample_values:
if len(sample_value) > 255 or sample_value.find('\n') >= 0:
multiline = True
break
if multiline:
if not value:
value = ''
return '<textarea name="%s" rows="5" cols="50">%s</textarea>' % (cgi.escape(name), cgi.escape(value))
else:
return DataType.input_field(self, name, value, sample_values)
def name(self):
return 'string'
def parse(self, value):
return value
def python_type(self):
return str
def input_field_size(self):
return 50
class TextType(StringType):
def name(self):
return 'Text'
def input_field(self, name, value, sample_values):
return '<textarea name="%s" rows="5" cols="50">%s</textarea>' % (cgi.escape(name), cgi.escape(str(value)))
def parse(self, value):
return datastore_types.Text(value)
def python_type(self):
return datastore_types.Text
class BlobType(StringType):
def name(self):
return 'Blob'
def input_field(self, name, value, sample_values):
return '<binary>'
def format(self, value):
return '<binary>'
def python_type(self):
return datastore_types.Blob
class TimeType(DataType):
_FORMAT = '%Y-%m-%d %H:%M:%S'
def format(self, value):
return value.strftime(TimeType._FORMAT)
def name(self):
return 'datetime'
def parse(self, value):
return datetime.datetime(*(time.strptime(value, TimeType._FORMAT)[0:6]))
def python_type(self):
return datetime.datetime
class ListType(DataType):
def format(self, value):
value_file = cStringIO.StringIO()
try:
writer = csv.writer(value_file)
writer.writerow(value)
return value_file.getvalue()
finally:
value_file.close()
def name(self):
return 'list'
def parse(self, value):
value_file = cStringIO.StringIO(value)
try:
reader = csv.reader(value_file)
return reader.next()
finally:
value_file.close()
def python_type(self):
return list
class BoolType(DataType):
def name(self):
return 'bool'
def input_field(self, name, value, sample_values):
selected = { None: '', False: '', True: '' };
selected[value] = "selected"
return """<select class="%s" name="%s">
<option %s value=''></option>
<option %s value='0'>False</option>
<option %s value='1'>True</option></select>""" % (cgi.escape(self.name()), cgi.escape(name), selected[None],
selected[False], selected[True])
def parse(self, value):
if value.lower() is 'true':
return True
if value.lower() is 'false':
return False
return bool(int(value))
def python_type(self):
return bool
class NumberType(DataType):
def input_field_size(self):
return 10
class IntType(NumberType):
def name(self):
return 'int'
def parse(self, value):
return int(value)
def python_type(self):
return int
class LongType(NumberType):
def name(self):
return 'long'
def parse(self, value):
return long(value)
def python_type(self):
return long
class FloatType(NumberType):
def name(self):
return 'float'
def parse(self, value):
return float(value)
def python_type(self):
return float
class UserType(DataType):
def name(self):
return 'User'
def parse(self, value):
return users.User(value)
def python_type(self):
return users.User
def input_field_size(self):
return 15
class ReferenceType(DataType):
def name(self):
return 'Key'
def short_format(self, value):
return str(value)[:8] + '...'
def parse(self, value):
return datastore_types.Key(value)
def python_type(self):
return datastore_types.Key
def input_field_size(self):
return 85
class EmailType(StringType):
def name(self):
return 'Email'
def parse(self, value):
return datastore_types.Email(value)
def python_type(self):
return datastore_types.Email
class CategoryType(StringType):
def name(self):
return 'Category'
def parse(self, value):
return datastore_types.Category(value)
def python_type(self):
return datastore_types.Category
class LinkType(StringType):
def name(self):
return 'Link'
def parse(self, value):
return datastore_types.Link(value)
def python_type(self):
return datastore_types.Link
class GeoPtType(DataType):
def name(self):
return 'GeoPt'
def parse(self, value):
return datastore_types.GeoPt(value)
def python_type(self):
return datastore_types.GeoPt
class ImType(DataType):
def name(self):
return 'IM'
def parse(self, value):
return datastore_types.IM(value)
def python_type(self):
return datastore_types.IM
class PhoneNumberType(StringType):
def name(self):
return 'PhoneNumber'
def parse(self, value):
return datastore_types.PhoneNumber(value)
def python_type(self):
return datastore_types.PhoneNumber
class PostalAddressType(StringType):
def name(self):
return 'PostalAddress'
def parse(self, value):
return datastore_types.PostalAddress(value)
def python_type(self):
return datastore_types.PostalAddress
class RatingType(NumberType):
def name(self):
return 'Rating'
def parse(self, value):
return datastore_types.Rating(value)
def python_type(self):
return datastore_types.Rating
class NoneType(DataType):
def name(self):
return 'None'
def parse(self, value):
return None
def format(self, value):
return 'None'
_DATA_TYPES = {
types.NoneType: NoneType(),
types.StringType: StringType(),
types.UnicodeType: StringType(),
datastore_types.Text: TextType(),
datastore_types.Blob: BlobType(),
types.BooleanType: BoolType(),
types.IntType: IntType(),
types.LongType: LongType(),
types.FloatType: FloatType(),
datetime.datetime: TimeType(),
users.User: UserType(),
datastore_types.Key: ReferenceType(),
types.ListType: ListType(),
datastore_types.Email: EmailType(),
datastore_types.Category: CategoryType(),
datastore_types.Link: LinkType(),
datastore_types.GeoPt: GeoPtType(),
datastore_types.IM: ImType(),
datastore_types.PhoneNumber: PhoneNumberType(),
datastore_types.PostalAddress: PostalAddressType(),
datastore_types.Rating: RatingType(),
}
_NAMED_DATA_TYPES = {}
for data_type in _DATA_TYPES.values():
_NAMED_DATA_TYPES[data_type.name()] = data_type
def main():
application = webapp.WSGIApplication([
('.*' + DatastoreQueryHandler.PATH, DatastoreQueryHandler),
('.*' + DatastoreEditHandler.PATH, DatastoreEditHandler),
('.*' + DatastoreBatchEditHandler.PATH, DatastoreBatchEditHandler),
('.*' + InteractivePageHandler.PATH, InteractivePageHandler),
('.*' + InteractiveExecuteHandler.PATH, InteractiveExecuteHandler),
('.*' + MemcachePageHandler.PATH, MemcachePageHandler),
('.*' + ImageHandler.PATH, ImageHandler),
('.*', DefaultPageHandler),
], debug=_DEBUG)
wsgiref.handlers.CGIHandler().run(application)
import django
if django.VERSION[:2] < (0, 97):
from django.template import defaultfilters
def safe(text, dummy=None):
return text
defaultfilters.register.filter("safe", safe)
if __name__ == '__main__':
main()
|
wgrose/leanto
|
google/appengine/ext/admin/__init__.py
|
Python
|
apache-2.0
| 31,441
|
def authorized_to_manage_request(_, request, current_user, pushmaster=False):
if pushmaster or \
request['user'] == current_user or \
(request['watchers'] and current_user in request['watchers'].split(',')):
return True
return False
def sort_pickmes(_, requests, tags_order):
"""Sort pickmes based on tags_order
Args:
- - request handler object
requests - a list of requests
tags_order - order to sort requests
Return: sorted requests list
"""
def compare_requests(request1, request2):
tags1_list = request1['tags'].split(',')
tags2_list = request2['tags'].split(',')
for tag in tags_order:
tag_in_tags1 = tag in tags1_list
tag_in_tags2 = tag in tags2_list
if tag_in_tags1 == tag_in_tags2:
continue
elif tag_in_tags1:
return -1
else:
return 1
return cmp(request1['user'], request2['user'])
sorted_requests = sorted(requests, cmp=compare_requests)
return sorted_requests
|
YelpArchive/pushmanager
|
pushmanager/ui_methods.py
|
Python
|
apache-2.0
| 1,099
|
#!/usr/bin/env python
# This file is part of Androguard.
#
# Copyright (C) 2012, Anthony Desnos <desnos at t0t0.fr>
# All rights reserved.
#
# Androguard is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Androguard is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with Androguard. If not, see <http://www.gnu.org/licenses/>.
import sys, re
PATH_INSTALL = "./"
sys.path.append(PATH_INSTALL)
from androguard.core.androgen import AndroguardS
from androguard.core.analysis import analysis
TESTS_CASES = [ #'examples/android/TC/bin/classes.dex',
'examples/android/TestsAndroguard/bin/classes.dex',
]
VALUES = {
'examples/android/TestsAndroguard/bin/classes.dex' : {
"Ltests/androguard/TestInvoke; <init> ()V" : {
0x0 : ("invoke-direct" , [['v',1] , ['meth@', 4, 'Ljava/lang/Object;', '()', 'V', '<init>']]),
0xa : ("invoke-virtual", [['v',1], ['v',0] , ['meth@', 49, 'Ltests/androguard/TestInvoke;', '(I)', 'I', 'TestInvoke1']]),
},
"Ltests/androguard/TestInvoke; TestInvoke1 (I)I" : {
0x4 : ("invoke-virtual", [['v',1] , ['v',2] , ['v',0] , ['meth@', 50,'Ltests/androguard/TestInvoke;' ,'(I I)', 'I', 'TestInvoke2']]),
},
"Ltests/androguard/TestInvoke; TestInvoke2 (I I)I" : {
0x4 : ("invoke-virtual", [['v',1] , ['v',2] , ['v',3] , ['v',0] , ['meth@', 51, 'Ltests/androguard/TestInvoke;', '(I I I)', 'I', 'TestInvoke3']]),
},
"Ltests/androguard/TestInvoke; TestInvoke3 (I I I)I" : {
0x4 : ("invoke-virtual", [['v', 1], ['v', 2], ['v', 3], ['v', 4], ['v', 0], ['meth@', 52, 'Ltests/androguard/TestInvoke;', '(I I I I)', 'I', 'TestInvoke4']]),
},
"Ltests/androguard/TestInvoke; TestInvoke4 (I I I I)I" : {
0xe : ("invoke-virtual/range", [['v', 0], ['v', 1], ['v', 2], ['v', 3], ['v', 4], ['v', 5], ['meth@', 53, 'Ltests/androguard/TestInvoke;', '(I I I I I)', 'I', 'TestInvoke5']]),
},
"Ltests/androguard/TestInvoke; TestInvoke5 (I I I I I)I" : {
0x10 : ("invoke-virtual/range", [['v', 0], ['v', 1], ['v', 2], ['v', 3], ['v', 4], ['v', 5], ['v', 6], ['meth@', 54, 'Ltests/androguard/TestInvoke;', '(I I I I I I)', 'I', 'TestInvoke6']]),
},
"Ltests/androguard/TestInvoke; TestInvoke6 (I I I I I I)I" : {
0x12 : ("invoke-virtual/range", [['v', 0], ['v', 1], ['v', 2], ['v', 3], ['v', 4], ['v', 5], ['v', 6], ['v', 7], ['meth@', 55, 'Ltests/androguard/TestInvoke;', '(I I I I I I I)', 'I', 'TestInvoke7']]),
},
"Ltests/androguard/TestInvoke; TestInvoke7 (I I I I I I I)I" : {
0x16 : ("invoke-virtual/range", [['v', 0], ['v', 1], ['v', 2], ['v', 3], ['v', 4], ['v', 5], ['v', 6], ['v', 7], ['v', 8], ['meth@', 56, 'Ltests/androguard/TestInvoke;', '(I I I I I I I I)', 'I', 'TestInvoke8']]),
},
"Ltests/androguard/TestInvoke; TestInvoke8 (I I I I I I I I)I" : {
0x0 : ("mul-int", [['v', 0], ['v', 2], ['v', 3]]),
0x4 : ("mul-int/2addr", [['v', 0], ['v', 4]]),
0x10 : ("return", [['v', 0]]),
}
},
}
def test(got, expected):
if got == expected:
prefix = ' OK '
else:
prefix = ' X '
print('\t%s got: %s expected: %s' % (prefix, repr(got), repr(expected)))
def getVal(i):
op = i.get_operands()
if isinstance(op, int):
return [ op ]
elif i.get_name() == "lookupswitch":
x = []
x.append( i.get_operands().default )
for idx in range(0, i.get_operands().npairs):
off = getattr(i.get_operands(), "offset%d" % idx)
x.append( off )
return x
return [-1]
def check(a, values):
for method in a.get_methods():
key = method.get_class_name() + " " + method.get_name() + " " + method.get_descriptor()
if key not in values:
continue
print("CHECKING ...", method.get_class_name(), method.get_name(), method.get_descriptor())
code = method.get_code()
bc = code.get_bc()
idx = 0
for i in bc.get():
# print "\t", "%x(%d)" % (idx, idx), i.get_name(), i.get_operands()
if idx in values[key]:
elem = values[key][idx]
val1 = i.get_name() + "%s" % i.get_operands()
val2 = elem[0] + "%s" % elem[1]
test(val1, val2)
del values[key][idx]
idx += i.get_length()
for i in TESTS_CASES:
a = AndroguardS( i )
check( a, VALUES[i] )
x = analysis.VMAnalysis( a.get_vm() )
print(x)
|
subho007/androguard
|
tests/test_ins.py
|
Python
|
apache-2.0
| 5,377
|
########
# Copyright (c) 2014 GigaSpaces Technologies Ltd. All rights reserved
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# * See the License for the specific language governing permissions and
# * limitations under the License.
from dsl_parser.interfaces import utils
INTERFACES = 'interfaces'
SOURCE_INTERFACES = 'source_interfaces'
TARGET_INTERFACES = 'target_interfaces'
NO_OP = utils.no_op()
|
codilime/cloudify-dsl-parser
|
dsl_parser/interfaces/constants.py
|
Python
|
apache-2.0
| 813
|
# Copyright 2016 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""A helper library for the runtime_configs command group."""
|
KaranToor/MA450
|
google-cloud-sdk/lib/googlecloudsdk/api_lib/deployment_manager/runtime_configs/__init__.py
|
Python
|
apache-2.0
| 657
|
#!/usr/bin/env python3
# (C) Copyright 2014, Google Inc.
# (C) Copyright 2018, James R Barlow
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# This script provides an easy way to execute various phases of training
# Tesseract. For a detailed description of the phases, see
# https://github.com/tesseract-ocr/tesseract/wiki/TrainingTesseract
import logging
import os
import sys
if (sys.version_info.major < 3) or (sys.version_info.major == 3 and sys.version_info.minor < 6):
raise Exception("Must be using Python minimum version 3.6!")
sys.path.insert(0, os.path.dirname(__file__))
from tesstrain_utils import (
parse_flags,
initialize_fontconfig,
phase_I_generate_image,
phase_UP_generate_unicharset,
phase_E_extract_features,
make_lstmdata,
cleanup,
)
import language_specific
log = logging.getLogger()
def setup_logging_console():
log.setLevel(logging.DEBUG)
console = logging.StreamHandler()
console.setLevel(logging.INFO)
console_formatter = logging.Formatter(
"[%(asctime)s] %(levelname)s - %(message)s", datefmt="%H:%M:%S"
)
console.setFormatter(console_formatter)
log.addHandler(console)
def setup_logging_logfile(logfile):
logfile = logging.FileHandler(logfile)
logfile.setLevel(logging.DEBUG)
logfile_formatter = logging.Formatter(
"[%(asctime)s] - %(levelname)s - %(name)s - %(message)s"
)
logfile.setFormatter(logfile_formatter)
log.addHandler(logfile)
def main():
setup_logging_console()
ctx = parse_flags()
setup_logging_logfile(ctx.log_file)
if not ctx.linedata:
log.error("--linedata_only is required since only LSTM is supported")
sys.exit(1)
log.info(f"=== Starting training for language {ctx.lang_code}")
ctx = language_specific.set_lang_specific_parameters(ctx, ctx.lang_code)
initialize_fontconfig(ctx)
phase_I_generate_image(ctx, par_factor=8)
phase_UP_generate_unicharset(ctx)
if ctx.linedata:
phase_E_extract_features(ctx, ["--psm", "6", "lstm.train"], "lstmf")
make_lstmdata(ctx)
cleanup(ctx)
log.info("All done!")
return 0
if __name__ == "__main__":
main()
# _rc0 = subprocess.call(["tlog","\n=== Starting training for language '"+str(LANG_CODE.val)+"'"],shell=True)
# _rc0 = subprocess.call(["source",os.popen("dirname "+__file__).read().rstrip("\n")+"/language-specific.sh"],shell=True)
# _rc0 = subprocess.call(["set_lang_specific_parameters",str(LANG_CODE.val)],shell=True)
# _rc0 = subprocess.call(["initialize_fontconfig"],shell=True)
# _rc0 = subprocess.call(["phase_I_generate_image","8"],shell=True)
# _rc0 = subprocess.call(["phase_UP_generate_unicharset"],shell=True)
# if (LINEDATA ):
# subprocess.call(["phase_E_extract_features"," --psm 6 lstm.train ","8","lstmf"],shell=True)
# subprocess.call(["make__lstmdata"],shell=True)
# subprocess.call(["tlog","\nCreated starter traineddata for language '"+str(LANG_CODE.val)+"'\n"],shell=True)
# subprocess.call(["tlog","\nRun lstmtraining to do the LSTM training for language '"+str(LANG_CODE.val)+"'\n"],shell=True)
# else:
# subprocess.call(["phase_D_generate_dawg"],shell=True)
# subprocess.call(["phase_E_extract_features","box.train","8","tr"],shell=True)
# subprocess.call(["phase_C_cluster_prototypes",str(TRAINING_DIR.val)+"/"+str(LANG_CODE.val)+".normproto"],shell=True)
# if (str(ENABLE_SHAPE_CLUSTERING.val) == "y" ):
# subprocess.call(["phase_S_cluster_shapes"],shell=True)
# subprocess.call(["phase_M_cluster_microfeatures"],shell=True)
# subprocess.call(["phase_B_generate_ambiguities"],shell=True)
# subprocess.call(["make__traineddata"],shell=True)
# subprocess.call(["tlog","\nCompleted training for language '"+str(LANG_CODE.val)+"'\n"],shell=True)
|
jbarlow83/tesseract
|
src/training/tesstrain.py
|
Python
|
apache-2.0
| 4,301
|
#! /usr/bin/python
import re
import os
import sys
import time
import pydas.communicator as apiMidas
import pydas.exceptions as pydasException
import uuid
import json
import shutil
from zipfile import ZipFile, ZIP_DEFLATED
from subprocess import Popen, PIPE, STDOUT
from contextlib import closing
# Load configuration file
def loadConfig(filename):
try: configfile = open(filename, "r")
except Exception, e: raise
try: configtext = configfile.read()
except Exception, e: raise
pattern = re.compile("\\n([\w_]+)[\t ]*([\w: \\\/~.-]+)")
# Find all matches to this pattern in the text of the config file
tuples = re.findall(pattern, configtext)
# Create a new dictionary and fill it: for every tuple (key, value) in
# the 'tuples' list, set ret[key] to value
ret = dict()
for x in tuples: ret[x[0]] = x[1]
# Return the fully-loaded dictionary object
return ret
# Set internal configuration
def setInternalConfig(email, apikey, token):
try: configfile = open('config.internal.cfg', "w")
except Exception, e: raise
configfile.write("\nemail "+email)
configfile.write("\napikey "+apikey)
configfile.write("\ntoken "+token)
configfile.close()
return
# Register a server to Midas
def registerServer():
"""
Register Server
"""
cfg = loadConfig('config.cfg')
if os.path.exists('config.internal.cfg') == False:
setInternalConfig('undefined', 'undefined', 'undefined')
cfginternal = loadConfig('config.internal.cfg')
url = cfg['url']
interfaceMidas = apiMidas.Communicator (url)
parameters = dict()
parameters['email'] = cfginternal['email']+'@example.org'
parameters['securitykey'] = cfg['securityKey']
parameters['apikey'] = cfginternal['apikey']
try: response = interfaceMidas.makeRequest('midas.remoteprocessing.registerserver', parameters)
except pydasException.PydasException, e:
parameters = dict()
parameters['securitykey'] = cfg['securityKey']
parameters['os'] = cfg['os']
try: response = interfaceMidas.makeRequest('midas.remoteprocessing.registerserver', parameters)
except pydasException.PydasException, e:
print "Unable to Register. Please check the configuration."
return False
setInternalConfig(response['email'], response['apikey'], response['token'])
print "Registered"
return True
# Register a server to Midas
def keepAliveServer():
"""
Keep Alive
"""
cfg = loadConfig('config.cfg')
cfginternal = loadConfig('config.internal.cfg')
url = cfg['url']
interfaceMidas = apiMidas.Communicator (url)
parameters = dict()
parameters['token'] = cfginternal['token']
parameters['os'] = cfg['os']
try: response = interfaceMidas.makeRequest('midas.remoteprocessing.keepaliveserver', parameters)
except pydasException.PydasException, e:
print "Keep aline failed"
print e
return False
return response
# Send results to Midas
def sendResults(file):
"""
Send Results
"""
cfg = loadConfig('config.cfg')
cfginternal = loadConfig('config.internal.cfg')
url = cfg['url']
interfaceMidas = apiMidas.Communicator (url)
parameters = dict()
parameters['token'] = cfginternal['token']
try: response = interfaceMidas.makeRequest('midas.remoteprocessing.resultsserver', parameters, file)
except pydasException.PydasException, e:
print "Unable to send results"
print e
return False
return response
# Handle Midas command
def handleMidasResponse(response):
"""
Handle response
"""
if response['action'] == 'wait':
print "Wait"
time.sleep(120)
elif response['action'] == 'process':
params = json.loads(response['params'])
script = response['script']
#Create processing folder
unique_name = str(uuid.uuid4())
pathProcessingFolder = sys.path[0]+'/tmp/'+unique_name
os.mkdir(pathProcessingFolder)
os.mkdir(pathProcessingFolder+'/script')
os.mkdir(pathProcessingFolder+'/results')
#Create Script file
try: scriptFile = open(pathProcessingFolder+'/script/script.py', "w")
except Exception, e: raise
scriptFile.write(script)
scriptFile.close()
#Create Params file
try: scriptFile = open(pathProcessingFolder+'/results/parameters.txt', "w")
except Exception, e: raise
scriptFile.write(response['params'])
scriptFile.close()
inputFiles = params['input']
cfg = loadConfig('config.cfg')
cfginternal = loadConfig('config.internal.cfg')
url = cfg['url']
interfaceMidas = apiMidas.Communicator (url)
if inputFiles:
print "Download Data"
for file in inputFiles:
interfaceMidas.downloadItem(file, pathProcessingFolder+'/script', cfginternal['token'])
print "Run script"
os.chdir(pathProcessingFolder+'/script/')
cmd = sys.executable+" "+pathProcessingFolder+'/script/script.py'
p = Popen(cmd, shell=True, stdin=PIPE, stdout=PIPE, stderr=STDOUT, close_fds=False)
p.wait()
stdout = p.stdout.read()
os.chdir(sys.path[0])
#Create Log files
try: scriptFile = open(pathProcessingFolder+'/results/log.txt', "w")
except Exception, e: raise
scriptFile.write(stdout)
scriptFile.close()
outputFiles = params['output']
if outputFiles:
for file in outputFiles:
if os.path.exists(pathProcessingFolder+'/script/'+file):
os.rename(pathProcessingFolder+'/script/'+file, pathProcessingFolder+'/results/'+file)
zipdir(pathProcessingFolder+'/results', pathProcessingFolder+'/results.zip')
print "Sending results"
sendResults(pathProcessingFolder+'/results.zip')
shutil.rmtree(pathProcessingFolder)
else:
print "Error, Unable to find command"
return False
return True
def zipdir(basedir, archivename):
assert os.path.isdir(basedir)
with closing(ZipFile(archivename, "w", ZIP_DEFLATED)) as z:
for root, dirs, files in os.walk(basedir):
#NOTE: ignore empty directories
for fn in files:
absfn = os.path.join(root, fn)
zfn = absfn[len(basedir)+len(os.sep):] #XXX: relative path
z.write(absfn, zfn)
# ------ Main --------
if __name__ == "__main__":
#Set directory location
while True:
os.chdir(sys.path[0])
registered = registerServer()
# Create tmp directory
if os.path.exists(sys.path[0]+'/tmp') == False:
os.mkdir('tmp')
if registered == True:
response = keepAliveServer()
if response != False:
handleMidasResponse(response)
else:
time.sleep(120)
else:
time.sleep(120)
|
jcfr/Midas
|
modules/remoteprocessing/remotescript/main.py
|
Python
|
apache-2.0
| 6,635
|
# -*- coding: utf-8 -*-
from django.test import TestCase
from zerver.decorator import \
REQ, has_request_variables, RequestVariableMissingError, \
RequestVariableConversionError, JsonableError
from zerver.lib.validator import (
check_string, check_dict, check_bool, check_int, check_list
)
import ujson
class DecoratorTestCase(TestCase):
def test_REQ_converter(self):
def my_converter(data):
lst = ujson.loads(data)
if not isinstance(lst, list):
raise ValueError('not a list')
if 13 in lst:
raise JsonableError('13 is an unlucky number!')
return lst
@has_request_variables
def get_total(request, numbers=REQ(converter=my_converter)):
return sum(numbers)
class Request(object):
REQUEST = {} # type: Dict[str, str]
request = Request()
with self.assertRaises(RequestVariableMissingError):
get_total(request)
request.REQUEST['numbers'] = 'bad_value'
with self.assertRaises(RequestVariableConversionError) as cm:
get_total(request)
self.assertEqual(str(cm.exception), "Bad value for 'numbers': bad_value")
request.REQUEST['numbers'] = ujson.dumps([2, 3, 5, 8, 13, 21])
with self.assertRaises(JsonableError) as cm:
get_total(request)
self.assertEqual(str(cm.exception), "13 is an unlucky number!")
request.REQUEST['numbers'] = ujson.dumps([1, 2, 3, 4, 5, 6])
result = get_total(request)
self.assertEqual(result, 21)
def test_REQ_validator(self):
@has_request_variables
def get_total(request, numbers=REQ(validator=check_list(check_int))):
return sum(numbers)
class Request(object):
REQUEST = {} # type: Dict[str, str]
request = Request()
with self.assertRaises(RequestVariableMissingError):
get_total(request)
request.REQUEST['numbers'] = 'bad_value'
with self.assertRaises(JsonableError) as cm:
get_total(request)
self.assertEqual(str(cm.exception), 'argument "numbers" is not valid json.')
request.REQUEST['numbers'] = ujson.dumps([1, 2, "what?", 4, 5, 6])
with self.assertRaises(JsonableError) as cm:
get_total(request)
self.assertEqual(str(cm.exception), 'numbers[2] is not an integer')
request.REQUEST['numbers'] = ujson.dumps([1, 2, 3, 4, 5, 6])
result = get_total(request)
self.assertEqual(result, 21)
class ValidatorTestCase(TestCase):
def test_check_string(self):
x = "hello"
self.assertEqual(check_string('x', x), None)
x = 4
self.assertEqual(check_string('x', x), 'x is not a string')
def test_check_bool(self):
x = True
self.assertEqual(check_bool('x', x), None)
x = 4
self.assertEqual(check_bool('x', x), 'x is not a boolean')
def test_check_int(self):
x = 5
self.assertEqual(check_int('x', x), None)
x = [{}]
self.assertEqual(check_int('x', x), 'x is not an integer')
def test_check_list(self):
x = 999
error = check_list(check_string)('x', x)
self.assertEqual(error, 'x is not a list')
x = ["hello", 5]
error = check_list(check_string)('x', x)
self.assertEqual(error, 'x[1] is not a string')
x = [["yo"], ["hello", "goodbye", 5]]
error = check_list(check_list(check_string))('x', x)
self.assertEqual(error, 'x[1][2] is not a string')
x = ["hello", "goodbye", "hello again"]
error = check_list(check_string, length=2)('x', x)
self.assertEqual(error, 'x should have exactly 2 items')
def test_check_dict(self):
keys = [
('names', check_list(check_string)),
('city', check_string),
]
x = {
'names': ['alice', 'bob'],
'city': 'Boston',
}
error = check_dict(keys)('x', x)
self.assertEqual(error, None)
x = 999
error = check_dict(keys)('x', x)
self.assertEqual(error, 'x is not a dict')
x = {}
error = check_dict(keys)('x', x)
self.assertEqual(error, 'names key is missing from x')
x = {
'names': ['alice', 'bob', {}]
}
error = check_dict(keys)('x', x)
self.assertEqual(error, 'x["names"][2] is not a string')
x = {
'names': ['alice', 'bob'],
'city': 5
}
error = check_dict(keys)('x', x)
self.assertEqual(error, 'x["city"] is not a string')
def test_encapsulation(self):
# There might be situations where we want deep
# validation, but the error message should be customized.
# This is an example.
def check_person(val):
error = check_dict([
['name', check_string],
['age', check_int],
])('_', val)
if error:
return 'This is not a valid person'
person = {'name': 'King Lear', 'age': 42}
self.assertEqual(check_person(person), None)
person = 'misconfigured data'
self.assertEqual(check_person(person), 'This is not a valid person')
|
dwrpayne/zulip
|
zerver/tests/test_decorators.py
|
Python
|
apache-2.0
| 5,333
|
#!/usr/bin/env python
from __future__ import absolute_import, division, print_function, with_statement
from tornado import netutil
from tornado.escape import json_decode, json_encode, utf8, _unicode, recursive_unicode, native_str
from tornado import gen
from tornado.http1connection import HTTP1Connection
from tornado.httpserver import HTTPServer
from tornado.httputil import HTTPHeaders, HTTPMessageDelegate, HTTPServerConnectionDelegate, ResponseStartLine
from tornado.iostream import IOStream
from tornado.log import gen_log
from tornado.netutil import ssl_options_to_context
from tornado.simple_httpclient import SimpleAsyncHTTPClient
from tornado.testing import AsyncHTTPTestCase, AsyncHTTPSTestCase, AsyncTestCase, ExpectLog, gen_test
from tornado.test.util import unittest, skipOnTravis
from tornado.web import Application, RequestHandler, asynchronous, stream_request_body
from contextlib import closing
import datetime
import gzip
import os
import shutil
import socket
import ssl
import sys
import tempfile
from io import BytesIO
def read_stream_body(stream, callback):
"""Reads an HTTP response from `stream` and runs callback with its
headers and body."""
chunks = []
class Delegate(HTTPMessageDelegate):
def headers_received(self, start_line, headers):
self.headers = headers
def data_received(self, chunk):
chunks.append(chunk)
def finish(self):
callback((self.headers, b''.join(chunks)))
conn = HTTP1Connection(stream, True)
conn.read_response(Delegate())
class HandlerBaseTestCase(AsyncHTTPTestCase):
def get_app(self):
return Application([('/', self.__class__.Handler)])
def fetch_json(self, *args, **kwargs):
response = self.fetch(*args, **kwargs)
response.rethrow()
return json_decode(response.body)
class HelloWorldRequestHandler(RequestHandler):
def initialize(self, protocol="http"):
self.expected_protocol = protocol
def get(self):
if self.request.protocol != self.expected_protocol:
raise Exception("unexpected protocol")
self.finish("Hello world")
def post(self):
self.finish("Got %d bytes in POST" % len(self.request.body))
# In pre-1.0 versions of openssl, SSLv23 clients always send SSLv2
# ClientHello messages, which are rejected by SSLv3 and TLSv1
# servers. Note that while the OPENSSL_VERSION_INFO was formally
# introduced in python3.2, it was present but undocumented in
# python 2.7
skipIfOldSSL = unittest.skipIf(
getattr(ssl, 'OPENSSL_VERSION_INFO', (0, 0)) < (1, 0),
"old version of ssl module and/or openssl")
class BaseSSLTest(AsyncHTTPSTestCase):
def get_app(self):
return Application([('/', HelloWorldRequestHandler,
dict(protocol="https"))])
class SSLTestMixin(object):
def get_ssl_options(self):
return dict(ssl_version=self.get_ssl_version(), # type: ignore
**AsyncHTTPSTestCase.get_ssl_options())
def get_ssl_version(self):
raise NotImplementedError()
def test_ssl(self):
response = self.fetch('/')
self.assertEqual(response.body, b"Hello world")
def test_large_post(self):
response = self.fetch('/',
method='POST',
body='A' * 5000)
self.assertEqual(response.body, b"Got 5000 bytes in POST")
def test_non_ssl_request(self):
# Make sure the server closes the connection when it gets a non-ssl
# connection, rather than waiting for a timeout or otherwise
# misbehaving.
with ExpectLog(gen_log, '(SSL Error|uncaught exception)'):
with ExpectLog(gen_log, 'Uncaught exception', required=False):
self.http_client.fetch(
self.get_url("/").replace('https:', 'http:'),
self.stop,
request_timeout=3600,
connect_timeout=3600)
response = self.wait()
self.assertEqual(response.code, 599)
def test_error_logging(self):
# No stack traces are logged for SSL errors.
with ExpectLog(gen_log, 'SSL Error') as expect_log:
self.http_client.fetch(
self.get_url("/").replace("https:", "http:"),
self.stop)
response = self.wait()
self.assertEqual(response.code, 599)
self.assertFalse(expect_log.logged_stack)
# Python's SSL implementation differs significantly between versions.
# For example, SSLv3 and TLSv1 throw an exception if you try to read
# from the socket before the handshake is complete, but the default
# of SSLv23 allows it.
class SSLv23Test(BaseSSLTest, SSLTestMixin):
def get_ssl_version(self):
return ssl.PROTOCOL_SSLv23
@skipIfOldSSL
class SSLv3Test(BaseSSLTest, SSLTestMixin):
def get_ssl_version(self):
return ssl.PROTOCOL_SSLv3
@skipIfOldSSL
class TLSv1Test(BaseSSLTest, SSLTestMixin):
def get_ssl_version(self):
return ssl.PROTOCOL_TLSv1
@unittest.skipIf(not hasattr(ssl, 'SSLContext'), 'ssl.SSLContext not present')
class SSLContextTest(BaseSSLTest, SSLTestMixin):
def get_ssl_options(self):
context = ssl_options_to_context(
AsyncHTTPSTestCase.get_ssl_options(self))
assert isinstance(context, ssl.SSLContext)
return context
class BadSSLOptionsTest(unittest.TestCase):
def test_missing_arguments(self):
application = Application()
self.assertRaises(KeyError, HTTPServer, application, ssl_options={
"keyfile": "/__missing__.crt",
})
def test_missing_key(self):
"""A missing SSL key should cause an immediate exception."""
application = Application()
module_dir = os.path.dirname(__file__)
existing_certificate = os.path.join(module_dir, 'test.crt')
existing_key = os.path.join(module_dir, 'test.key')
self.assertRaises((ValueError, IOError),
HTTPServer, application, ssl_options={
"certfile": "/__mising__.crt",
})
self.assertRaises((ValueError, IOError),
HTTPServer, application, ssl_options={
"certfile": existing_certificate,
"keyfile": "/__missing__.key"
})
# This actually works because both files exist
HTTPServer(application, ssl_options={
"certfile": existing_certificate,
"keyfile": existing_key,
})
class MultipartTestHandler(RequestHandler):
def post(self):
self.finish({"header": self.request.headers["X-Header-Encoding-Test"],
"argument": self.get_argument("argument"),
"filename": self.request.files["files"][0].filename,
"filebody": _unicode(self.request.files["files"][0]["body"]),
})
# This test is also called from wsgi_test
class HTTPConnectionTest(AsyncHTTPTestCase):
def get_handlers(self):
return [("/multipart", MultipartTestHandler),
("/hello", HelloWorldRequestHandler)]
def get_app(self):
return Application(self.get_handlers())
def raw_fetch(self, headers, body, newline=b"\r\n"):
with closing(IOStream(socket.socket())) as stream:
stream.connect(('127.0.0.1', self.get_http_port()), self.stop)
self.wait()
stream.write(
newline.join(headers +
[utf8("Content-Length: %d" % len(body))]) +
newline + newline + body)
read_stream_body(stream, self.stop)
headers, body = self.wait()
return body
def test_multipart_form(self):
# Encodings here are tricky: Headers are latin1, bodies can be
# anything (we use utf8 by default).
response = self.raw_fetch([
b"POST /multipart HTTP/1.0",
b"Content-Type: multipart/form-data; boundary=1234567890",
b"X-Header-encoding-test: \xe9",
],
b"\r\n".join([
b"Content-Disposition: form-data; name=argument",
b"",
u"\u00e1".encode("utf-8"),
b"--1234567890",
u'Content-Disposition: form-data; name="files"; filename="\u00f3"'.encode("utf8"),
b"",
u"\u00fa".encode("utf-8"),
b"--1234567890--",
b"",
]))
data = json_decode(response)
self.assertEqual(u"\u00e9", data["header"])
self.assertEqual(u"\u00e1", data["argument"])
self.assertEqual(u"\u00f3", data["filename"])
self.assertEqual(u"\u00fa", data["filebody"])
def test_newlines(self):
# We support both CRLF and bare LF as line separators.
for newline in (b"\r\n", b"\n"):
response = self.raw_fetch([b"GET /hello HTTP/1.0"], b"",
newline=newline)
self.assertEqual(response, b'Hello world')
def test_100_continue(self):
# Run through a 100-continue interaction by hand:
# When given Expect: 100-continue, we get a 100 response after the
# headers, and then the real response after the body.
stream = IOStream(socket.socket(), io_loop=self.io_loop)
stream.connect(("127.0.0.1", self.get_http_port()), callback=self.stop)
self.wait()
stream.write(b"\r\n".join([b"POST /hello HTTP/1.1",
b"Content-Length: 1024",
b"Expect: 100-continue",
b"Connection: close",
b"\r\n"]), callback=self.stop)
self.wait()
stream.read_until(b"\r\n\r\n", self.stop)
data = self.wait()
self.assertTrue(data.startswith(b"HTTP/1.1 100 "), data)
stream.write(b"a" * 1024)
stream.read_until(b"\r\n", self.stop)
first_line = self.wait()
self.assertTrue(first_line.startswith(b"HTTP/1.1 200"), first_line)
stream.read_until(b"\r\n\r\n", self.stop)
header_data = self.wait()
headers = HTTPHeaders.parse(native_str(header_data.decode('latin1')))
stream.read_bytes(int(headers["Content-Length"]), self.stop)
body = self.wait()
self.assertEqual(body, b"Got 1024 bytes in POST")
stream.close()
class EchoHandler(RequestHandler):
def get(self):
self.write(recursive_unicode(self.request.arguments))
def post(self):
self.write(recursive_unicode(self.request.arguments))
class TypeCheckHandler(RequestHandler):
def prepare(self):
self.errors = {}
fields = [
('method', str),
('uri', str),
('version', str),
('remote_ip', str),
('protocol', str),
('host', str),
('path', str),
('query', str),
]
for field, expected_type in fields:
self.check_type(field, getattr(self.request, field), expected_type)
self.check_type('header_key', list(self.request.headers.keys())[0], str)
self.check_type('header_value', list(self.request.headers.values())[0], str)
self.check_type('cookie_key', list(self.request.cookies.keys())[0], str)
self.check_type('cookie_value', list(self.request.cookies.values())[0].value, str)
# secure cookies
self.check_type('arg_key', list(self.request.arguments.keys())[0], str)
self.check_type('arg_value', list(self.request.arguments.values())[0][0], bytes)
def post(self):
self.check_type('body', self.request.body, bytes)
self.write(self.errors)
def get(self):
self.write(self.errors)
def check_type(self, name, obj, expected_type):
actual_type = type(obj)
if expected_type != actual_type:
self.errors[name] = "expected %s, got %s" % (expected_type,
actual_type)
class HTTPServerTest(AsyncHTTPTestCase):
def get_app(self):
return Application([("/echo", EchoHandler),
("/typecheck", TypeCheckHandler),
("//doubleslash", EchoHandler),
])
def test_query_string_encoding(self):
response = self.fetch("/echo?foo=%C3%A9")
data = json_decode(response.body)
self.assertEqual(data, {u"foo": [u"\u00e9"]})
def test_empty_query_string(self):
response = self.fetch("/echo?foo=&foo=")
data = json_decode(response.body)
self.assertEqual(data, {u"foo": [u"", u""]})
def test_empty_post_parameters(self):
response = self.fetch("/echo", method="POST", body="foo=&bar=")
data = json_decode(response.body)
self.assertEqual(data, {u"foo": [u""], u"bar": [u""]})
def test_types(self):
headers = {"Cookie": "foo=bar"}
response = self.fetch("/typecheck?foo=bar", headers=headers)
data = json_decode(response.body)
self.assertEqual(data, {})
response = self.fetch("/typecheck", method="POST", body="foo=bar", headers=headers)
data = json_decode(response.body)
self.assertEqual(data, {})
def test_double_slash(self):
# urlparse.urlsplit (which tornado.httpserver used to use
# incorrectly) would parse paths beginning with "//" as
# protocol-relative urls.
response = self.fetch("//doubleslash")
self.assertEqual(200, response.code)
self.assertEqual(json_decode(response.body), {})
def test_malformed_body(self):
# parse_qs is pretty forgiving, but it will fail on python 3
# if the data is not utf8. On python 2 parse_qs will work,
# but then the recursive_unicode call in EchoHandler will
# fail.
if str is bytes:
return
with ExpectLog(gen_log, 'Invalid x-www-form-urlencoded body'):
response = self.fetch(
'/echo', method="POST",
headers={'Content-Type': 'application/x-www-form-urlencoded'},
body=b'\xe9')
self.assertEqual(200, response.code)
self.assertEqual(b'{}', response.body)
class HTTPServerRawTest(AsyncHTTPTestCase):
def get_app(self):
return Application([
('/echo', EchoHandler),
])
def setUp(self):
super(HTTPServerRawTest, self).setUp()
self.stream = IOStream(socket.socket())
self.stream.connect(('127.0.0.1', self.get_http_port()), self.stop)
self.wait()
def tearDown(self):
self.stream.close()
super(HTTPServerRawTest, self).tearDown()
def test_empty_request(self):
self.stream.close()
self.io_loop.add_timeout(datetime.timedelta(seconds=0.001), self.stop)
self.wait()
def test_malformed_first_line(self):
with ExpectLog(gen_log, '.*Malformed HTTP request line'):
self.stream.write(b'asdf\r\n\r\n')
# TODO: need an async version of ExpectLog so we don't need
# hard-coded timeouts here.
self.io_loop.add_timeout(datetime.timedelta(seconds=0.05),
self.stop)
self.wait()
def test_malformed_headers(self):
with ExpectLog(gen_log, '.*Malformed HTTP headers'):
self.stream.write(b'GET / HTTP/1.0\r\nasdf\r\n\r\n')
self.io_loop.add_timeout(datetime.timedelta(seconds=0.05),
self.stop)
self.wait()
def test_chunked_request_body(self):
# Chunked requests are not widely supported and we don't have a way
# to generate them in AsyncHTTPClient, but HTTPServer will read them.
self.stream.write(b"""\
POST /echo HTTP/1.1
Transfer-Encoding: chunked
Content-Type: application/x-www-form-urlencoded
4
foo=
3
bar
0
""".replace(b"\n", b"\r\n"))
read_stream_body(self.stream, self.stop)
headers, response = self.wait()
self.assertEqual(json_decode(response), {u'foo': [u'bar']})
def test_chunked_request_uppercase(self):
# As per RFC 2616 section 3.6, "Transfer-Encoding" header's value is
# case-insensitive.
self.stream.write(b"""\
POST /echo HTTP/1.1
Transfer-Encoding: Chunked
Content-Type: application/x-www-form-urlencoded
4
foo=
3
bar
0
""".replace(b"\n", b"\r\n"))
read_stream_body(self.stream, self.stop)
headers, response = self.wait()
self.assertEqual(json_decode(response), {u'foo': [u'bar']})
def test_invalid_content_length(self):
with ExpectLog(gen_log, '.*Only integer Content-Length is allowed'):
self.stream.write(b"""\
POST /echo HTTP/1.1
Content-Length: foo
bar
""".replace(b"\n", b"\r\n"))
self.stream.read_until_close(self.stop)
self.wait()
class XHeaderTest(HandlerBaseTestCase):
class Handler(RequestHandler):
def get(self):
self.write(dict(remote_ip=self.request.remote_ip,
remote_protocol=self.request.protocol))
def get_httpserver_options(self):
return dict(xheaders=True)
def test_ip_headers(self):
self.assertEqual(self.fetch_json("/")["remote_ip"], "127.0.0.1")
valid_ipv4 = {"X-Real-IP": "4.4.4.4"}
self.assertEqual(
self.fetch_json("/", headers=valid_ipv4)["remote_ip"],
"4.4.4.4")
valid_ipv4_list = {"X-Forwarded-For": "127.0.0.1, 4.4.4.4"}
self.assertEqual(
self.fetch_json("/", headers=valid_ipv4_list)["remote_ip"],
"4.4.4.4")
valid_ipv6 = {"X-Real-IP": "2620:0:1cfe:face:b00c::3"}
self.assertEqual(
self.fetch_json("/", headers=valid_ipv6)["remote_ip"],
"2620:0:1cfe:face:b00c::3")
valid_ipv6_list = {"X-Forwarded-For": "::1, 2620:0:1cfe:face:b00c::3"}
self.assertEqual(
self.fetch_json("/", headers=valid_ipv6_list)["remote_ip"],
"2620:0:1cfe:face:b00c::3")
invalid_chars = {"X-Real-IP": "4.4.4.4<script>"}
self.assertEqual(
self.fetch_json("/", headers=invalid_chars)["remote_ip"],
"127.0.0.1")
invalid_chars_list = {"X-Forwarded-For": "4.4.4.4, 5.5.5.5<script>"}
self.assertEqual(
self.fetch_json("/", headers=invalid_chars_list)["remote_ip"],
"127.0.0.1")
invalid_host = {"X-Real-IP": "www.google.com"}
self.assertEqual(
self.fetch_json("/", headers=invalid_host)["remote_ip"],
"127.0.0.1")
def test_scheme_headers(self):
self.assertEqual(self.fetch_json("/")["remote_protocol"], "http")
https_scheme = {"X-Scheme": "https"}
self.assertEqual(
self.fetch_json("/", headers=https_scheme)["remote_protocol"],
"https")
https_forwarded = {"X-Forwarded-Proto": "https"}
self.assertEqual(
self.fetch_json("/", headers=https_forwarded)["remote_protocol"],
"https")
bad_forwarded = {"X-Forwarded-Proto": "unknown"}
self.assertEqual(
self.fetch_json("/", headers=bad_forwarded)["remote_protocol"],
"http")
class SSLXHeaderTest(AsyncHTTPSTestCase, HandlerBaseTestCase):
def get_app(self):
return Application([('/', XHeaderTest.Handler)])
def get_httpserver_options(self):
output = super(SSLXHeaderTest, self).get_httpserver_options()
output['xheaders'] = True
return output
def test_request_without_xprotocol(self):
self.assertEqual(self.fetch_json("/")["remote_protocol"], "https")
http_scheme = {"X-Scheme": "http"}
self.assertEqual(
self.fetch_json("/", headers=http_scheme)["remote_protocol"], "http")
bad_scheme = {"X-Scheme": "unknown"}
self.assertEqual(
self.fetch_json("/", headers=bad_scheme)["remote_protocol"], "https")
class ManualProtocolTest(HandlerBaseTestCase):
class Handler(RequestHandler):
def get(self):
self.write(dict(protocol=self.request.protocol))
def get_httpserver_options(self):
return dict(protocol='https')
def test_manual_protocol(self):
self.assertEqual(self.fetch_json('/')['protocol'], 'https')
@unittest.skipIf(not hasattr(socket, 'AF_UNIX') or sys.platform == 'cygwin',
"unix sockets not supported on this platform")
class UnixSocketTest(AsyncTestCase):
"""HTTPServers can listen on Unix sockets too.
Why would you want to do this? Nginx can proxy to backends listening
on unix sockets, for one thing (and managing a namespace for unix
sockets can be easier than managing a bunch of TCP port numbers).
Unfortunately, there's no way to specify a unix socket in a url for
an HTTP client, so we have to test this by hand.
"""
def setUp(self):
super(UnixSocketTest, self).setUp()
self.tmpdir = tempfile.mkdtemp()
self.sockfile = os.path.join(self.tmpdir, "test.sock")
sock = netutil.bind_unix_socket(self.sockfile)
app = Application([("/hello", HelloWorldRequestHandler)])
self.server = HTTPServer(app, io_loop=self.io_loop)
self.server.add_socket(sock)
self.stream = IOStream(socket.socket(socket.AF_UNIX), io_loop=self.io_loop)
self.stream.connect(self.sockfile, self.stop)
self.wait()
def tearDown(self):
self.stream.close()
self.server.stop()
shutil.rmtree(self.tmpdir)
super(UnixSocketTest, self).tearDown()
def test_unix_socket(self):
self.stream.write(b"GET /hello HTTP/1.0\r\n\r\n")
self.stream.read_until(b"\r\n", self.stop)
response = self.wait()
self.assertEqual(response, b"HTTP/1.1 200 OK\r\n")
self.stream.read_until(b"\r\n\r\n", self.stop)
headers = HTTPHeaders.parse(self.wait().decode('latin1'))
self.stream.read_bytes(int(headers["Content-Length"]), self.stop)
body = self.wait()
self.assertEqual(body, b"Hello world")
def test_unix_socket_bad_request(self):
# Unix sockets don't have remote addresses so they just return an
# empty string.
with ExpectLog(gen_log, "Malformed HTTP message from"):
self.stream.write(b"garbage\r\n\r\n")
self.stream.read_until_close(self.stop)
response = self.wait()
self.assertEqual(response, b"")
class KeepAliveTest(AsyncHTTPTestCase):
"""Tests various scenarios for HTTP 1.1 keep-alive support.
These tests don't use AsyncHTTPClient because we want to control
connection reuse and closing.
"""
def get_app(self):
class HelloHandler(RequestHandler):
def get(self):
self.finish('Hello world')
def post(self):
self.finish('Hello world')
class LargeHandler(RequestHandler):
def get(self):
# 512KB should be bigger than the socket buffers so it will
# be written out in chunks.
self.write(''.join(chr(i % 256) * 1024 for i in range(512)))
class FinishOnCloseHandler(RequestHandler):
@asynchronous
def get(self):
self.flush()
def on_connection_close(self):
# This is not very realistic, but finishing the request
# from the close callback has the right timing to mimic
# some errors seen in the wild.
self.finish('closed')
return Application([('/', HelloHandler),
('/large', LargeHandler),
('/finish_on_close', FinishOnCloseHandler)])
def setUp(self):
super(KeepAliveTest, self).setUp()
self.http_version = b'HTTP/1.1'
def tearDown(self):
# We just closed the client side of the socket; let the IOLoop run
# once to make sure the server side got the message.
self.io_loop.add_timeout(datetime.timedelta(seconds=0.001), self.stop)
self.wait()
if hasattr(self, 'stream'):
self.stream.close()
super(KeepAliveTest, self).tearDown()
# The next few methods are a crude manual http client
def connect(self):
self.stream = IOStream(socket.socket(), io_loop=self.io_loop)
self.stream.connect(('127.0.0.1', self.get_http_port()), self.stop)
self.wait()
def read_headers(self):
self.stream.read_until(b'\r\n', self.stop)
first_line = self.wait()
self.assertTrue(first_line.startswith(b'HTTP/1.1 200'), first_line)
self.stream.read_until(b'\r\n\r\n', self.stop)
header_bytes = self.wait()
headers = HTTPHeaders.parse(header_bytes.decode('latin1'))
return headers
def read_response(self):
self.headers = self.read_headers()
self.stream.read_bytes(int(self.headers['Content-Length']), self.stop)
body = self.wait()
self.assertEqual(b'Hello world', body)
def close(self):
self.stream.close()
del self.stream
def test_two_requests(self):
self.connect()
self.stream.write(b'GET / HTTP/1.1\r\n\r\n')
self.read_response()
self.stream.write(b'GET / HTTP/1.1\r\n\r\n')
self.read_response()
self.close()
def test_request_close(self):
self.connect()
self.stream.write(b'GET / HTTP/1.1\r\nConnection: close\r\n\r\n')
self.read_response()
self.stream.read_until_close(callback=self.stop)
data = self.wait()
self.assertTrue(not data)
self.close()
# keepalive is supported for http 1.0 too, but it's opt-in
def test_http10(self):
self.http_version = b'HTTP/1.0'
self.connect()
self.stream.write(b'GET / HTTP/1.0\r\n\r\n')
self.read_response()
self.stream.read_until_close(callback=self.stop)
data = self.wait()
self.assertTrue(not data)
self.assertTrue('Connection' not in self.headers)
self.close()
def test_http10_keepalive(self):
self.http_version = b'HTTP/1.0'
self.connect()
self.stream.write(b'GET / HTTP/1.0\r\nConnection: keep-alive\r\n\r\n')
self.read_response()
self.assertEqual(self.headers['Connection'], 'Keep-Alive')
self.stream.write(b'GET / HTTP/1.0\r\nConnection: keep-alive\r\n\r\n')
self.read_response()
self.assertEqual(self.headers['Connection'], 'Keep-Alive')
self.close()
def test_http10_keepalive_extra_crlf(self):
self.http_version = b'HTTP/1.0'
self.connect()
self.stream.write(b'GET / HTTP/1.0\r\nConnection: keep-alive\r\n\r\n\r\n')
self.read_response()
self.assertEqual(self.headers['Connection'], 'Keep-Alive')
self.stream.write(b'GET / HTTP/1.0\r\nConnection: keep-alive\r\n\r\n')
self.read_response()
self.assertEqual(self.headers['Connection'], 'Keep-Alive')
self.close()
def test_pipelined_requests(self):
self.connect()
self.stream.write(b'GET / HTTP/1.1\r\n\r\nGET / HTTP/1.1\r\n\r\n')
self.read_response()
self.read_response()
self.close()
def test_pipelined_cancel(self):
self.connect()
self.stream.write(b'GET / HTTP/1.1\r\n\r\nGET / HTTP/1.1\r\n\r\n')
# only read once
self.read_response()
self.close()
def test_cancel_during_download(self):
self.connect()
self.stream.write(b'GET /large HTTP/1.1\r\n\r\n')
self.read_headers()
self.stream.read_bytes(1024, self.stop)
self.wait()
self.close()
def test_finish_while_closed(self):
self.connect()
self.stream.write(b'GET /finish_on_close HTTP/1.1\r\n\r\n')
self.read_headers()
self.close()
def test_keepalive_chunked(self):
self.http_version = b'HTTP/1.0'
self.connect()
self.stream.write(b'POST / HTTP/1.0\r\nConnection: keep-alive\r\n'
b'Transfer-Encoding: chunked\r\n'
b'\r\n0\r\n')
self.read_response()
self.assertEqual(self.headers['Connection'], 'Keep-Alive')
self.stream.write(b'GET / HTTP/1.0\r\nConnection: keep-alive\r\n\r\n')
self.read_response()
self.assertEqual(self.headers['Connection'], 'Keep-Alive')
self.close()
class GzipBaseTest(object):
def get_app(self):
return Application([('/', EchoHandler)])
def post_gzip(self, body):
bytesio = BytesIO()
gzip_file = gzip.GzipFile(mode='w', fileobj=bytesio)
gzip_file.write(utf8(body))
gzip_file.close()
compressed_body = bytesio.getvalue()
return self.fetch('/', method='POST', body=compressed_body,
headers={'Content-Encoding': 'gzip'})
def test_uncompressed(self):
response = self.fetch('/', method='POST', body='foo=bar')
self.assertEquals(json_decode(response.body), {u'foo': [u'bar']})
class GzipTest(GzipBaseTest, AsyncHTTPTestCase):
def get_httpserver_options(self):
return dict(decompress_request=True)
def test_gzip(self):
response = self.post_gzip('foo=bar')
self.assertEquals(json_decode(response.body), {u'foo': [u'bar']})
class GzipUnsupportedTest(GzipBaseTest, AsyncHTTPTestCase):
def test_gzip_unsupported(self):
# Gzip support is opt-in; without it the server fails to parse
# the body (but parsing form bodies is currently just a log message,
# not a fatal error).
with ExpectLog(gen_log, "Unsupported Content-Encoding"):
response = self.post_gzip('foo=bar')
self.assertEquals(json_decode(response.body), {})
class StreamingChunkSizeTest(AsyncHTTPTestCase):
# 50 characters long, and repetitive so it can be compressed.
BODY = b'01234567890123456789012345678901234567890123456789'
CHUNK_SIZE = 16
def get_http_client(self):
# body_producer doesn't work on curl_httpclient, so override the
# configured AsyncHTTPClient implementation.
return SimpleAsyncHTTPClient(io_loop=self.io_loop)
def get_httpserver_options(self):
return dict(chunk_size=self.CHUNK_SIZE, decompress_request=True)
class MessageDelegate(HTTPMessageDelegate):
def __init__(self, connection):
self.connection = connection
def headers_received(self, start_line, headers):
self.chunk_lengths = []
def data_received(self, chunk):
self.chunk_lengths.append(len(chunk))
def finish(self):
response_body = utf8(json_encode(self.chunk_lengths))
self.connection.write_headers(
ResponseStartLine('HTTP/1.1', 200, 'OK'),
HTTPHeaders({'Content-Length': str(len(response_body))}))
self.connection.write(response_body)
self.connection.finish()
def get_app(self):
class App(HTTPServerConnectionDelegate):
def start_request(self, server_conn, request_conn):
return StreamingChunkSizeTest.MessageDelegate(request_conn)
return App()
def fetch_chunk_sizes(self, **kwargs):
response = self.fetch('/', method='POST', **kwargs)
response.rethrow()
chunks = json_decode(response.body)
self.assertEqual(len(self.BODY), sum(chunks))
for chunk_size in chunks:
self.assertLessEqual(chunk_size, self.CHUNK_SIZE,
'oversized chunk: ' + str(chunks))
self.assertGreater(chunk_size, 0,
'empty chunk: ' + str(chunks))
return chunks
def compress(self, body):
bytesio = BytesIO()
gzfile = gzip.GzipFile(mode='w', fileobj=bytesio)
gzfile.write(body)
gzfile.close()
compressed = bytesio.getvalue()
if len(compressed) >= len(body):
raise Exception("body did not shrink when compressed")
return compressed
def test_regular_body(self):
chunks = self.fetch_chunk_sizes(body=self.BODY)
# Without compression we know exactly what to expect.
self.assertEqual([16, 16, 16, 2], chunks)
def test_compressed_body(self):
self.fetch_chunk_sizes(body=self.compress(self.BODY),
headers={'Content-Encoding': 'gzip'})
# Compression creates irregular boundaries so the assertions
# in fetch_chunk_sizes are as specific as we can get.
def test_chunked_body(self):
def body_producer(write):
write(self.BODY[:20])
write(self.BODY[20:])
chunks = self.fetch_chunk_sizes(body_producer=body_producer)
# HTTP chunk boundaries translate to application-visible breaks
self.assertEqual([16, 4, 16, 14], chunks)
def test_chunked_compressed(self):
compressed = self.compress(self.BODY)
self.assertGreater(len(compressed), 20)
def body_producer(write):
write(compressed[:20])
write(compressed[20:])
self.fetch_chunk_sizes(body_producer=body_producer,
headers={'Content-Encoding': 'gzip'})
class MaxHeaderSizeTest(AsyncHTTPTestCase):
def get_app(self):
return Application([('/', HelloWorldRequestHandler)])
def get_httpserver_options(self):
return dict(max_header_size=1024)
def test_small_headers(self):
response = self.fetch("/", headers={'X-Filler': 'a' * 100})
response.rethrow()
self.assertEqual(response.body, b"Hello world")
def test_large_headers(self):
with ExpectLog(gen_log, "Unsatisfiable read", required=False):
response = self.fetch("/", headers={'X-Filler': 'a' * 1000})
# 431 is "Request Header Fields Too Large", defined in RFC
# 6585. However, many implementations just close the
# connection in this case, resulting in a 599.
self.assertIn(response.code, (431, 599))
@skipOnTravis
class IdleTimeoutTest(AsyncHTTPTestCase):
def get_app(self):
return Application([('/', HelloWorldRequestHandler)])
def get_httpserver_options(self):
return dict(idle_connection_timeout=0.1)
def setUp(self):
super(IdleTimeoutTest, self).setUp()
self.streams = []
def tearDown(self):
super(IdleTimeoutTest, self).tearDown()
for stream in self.streams:
stream.close()
def connect(self):
stream = IOStream(socket.socket())
stream.connect(('127.0.0.1', self.get_http_port()), self.stop)
self.wait()
self.streams.append(stream)
return stream
def test_unused_connection(self):
stream = self.connect()
stream.set_close_callback(self.stop)
self.wait()
def test_idle_after_use(self):
stream = self.connect()
stream.set_close_callback(lambda: self.stop("closed"))
# Use the connection twice to make sure keep-alives are working
for i in range(2):
stream.write(b"GET / HTTP/1.1\r\n\r\n")
stream.read_until(b"\r\n\r\n", self.stop)
self.wait()
stream.read_bytes(11, self.stop)
data = self.wait()
self.assertEqual(data, b"Hello world")
# Now let the timeout trigger and close the connection.
data = self.wait()
self.assertEqual(data, "closed")
class BodyLimitsTest(AsyncHTTPTestCase):
def get_app(self):
class BufferedHandler(RequestHandler):
def put(self):
self.write(str(len(self.request.body)))
@stream_request_body
class StreamingHandler(RequestHandler):
def initialize(self):
self.bytes_read = 0
def prepare(self):
if 'expected_size' in self.request.arguments:
self.request.connection.set_max_body_size(
int(self.get_argument('expected_size')))
if 'body_timeout' in self.request.arguments:
self.request.connection.set_body_timeout(
float(self.get_argument('body_timeout')))
def data_received(self, data):
self.bytes_read += len(data)
def put(self):
self.write(str(self.bytes_read))
return Application([('/buffered', BufferedHandler),
('/streaming', StreamingHandler)])
def get_httpserver_options(self):
return dict(body_timeout=3600, max_body_size=4096)
def get_http_client(self):
# body_producer doesn't work on curl_httpclient, so override the
# configured AsyncHTTPClient implementation.
return SimpleAsyncHTTPClient(io_loop=self.io_loop)
def test_small_body(self):
response = self.fetch('/buffered', method='PUT', body=b'a' * 4096)
self.assertEqual(response.body, b'4096')
response = self.fetch('/streaming', method='PUT', body=b'a' * 4096)
self.assertEqual(response.body, b'4096')
def test_large_body_buffered(self):
with ExpectLog(gen_log, '.*Content-Length too long'):
response = self.fetch('/buffered', method='PUT', body=b'a' * 10240)
self.assertEqual(response.code, 599)
def test_large_body_buffered_chunked(self):
with ExpectLog(gen_log, '.*chunked body too large'):
response = self.fetch('/buffered', method='PUT',
body_producer=lambda write: write(b'a' * 10240))
self.assertEqual(response.code, 599)
def test_large_body_streaming(self):
with ExpectLog(gen_log, '.*Content-Length too long'):
response = self.fetch('/streaming', method='PUT', body=b'a' * 10240)
self.assertEqual(response.code, 599)
def test_large_body_streaming_chunked(self):
with ExpectLog(gen_log, '.*chunked body too large'):
response = self.fetch('/streaming', method='PUT',
body_producer=lambda write: write(b'a' * 10240))
self.assertEqual(response.code, 599)
def test_large_body_streaming_override(self):
response = self.fetch('/streaming?expected_size=10240', method='PUT',
body=b'a' * 10240)
self.assertEqual(response.body, b'10240')
def test_large_body_streaming_chunked_override(self):
response = self.fetch('/streaming?expected_size=10240', method='PUT',
body_producer=lambda write: write(b'a' * 10240))
self.assertEqual(response.body, b'10240')
@gen_test
def test_timeout(self):
stream = IOStream(socket.socket())
try:
yield stream.connect(('127.0.0.1', self.get_http_port()))
# Use a raw stream because AsyncHTTPClient won't let us read a
# response without finishing a body.
stream.write(b'PUT /streaming?body_timeout=0.1 HTTP/1.0\r\n'
b'Content-Length: 42\r\n\r\n')
with ExpectLog(gen_log, 'Timeout reading body'):
response = yield stream.read_until_close()
self.assertEqual(response, b'')
finally:
stream.close()
@gen_test
def test_body_size_override_reset(self):
# The max_body_size override is reset between requests.
stream = IOStream(socket.socket())
try:
yield stream.connect(('127.0.0.1', self.get_http_port()))
# Use a raw stream so we can make sure it's all on one connection.
stream.write(b'PUT /streaming?expected_size=10240 HTTP/1.1\r\n'
b'Content-Length: 10240\r\n\r\n')
stream.write(b'a' * 10240)
headers, response = yield gen.Task(read_stream_body, stream)
self.assertEqual(response, b'10240')
# Without the ?expected_size parameter, we get the old default value
stream.write(b'PUT /streaming HTTP/1.1\r\n'
b'Content-Length: 10240\r\n\r\n')
with ExpectLog(gen_log, '.*Content-Length too long'):
data = yield stream.read_until_close()
self.assertEqual(data, b'')
finally:
stream.close()
class LegacyInterfaceTest(AsyncHTTPTestCase):
def get_app(self):
# The old request_callback interface does not implement the
# delegate interface, and writes its response via request.write
# instead of request.connection.write_headers.
def handle_request(request):
self.http1 = request.version.startswith("HTTP/1.")
if not self.http1:
# This test will be skipped if we're using HTTP/2,
# so just close it out cleanly using the modern interface.
request.connection.write_headers(
ResponseStartLine('', 200, 'OK'),
HTTPHeaders())
request.connection.finish()
return
message = b"Hello world"
request.write(utf8("HTTP/1.1 200 OK\r\n"
"Content-Length: %d\r\n\r\n" % len(message)))
request.write(message)
request.finish()
return handle_request
def test_legacy_interface(self):
response = self.fetch('/')
if not self.http1:
self.skipTest("requires HTTP/1.x")
self.assertEqual(response.body, b"Hello world")
|
mr-ping/tornado
|
tornado/test/httpserver_test.py
|
Python
|
apache-2.0
| 41,934
|
#!/usr/bin/env python
# Load common imports and system envs to build the core object
import sys, os
# Load the Environment:
os.environ["ENV_DEPLOYMENT_TYPE"] = "JustRedis"
from src.common.inits_for_python import *
#####################################################################
#
# Start Arg Processing:
#
action = "Extract and Upload IRIS Models to S3"
parser = argparse.ArgumentParser(description="Parser for Action: " + str(action))
parser.add_argument('-u', '--url', help='URL to Download', dest='url')
parser.add_argument('-b', '--s3bucket', help='S3 Bucket (Optional)', dest='s_bucket')
parser.add_argument('-k', '--s3key', help='S3 Key (Optional)', dest='s_key')
parser.add_argument("-d", "--debug", help="Debug Flag", dest='debug', action='store_true')
args = parser.parse_args()
if args.debug:
debug = True
core.enable_debug()
data_dir = str(os.getenv("ENV_DATA_DST_DIR", "/opt/work/data/dst"))
if not os.path.exists(data_dir):
os.mkdir(data_dir, 0777)
ds_name = "iris_classifier"
cur_date_str = datetime.datetime.now().strftime("%Y-%m-%d-%H-%M-%S")
s3_bucket = "unique-bucket-name-for-datasets"
s3_key = "dataset_" + core.to_upper(ds_name) + ".cache.pickle.zlib"
s3_loc = ""
if args.s_bucket:
s3_bucket = str(args.s_bucket)
if args.s_key:
s3_key = str(args.s_key)
#
# End Arg Processing
#
#####################################################################
s3_loc = str(s3_bucket) + ":" + str(s3_key)
lg("-------------------------------------------------", 6)
lg("Extracting and Uploading Models from CACHE to S3Loc(" + str(s3_loc) + ")", 6)
lg("", 6)
cache_req = {
"RAName" : "CACHE", # Redis instance name holding the models
"DSName" : str(ds_name), # Dataset name for pulling out of the cache
"S3Loc" : str(s3_loc), # S3 location to store the model file
"DeleteAfter" : False, # Optional delete after upload
"SaveDir" : data_dir, # Optional dir to save the model file - default is ENV_DATA_DST_DIR
"TrackingID" : "" # Future support for using the tracking id
}
upload_results = core.ml_upload_cached_dataset_to_s3(cache_req, core.get_rds(), core.get_dbs(), debug)
if upload_results["Status"] == "SUCCESS":
lg("Done Uploading Model and Analysis DSName(" + str(ds_name) + ") S3Loc(" + str(cache_req["S3Loc"]) + ")", 6)
else:
lg("", 6)
lg("ERROR: Failed Upload Model and Analysis Caches as file for DSName(" + str(ds_name) + ")", 6)
lg(upload_results["Error"], 6)
lg("", 6)
sys.exit(1)
# end of if extract + upload worked
lg("", 6)
lg("Extract and Upload Completed", 5)
lg("", 6)
sys.exit(0)
|
jay-johnson/sci-pype
|
bins/ml/extractors/extract_and_upload_iris_classifier.py
|
Python
|
apache-2.0
| 2,955
|
# Copyright 2016 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Log entries within the Google Stackdriver Logging API."""
import json
import re
from google.protobuf import any_pb2
from google.protobuf.json_format import Parse
from google.cloud.logging.resource import Resource
from google.cloud._helpers import _name_from_project_path
from google.cloud._helpers import _rfc3339_nanos_to_datetime
_LOGGER_TEMPLATE = re.compile(r"""
projects/ # static prefix
(?P<project>[^/]+) # initial letter, wordchars + hyphen
/logs/ # static midfix
(?P<name>[^/]+) # initial letter, wordchars + allowed punc
""", re.VERBOSE)
def logger_name_from_path(path):
"""Validate a logger URI path and get the logger name.
:type path: str
:param path: URI path for a logger API request.
:rtype: str
:returns: Logger name parsed from ``path``.
:raises: :class:`ValueError` if the ``path`` is ill-formed or if
the project from the ``path`` does not agree with the
``project`` passed in.
"""
return _name_from_project_path(path, None, _LOGGER_TEMPLATE)
class _BaseEntry(object):
"""Base class for TextEntry, StructEntry, ProtobufEntry.
:type payload: text or dict
:param payload: The payload passed as ``textPayload``, ``jsonPayload``,
or ``protoPayload``.
:type logger: :class:`google.cloud.logging.logger.Logger`
:param logger: the logger used to write the entry.
:type insert_id: text
:param insert_id: (optional) the ID used to identify an entry uniquely.
:type timestamp: :class:`datetime.datetime`
:param timestamp: (optional) timestamp for the entry
:type labels: dict
:param labels: (optional) mapping of labels for the entry
:type severity: str
:param severity: (optional) severity of event being logged.
:type http_request: dict
:param http_request: (optional) info about HTTP request associated with
the entry.
:type resource: :class:`~google.cloud.logging.resource.Resource`
:param resource: (Optional) Monitored resource of the entry
"""
def __init__(self, payload, logger, insert_id=None, timestamp=None,
labels=None, severity=None, http_request=None, resource=None):
self.payload = payload
self.logger = logger
self.insert_id = insert_id
self.timestamp = timestamp
self.labels = labels
self.severity = severity
self.http_request = http_request
self.resource = resource
@classmethod
def from_api_repr(cls, resource, client, loggers=None):
"""Factory: construct an entry given its API representation
:type resource: dict
:param resource: text entry resource representation returned from
the API
:type client: :class:`google.cloud.logging.client.Client`
:param client: Client which holds credentials and project
configuration.
:type loggers: dict
:param loggers:
(Optional) A mapping of logger fullnames -> loggers. If not
passed, the entry will have a newly-created logger.
:rtype: :class:`google.cloud.logging.entries._BaseEntry`
:returns: Text entry parsed from ``resource``.
"""
if loggers is None:
loggers = {}
logger_fullname = resource['logName']
logger = loggers.get(logger_fullname)
if logger is None:
logger_name = logger_name_from_path(logger_fullname)
logger = loggers[logger_fullname] = client.logger(logger_name)
payload = resource[cls._PAYLOAD_KEY]
insert_id = resource.get('insertId')
timestamp = resource.get('timestamp')
if timestamp is not None:
timestamp = _rfc3339_nanos_to_datetime(timestamp)
labels = resource.get('labels')
severity = resource.get('severity')
http_request = resource.get('httpRequest')
monitored_resource_dict = resource.get('resource')
monitored_resource = None
if monitored_resource_dict is not None:
monitored_resource = Resource._from_dict(monitored_resource_dict)
return cls(payload, logger, insert_id=insert_id, timestamp=timestamp,
labels=labels, severity=severity, http_request=http_request,
resource=monitored_resource)
class TextEntry(_BaseEntry):
"""Entry created with ``textPayload``.
See
https://cloud.google.com/logging/docs/reference/v2/rest/v2/LogEntry
"""
_PAYLOAD_KEY = 'textPayload'
class StructEntry(_BaseEntry):
"""Entry created with ``jsonPayload``.
See
https://cloud.google.com/logging/docs/reference/v2/rest/v2/LogEntry
"""
_PAYLOAD_KEY = 'jsonPayload'
class ProtobufEntry(_BaseEntry):
"""Entry created with ``protoPayload``.
See
https://cloud.google.com/logging/docs/reference/v2/rest/v2/LogEntry
:type payload: str, dict or any_pb2.Any
:param payload: The payload passed as ``textPayload``, ``jsonPayload``,
or ``protoPayload``. This also may be passed as a raw
:class:`.any_pb2.Any` if the ``protoPayload`` could
not be deserialized.
:type logger: :class:`~google.cloud.logging.logger.Logger`
:param logger: the logger used to write the entry.
:type insert_id: str
:param insert_id: (optional) the ID used to identify an entry uniquely.
:type timestamp: :class:`datetime.datetime`
:param timestamp: (optional) timestamp for the entry
:type labels: dict
:param labels: (optional) mapping of labels for the entry
:type severity: str
:param severity: (optional) severity of event being logged.
:type http_request: dict
:param http_request: (optional) info about HTTP request associated with
the entry
:type resource: :class:`~google.cloud.logging.resource.Resource`
:param resource: (Optional) Monitored resource of the entry
"""
_PAYLOAD_KEY = 'protoPayload'
def __init__(self, payload, logger, insert_id=None, timestamp=None,
labels=None, severity=None, http_request=None, resource=None):
super(ProtobufEntry, self).__init__(
payload, logger, insert_id=insert_id, timestamp=timestamp,
labels=labels, severity=severity, http_request=http_request,
resource=resource)
if isinstance(self.payload, any_pb2.Any):
self.payload_pb = self.payload
self.payload = None
else:
self.payload_pb = None
def parse_message(self, message):
"""Parse payload into a protobuf message.
Mutates the passed-in ``message`` in place.
:type message: Protobuf message
:param message: the message to be logged
"""
# NOTE: This assumes that ``payload`` is already a deserialized
# ``Any`` field and ``message`` has come from an imported
# ``pb2`` module with the relevant protobuf message type.
Parse(json.dumps(self.payload), message)
|
calpeyser/google-cloud-python
|
logging/google/cloud/logging/entries.py
|
Python
|
apache-2.0
| 7,721
|
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2010-2011 OpenStack LLC.
# Copyright 2011 Piston Cloud Computing, Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import datetime
from lxml import etree
import webob
from nova.api.openstack.compute import consoles
from nova.compute import vm_states
from nova import console
from nova import db
from nova import exception
from nova import flags
from nova import test
from nova.tests.api.openstack import fakes
from nova import utils
FLAGS = flags.FLAGS
FAKE_UUID = 'aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa'
class FakeInstanceDB(object):
def __init__(self):
self.instances_by_id = {}
self.ids_by_uuid = {}
self.max_id = 0
def return_server_by_id(self, context, id):
if id not in self.instances_by_id:
self._add_server(id=id)
return dict(self.instances_by_id[id])
def return_server_by_uuid(self, context, uuid):
if uuid not in self.ids_by_uuid:
self._add_server(uuid=uuid)
return dict(self.instances_by_id[self.ids_by_uuid[uuid]])
def _add_server(self, id=None, uuid=None):
if id is None:
id = self.max_id + 1
if uuid is None:
uuid = str(utils.gen_uuid())
instance = stub_instance(id, uuid=uuid)
self.instances_by_id[id] = instance
self.ids_by_uuid[uuid] = id
if id > self.max_id:
self.max_id = id
def stub_instance(id, user_id='fake', project_id='fake', host=None,
vm_state=None, task_state=None,
reservation_id="", uuid=FAKE_UUID, image_ref="10",
flavor_id="1", name=None, key_name='',
access_ipv4=None, access_ipv6=None, progress=0):
if host is not None:
host = str(host)
if key_name:
key_data = 'FAKE'
else:
key_data = ''
# ReservationID isn't sent back, hack it in there.
server_name = name or "server%s" % id
if reservation_id != "":
server_name = "reservation_%s" % (reservation_id, )
instance = {
"id": int(id),
"created_at": datetime.datetime(2010, 10, 10, 12, 0, 0),
"updated_at": datetime.datetime(2010, 11, 11, 11, 0, 0),
"admin_pass": "",
"user_id": user_id,
"project_id": project_id,
"image_ref": image_ref,
"kernel_id": "",
"ramdisk_id": "",
"launch_index": 0,
"key_name": key_name,
"key_data": key_data,
"vm_state": vm_state or vm_states.BUILDING,
"task_state": task_state,
"memory_mb": 0,
"vcpus": 0,
"root_gb": 0,
"hostname": "",
"host": host,
"instance_type": {},
"user_data": "",
"reservation_id": reservation_id,
"mac_address": "",
"scheduled_at": utils.utcnow(),
"launched_at": utils.utcnow(),
"terminated_at": utils.utcnow(),
"availability_zone": "",
"display_name": server_name,
"display_description": "",
"locked": False,
"metadata": [],
"access_ip_v4": access_ipv4,
"access_ip_v6": access_ipv6,
"uuid": uuid,
"progress": progress}
return instance
class ConsolesControllerTest(test.TestCase):
def setUp(self):
super(ConsolesControllerTest, self).setUp()
self.flags(verbose=True)
self.instance_db = FakeInstanceDB()
self.stubs.Set(db, 'instance_get',
self.instance_db.return_server_by_id)
self.stubs.Set(db, 'instance_get_by_uuid',
self.instance_db.return_server_by_uuid)
self.uuid = str(utils.gen_uuid())
self.url = '/v2/fake/servers/%s/consoles' % self.uuid
self.controller = consoles.Controller()
def test_create_console(self):
def fake_create_console(cons_self, context, instance_id):
self.assertEqual(instance_id, self.uuid)
return {}
self.stubs.Set(console.api.API, 'create_console', fake_create_console)
req = fakes.HTTPRequest.blank(self.url)
self.controller.create(req, self.uuid)
def test_show_console(self):
def fake_get_console(cons_self, context, instance_id, console_id):
self.assertEqual(instance_id, self.uuid)
self.assertEqual(console_id, 20)
pool = dict(console_type='fake_type',
public_hostname='fake_hostname')
return dict(id=console_id, password='fake_password',
port='fake_port', pool=pool, instance_name='inst-0001')
expected = {'console': {'id': 20,
'port': 'fake_port',
'host': 'fake_hostname',
'password': 'fake_password',
'instance_name': 'inst-0001',
'console_type': 'fake_type'}}
self.stubs.Set(console.api.API, 'get_console', fake_get_console)
req = fakes.HTTPRequest.blank(self.url + '/20')
res_dict = self.controller.show(req, self.uuid, '20')
self.assertDictMatch(res_dict, expected)
def test_show_console_unknown_console(self):
def fake_get_console(cons_self, context, instance_id, console_id):
raise exception.ConsoleNotFound(console_id=console_id)
self.stubs.Set(console.api.API, 'get_console', fake_get_console)
req = fakes.HTTPRequest.blank(self.url + '/20')
self.assertRaises(webob.exc.HTTPNotFound, self.controller.show,
req, self.uuid, '20')
def test_show_console_unknown_instance(self):
def fake_get_console(cons_self, context, instance_id, console_id):
raise exception.InstanceNotFound(instance_id=instance_id)
self.stubs.Set(console.api.API, 'get_console', fake_get_console)
req = fakes.HTTPRequest.blank(self.url + '/20')
self.assertRaises(webob.exc.HTTPNotFound, self.controller.show,
req, self.uuid, '20')
def test_list_consoles(self):
def fake_get_consoles(cons_self, context, instance_id):
self.assertEqual(instance_id, self.uuid)
pool1 = dict(console_type='fake_type',
public_hostname='fake_hostname')
cons1 = dict(id=10, password='fake_password',
port='fake_port', pool=pool1)
pool2 = dict(console_type='fake_type2',
public_hostname='fake_hostname2')
cons2 = dict(id=11, password='fake_password2',
port='fake_port2', pool=pool2)
return [cons1, cons2]
expected = {'consoles':
[{'console': {'id': 10, 'console_type': 'fake_type'}},
{'console': {'id': 11, 'console_type': 'fake_type2'}}]}
self.stubs.Set(console.api.API, 'get_consoles', fake_get_consoles)
req = fakes.HTTPRequest.blank(self.url)
res_dict = self.controller.index(req, self.uuid)
self.assertDictMatch(res_dict, expected)
def test_delete_console(self):
def fake_get_console(cons_self, context, instance_id, console_id):
self.assertEqual(instance_id, self.uuid)
self.assertEqual(console_id, 20)
pool = dict(console_type='fake_type',
public_hostname='fake_hostname')
return dict(id=console_id, password='fake_password',
port='fake_port', pool=pool)
def fake_delete_console(cons_self, context, instance_id, console_id):
self.assertEqual(instance_id, self.uuid)
self.assertEqual(console_id, 20)
self.stubs.Set(console.api.API, 'get_console', fake_get_console)
self.stubs.Set(console.api.API, 'delete_console', fake_delete_console)
req = fakes.HTTPRequest.blank(self.url + '/20')
self.controller.delete(req, self.uuid, '20')
def test_delete_console_unknown_console(self):
def fake_delete_console(cons_self, context, instance_id, console_id):
raise exception.ConsoleNotFound(console_id=console_id)
self.stubs.Set(console.api.API, 'delete_console', fake_delete_console)
req = fakes.HTTPRequest.blank(self.url + '/20')
self.assertRaises(webob.exc.HTTPNotFound, self.controller.delete,
req, self.uuid, '20')
def test_delete_console_unknown_instance(self):
def fake_delete_console(cons_self, context, instance_id, console_id):
raise exception.InstanceNotFound(instance_id=instance_id)
self.stubs.Set(console.api.API, 'delete_console', fake_delete_console)
req = fakes.HTTPRequest.blank(self.url + '/20')
self.assertRaises(webob.exc.HTTPNotFound, self.controller.delete,
req, self.uuid, '20')
class TestConsolesXMLSerializer(test.TestCase):
def test_show(self):
fixture = {'console': {'id': 20,
'password': 'fake_password',
'port': 'fake_port',
'host': 'fake_hostname',
'console_type': 'fake_type'}}
output = consoles.ConsoleTemplate().serialize(fixture)
res_tree = etree.XML(output)
self.assertEqual(res_tree.tag, 'console')
self.assertEqual(res_tree.xpath('id')[0].text, '20')
self.assertEqual(res_tree.xpath('port')[0].text, 'fake_port')
self.assertEqual(res_tree.xpath('host')[0].text, 'fake_hostname')
self.assertEqual(res_tree.xpath('password')[0].text, 'fake_password')
self.assertEqual(res_tree.xpath('console_type')[0].text, 'fake_type')
def test_index(self):
fixture = {'consoles': [{'console': {'id': 10,
'console_type': 'fake_type'}},
{'console': {'id': 11,
'console_type': 'fake_type2'}}]}
output = consoles.ConsolesTemplate().serialize(fixture)
res_tree = etree.XML(output)
self.assertEqual(res_tree.tag, 'consoles')
self.assertEqual(len(res_tree), 2)
self.assertEqual(res_tree[0].tag, 'console')
self.assertEqual(res_tree[1].tag, 'console')
self.assertEqual(len(res_tree[0]), 1)
self.assertEqual(res_tree[0][0].tag, 'console')
self.assertEqual(len(res_tree[1]), 1)
self.assertEqual(res_tree[1][0].tag, 'console')
self.assertEqual(res_tree[0][0].xpath('id')[0].text, '10')
self.assertEqual(res_tree[1][0].xpath('id')[0].text, '11')
self.assertEqual(res_tree[0][0].xpath('console_type')[0].text,
'fake_type')
self.assertEqual(res_tree[1][0].xpath('console_type')[0].text,
'fake_type2')
|
josephsuh/extra-specs
|
nova/tests/api/openstack/compute/test_consoles.py
|
Python
|
apache-2.0
| 11,446
|
#!/usr/bin/env python
#
# Copyright 2013 CSIR Meraka HLT and Multilingual Speech Technologies (MuST) North-West University
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
__author__ = "Marelie Davel"
__email__ = "mdavel@csir.co.za"
"""
Display the dictionary pronunciations of the most frequent words occuring in a speech corpus
@param in_trans_list: List of transcription filenames
@param in_dict: Pronunciation dictionary
@param top_n: Number of words to verify
@param out_name: Name of output file for results
"""
import sys, operator, codecs
#------------------------------------------------------------------------------
def display_top_prons(trans_list_name, dict_name, top_n, out_name):
"""Display the dictionary pronunciations of the most frequent words occuring in a speech corpus"""
#Read dictionary
pron_dict = {}
try:
dict_file = codecs.open(dict_name,"r","utf8")
except IOError:
print "Error: Error reading from file " + dict_name
sys.exit(1)
for ln in dict_file:
ln = ln.strip()
parts = ln.split("\t")
if len(parts) != 2:
print "Error: dictionary format error line %s" % ln
word = parts[0]
pron = parts[1]
if pron_dict.has_key(word):
pron_dict[word].append(pron)
else:
pron_dict[word] = []
pron_dict[word].append(pron)
dict_file.close()
#Read and cnt words in transcriptions
counts = {}
try:
list_file = codecs.open(trans_list_name,"r","utf8")
except IOError:
print "Error: Error reading from file " + trans_list_name
sys.exit(1)
for trans_name in list_file:
trans_name = trans_name.strip()
try:
trans_file = codecs.open(trans_name,"r","utf8")
except IOError:
print "Error: Error reading from file " + trans_name
sys.exit(1)
for ln in trans_file:
ln = ln.strip()
parts = ln.split(" ")
for word in parts:
if counts.has_key(word):
counts[word] = counts[word]+1
else:
counts[word] = 1
trans_file.close()
list_file.close()
#Now write top pronunciations to file
try:
out_file = codecs.open(out_name,"w","utf8")
except IOError:
print "Error: Error writing to file " + out_name
sys.exit(1)
top_words = sorted(counts.items(),key=operator.itemgetter(1),reverse=True)
n = 0;
for (w,c) in top_words:
if n < top_n:
if pron_dict.has_key(w):
for var_pron in pron_dict[w]:
out_file.write("%d\t%-20s\t%s\n" % (c,w,var_pron) )
n = n+1
else:
print "Error: unknown word %s" % word
else:
break
out_file.close()
#------------------------------------------------------------------------------
if __name__ == "__main__":
if len(sys.argv) == 5:
trans_list_name = str(sys.argv[1])
dict_name = str(sys.argv[2])
top_n = int(sys.argv[3])
out_name = str(sys.argv[4])
print "Displaying the %d most frequent words" % top_n
display_top_prons(trans_list_name, dict_name, top_n, out_name)
else:
print "\nDisplay the dictionary pronunciations of the most frequent words in a speech corpus."
print "Usage: display_top_prons.py <in:trans_list> <in:dict> <n> <out:results>"
print " <in:trans_list> list of transcription filenames"
print " <in:dict> pronunciation dictionary"
print " <n> number of words to verify"
print " <out:results> name of output file for results"
#------------------------------------------------------------------------------
|
Mphaya/heasy
|
heasy/utility_scripts/display_top_prons.py
|
Python
|
apache-2.0
| 4,406
|