repo_name stringlengths 5 100 | path stringlengths 4 231 | language stringclasses 1 value | license stringclasses 15 values | size int64 6 947k | score float64 0 0.34 | prefix stringlengths 0 8.16k | middle stringlengths 3 512 | suffix stringlengths 0 8.17k |
|---|---|---|---|---|---|---|---|---|
robertsj/poropy | pyqtgraph/examples/ViewBox.py | Python | mit | 2,670 | 0.010861 | #!/usr/bin/python
# -*- coding: utf-8 -*-
## Add path | to lib | rary (just for examples; you do not need this)
import sys, os
sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..', '..'))
## This example uses a ViewBox to create a PlotWidget-like interface
#from scipy import random
import numpy as np
from pyqtgraph.Qt import QtGui, QtCore
import pyqtgraph as pg
app = QtGui.QApplication([])
mw = QtGui.QMainWindow()
#cw = QtGui.QWidget()
#vl = QtGui.QVBoxLayout()
#cw.setLayout(vl)
#mw.setCentralWidget(cw)
mw.show()
mw.resize(800, 600)
gv = pg.GraphicsView()
mw.setCentralWidget(gv)
#gv.enableMouse(False) ## Mouse interaction will be handled by the ViewBox
l = QtGui.QGraphicsGridLayout()
l.setHorizontalSpacing(0)
l.setVerticalSpacing(0)
#vl.addWidget(gv)
vb = pg.ViewBox()
#grid = pg.GridItem()
#vb.addItem(grid)
p1 = pg.PlotDataItem()
vb.addItem(p1)
class movableRect(QtGui.QGraphicsRectItem):
def __init__(self, *args):
QtGui.QGraphicsRectItem.__init__(self, *args)
self.setAcceptHoverEvents(True)
def hoverEnterEvent(self, ev):
self.savedPen = self.pen()
self.setPen(QtGui.QPen(QtGui.QColor(255, 255, 255)))
ev.ignore()
def hoverLeaveEvent(self, ev):
self.setPen(self.savedPen)
ev.ignore()
def mousePressEvent(self, ev):
if ev.button() == QtCore.Qt.LeftButton:
ev.accept()
self.pressDelta = self.mapToParent(ev.pos()) - self.pos()
else:
ev.ignore()
def mouseMoveEvent(self, ev):
self.setPos(self.mapToParent(ev.pos()) - self.pressDelta)
#rect = QtGui.QGraphicsRectItem(QtCore.QRectF(0, 0, 1, 1))
rect = movableRect(QtCore.QRectF(0, 0, 1, 1))
rect.setPen(QtGui.QPen(QtGui.QColor(100, 200, 100)))
vb.addItem(rect)
l.addItem(vb, 0, 1)
gv.centralWidget.setLayout(l)
xScale = pg.AxisItem(orientation='bottom', linkView=vb)
l.addItem(xScale, 1, 1)
yScale = pg.AxisItem(orientation='left', linkView=vb)
l.addItem(yScale, 0, 0)
xScale.setLabel(text=u"<span style='color: #ff0000; font-weight: bold'>X</span> <i>Axis</i>", units="s")
yScale.setLabel('Y Axis', units='V')
def rand(n):
data = np.random.random(n)
data[int(n*0.1):int(n*0.13)] += .5
data[int(n*0.18)] += 2
data[int(n*0.1):int(n*0.13)] *= 5
data[int(n*0.18)] *= 20
return data, np.arange(n, n+len(data)) / float(n)
def updateData():
yd, xd = rand(10000)
p1.setData(y=yd, x=xd)
yd, xd = rand(10000)
updateData()
vb.autoRange()
t = QtCore.QTimer()
t.timeout.connect(updateData)
t.start(50)
## Start Qt event loop unless running in interactive mode.
if sys.flags.interactive != 1:
app.exec_()
|
zerocoordinate/deployer | deployer/tasks/databases/postgis.py | Python | bsd-3-clause | 1,222 | 0.006547 | import os
from fabric.api import *
from fabric.context_managers import cd
from .postgresql import remove_db, remove_db_user, create_db_user, backup_db
from .postgresql import configure_db as configure_postgresql
def install_db():
packages = (
'postgresql',
'binutils',
'gdal-bin',
'postgresql-9.1-postgis',
'postgresql-server-dev-9.1',
'libgeoip1',
'python-gdal',
)
sudo('apt-get install -y %s;' % " ".join(packages))
def configure_db():
configure_post | gresql()
create_spatialdb_template()
def create_spatialdb_template():
''' Runs the PostGIS spatial DB template script. '''
put(os.path.join(env.deploy_dir, 'create_template_postgis-debian.sh | '),
'/tmp/', mirror_local_mode=True)
try:
sudo('/tmp/create_template_postgis-debian.sh', user='postgres')
except Exception, exc:
print "There was an error creating the spatialdb template: %s" % exc
finally:
run('rm -f /tmp/create_template_postgis-debian.sh')
def create_db(db_user, db_name):
''' Creates a PostgreSQL database. '''
sudo('psql -c "CREATE DATABASE %s WITH OWNER %s TEMPLATE template_postgis"' % (db_name, db_user), user='postgres')
|
pschmitt/home-assistant | homeassistant/components/zha/device_trigger.py | Python | apache-2.0 | 2,852 | 0.000701 | """Provides device automations for ZHA devices that emit events."""
import voluptuous as vol
import homeassistant.components.automation.event as event
from homeassistant.components.device_automation import TRIGGER_BASE_SCHEMA
from homeassistant.components.device_automation.exceptions import (
InvalidDeviceAutomationConfig,
)
from homeassistant.const import CONF_DEVICE_ID, CONF_DOMAIN, CONF_PLATFORM, CONF_TYPE
from . import DOMAIN
from .core.helpers import async_get_zha_device
CONF_SUBTYPE = "subtype"
DEVICE = "device"
DEVICE_IEEE = "device_ieee"
ZHA_EVENT = "zha_event"
TRIGGER_SCHEMA = TRIGGER_BASE_SCHEMA.extend(
{vol.Required(CONF_TYPE): str, vol.Required(CONF_SUBTYPE): str}
)
async def async_validate_trigger_config(hass, config):
"""Validate config."""
config = TRIGGER_SCHEMA(config)
if "zha" in hass.config.components:
trigger = (config[CONF_TYPE], config[CONF_SUBTYPE])
try:
zha_device = await async_get_zha_device(hass, config[CONF_DEVICE_ID])
except (KeyError, AttributeError):
raise InvalidDeviceAutomationConfig
if (
zha_device.device_automation_triggers is None
or trigger not in zha_device.device_automation_triggers
):
raise InvalidDeviceAutomationCon | fig
return config
async def async_attach_trigger(hass, config, action, automation_info):
"""Listen for state changes based on configuration."""
trigger = (config[CONF_TYPE], config[CONF_SUBTYPE])
try:
| zha_device = await async_get_zha_device(hass, config[CONF_DEVICE_ID])
except (KeyError, AttributeError):
return None
if trigger not in zha_device.device_automation_triggers:
return None
trigger = zha_device.device_automation_triggers[trigger]
event_config = {
event.CONF_PLATFORM: "event",
event.CONF_EVENT_TYPE: ZHA_EVENT,
event.CONF_EVENT_DATA: {DEVICE_IEEE: str(zha_device.ieee), **trigger},
}
event_config = event.TRIGGER_SCHEMA(event_config)
return await event.async_attach_trigger(
hass, event_config, action, automation_info, platform_type="device"
)
async def async_get_triggers(hass, device_id):
"""List device triggers.
Make sure the device supports device automations and
if it does return the trigger list.
"""
zha_device = await async_get_zha_device(hass, device_id)
if not zha_device.device_automation_triggers:
return
triggers = []
for trigger, subtype in zha_device.device_automation_triggers.keys():
triggers.append(
{
CONF_DEVICE_ID: device_id,
CONF_DOMAIN: DOMAIN,
CONF_PLATFORM: DEVICE,
CONF_TYPE: trigger,
CONF_SUBTYPE: subtype,
}
)
return triggers
|
paopao74cn/noworkflow | tests/t1.py | Python | mit | 66 | 0.030303 | def a | ():
def b():
print ' | b'
print 'a'
print a.b() |
CruiseDevice/coala | coalib/parsing/DefaultArgParser.py | Python | agpl-3.0 | 9,780 | 0 | import argparse
from coalib.misc import Constants
from coalib.collecting.Collectors import get_all_bears_names
from coalib.parsing.filters import available_filters
class CustomFormatter(argparse.RawDescriptionHelpFormatter):
"""
A Custom Formatter that will keep the metavars in the usage but remove them
in the more detailed arguments section.
"""
def _format_action_invocation(self, action):
if not action.option_strings:
# For arguments that don't have options strings
metavar, = self._metavar_formatter(action, action.dest)(1)
return metavar
else:
# Option string arguments (like "-f, --files")
parts = action.option_strings
return ', '.join(parts)
def default_arg_parser(formatter_class=None):
"""
This function creates an ArgParser to parse command line arguments.
:param formatter_class: Formatting the arg_parser output into a specific
form. For example: In the manpage format.
"""
formatter_class = (CustomFormatter if formatter_class is None
else formatter_class)
description = """
coala provides a common command-line interface for linting and fixing all your
code, regardless of the programming languages you use.
To find out what kind of analysis coala offers for the languages you use, visit
http://coala.io/languages, or run::
$ coala --show-bears --filter-by language C Python
To perform code analysis, simply specify the analysis routines (bears) and the
files you want it to run on, for example:
spaceBear::
$ coala --bears SpaceConsistencyBear --files **.py
coala can also automatically fix your code:
spacePatchBear::
$ coala --bears SpaceConsistencyBear --files **.py --apply-patches
To run coala without user interaction, run the `coala --non-interactive`,
`coala --json` and `coala --format` commands.
"""
arg_parser = argparse.ArgumentParser(
formatter_class=formatter_class,
prog='coala',
description= | description,
# Use our own help so that we can put it in the | group we want
add_help=False)
arg_parser.add_argument('TARGETS',
nargs='*',
help='sections to be executed exclusively')
info_group = arg_parser.add_argument_group('Info')
info_group.add_argument('-h',
'--help',
action='help',
help='show this help message and exit')
info_group.add_argument('-v',
'--version',
action='version',
version=Constants.VERSION)
mode_group = arg_parser.add_argument_group('Mode')
mode_group.add_argument(
'-C', '--non-interactive', const=True, action='store_const',
help='run coala in non interactive mode')
mode_group.add_argument(
'--ci', action='store_const', dest='non_interactive', const=True,
help='continuous integration run, alias for `--non-interactive`')
mode_group.add_argument(
'--json', const=True, action='store_const',
help='mode in which coala will display output as json')
mode_group.add_argument(
'--format', const=True, nargs='?', metavar='STR',
help='output results with a custom format string, e.g. '
'"Message: {message}"; possible placeholders: '
'id, origin, file, line, end_line, column, end_column, '
'severity, severity_str, message, message_base, '
'message_arguments, affected_code, source_lines')
config_group = arg_parser.add_argument_group('Configuration')
config_group.add_argument(
'-c', '--config', nargs=1, metavar='FILE',
help='configuration file to be used, defaults to {}'.format(
Constants.default_coafile))
config_group.add_argument(
'-F', '--find-config', action='store_const', const=True,
help='find {} in ancestors of the working directory'.format(
Constants.default_coafile))
config_group.add_argument(
'-I', '--no-config', const=True, action='store_const',
help='run without using any config file')
config_group.add_argument(
'-s', '--save', nargs='?', const=True, metavar='FILE',
help='save used arguments to a config file to a {}, the given path, '
'or at the value of -c'.format(Constants.default_coafile))
config_group.add_argument(
'--disable-caching', const=True, action='store_const',
help='run on all files even if unchanged')
config_group.add_argument(
'--flush-cache', const=True, action='store_const',
help='rebuild the file cache')
config_group.add_argument(
'--no-autoapply-warn', const=True, action='store_const',
help='turn off warning about patches not being auto applicable')
inputs_group = arg_parser.add_argument_group('Inputs')
inputs_group.add_argument(
'-b', '--bears', nargs='+', metavar='NAME',
help='names of bears to use').completer = (
lambda *args, **kwargs: get_all_bears_names()) # pragma: no cover
inputs_group.add_argument(
'-f', '--files', nargs='+', metavar='FILE',
help='files that should be checked')
inputs_group.add_argument(
'-i', '--ignore', nargs='+', metavar='FILE',
help='files that should be ignored')
inputs_group.add_argument(
'--limit-files', nargs='+', metavar='FILE',
help="filter the `--files` argument's matches further")
inputs_group.add_argument(
'-d', '--bear-dirs', nargs='+', metavar='DIR',
help='additional directories which may contain bears')
outputs_group = arg_parser.add_argument_group('Outputs')
outputs_group.add_argument(
'-V', '--verbose', action='store_const',
dest='log_level', const='DEBUG',
help='alias for `-L DEBUG`')
outputs_group.add_argument(
'-L', '--log-level', nargs=1,
choices=['ERROR', 'INFO', 'WARNING', 'DEBUG'], metavar='ENUM',
help='set log output level to DEBUG/INFO/WARNING/ERROR, '
'defaults to INFO')
outputs_group.add_argument(
'-m', '--min-severity', nargs=1,
choices=('INFO', 'NORMAL', 'MAJOR'), metavar='ENUM',
help='set minimal result severity to INFO/NORMAL/MAJOR')
outputs_group.add_argument(
'-N', '--no-color', const=True, action='store_const',
help='display output without coloring (excluding logs)')
outputs_group.add_argument(
'-B', '--show-bears', const=True, action='store_const',
help='list all bears')
outputs_group.add_argument(
'-l', '--filter-by-language', nargs='+', metavar='LANG',
help='filters `--show-bears` by the given languages')
outputs_group.add_argument(
'--filter-by', action='append', nargs='+',
metavar=('FILTER_NAME FILTER_ARG', 'FILTER_ARG'),
help='filters `--show-bears` by the filter given as argument. '
'Available filters: {}'.format(', '.join(sorted(
available_filters))))
outputs_group.add_argument(
'-p', '--show-capabilities', nargs='+', metavar='LANG',
help='show what coala can fix and detect for the given languages')
outputs_group.add_argument(
'-D', '--show-description', const=True, action='store_const',
help='show bear descriptions for `--show-bears`')
outputs_group.add_argument(
'--show-settings', const=True, action='store_const',
help='show bear settings for `--show-bears`')
outputs_group.add_argument(
'--show-details', const=True, action='store_const',
help='show bear details for `--show-bears`')
outputs_group.add_argument(
'--log-json', const=True, action='store_const',
help='output logs as json along with results'
' (must be called with --json)')
outputs_group.add_argument(
'-o', '--output', nargs=1, metavar='FILE',
help='writ |
lituan/tools | blosum.py | Python | cc0-1.0 | 11,063 | 0.004429 | BLOSUM45 = {
'A': {'A': 1, 'R': -2, 'N': -1, 'D': -2, 'C': -1, 'Q': -1, 'E': -1, 'G': 0, 'H': -2, 'I': -1, 'L': -1, 'K': -1, 'M': -1, 'F': -2, 'P': -1, 'S': 0, 'T': 0, 'W': -2, 'Y': -2, 'V': 0, 'B': -1, 'J': -1, 'Z': -1, 'X': -1, '*': -5},
'R': {'A': -2, 'R': 8, 'N': 0, 'D': -2, 'C': -3, 'Q': 1, 'E': -2, 'G': -2, 'H': 2, 'I': -3, 'L': -2, 'K': 3, 'M': -1, 'F': -2, 'P': -2, 'S': -1, 'T': -1, 'W': -2, 'Y': -1, 'V': -2, 'B': -1, 'J': -3, 'Z': 1, 'X': -1, '*': -5},
'N': {'A': -1, 'R': 0, 'N': 6, 'D': 2, 'C': -2, 'Q': 0, 'E': 0, 'G': 0, 'H': 1, 'I': -2, 'L': -3, 'K': 0, 'M': -2, 'F': -2, 'P': -2, 'S': 1, 'T': 0, 'W': -4, 'Y': -2, 'V': -3, 'B': 5, 'J': -3, 'Z': 0, 'X': -1, '*': -5},
'D': {'A': -2, 'R': -2, 'N': 2, 'D': 7, 'C': -3, 'Q': 0, 'E': 2, 'G': -1, 'H': -2, 'I': -4, 'L': -3, 'K': -2, 'M': -3, 'F': -4, 'P': -1, 'S': 0, 'T': -1, 'W': -4, 'Y': -2, 'V': -3, 'B': 6, 'J': -3, 'Z': 1, 'X': -1, '*': -5},
'C': {'A': -1, 'R': -3, 'N': -2, 'D': -3, 'C': 1, 'Q': -3, 'E': -3, 'G': -3, 'H': -3, 'I': -3, 'L': -2, 'K': -3, 'M': -2, 'F': -2, 'P': -4, 'S': -1, 'T': -1, 'W': -5, 'Y': -3, 'V': -1, 'B': -2, 'J': -2, 'Z': -3, 'X': -1, '*': -5},
'Q': {'A': -1, 'R': 1, 'N': 0, 'D': 0, 'C': -3, 'Q': 6, 'E': 2, 'G': -2, 'H': 1, 'I': -2, 'L': -2, 'K': 1, 'M': 0, 'F': -4, 'P': -1, 'S': 0, 'T': -1, 'W': -2, 'Y': -1, 'V': -3, 'B': 0, 'J': -2, 'Z': 4, 'X': -1, '*': -5},
'E': {'A': -1, 'R': -2, 'N': 0, 'D': 2, 'C': -3, 'Q': 2, 'E': 6, 'G': -2, 'H': -2, 'I': -3, 'L': -2, 'K': -2, 'M': -2, 'F': -3, 'P': 0, 'S': 0, 'T': -1, 'W': -3, 'Y': -2, 'V': -3, 'B': 1, 'J': -3, 'Z': 5, 'X': -1, '*': -5},
'G': {'A': 0, 'R': -2, 'N': 0, 'D': -1, 'C': -3, 'Q': -2, 'E': -2, 'G': 1, 'H': -2, 'I': -4, 'L': -3, 'K': -2, 'M': -2, 'F': -3, 'P': -2, 'S': 0, 'T': -2, 'W': -2, 'Y': -3, 'V': -3, 'B': -1, 'J': -4, 'Z': -2, 'X': -1, '*': -5},
'H': {'A': -2, 'R': 0, 'N': 1, 'D': -2, 'C': -3, 'Q': 1, 'E': -2, 'G': -2, 'H': 10, 'I': -3, 'L': -2, 'K': -1, 'M': 0, 'F': -2, 'P': -2, 'S': -1, 'T': -2, 'W': -3, 'Y': 2, 'V': -3, 'B': 0, 'J': -2, 'Z': 0, 'X': -1, '*': -5},
'I': {'A': -1, 'R': -3, 'N': -2, 'D': -4, 'C': -3, 'Q': -2, 'E': -3, 'G': -4, 'H': -3, 'I': 5, 'L': 2, 'K': -3, 'M': 2, 'F': 0, 'P': -2, 'S': -2, 'T': -1, 'W': -2, 'Y': 0, 'V': 3, 'B': -3, 'J': 4, 'Z': -3, 'X': -1, '*': -5},
'L': {'A': -1, 'R': -2, 'N': -3, 'D': -3, 'C': -2, 'Q': -2, 'E': -2, 'G': -3, 'H': -2, 'I': 2, 'L': 5, 'K': -3, 'M': 2, 'F': 1, 'P': -3, 'S': -3, 'T': -1, 'W': -2, 'Y': 0, 'V': 1, 'B': -3, 'J': 4, 'Z': -2, 'X': -1, '*': -5},
'K': {'A': -1, 'R': 3, 'N': 0, 'D': -2, 'C': -3, 'Q': 1, 'E': -2, 'G': -2, 'H': -2, 'I': -3, 'L': -3, 'K': 5, 'M': -1, 'F': -3, 'P': -1, 'S': -1, 'T': -1, 'W': -2, 'Y': -1, 'V': -2, 'B': 0, 'J': -3, 'Z': 1, 'X': -1, '*': -5},
'M': {'A': -1, 'R': -1, 'N': -2, 'D': -3, 'C': -2, 'Q': 0, 'E': -2, 'G': -2, 'H': 0, 'I': 2, 'L': 2, 'K': -1, 'M': 5, 'F': 0, 'P': -2, 'S': -2, 'T': -1, 'W': -2, 'Y': 0, 'V': 1, 'B': -2, 'J': 2, 'Z': -1, 'X': -1, '*': -5},
'F': {'A': -2, 'R': -2, 'N': -2, 'D': -4, 'C': -2, | 'Q': -4, 'E': -3, 'G': -3, 'H': -2, 'I': 0, 'L': 1, 'K': -3, 'M': 0, 'F': 8, 'P': -3, 'S': -2, 'T': -1, 'W': 1, 'Y': 3, 'V': 0, 'B': -3, 'J': 1, 'Z': -3, 'X': -1, '*': -5},
'P': {'A': -1, 'R': -2, 'N': -2, 'D': -1, 'C': -4, 'Q': -1, 'E': 0, 'G': -2, 'H': -2, 'I': -2, 'L': -3, 'K': -1, 'M': -2, 'F': -3, 'P': 2, 'S': -1, 'T': -1, 'W': -3, 'Y' | : -3, 'V': -3, 'B': -2, 'J': -3, 'Z': -1, 'X': -1, '*': -5},
'S': {'A': 0, 'R': -1, 'N': 1, 'D': 0, 'C': -1, 'Q': 0, 'E': 0, 'G': 0, 'H': -1, 'I': -2, 'L': -3, 'K': -1, 'M': -2, 'F': -2, 'P': -1, 'S': 4, 'T': 2, 'W': -4, 'Y': -2, 'V': -1, 'B': 0, 'J': -2, 'Z': 0, 'X': -1, '*': -5},
'T': {'A': 0, 'R': -1, 'N': 0, 'D': -1, 'C': -1, 'Q': -1, 'E': -1, 'G': -2, 'H': -2, 'I': -1, 'L': -1, 'K': -1, 'M': -1, 'F': -1, 'P': -1, 'S': 2, 'T': 5, 'W': -3, 'Y': -1, 'V': 0, 'B': 0, 'J': -1, 'Z': -1, 'X': -1, '*': -5},
'W': {'A': -2, 'R': -2, 'N': -4, 'D': -4, 'C': -5, 'Q': -2, 'E': -3, 'G': -2, 'H': -3, 'I': -2, 'L': -2, 'K': -2, 'M': -2, 'F': 1, 'P': -3, 'S': -4, 'T': -3, 'W': 10, 'Y': 3, 'V': -3, 'B': -4, 'J': -2, 'Z': -2, 'X': -1, '*': -5},
'Y': {'A': -2, 'R': -1, 'N': -2, 'D': -2, 'C': -3, 'Q': -1, 'E': -2, 'G': -3, 'H': 2, 'I': 0, 'L': 0, 'K': -1, 'M': 0, 'F': 3, 'P': -3, 'S': -2, 'T': -1, 'W': 3, 'Y': 8, 'V': -1, 'B': -2, 'J': 0, 'Z': -2, 'X': -1, '*': -5},
'V': {'A': 0, 'R': -2, 'N': -3, 'D': -3, 'C': -1, 'Q': -3, 'E': -3, 'G': -3, 'H': -3, 'I': 3, 'L': 1, 'K': -2, 'M': 1, 'F': 0, 'P': -3, 'S': -1, 'T': 0, 'W': -3, 'Y': -1, 'V': 4, 'B': -3, 'J': 2, 'Z': -3, 'X': -1, '*': -5},
'B': {'A': -1, 'R': -1, 'N': 5, 'D': 6, 'C': -2, 'Q': 0, 'E': 1, 'G': -1, 'H': 0, 'I': -3, 'L': -3, 'K': 0, 'M': -2, 'F': -3, 'P': -2, 'S': 0, 'T': 0, 'W': -4, 'Y': -2, 'V': -3, 'B': 5, 'J': -3, 'Z': 1, 'X': -1, '*': -5},
'J': {'A': -1, 'R': -3, 'N': -3, 'D': -3, 'C': -2, 'Q': -2, 'E': -3, 'G': -4, 'H': -2, 'I': 4, 'L': 4, 'K': -3, 'M': 2, 'F': 1, 'P': -3, 'S': -2, 'T': -1, 'W': -2, 'Y': 0, 'V': 2, 'B': -3, 'J': 4, 'Z': -2, 'X': -1, '*': -5},
'Z': {'A': -1, 'R': 1, 'N': 0, 'D': 1, 'C': -3, 'Q': 4, 'E': 5, 'G': -2, 'H': 0, 'I': -3, 'L': -2, 'K': 1, 'M': -1, 'F': -3, 'P': -1, 'S': 0, 'T': -1, 'W': -2, 'Y': -2, 'V': -3, 'B': 1, 'J': -2, 'Z': 5, 'X': -1, '*': -5},
'X': {'A': -1, 'R': -1, 'N': -1, 'D': -1, 'C': -1, 'Q': -1, 'E': -1, 'G': -1, 'H': -1, 'I': -1, 'L': -1, 'K': -1, 'M': -1, 'F': -1, 'P': -1, 'S': -1, 'T': -1, 'W': -1, 'Y': -1, 'V': -1, 'B': -1, 'J': -1, 'Z': -1, 'X': -1, '*': -5},
'*': {'A': -5, 'R': -5, 'N': -5, 'D': -5, 'C': -5, 'Q': -5, 'E': -5, 'G': -5, 'H': -5, 'I': -5, 'L': -5, 'K': -5, 'M': -5, 'F': -5, 'P': -5, 'S': -5, 'T': -5, 'W': -5, 'Y': -5, 'V': -5, 'B': -5, 'J': -5, 'Z': -5, 'X': -5, '*': 1}
}
BLOSUM62 = {
'*': {'*': 1, 'A': -4, 'C': -4, 'B': -4, 'E': -4, 'D': -4, 'G': -4, 'F': -4, 'I': -4, 'H': -4, 'K': -4, 'M': -4, 'L': -4, 'N': -4, 'Q': -4, 'P': -4, 'S': -4, 'R': -4, 'T': -4, 'W': -4, 'V': -4, 'Y': -4, 'X': -4, 'Z': -4},
'A': {'*': -4, 'A': 4, 'C': 0, 'B': -2, 'E': -1, 'D': -2, 'G': 0, 'F': -2, 'I': -1, 'H': -2, 'K': -1, 'M': -1, 'L': -1, 'N': -2, 'Q': -1, 'P': -1, 'S': 1, 'R': -1, 'T': 0, 'W': -3, 'V': 0, 'Y': -2, 'X': 0, 'Z': -1},
'C': {'*': -4, 'A': 0, 'C': 9, 'B': -3, 'E': -4, 'D': -3, 'G': -3, 'F': -2, 'I': -1, 'H': -3, 'K': -3, 'M': -1, 'L': -1, 'N': -3, 'Q': -3, 'P': -3, 'S': -1, 'R': -3, 'T': -1, 'W': -2, 'V': -1, 'Y': -2, 'X': -2, 'Z': -3},
'B': {'*': -4, 'A': -2, 'C': -3, 'B': 4, 'E': 1, 'D': 4, 'G': -1, 'F': -3, 'I': -3, 'H': 0, 'K': 0, 'M': -3, 'L': -4, 'N': 3, 'Q': 0, 'P': -2, 'S': 0, 'R': -1, 'T': -1, 'W': -4, 'V': -3, 'Y': -3, 'X': -1, 'Z': 1},
'E': {'*': -4, 'A': -1, 'C': -4, 'B': 1, 'E': 5, 'D': 2, 'G': -2, 'F': -3, 'I': -3, 'H': 0, 'K': 1, 'M': -2, 'L': -3, 'N': 0, 'Q': 2, 'P': -1, 'S': 0, 'R': 0, 'T': -1, 'W': -3, 'V': -2, 'Y': -2, 'X': -1, 'Z': 4},
'D': {'*': -4, 'A': -2, 'C': -3, 'B': 4, 'E': 2, 'D': 6, 'G': -1, 'F': -3, 'I': -3, 'H': -1, 'K': -1, 'M': -3, 'L': -4, 'N': 1, 'Q': 0, 'P': -1, 'S': 0, 'R': -2, 'T': -1, 'W': -4, 'V': -3, 'Y': -3, 'X': -1, 'Z': 1},
'G': {'*': -4, 'A': 0, 'C': -3, 'B': -1, 'E': -2, 'D': -1, 'G': 6, 'F': -3, 'I': -4, 'H': -2, 'K': -2, 'M': -3, 'L': -4, 'N': 0, 'Q': -2, 'P': -2, 'S': 0, 'R': -2, 'T': -2, 'W': -2, 'V': -3, 'Y': -3, 'X': -1, 'Z': -2},
'F': {'*': -4, 'A': -2, 'C': -2, 'B': -3, 'E': -3, 'D': -3, 'G': -3, 'F': 6, 'I': 0, 'H': -1, 'K': -3, 'M': 0, 'L': 0, 'N': -3, 'Q': -3, 'P': -4, 'S': -2, 'R': -3, 'T': -2, 'W': 1, 'V': -1, 'Y': 3, 'X': -1, 'Z': -3},
'I': {'*': -4, 'A': -1, 'C': -1, 'B': -3, 'E': -3, 'D': -3, 'G': -4, 'F': 0, 'I': 4, 'H': -3, 'K': -3, 'M': 1, 'L': 2, 'N': -3, 'Q': -3, 'P': -3, 'S': -2, 'R': -3, 'T': -1, 'W': -3, 'V': 3, 'Y': -1, 'X': -1, 'Z': -3},
'H': {'*': -4, 'A': -2, 'C': -3, 'B': 0, 'E': 0, 'D': -1, 'G': -2, 'F': -1, 'I': -3, 'H': 8, 'K': -1, 'M': -2, 'L': -3, 'N': 1, 'Q': 0, 'P': -2, 'S': -1, 'R': 0, 'T': -2, 'W': -2, 'V': -3, 'Y': 2, 'X': -1, 'Z': 0},
'K': {'*': -4, 'A': -1, 'C': -3, 'B': 0, 'E': 1, 'D': -1, 'G': -2, 'F': -3, 'I': -3, 'H': -1, 'K': 5, 'M': -1, 'L': -2, 'N': 0, 'Q': 1, 'P': -1, 'S': 0, 'R': 2, 'T': -1, 'W': -3, 'V': -2, 'Y': -2, ' |
jboy/nim-pymod | tests/01-numpy_arrays/002-data_attr/test_data_attr.py | Python | mit | 10,370 | 0.005689 | import array_utils
import numpy
import pytest
def test_0_compile_pymod_test_mod(pmgen_py_compile):
pmgen_py_compile(__name__)
def _get_array_data_address(arr):
# It took me a long time to find out how to access the `arr.data` address
# (ie, obtain the actual `arr.data` pointer as an integer) in Python!
# If you simply invoke `arr.data` in Python, it returns you a temporary
# intermediate buffer object, that has a different memory address!
data_addr = arr.__array_interface__["data"][0]
return data_addr
@pytest.mark.parametrize("array_shape", array_utils.all_small_array_shapes)
@pytest.mark.parametrize("input_type", array_utils.all_supported_numpy_types)
def test_returnDataPointerAsInt1(pymod_test_mod, array_shape, input_type):
arg = array_utils.get_random_Nd_array_of_shape_and_type(array_shape, input_type)
res = pymod_test_mod.returnDataPointerAsInt(arg)
data_addr = _get_array_data_address(arg)
assert res == data_addr
@pytest.mark.parametrize("input_type", array_utils.all_supported_numpy_types)
def test_returnDataPointerAsInt2(pymod_test_mod, random_1d_array_size, input_type):
arg = numpy.zeros(random_1d_array_size, dtype=input_type)
res = pymod_test_mod.returnDataPointerAsInt(arg)
data_addr = _get_array_data_address(arg)
assert res == data_addr
def test_returnBoolDataPtrAsInt_1d1(pymod_test_mod, random_1d_array_size):
arg = array_utils.get_random_1d_array_of_size_and_type(random_1d_array_size, numpy.bool)
res = pymod_test_mod.returnBoolDataPtrAsInt(arg)
data_addr = _get_array_data_address(arg)
assert res == data_addr
def test_returnBool | DataPtrAsInt_1d2(pymod_test_mod, random_1d_array_size):
arg = array_utils.get_random_1d_array_of_size_and_type(random_1d_array_size, | numpy.bool_)
res = pymod_test_mod.returnBoolDataPtrAsInt(arg)
data_addr = _get_array_data_address(arg)
assert res == data_addr
def test_returnInt8DataPtrAsInt_1d(pymod_test_mod, random_1d_array_size):
arg = array_utils.get_random_1d_array_of_size_and_type(random_1d_array_size, numpy.int8)
res = pymod_test_mod.returnInt8DataPtrAsInt(arg)
data_addr = _get_array_data_address(arg)
assert res == data_addr
def test_returnInt16DataPtrAsInt_1d(pymod_test_mod, random_1d_array_size):
arg = array_utils.get_random_1d_array_of_size_and_type(random_1d_array_size, numpy.int16)
res = pymod_test_mod.returnInt16DataPtrAsInt(arg)
data_addr = _get_array_data_address(arg)
assert res == data_addr
def test_returnInt32DataPtrAsInt_1d(pymod_test_mod, random_1d_array_size):
arg = array_utils.get_random_1d_array_of_size_and_type(random_1d_array_size, numpy.int32)
res = pymod_test_mod.returnInt32DataPtrAsInt(arg)
data_addr = _get_array_data_address(arg)
assert res == data_addr
def test_returnInt64DataPtrAsInt_1d(pymod_test_mod, random_1d_array_size):
arg = array_utils.get_random_1d_array_of_size_and_type(random_1d_array_size, numpy.int64)
res = pymod_test_mod.returnInt64DataPtrAsInt(arg)
data_addr = _get_array_data_address(arg)
assert res == data_addr
def test_returnFloat32DataPtrAsInt_1d(pymod_test_mod, random_1d_array_size):
arg = array_utils.get_random_1d_array_of_size_and_type(random_1d_array_size, numpy.float32)
res = pymod_test_mod.returnFloat32DataPtrAsInt(arg)
data_addr = _get_array_data_address(arg)
assert res == data_addr
def test_returnFloat64DataPtrAsInt_1d(pymod_test_mod, random_1d_array_size):
arg = array_utils.get_random_1d_array_of_size_and_type(random_1d_array_size, numpy.float64)
res = pymod_test_mod.returnFloat64DataPtrAsInt(arg)
data_addr = _get_array_data_address(arg)
assert res == data_addr
def test_returnBoolDataPtrAsInt_2d1(pymod_test_mod, random_2d_array_shape):
arg = array_utils.get_random_Nd_array_of_shape_and_type(random_2d_array_shape, numpy.bool)
res = pymod_test_mod.returnBoolDataPtrAsInt(arg)
data_addr = _get_array_data_address(arg)
assert res == data_addr
def test_returnBoolDataPtrAsInt_2d2(pymod_test_mod, random_2d_array_shape):
arg = array_utils.get_random_Nd_array_of_shape_and_type(random_2d_array_shape, numpy.bool_)
res = pymod_test_mod.returnBoolDataPtrAsInt(arg)
data_addr = _get_array_data_address(arg)
assert res == data_addr
def test_returnInt8DataPtrAsInt_2d(pymod_test_mod, random_2d_array_shape):
arg = array_utils.get_random_Nd_array_of_shape_and_type(random_2d_array_shape, numpy.int8)
res = pymod_test_mod.returnInt8DataPtrAsInt(arg)
data_addr = _get_array_data_address(arg)
assert res == data_addr
def test_returnInt16DataPtrAsInt_2d(pymod_test_mod, random_2d_array_shape):
arg = array_utils.get_random_Nd_array_of_shape_and_type(random_2d_array_shape, numpy.int16)
res = pymod_test_mod.returnInt16DataPtrAsInt(arg)
data_addr = _get_array_data_address(arg)
assert res == data_addr
def test_returnInt32DataPtrAsInt_2d(pymod_test_mod, random_2d_array_shape):
arg = array_utils.get_random_Nd_array_of_shape_and_type(random_2d_array_shape, numpy.int32)
res = pymod_test_mod.returnInt32DataPtrAsInt(arg)
data_addr = _get_array_data_address(arg)
assert res == data_addr
def test_returnInt64DataPtrAsInt_2d(pymod_test_mod, random_2d_array_shape):
arg = array_utils.get_random_Nd_array_of_shape_and_type(random_2d_array_shape, numpy.int64)
res = pymod_test_mod.returnInt64DataPtrAsInt(arg)
data_addr = _get_array_data_address(arg)
assert res == data_addr
def test_returnFloat32DataPtrAsInt_2d(pymod_test_mod, random_2d_array_shape):
arg = array_utils.get_random_Nd_array_of_shape_and_type(random_2d_array_shape, numpy.float32)
res = pymod_test_mod.returnFloat32DataPtrAsInt(arg)
data_addr = _get_array_data_address(arg)
assert res == data_addr
def test_returnFloat64DataPtrAsInt_2d(pymod_test_mod, random_2d_array_shape):
arg = array_utils.get_random_Nd_array_of_shape_and_type(random_2d_array_shape, numpy.float64)
res = pymod_test_mod.returnFloat64DataPtrAsInt(arg)
data_addr = _get_array_data_address(arg)
assert res == data_addr
# TODO
#def test_returnBoolDataPtrIndex0_1d(pymod_test_mod, random_1d_array_of_bool):
# arg = random_1d_array_of_bool.copy()
# expectedRes = bool(arg[0])
# res = pymod_test_mod.returnBoolDataPtrIndex0(arg)
# assert res == expectedRes
# assert type(res) == type(expectedRes)
# TODO
#def test_returnInt8DataPtrIndex0_1d(pymod_test_mod, random_1d_array_of_integers):
# arg = random_1d_array_of_integers.astype(numpy.int8)
# expectedRes = int(arg[0])
# res = pymod_test_mod.returnInt8DataPtrIndex0(arg)
# assert res == expectedRes
# assert type(res) == type(expectedRes)
def test_returnInt16DataPtrIndex0_1d(pymod_test_mod, random_1d_array_of_integers):
arg = random_1d_array_of_integers.astype(numpy.int16)
expectedRes = int(arg[0])
res = pymod_test_mod.returnInt16DataPtrIndex0(arg)
assert res == expectedRes
assert type(res) == type(expectedRes)
def test_returnInt32DataPtrIndex0_1d(pymod_test_mod, random_1d_array_of_integers):
arg = random_1d_array_of_integers.astype(numpy.int32)
expectedRes = int(arg[0])
res = pymod_test_mod.returnInt32DataPtrIndex0(arg)
assert res == expectedRes
assert type(res) == type(expectedRes)
def test_returnInt64DataPtrIndex0_1d(pymod_test_mod, random_1d_array_of_integers):
arg = random_1d_array_of_integers.astype(numpy.int64)
expectedRes = int(arg[0])
res = pymod_test_mod.returnInt64DataPtrIndex0(arg)
assert res == expectedRes
assert type(res) == type(expectedRes)
def test_returnFloat32DataPtrIndex0_1d(pymod_test_mod, random_1d_array_of_integers):
arg = random_1d_array_of_integers.astype(numpy.float32)
expectedRes = float(arg[0])
res = pymod_test_mod.returnFloat32DataPtrIndex0(arg)
assert res == expectedRes
assert type(res) == type(expectedRes)
def test_returnFloat64DataPtrIndex0_1d(pymod_test_mod, random_1d_array_of_integers):
arg = random_1d_array_of_integers.astype(numpy.float64)
expectedRes = float(arg[0])
res = pymod_test_mod.returnFloat64DataPtrIndex0(arg)
assert res |
Elemnir/presentations | utk_prog_team_2015_04_09/flask2/flask2.py | Python | bsd-2-clause | 233 | 0.008584 | from flask import Flask, request, render_template
app = Flask(__name__)
@app.route("/")
def hello():
return re | nder_template('test.html', r=request)
if __name__ == "__main__":
app.run(host='0.0.0.0', | port=8001, debug=True)
|
taikoa/wevolver-server | wevolve/users/migrations/0005_auto__del_field_profile_modified_user__del_field_profile_data.py | Python | agpl-3.0 | 7,905 | 0.007337 | # -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Deleting field 'Profile.modified_user'
db.delete_column(u'user_profile', 'modified_user_id')
# Deleting field 'Profile.data'
db.delete_column(u'user_profile', 'data')
def backwards(self, orm):
# Adding field 'Profile.modified_user'
db.add_column(u'user_profile', 'modified_user',
self.gf('django.db.models.fields.related.ForeignKey')(to=orm['users.Profile'], null=True, blank=True),
keep_default=False)
# Adding field 'Profile.data'
db.add_column(u'user_profile', 'data',
self.gf('django.db.models.fields.TextField')(null=True),
keep_default=False)
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'home.country': {
'Meta': {'object_name': 'Country', 'db_table': "u'country'"},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.Char | Field', [], {'unique': 'True', 'max_length': '150'})
},
'users.profile': {
'Meta': {'object | _name': 'Profile', 'db_table': "u'user_profile'"},
'bio': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'city': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'country': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['home.Country']", 'null': 'True', 'db_column': "'country_id'"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'interests': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'picture_name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'picture_original_name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'skills': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'state': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'status': ('django.db.models.fields.IntegerField', [], {'default': '0', 'null': 'True'}),
'token': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['auth.User']", 'unique': 'True', 'null': 'True'})
},
'users.skill': {
'Meta': {'object_name': 'Skill', 'db_table': "u'skill'"},
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2013, 1, 11, 0, 0)', 'null': 'True', 'blank': 'True'}),
'created_user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'skill_created_user'", 'to': "orm['auth.User']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'modified_user': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'skill_modified_user'", 'null': 'True', 'to': "orm['auth.User']"}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '150'})
},
'users.userskill': {
'Meta': {'object_name': 'UserSkill', 'db_table': "u'user_skill'"},
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2013, 1, 11, 0, 0)', 'null': 'True', 'blank': 'True'}),
'created_user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'userskill_created_user'", 'to': "orm['auth.User']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'modified_user_id1': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'userskill_modified_user'", 'null': 'True', 'db_column': "'modified_user_id1'", 'to': "orm['auth.User']"}),
'skill': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['users.Skill']"}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"})
}
}
complete_apps = ['users'] |
SocialNPHS/SocialNPHS | tests/sources.py | Python | mit | 5,885 | 0 | """
Unit tests for the portion of this project which collects text from social
media sources
"""
import unittest
from unittest.mock import MagicMock
from unittest.mock import patch
import tweepy
# Magically manipulate sys.path
from testassets import pathmagic
from SocialNPHS.sources.twitter import discover
from SocialNPHS.sources.twitter import user
from SocialNPHS.sources.twitter import tweets
from SocialNPHS.sources.twitter.auth import api
# TWITTER TESTS
class TestUser(unittest.TestCase):
""" Test SocialNPHS.sources.twitter.user """
def test_basics(self):
luke = user.NPUser("1Defenestrator")
self.assertIsInstance(luke.followers, list)
self.assertIsInstance(luke.following, list)
self.assertIn(luke.grade, ["Freshman",
"Sophomore",
"Junior",
"Senior"])
self.assertIsInstance(luke.has_graduated, bool)
def test_magicmethod(self):
chris = user.NPUser("bravoc9")
self.assertEqual(chris.fullname, "Chris Bravo")
self.assertEqual(chris.sex, "M")
with self.assertRaises(AttributeError):
chris.this_is_not_a_valid_attribute
def test_has_graduated(self):
""" Test has_graduated by manipulating values """
moshe = user.NPUser("G4_Y5_3X")
moshe.user_info["grade"] = "3000" # In the past
self.assertFalse(moshe.has_graduated)
moshe.user_info["grade"] = "2000" # In the future
self.assertTrue(moshe.has_graduated)
moshe.user_info["grade"] = "2017" # Reset
def test_misc(self):
with self.assertRaises(ValueError):
user.NPUser("this_is_too_long_to_be_a_twitter_handle")
class TestTweets(unittest.TestCase):
""" Test SocialNPHS.sources.twitter.tweets """
def test_list(self):
# NOTE: this will error if nobody on the list has tweeted in the past
# 24 hours. Should be fixed perhaps.
t = tweets.get_nphs_tweets()
self.assertIsInstance(t, list)
self.assertIsInstance(t[0], tweepy.Status)
class TestDiscovery(unittest.TestCase):
""" Test SocialNPHS.sources.twitter.discover """
# Setting up mock Twitter users to do these tests consistently
chris = MagicMock(
id='c',
location='New Paltz, NY',
screen_name='bravoc9',
followers_count=2
)
luke = MagicMock(
id='l',
location='New Paltz',
screen_name='1Defenestrator',
followers_count=4
)
moshe = MagicMock(
id='m',
location='New Paltz NY',
screen_name='G4_Y5_3X',
followers_count=2
)
michael = MagicMock(
id='b',
location='New Paltz',
screen_name='iceberger',
followers_count=0
)
rock = MagicMock(
id='r',
location='Da Qiu, China',
screen_name='TheRock',
followers_count=10390547
)
chris.followers = [luke, moshe]
chris.following = [luke, rock]
luke.followers = [chris, moshe, rock, michael]
luke.following = [chris, moshe, rock]
moshe.followers = [luke, rock]
moshe.following = [chris, luke, rock]
michael.followers = []
michael.following = [luke]
rock.followers = [chris, luke, moshe]
rock.following = [luke, moshe]
def test_association(self):
users = discover.discover_by_association(self.luke, 3, 2)
# Chris isn't famous and has enough people from NP following him
self.assertTrue('bravoc9' in users)
# Rock is too famous, Moshe only has 1 NP student following him
self.assertFalse('G4_Y5_3X' in users)
self.assertFalse('TheRock' in users)
def test_location(self):
users = discover.discover_by_location(self.luke, 6)
self.assertTrue('G4_Y5_3X' in users)
self.assertFalse('TheRock' in users)
def test_geolocation(self):
with patch('SocialNPHS.sources.twitter.auth.api') as mock_api:
mock_tweets = {
'c': [MagicMock(
place=MagicMock(
attributes={
| 'postal_code': '12561'
}
| )
)],
'l': [MagicMock(
place=MagicMock(
contained_within=[{'name': 'New Paltz'}]
)
)],
'm': [MagicMock(
place=MagicMock(
contained_within=[{
'bounding_box': MagicMock(
coordinates=[[
(-74.205, 41.695), (-74.205, 41.711),
(-73.037, 41.711), (-73.037, 41.695)
]]
)
}]
)
)],
'b': [
MagicMock(
place=MagicMock(
contained_within=[{'name': 'New Paltz'}]
)
)
],
'r': [
MagicMock(place=None)
]
}
def return_method(id):
return mock_tweets[id]
mock_api.user_timeline = MagicMock(side_effect=return_method)
users = discover.discover_by_geolocation(
self.luke, 7, _api=mock_api
)
self.assertTrue('G4_Y5_3X' in users)
self.assertTrue('iceberger' in users)
self.assertFalse('TheRock' in users)
# INSTAGRAM TESTS
class TestNotImplemented(unittest.TestCase):
def test_error(self):
with self.assertRaises(NotImplementedError):
import SocialNPHS.sources.instagram
|
0xced/youtube-dl | test/test_download.py | Python | unlicense | 4,976 | 0.004421 | #!/usr/bin/env python
import errno
import hashlib
import io
import os
import json
import unittest
import sys
import hashlib
import socket
# Allow direct execution
sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
import youtube_dl.FileDownloader
import youtube_dl.InfoExtractors
from youtube_dl.utils import *
DEF_FILE = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'tests.json')
PARAMETERS_FILE = os.path.join(os.path.dirname(os.path.abspath(__file__)), "parameters.json")
RETRIES = 3
# General configuration (from __init__, not very elegant...)
jar = compat_cookiejar.CookieJar()
cookie_processor = compat_urllib_request.HTTPCookieProcessor(jar)
proxy_handler = compat_urllib_request.ProxyHandler()
opener = compat_urllib_request.build_opener(proxy_handler, cookie_processor, YoutubeDLHandler())
compat_urllib_request.install_opener(opener)
socket.setdefaulttimeout(10)
def _try_rm(filename):
""" Remove a file if it exists """
try:
os.remove(filename)
except OSError as ose:
if ose.errno != errno.ENOENT:
raise
class FileDownloader(youtube_dl.FileDownloader):
def __init__(self, *args, **kwargs):
self.to_stderr = self.to_screen
self.processed_info_dicts = []
return youtube_dl.FileDownloader.__init__(self, *args, **kwargs)
def process_info(self, info_dict):
self.processed_info_dicts.append(info_dict)
return youtube_dl.FileDownloader.process_info(self, info_dict)
def _file_md5(fn):
with open(fn, 'rb') as f:
return hashlib.md5(f.read()).hexdigest()
with io.open(DEF_FILE, encoding='utf-8') as deff:
defs = json.load(deff)
with io.open(PARAMETERS_FILE, encoding='utf-8') as pf:
parameters = json.load(pf)
class TestDownload(unittest.TestCase):
maxDiff = None
def setUp(self):
self.parameters = parameters
self.defs = defs
### Dynamically generate tests
def generator(test_case):
def test_template(self):
ie = youtube_dl.InfoExtractors.get_info_extractor(test_case['name'])
if not ie._WORKING:
print('Skipping: IE marked as not _WORKING')
return
if 'playlist' not in test_case and not test_case['file']:
print('Skipping: No output file specified')
return
if 'skip' in test_case:
print('Skipping: {0}'.format(test_case['skip']))
return
params = self.parameters.copy()
params.update(test_case.get('params', {}))
fd = FileDownloader(params)
for ie in youtube_dl.InfoExtractors.gen_extractors():
fd.add_info_extractor(ie)
finished_hook_called = set()
def _hook(status):
if status['status'] == 'finished':
| finished_hook_called.add(status['filename'])
fd.add_progress_hook(_hook)
test_cases = test_case.get('playlist', [test_case])
for tc in test_cases:
_try_rm(tc['file'])
_try_rm(tc['file'] + '.p | art')
_try_rm(tc['file'] + '.info.json')
try:
for retry in range(1, RETRIES + 1):
try:
fd.download([test_case['url']])
except (DownloadError, ExtractorError) as err:
if retry == RETRIES: raise
# Check if the exception is not a network related one
if not err.exc_info[0] in (compat_urllib_error.URLError, socket.timeout, UnavailableVideoError):
raise
print('Retrying: {0} failed tries\n\n##########\n\n'.format(retry))
else:
break
for tc in test_cases:
if not test_case.get('params', {}).get('skip_download', False):
self.assertTrue(os.path.exists(tc['file']), msg='Missing file ' + tc['file'])
self.assertTrue(tc['file'] in finished_hook_called)
self.assertTrue(os.path.exists(tc['file'] + '.info.json'))
if 'md5' in tc:
md5_for_file = _file_md5(tc['file'])
self.assertEqual(md5_for_file, tc['md5'])
with io.open(tc['file'] + '.info.json', encoding='utf-8') as infof:
info_dict = json.load(infof)
for (info_field, value) in tc.get('info_dict', {}).items():
self.assertEqual(value, info_dict.get(info_field))
finally:
for tc in test_cases:
_try_rm(tc['file'])
_try_rm(tc['file'] + '.part')
_try_rm(tc['file'] + '.info.json')
return test_template
### And add them to TestDownload
for test_case in defs:
test_method = generator(test_case)
test_method.__name__ = "test_{0}".format(test_case["name"])
setattr(TestDownload, test_method.__name__, test_method)
del test_method
if __name__ == '__main__':
unittest.main()
|
maartenbreddels/vaex | packages/vaex-ui/vaex/ui/rthook_pyqt4.py | Python | mit | 298 | 0 | __author__ = 'breddels'
# from https://github.com/pyinst | aller/pyinstaller/wiki/Recipe-PyQt4-API-Version
import sip
sip.setapi(u'QDate', 2)
sip.setapi(u'QDateTime', 2)
sip.setapi(u'QString', 2)
sip.setapi(u'QTextStream', 2)
si | p.setapi(u'QTime', 2)
sip.setapi(u'QUrl', 2)
sip.setapi(u'QVariant', 2)
|
kaneawk/shadowsocksr | shadowsocks/udprelay.py | Python | apache-2.0 | 25,937 | 0.002275 | #!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Copyright 2015 clowwindy
#
# Licensed under the Apache License, Version 2.0 | (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# dist | ributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
# SOCKS5 UDP Request
# +----+------+------+----------+----------+----------+
# |RSV | FRAG | ATYP | DST.ADDR | DST.PORT | DATA |
# +----+------+------+----------+----------+----------+
# | 2 | 1 | 1 | Variable | 2 | Variable |
# +----+------+------+----------+----------+----------+
# SOCKS5 UDP Response
# +----+------+------+----------+----------+----------+
# |RSV | FRAG | ATYP | DST.ADDR | DST.PORT | DATA |
# +----+------+------+----------+----------+----------+
# | 2 | 1 | 1 | Variable | 2 | Variable |
# +----+------+------+----------+----------+----------+
# shadowsocks UDP Request (before encrypted)
# +------+----------+----------+----------+
# | ATYP | DST.ADDR | DST.PORT | DATA |
# +------+----------+----------+----------+
# | 1 | Variable | 2 | Variable |
# +------+----------+----------+----------+
# shadowsocks UDP Response (before encrypted)
# +------+----------+----------+----------+
# | ATYP | DST.ADDR | DST.PORT | DATA |
# +------+----------+----------+----------+
# | 1 | Variable | 2 | Variable |
# +------+----------+----------+----------+
# shadowsocks UDP Request and Response (after encrypted)
# +-------+--------------+
# | IV | PAYLOAD |
# +-------+--------------+
# | Fixed | Variable |
# +-------+--------------+
# HOW TO NAME THINGS
# ------------------
# `dest` means destination server, which is from DST fields in the SOCKS5
# request
# `local` means local server of shadowsocks
# `remote` means remote server of shadowsocks
# `client` means UDP clients that connects to other servers
# `server` means the UDP server that handles user requests
from __future__ import absolute_import, division, print_function, \
with_statement
import time
import socket
import logging
import struct
import errno
import random
import binascii
import traceback
import threading
from shadowsocks import encrypt, obfs, eventloop, lru_cache, common, shell
from shadowsocks.common import pre_parse_header, parse_header, pack_addr
# for each handler, we have 2 stream directions:
# upstream: from client to server direction
# read local and write to remote
# downstream: from server to client direction
# read remote and write to local
STREAM_UP = 0
STREAM_DOWN = 1
# for each stream, it's waiting for reading, or writing, or both
WAIT_STATUS_INIT = 0
WAIT_STATUS_READING = 1
WAIT_STATUS_WRITING = 2
WAIT_STATUS_READWRITING = WAIT_STATUS_READING | WAIT_STATUS_WRITING
BUF_SIZE = 65536
DOUBLE_SEND_BEG_IDS = 16
POST_MTU_MIN = 500
POST_MTU_MAX = 1400
SENDING_WINDOW_SIZE = 8192
STAGE_INIT = 0
STAGE_RSP_ID = 1
STAGE_DNS = 2
STAGE_CONNECTING = 3
STAGE_STREAM = 4
STAGE_DESTROYED = -1
CMD_CONNECT = 0
CMD_RSP_CONNECT = 1
CMD_CONNECT_REMOTE = 2
CMD_RSP_CONNECT_REMOTE = 3
CMD_POST = 4
CMD_SYN_STATUS = 5
CMD_POST_64 = 6
CMD_SYN_STATUS_64 = 7
CMD_DISCONNECT = 8
CMD_VER_STR = b"\x08"
RSP_STATE_EMPTY = b""
RSP_STATE_REJECT = b"\x00"
RSP_STATE_CONNECTED = b"\x01"
RSP_STATE_CONNECTEDREMOTE = b"\x02"
RSP_STATE_ERROR = b"\x03"
RSP_STATE_DISCONNECT = b"\x04"
RSP_STATE_REDIRECT = b"\x05"
def client_key(source_addr, server_af):
# notice this is server af, not dest af
return '%s:%s:%d' % (source_addr[0], source_addr[1], server_af)
class UDPRelay(object):
def __init__(self, config, dns_resolver, is_local, stat_callback=None, stat_counter=None):
self._config = config
if config.get('connect_verbose_info', 0) > 0:
common.connect_log = logging.info
if is_local:
self._listen_addr = config['local_address']
self._listen_port = config['local_port']
self._remote_addr = config['server']
self._remote_port = config['server_port']
else:
self._listen_addr = config['server']
self._listen_port = config['server_port']
self._remote_addr = None
self._remote_port = None
self._dns_resolver = dns_resolver
self._password = common.to_bytes(config['password'])
self._method = config['method']
self._timeout = config['timeout']
self._is_local = is_local
self._udp_cache_size = config['udp_cache']
self._cache = lru_cache.LRUCache(timeout=config['udp_timeout'],
close_callback=self._close_client_pair)
self._cache_dns_client = lru_cache.LRUCache(timeout=10,
close_callback=self._close_client_pair)
self._client_fd_to_server_addr = {}
#self._dns_cache = lru_cache.LRUCache(timeout=1800)
self._eventloop = None
self._closed = False
self.server_transfer_ul = 0
self.server_transfer_dl = 0
self.server_users = {}
self.server_user_transfer_ul = {}
self.server_user_transfer_dl = {}
if common.to_str(config['protocol']) in obfs.mu_protocol():
self._update_users(None, None)
self.protocol_data = obfs.obfs(config['protocol']).init_data()
self._protocol = obfs.obfs(config['protocol'])
server_info = obfs.server_info(self.protocol_data)
server_info.host = self._listen_addr
server_info.port = self._listen_port
server_info.users = self.server_users
server_info.protocol_param = config['protocol_param']
server_info.obfs_param = ''
server_info.iv = b''
server_info.recv_iv = b''
server_info.key_str = common.to_bytes(config['password'])
server_info.key = encrypt.encrypt_key(self._password, self._method)
server_info.head_len = 30
server_info.tcp_mss = 1452
server_info.buffer_size = BUF_SIZE
server_info.overhead = 0
self._protocol.set_server_info(server_info)
self._sockets = set()
self._fd_to_handlers = {}
self._reqid_to_hd = {}
self._data_to_write_to_server_socket = []
self._timeout_cache = lru_cache.LRUCache(timeout=self._timeout,
close_callback=self._close_tcp_client)
self._bind = config.get('out_bind', '')
self._bindv6 = config.get('out_bindv6', '')
self._ignore_bind_list = config.get('ignore_bind', [])
if 'forbidden_ip' in config:
self._forbidden_iplist = config['forbidden_ip']
else:
self._forbidden_iplist = None
if 'forbidden_port' in config:
self._forbidden_portset = config['forbidden_port']
else:
self._forbidden_portset = None
addrs = socket.getaddrinfo(self._listen_addr, self._listen_port, 0,
socket.SOCK_DGRAM, socket.SOL_UDP)
if len(addrs) == 0:
raise Exception("can't get addrinfo for %s:%d" %
(self._listen_addr, self._listen_port))
af, socktype, proto, canonname, sa = addrs[0]
server_socket = socket.socket(af, socktype, proto)
server_socket.bind((self._listen_addr, self._listen_port))
server_socket.setblocking(False)
server_socket.setsockopt(socket.SOL_SOCKET, socket.SO_SNDBUF, 1024 * 1024)
server_socket.setsockopt(socket.SOL_SOCKET, socket.SO_RCVBUF, 1024 * 1024)
self._server_socket = server_socket
self._stat_callback = stat_callback
def _get_a_server(self):
server = self._config['server']
server_port = self._config['server_port']
if type(server_port) == list:
serve |
kamyu104/LeetCode | Python/valid-palindrome.py | Python | mit | 994 | 0.004024 | from __future__ import print_function
# Time: O(n)
# Space: O(1)
#
# Given a string, determine if it is a palindrome, considering only alphanumeric characters and ignoring cases.
#
# For example,
# "A man, a plan, a canal: Panama" is a palindrome.
# "race a car" is not a palindrome.
#
# Note:
# Have you consider that the string might be empty? This is a good question to ask during an i | nterview.
#
# For the purpose of this problem, we define empty string as valid palindrome.
#
class Solution:
# @param s, a string
# @return a boolean
def isPalindrome(self, s):
i, j = 0, len(s) - 1
while i < j:
while i < j and not s[i].isalnum():
i += 1
while i < j and not s[j].isalnum():
j -= 1
| if s[i].lower() != s[j].lower():
return False
i, j = i + 1, j - 1
return True
if __name__ == "__main__":
print(Solution().isPalindrome("A man, a plan, a canal: Panama"))
|
aonotas/chainer | tests/chainer_tests/links_tests/theano_tests/test_theano_function.py | Python | mit | 5,778 | 0 | import unittest
import numpy
import chainer
from chainer.backends import cuda
from chainer import gradient_check
from chainer import links
from chainer import testing
from chainer.testing import attr
from chainer.testing import condition
@testing.with_requires('theano')
class TheanoFunctionTestBase(object):
forward_test_options = {}
backward_test_options = {'atol': 1e-4}
def setUp(self):
self.input_data = [
numpy.random.uniform(
-1, 1, d['shape']).astype(getattr(numpy, d['type']))
for d in self.inputs]
self.grad_data = [
numpy.random.uniform(
-1, 1, d['shape']).astype(getattr(numpy, d['type']))
for d in self.outputs]
def make_func(self):
raise NotImplementedError
def expect_forward(self):
raise NotImplementedError
def check_forward(self, input_data):
func = self.make_func()
inputs = [chainer.Variable(data) for data in input_data]
outputs = func(*inputs)
if isinstance(outputs, chainer.Variable):
outputs = (outputs,)
expect = self.expect_forward()
self.assertEqual(len(outputs), len(expect))
for o, e in zip(outputs, expect):
testing.assert_allclose(
o.data, e, **self.forward_test_options)
def test_forward_cpu(self):
self.check_forward(self.input_data)
@attr.gpu
def test_forward_gpu(self):
inputs = [cuda.to_gpu(x) for x in self.input_data]
self.check_forward(inputs)
def check_backward(self, input_data, grad_data):
func = self.make_func()
gradient_check.check_backward(
func, input_data, grad_data, **self.backward_test_options)
@condition.retry(3)
def test_backward_cpu(self):
self.check_backward(self.input_data, self.grad_data)
@attr.gpu
@condition.retry(3)
def test_backward_gpu(self):
inputs = [cuda.to_gpu(x) for x in self.input_data]
grads = [cuda.to_gpu(x) for x in self.grad_data]
self.check_backward(inputs, grads)
@testing.parameterize(
{'inputs': [{'shape': (3, 2), 'type': 'float32'},
{'shape': (3, 2), 'type': 'float32'}],
'outputs': [{'shape': (3, 2), 'type': 'float32'}]},
{'inputs': [{'shape': (3, 2), 'type': 'float32'},
{'shape': (2,), 'type': 'float32'}],
'outputs': [{'shape': (3, 2), 'type': 'float32'}]},
{'inputs': [{'shape': (3, 2), 'type': 'float32'},
{'shape': (), 'type': 'float32'}],
'outputs': [{'shape': (3, 2), 'type': 'float32'}]},
{'inputs': [{'shape': (3, 2), 'type': 'float32'},
{'shape': (3, 2), 'type': 'float64'}],
'outputs': [{'shape': (3, 2), 'type': 'float64'}]},
{'inputs': [{'shape': (3, 2), 'type': 'float16'},
{'shape': (3, 2), 'type': 'float32'}],
'outputs': [{'shape': (3, 2), 'type': 'float32'}],
'forward_test_options': {'atol': 1e-3, 'rtol': 1e-3},
'backward_test_options': {'eps': 1, 'atol': 1e-3, 'rtol': 1e-3}},
)
class TestTheanoFunction(TheanoFunctionTestBase, unittest.TestCase):
def make_func(self):
import theano.tensor as T
x = T.TensorType(self.inputs[0]['type'],
(False,) * len(self.inputs[0]['shape']))('x')
y = T.TensorType(self.inputs[1]['type'],
(False,) * len(self.inputs[1]['shape']))('y')
z = x + y
return links.TheanoFunction([x, y], [z])
def expect_forward(self):
x, y = self.input_data
return x + y,
@testing.parameterize(
{'inputs': [{'shape': (3, 2), 'type': 'float32'},
{'shape': (3, 2), 'type': 'float32'}],
'outputs': [{'shape': (3, 2), 'type': 'float32'},
{'shape': (3, 2), 'type': 'float32'}]},
{'inputs': [{'shape': (3, 2), 'type': 'float32'},
{'shape': (2,), 'type': 'float32'}],
'outputs': [{'shape': (3, 2), 'type': 'float32'},
{'shape': (3, 2), 'type': 'float32'}]},
{'inputs': [{'shape': (3, 2), 'type': 'float32'},
{'shape': (), 'type': 'float32'}],
'outputs': [{'shape': (3, 2), 'type': 'float32'},
{'shape': (3, 2), 'type': 'float32'}]},
)
class TestTheanoFunctionTwoOutputs(TheanoFunctionTestBase, unittest.TestCase):
def make_func(self):
import theano.tensor as T
x = T.TensorType(self.inputs[0]['type'],
(False,) * len(self.inputs[0]['shape']))('x')
y = T.TensorType(self.inputs[1]['type'],
(False,) * len(self.inputs[1]['shape']))('y')
z = x + y
w = x - y
return links.TheanoFunction([x, y], [z, w])
def expect_forward(self):
x, y = self.input_data
return x + y, x - y
@testing.parameterize(
{'inputs': [{'shape': (3, 2), 'type': 'float32'},
{'shape': (2,), 'type': 'int32'}],
'outputs': [{'shape': (2, 2), 'type': 'float32'}]},
{'inputs': [{'shape': (3, 2), 'type': 'float32'},
{'shape': (), 'type': 'int32'}],
'outputs': [{'shape': (2,), 'type': 'float32'}]},
)
class TestTheanoFunctionNonDifferential(
TheanoFunctionTestBase, unittest.TestCase):
def make_func(self):
import theano.te | nsor as T
x = T.TensorType(self.inputs[0]['type'],
(False,) * len(self.inputs[0]['shape']))('x')
i = T.TensorType(self.inputs[1]['type'],
(False,) * len(self.inputs[1]['shape']))('y')
z = x[i]
return links.TheanoFunction([x, i], z)
def expect_forward(self):
x, i = | self.input_data
return x[i],
testing.run_module(__name__, __file__)
|
unomena/unobase | unobase/blog/templatetags/blog_widgets.py | Python | bsd-3-clause | 407 | 0.007371 | __author__ = 'michael'
from django import template
from unobase import models as | unobase_models
register = template.Library()
| @register.inclusion_tag('blog/widgets/tag_cloud.html')
def tag_cloud(blog_slug):
tags = unobase_models.TagModel.get_distinct_tags('blogentry')
#set(unobase_models.TagModel.get_tags('blogentry'))
return {
'blog_slug': blog_slug,
'tags': tags
}
|
dbbhattacharya/kitsune | vendor/packages/pyparsing/examples/greetingInGreek.py | Python | bsd-3-clause | 440 | 0.021077 | # vim:fileencoding=utf-8
#
# greetingInGreek.py
#
# Demonstration of t | he parsing module, on the prototypical "Hello, World!" example
#
from pyparsing import Word
# define grammar
alphas = u''.join(unichr(x) for x in xrange(0x386, 0x3ce))
greet = Word(alphas) + u',' + Word(alphas) + u'!'
# input string
hello = "Καλημέρα, κόσμε!".decode('utf-8')
# parse input string
print | greet.parseString( hello )
|
mike820324/microProxy | microproxy/interceptor/plugin_manager.py | Python | mit | 3,707 | 0.00027 | import os
import sys
from copy import copy
from watchdog.events import RegexMatchingEventHandler
if sys.platform == "darwin":
from watchdog.observers.polling import PollingObserver as Observer
else:
from watchdog.observers import Observer
from microproxy.log import ProxyLogger
logger = ProxyLogger.get_logger(__name__)
class PluginEventHandler(RegexMatchingEventHandler):
def __init__(self, filename, callback):
super(PluginEventHandler, self).__init__(ignore_directories=True,
regexes=['.*' + filename])
self.callback = callback
def on_modified(self, event):
self.callback()
class Plugin(object):
PLUGIN_METHODS = ["on_request", "on_response"]
def __init__(self, plugin_path):
self.plugin_path = os.path.abspath(plugin_path)
self.plugin_name = os.path.basename(self.plugin_path)
self.plugin_dir = os.path.dirname(self.plugin_path)
self.namespace = None
self._load_plugin()
self._register_watcher()
def _register_watcher(self):
logger.debug("Register File Watcher for {0}".format(self.plugin_name))
self.event_handler = PluginEventHandler(self.plugin_name,
self._reload_plugin)
self.observer = Observer()
self.observer.schedule(self.event_handler, self.plugin_dir)
self.observer.start()
def _load_plugin(self):
sys.path.append(os.path.dirname(self.plugin_path))
try:
with open(self.plugin_path) as fp:
self.namespace = {"__file__": self.plugin_path}
code = compile(fp.read(), self.plugin_path, "exec")
exec (code, self.namespace, self.namespace)
except Exception as e:
logger.exception(e)
sys.path.pop()
logger.info("Load Plugin : {0}".format(self.plugin_name))
def _reload_plugin(self):
logger.info("Reload Plugin : {0}".format(self.plugin_name) | )
self._load_plugin()
def __getattr__(self, attr):
if attr not in self.PLUGIN_METHODS:
raise AttributeErr | or
try:
return self.namespace[attr]
except KeyError:
raise AttributeError
class PluginManager(object):
def __init__(self, config):
self.plugins = []
self.load_plugins(config["plugins"])
def load_plugins(self, plugin_paths):
for plugin_path in plugin_paths:
plugin = Plugin(plugin_path)
self.plugins.append(plugin)
def exec_request(self, plugin_context):
if len(self.plugins) == 0:
return plugin_context
current_context = copy(plugin_context)
for plugin in self.plugins:
try:
new_context = plugin.on_request(current_context)
current_context = copy(new_context)
except AttributeError:
logger.debug(
"Plugin {0} does not have on_request".format(
plugin.namespace["__file__"].split("/")[-1]))
return current_context
def exec_response(self, plugin_context):
if len(self.plugins) == 0:
return plugin_context
current_context = copy(plugin_context)
for plugin in self.plugins:
try:
new_context = plugin.on_response(current_context)
current_context = copy(new_context)
except AttributeError:
logger.debug(
"Plugin {0} does not have on_response".format(
plugin.namespace["__file__"].split("/")[-1]))
return current_context
|
tehranian/django-url-shortener | shortener/tests.py | Python | mit | 9,097 | 0.00011 | import random
import string
import sys
from django.core.urlresolvers import reverse
from django.template import Context, RequestContext, Template
from django.test import TestCase
from django.test.client import Client, RequestFactory
from shortener.baseconv import base62, DecodingError, EncodingError
from shortener.forms import too_long_error
from shortener.models import Link
# needed for the short_url templatetag
CUSTOM_HTTP_HOST = 'django.testserver'
class TemplateTagTestCase(TestCase):
def setUp(self):
self.HTTP_HOST = CUSTOM_HTTP_HOST
self.factory = RequestFactory(HTTP_HOST=self.HTTP_HOST)
def test_short_url(self):
"""
the short_url templatetag works with auto-generated links
"""
link = Link.objects.create(url='http://www.python.org/')
request = self.factory.get(reverse('index'))
out = Template(
"{% load shortener_helpers %}"
"{% short_url link %}"
).render(RequestContext(request, {'link': link}))
self.assertEqual(
out, 'http://%s/%s' % (self.HTTP_HOST, link.to_base62()))
def test_short_url_with_custom(self):
| """
the short_url templateta | g works with custom links
"""
custom = 'python'
link = Link.objects.create(
url='http://www.python.org/', id=base62.to_decimal(custom))
request = self.factory.get(reverse('index'))
out = Template(
"{% load shortener_helpers %}"
"{% short_url link %}"
).render(RequestContext(request, {'link': link}))
self.assertEqual(
out, 'http://%s/%s' % (self.HTTP_HOST, link.to_base62()))
class ViewTestCase(TestCase):
def setUp(self):
self.client = Client(HTTP_HOST=CUSTOM_HTTP_HOST)
def test_submit(self):
"""
submit view with auto-generated short url
"""
url = u'http://www.python.org/'
response = self.client.post(reverse('submit'), {'url': url})
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'shortener/submit_success.html')
self.assertIn('link', response.context)
link = response.context['link']
self.assertIsInstance(link, Link)
self.assertEqual(url, link.url)
self.assertEqual(link.usage_count, 0)
self.assertEqual(base62.from_decimal(link.id), link.to_base62())
def test_submit_with_custom(self):
"""
submit view with a custom short url
"""
url = u'http://www.python.org/'
custom = 'mylink'
response = self.client.post(reverse('submit'), {
'url': url, 'custom': custom})
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'shortener/submit_success.html')
self.assertIn('link', response.context)
link = response.context['link']
self.assertIsInstance(link, Link)
self.assertEqual(url, link.url)
self.assertEqual(link.usage_count, 0)
self.assertEqual(link.to_base62(), custom)
def test_submit_with_bad_character_in_custom(self):
"""
submit view with an invalid character in custom
"""
url = u'http://www.python.org/'
custom = 'my_link_bad_chars:##$#$%^$&%^**'
response = self.client.post(reverse('submit'), {
'url': url, 'custom': custom})
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'shortener/submit_failed.html')
self.assertFormError(
response, 'link_form', 'custom', u'Invalid character for encoding: _')
self.assertNotIn('link', response.context)
def test_submit_with_custom_no_repeats(self):
"""
submitting a request w/a custom name fails if it is already taken
"""
url = u'http://www.python.org/'
custom = 'mylink'
# first time should succeed
response = self.client.post(reverse('submit'), {
'url': url, 'custom': custom})
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'shortener/submit_success.html')
self.assertIn('link', response.context)
link = response.context['link']
self.assertIsInstance(link, Link)
self.assertEqual(url, link.url)
self.assertEqual(link.usage_count, 0)
self.assertEqual(link.to_base62(), custom)
# second time should be an error
response = self.client.post(reverse('submit'), {
'url': url, 'custom': custom})
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'shortener/submit_failed.html')
self.assertFormError(
response, 'link_form', 'custom', '"%s" is already taken' % custom)
self.assertNotIn('link', response.context)
def test_submit_long_custom(self):
"""
if a custom shortened url is too long we return an error
"""
url = u'http://www.python.org/'
custom = 'MyLinkCustomLinkThatIsTooLongOoooooooohYea'
response = self.client.post(reverse('submit'), {
'url': url, 'custom': custom})
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'shortener/submit_failed.html')
self.assertFormError(response, 'link_form', 'custom', too_long_error)
def test_follow(self):
"""
the follow view on a valid url
"""
url = 'http://www.python.org/'
link = Link.objects.create(url=url)
self.assertEqual(link.usage_count, 0)
# follow the short url and get a redirect
response = self.client.get(reverse('follow', kwargs={
'base62_id': link.to_base62()}))
self.assertRedirects(response, url, 301)
# re-fetch link so that we can make sure that usage_count incremented
link = Link.objects.get(id=link.id)
self.assertEqual(link.usage_count, 1)
def test_follow_404(self):
"""
follow on an unknown url should return 404
"""
url = u'http://www.python.org/'
response = self.client.get(reverse('follow', kwargs={
'base62_id': "fails"}))
self.assertEqual(response.status_code, 404)
def test_info(self):
"""
the info view on a valid url
"""
url = u'http://www.python.org/'
link = Link.objects.create(url=url)
response = self.client.get(reverse('info', kwargs={
'base62_id': link.to_base62()}))
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'shortener/link_info.html')
def test_info_404(self):
"""
info on an unknown url should return 404
"""
url = u'http://www.python.org/'
response = self.client.get(reverse('info', kwargs={
'base62_id': "fails"}))
self.assertEqual(response.status_code, 404)
class LinkTestCase(TestCase):
def test_create(self):
"""
Link.base_62() is derived from auto-generated Link.id
"""
link = Link.objects.create(url='http://www.python.org')
self.assertEqual(link.to_base62(), base62.from_decimal(link.id))
def test_create_with_custom_id(self):
"""
Link.base_62() is derived from custom Link.id
"""
id = 5000
link = Link.objects.create(id=id, url='http://www.python.org')
self.assertEqual(link.to_base62(), base62.from_decimal(id))
def test_unicode(self):
"""
unicode test
"""
url = 'http://www.python.org'
link = Link.objects.create(url=url)
self.assertTrue(url in unicode(link))
class BaseconvTestCase(TestCase):
def test_symmetry_positive_int(self):
"""
symmetry for encoding/decoding values
"""
for x in xrange(1000):
random_int = random.randint(0, sys.maxint)
encoded_int = base62.from_decimal(random_int)
self.assertEqual(random_int, base62.to_decimal(encoded_int))
def test_symmetry_negative_int(self):
|
shiminasai/ciat_plataforma | monitoreo/indicador11/migrations/0001_initial.py | Python | mit | 14,973 | 0.007547 | # -*- coding: utf-8 -*-
from south.utils import datetime_utils as datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'CultivosVariedad'
db.create_table(u'indicador11_cultivosvariedad', (
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('cultivo', self.gf('django.db.models.fields.CharField')(max_length=200)),
))
db.send_create_signal(u'indicador11', ['CultivosVariedad'])
# Adding model 'Variedades'
db.create_table(u'indicador11_variedades', (
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('cultivo', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['indicador11.CultivosVariedad'])),
('variedad', self.gf('django.db.models.fields.CharField')(max_length=200)),
))
db.send_create_signal(u'indicador11', ['Variedades'])
# Adding model 'Semilla'
db.create_table(u'indicador11_semilla', (
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('cultivo', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['indicador11.Variedades'])),
('origen', self.gf('django.db.models.fields.IntegerField')()),
('encuesta', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['monitoreo.Encuesta'])),
))
db.send_create_signal(u'indicador11', ['Semilla'])
def backwards(self, orm):
# Deleting model 'CultivosVariedad'
db.delete_table(u'indicador11_cultivosvariedad')
# Deleting model 'Variedades'
db.delete_table(u'indicador11_variedades')
# Deleting model 'Semilla'
db.delete_table(u'indicador11_semilla')
models = {
u'auth.group': {
'Meta': {'object_name': 'Group'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
u'auth.permission': {
'Meta': {'ordering': "(u'content_type__app_label', u'content_type__model', u'codename')", 'unique_together': "((u'content_type', u'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
u'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "u'user_set'", 'blank': 'True', 'to': u"orm['auth.Group']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "u'user_set'", 'blank': 'True', 'to': u"orm['auth.Permission']"}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
u'configuracion.areaaccion': {
'Meta': {'object_name': 'AreaAccion'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'nombre': ('django.db.models.fields.CharField', [], {'max_length': '250'})
},
u'configuracion.plataforma': {
'Meta': {'object_name': 'Plataforma'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'nombre': ('django.db.models.fields.CharField', [], {'max_length': '250'})
},
u'configuracion.sector': {
'Meta': {'object_name': 'Sector'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'nombre': ('django.db.models.fields.CharField', [], {'max_length': '200'})
},
u'configuracion.sitioaccion': {
'Meta': {'object_name': 'SitioAccion'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'nombre': ('django.db.models.fields.CharField', [], {'max_length': '250'})
},
u'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
u'indicador11.cultivosvariedad': {
'Meta': {'ordering': "['cultivo']", 'object_name': 'CultivosVariedad'},
'cultivo': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'})
},
u'indicador11.semilla': {
'Meta': {'object_name': 'Semilla'},
'cultivo': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['indicador11.Variedades']"}),
'encuesta': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['monitoreo.Encuesta']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'origen': ('django.db.models.fields.IntegerField', [], {})
},
u'indicador11.variedades': {
'Meta': {'ordering': "['cultivo']", 'object_name': 'Variedades'},
'cultivo': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['indicador11.CultivosVariedad']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'variedad': ('django.db.models.fields.CharField', [], {'max_length': '200'})
},
u'lugar.comunidad': {
'Meta': {'ordering': "['nombre']", 'object_name': 'Comunidad'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'municipio': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['lugar.Municipio']"}),
'nombre': ('django.db.models.fields.CharField', [], {'max_le | ngth': '40'})
},
u'lugar.departamento': {
'Meta': {'ordering': "['nombre']", 'object_name': 'Departamento'},
'extension': ('django.db.models.fields.DecimalField', [], {'null' | : 'True', 'max_digits': '10', 'decimal_places': '2', 'blank': 'True'}),
'id': ('django.db.models.fields.IntegerField', [], {'primary_key': 'True'}),
'nombre': ('django.db.models.fi |
tecnologiaenegocios/tn.plonebehavior.template | src/tn/plonebehavior/template/tests/test_template.py | Python | bsd-3-clause | 14,717 | 0 | from plone.app.dexterity.behaviors.metadata import ICategorization
from plone.behavior.interfaces import IBehavior
from stubydoo import double
from stubydoo import stub
from tn.plonebehavior.template import AssociatedTemplateCompilation
from tn.plonebehavior.template import interfaces
from tn.plonebehavior.template import IHasTemplate
from tn.plonebehavior.template import INullTemplateConfiguration
from tn.plonebehavior.template import ITemplating
from tn.plonebehavior.template import ITemplateConfiguration
from tn.plonebehavior.template import NullTemplate
from tn.plonebehavior.template import NullTemplateConfiguration
from tn.plonebehavior.template import Templating
from tn.plonebehavior.template import TemplateConfiguration
from tn.plonebehavior.template import Template
from tn.plonebehavior.template import TemplatedView
from tn.plonebehavior.template.interfaces import IPossibleTemplate
from tn.plonebehavior.template.tests import base
from zope.annotation.interfaces import IAnnotations
from zope.annotation.interfaces import IAttributeAnnotatable
from zope.app.testing import placelesssetup
from zope.interface import providedBy
import lxml.html
import stubydoo
import unittest
import zope.component
import zope.interface
import zope.publisher.interfaces.browser
@stubydoo.assert_expectations
class TestTemplateConfiguration(unittest.TestCase):
def setUp(self):
placelesssetup.setUp(self)
self.context = double()
zope.interface.alsoProvides(self.context, IAttributeAnnotatable)
self.configuration = TemplateConfiguration(self.context)
@zope.component.adapter(IAttributeAnnotatable)
@zope.interface.implementer(IAnnotations)
def annotations_adapter(context):
if hasattr(context, '_annotations'):
return context._annotations
context._annotations = {}
return context._annotations
zope.component.provideAdapter(annotations_adapter)
self.default_html = u'<html id="selector"></html>'
@zope.component.adapter(None)
@zope.interface.implementer(interfaces.IHTML)
def html_attribute_adapter(context):
return self.default_html
zope.component.provideAdapter(html_attribute_adapter)
def tearDown(self):
placelesssetup.tearDown()
def test_use_adapter_for_html_attribute(self):
self.assertEquals(self.configuration.html, self.default_html)
def test_cannot_set_html_attribute(self):
def set_attribute():
self.configuration.html = u'Other HTML Code'
self.assertRaises(AttributeError, set_attribute)
def test_persists_xpath(self):
self.configuration.xpath = u'//a/xpath/expression'
other_configuration = TemplateConfiguration(self.context)
self.assertEquals(other_configuration.xpath, u'//a/xpath/expression')
def test_persists_css(self):
self.configuration.css = u'a css selector'
other_configuration = TemplateConfiguration(self.context)
self.assertEquals(other_configuration.css, u'a css selector')
def test_css_sets_xpath(self):
self.configuration.css = u'#other-id'
other_configuration = TemplateConfiguration(self.context)
self.assertEquals(other_configuration.xpath,
u"descendant-or-self::*[@id = 'other-id']")
def test_marks_the_content_when_xpath_is_valid(self):
self.configuration.xpath = u"descendant-or-self::*[@id = 'selector']"
self.assertTrue(IPossibleTemplate in providedBy(self.context))
def test_unmarks_the_content_when_xpath_is_emptied(self):
self.configuration.xpath = u"descendant-or-self::*[@id = 'selector']"
self.configuration.xpath = None
self.assertTrue(IPossibleTemplate not in providedBy(self.context))
def test_doesnt_break_if_content_is_unmarked_when_xpath_is_emptied(self):
self.configuration.xpath = u"descendant-or-self::*[@id = 'selector']"
zope.interface.noLongerProvides(self.context, IPossibleTemplate)
self.configuration.xpath = None
self.assertTrue(IPossibleTemplate not in providedBy(self.context))
def test_marks_the_content_when_css_is_set(self):
self.configuration.css = '#selector'
self.assertTrue(IPossibleTemplate in providedBy(self.context))
def test_unmarks_the_content_when_css_is_emptied(self):
self.configuration.css = '#selector'
self.configuration.css = None
self.assertTrue(IPossibleTemplate not in providedBy(self.context))
def test_doesnt_break_if_content_is_unmarked_when_css_is_emptied(self):
self.configuration.css = '#selector'
zope.interface.noLongerProvides(self.context, IPossibleTemplate)
self.configuration.css = None
self.assertTrue(IPossibleTemplate not in providedBy(self.context))
@stubydoo.assert_expectations
class TestNullTemplateConfiguration(unittest.TestCase):
def setUp(self):
placelesssetup.setUp(self)
self.metadata = double(language='klingon')
@zope.component.adapter(None)
@zope.interface.implementer(ICategorization)
def metadata(context):
return self.metadata
zope.component.provideAdapter(metadata)
self.context = double(title=u'Content title')
self.configuration = NullTemplateConfiguration(self.context)
def tearDown(self):
placelesssetup.tearDown()
def test_default_xpath_to_select_the_body_tag(self):
self.assertEquals(self.configuration.xpath, u'//body')
def test_default_css_to_select_the_body_tag(self):
self.assertEquals(self.configuration.css, u'body')
def test_get_language_from_content_if_set(self):
tree = lxml.html.document_fromstring(self.configuration.html)
lang = tree.xpath('/html/@lang')[0]
self.assertEquals(lang, 'klingon')
def test_get_language_from_portal_if_content_has_no_language(self):
self.metadata = None
portal_state_view = double(default_language=lambda self: 'mayan')
@zope.component.adapter(None, None)
@zope.interface.implementer(
zope.publisher.interfaces.browser.IBrowserView
)
def view(context, request):
return portal_state_view
zope.component.provideAdapter(view, name=u'plone_portal_state')
tree = lxml.html.document_fromstring(self.configuration.html)
lang = tree.xpath('/html/@lang')[0]
self.assertEquals(lang, 'mayan')
def test_title_is_retrieved_from_context(self):
tree = lxml.html.document_fromstring(self.configuration.html)
title = tree.xpath('/html/head/title')[0].text
self.assertEquals(title, self.context.title)
@stubydoo.assert_expectations
class TestTemplateAdapter(unittest.TestCase):
def setUp(self):
placelesssetup.setUp(self)
class CompilationStrategy(object):
zope.component.adapts(None, ITemplateConfiguration)
zope.interface.implements(interfaces.ICompilationStrategy)
def __init__(self, content, config):
self.context, self.config = content, config
def compile(self):
return self.config.html % self.context.body
class Configuration(object):
zope.component.adapts(IAttributeAnnotatable)
zope.interface.implements(ITemplateConfiguration)
def __init__(self, context):
self.html = u'html(%s)'
zope.component.provideAdapter(CompilationStrategy)
zope.comp | onent.provideAdapter(Configuration)
def tearDown(self):
placelesssetup.tearDown()
def test_delegates_to_adapters(self):
context = double()
content = double(body=u'body')
zope.interface.alsoProvides(context, IAttributeAnnotatable)
template = Template(context)
self.assertEquals(templat | e.compile(content), u'html(body)')
@stubydoo.assert_expectations
class TestAssociatedTemplateCompilationAdapter(unittest.TestCase):
def setUp(self):
placelesssetup.setUp(self)
self.context = double()
s |
guyinatuxedo/escape | fmt_str/f5_64/exploit.py | Python | gpl-3.0 | 448 | 0.015625 | #First import pwn tools
from pwn import *
#Declare the binary, and run it
elf = ELF("./f5_64")
context(binary=elf)
target = process("./f5_64")
#Grab the buf0 address
buf0_address = p64(elf | .symbols["buf0"])
#Unpack sh and store as a string
sh = str(u64("sh\0\0\0\0\0\0"))
#Finish crafting the exploit and send it
target.sendline("%" + sh + "x%8$n00000" | + str(buf0_address))
#Drop to an interactive prompt to use the shell
target.interactive()
|
ISP-Tetsuro-Kitajima/xchainer | xchainer/manager.py | Python | mit | 6,615 | 0.000966 | # -*- coding: utf_8 -*-
import numpy as np
from sklearn.base import BaseEstimator
from chainer import Variable, cuda
import chainer.functions as F
class NNmanager (BaseEstimator):
def __init__(self, model, optimizer, lossFunction, gpu=True, **params):
# CUDAデバイスの設定
self.gpu = gpu
# 学習器の初期化
# ネットワークの定義
if gpu:
self.model = model.to_gpu()
else:
self.model = model
# オプティマイザの設定
self.optimizer = optimizer
self.optimizer.setup(self.model)
# 損失関数の設定
self.lossFunction = lossFunction
# epochの設定
self.epoch = params['epoch'] if 'epoch' in params else 20
# バッチサイズの設定
self.batchsize = params['batchsize'] if 'batchsize' in params else 100
# ロギングの設定
self.testing_cycle = params['testing_cycle'] if 'testing_cycle' in params else 1
self.logging = params['logging'] if 'logging' in params else False
self.train_logFormat = "[%d epoch] mean loss: %f, mean accuracy: %f"
self.testing_logFormat = "[%d epoch] mean l | oss: %f, mean a | ccuracy: %f, testing loss: %f, testing accuracy: %f"
# テストデータの設定
self.x_test = None
self.y_test = None
self.showTestingMode = None
def fit(self, x_train, y_train):
if self.showTestingMode:
if self.x_test is None or self.y_test is None:
raise RuntimeError("先にテストデータを登録してください")
self.runEpoch(x_train, y_train, self.x_test, self.y_test)
else:
self.runEpoch(x_train, y_train)
return self
def registTestingData(self, x_test, y_test):
self.x_test = x_test
self.y_test = y_test
def showTesting(self, mode):
self.showTestingMode = mode
def predict(self, x_test):
if self.gpu:
# GPU向け実装
x_test = cuda.to_gpu(x_test)
output = self.forward(x_test, train=False)
output.data = cuda.to_cpu(output.data)
else:
# CPU向け実装
output = self.forward(x_test, train=False)
return self.trimOutput(output)
def trimOutput(self, output):
# 結果を整形したいときなど。
return output.data
# 順伝播・逆伝播
def forward(self, x_data, train):
# x = Variable(x_data)
# h1 = F.relu(self.model.l1(x))
# h2 = F.relu(self.model.l2(h1))
# y_predict = self.model.l3(h2)
# return y_predict
raise NotImplementedError("`forward` method is not implemented.")
def backward(self, y_predict, y_data):
y = Variable(y_data)
loss = self.lossFunction(y_predict, y)
accuracy = F.accuracy(y_predict, y)
loss.backward()
return loss, accuracy
def setLogger(self, logging):
self.logging = logging
def setTrainLogFormat(self, logFormat):
self.train_logFormat = logFormat
def setTestingLogFormat(self, logFormat):
self.testing_logFormat = logFormat
def runEpoch(self, x_train, y_train, x_test=None, y_test=None):
if (x_test is None) and (y_test is not None):
raise RuntimeError("x_testとy_testの片方のみの指定は許されません")
if (x_test is not None) and (y_test is None):
raise RuntimeError("x_testとy_testの片方のみの指定は許されません")
testing = (x_test is not None)
for epoch in xrange(self.epoch):
mean_loss, mean_accuracy = self.epochProcess(x_train, y_train)
mode_train_only = not testing or (epoch % self.testing_cycle > 0)
mode_train_test = testing and (epoch % self.testing_cycle == 0)
if mode_train_only and self.logging:
# 訓練データのMean_Loss, Mean_Accuracyを表示
print self.train_logFormat % (epoch, mean_loss, mean_accuracy)
elif mode_train_test and self.logging:
# 訓練データとテストデータのMean_Loss, Mean_Accuracyを表示
if self.gpu:
# GPU向け実装 ToDo: バッチ分割回りがepochProcess()と似ているので、まとめる
testsize = len(y_test)
indexes = np.random.permutation(testsize)
sum_loss = 0.0
sum_accuracy = 0.0
for i in xrange(0, testsize, self.batchsize):
x_batch = x_test[indexes[i: i + self.batchsize]]
y_batch = y_test[indexes[i: i + self.batchsize]]
x_batch = cuda.to_gpu(x_batch)
y_batch = cuda.to_gpu(y_batch)
y_predict = self.forward(x_batch, train=False)
loss, accuracy = self.backward(y_predict, y_batch)
sum_loss += loss.data * self.batchsize
sum_accuracy += accuracy.data * self.batchsize
testing_loss = sum_loss / testsize
testing_accuracy = sum_accuracy / testsize
else:
# CPU向け実装 一括処理
y_predict = self.forward(x_test, train=False)
loss, accuracy = self.backward(y_predict, y_test)
testing_loss = loss.data
testing_accuracy = accuracy.data
print self.testing_logFormat % (epoch, mean_loss, mean_accuracy, testing_loss, testing_accuracy)
def epochProcess(self, x_train, y_train):
trainsize = len(y_train)
indexes = np.random.permutation(trainsize)
sum_loss = 0
sum_accuracy = 0
for i in xrange(0, trainsize, self.batchsize):
x_batch = x_train[indexes[i: i + self.batchsize]]
y_batch = y_train[indexes[i: i + self.batchsize]]
if self.gpu:
x_batch = cuda.to_gpu(x_batch)
y_batch = cuda.to_gpu(y_batch)
self.optimizer.zero_grads()
y_predict = self.forward(x_batch, train=True)
loss, accuracy = self.backward(y_predict, y_batch)
self.optimizer.update()
sum_loss += loss.data * self.batchsize
sum_accuracy += accuracy.data * self.batchsize
mean_loss = sum_loss / trainsize
mean_accuracy = sum_accuracy / trainsize
return mean_loss, mean_accuracy |
goldmedal/spark | sql/gen-sql-api-docs.py | Python | apache-2.0 | 5,943 | 0.001851 | #
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import os
from collections import namedtuple
from pyspark.java_gateway import launch_gateway
ExpressionInfo = namedtuple(
"ExpressionInfo", "className name usage arguments examples note since deprecated")
def _list_function_infos(jvm):
"""
Returns a list of function information via JVM. Sorts wrapped expression infos by name
and returns them.
"""
jinfos = jvm.org.apache.spark.sql.api.python.PythonSQLUtils.listBuiltinFunctionInfos()
infos = []
for jinfo in jinfos:
name = jinfo.getName()
usage = jinfo.getUsage()
usage = usage.replace("_FUNC_", name) if usage is not None else usage
infos.append(ExpressionInfo(
className=jinfo.getClassName(),
name=name,
usage=usage,
arguments=jinfo.getArguments().replace("_FUNC_", name),
examples=jinfo.getExamples().replace("_FUNC_", name),
note=jinfo.getNote(),
since=jinfo.getSince(),
deprecated=jinfo.getDeprecated()))
return sorted(infos, key=lambda i: i.name)
def _make_pretty_usage(usage):
"""
Makes the usage description pretty and returns a formatted string if `usage`
is not an empty string. Otherwise, returns None.
"""
if usage is not None and usage.strip() != "":
usage = "\n".join(map(lambda u: u.strip(), usage.split("\n")))
return "%s\n\n" % usage
def _make_pretty_arguments(arguments):
"""
Makes the arguments description pretty and returns a formatted string if `arguments`
starts with the argument prefix. Otherwise, returns None.
Expected input:
Arguments:
* arg0 - ...
...
* arg0 - ...
...
Expected output:
**Arguments:**
* arg0 - ...
...
* arg0 - ...
...
"""
if arguments.startswith("\n Arguments:"):
arguments = "\n".join(map(lambda u: u[6:], arguments.strip().split("\n")[1:]))
return "**Arguments:**\n\n%s\n\n" % arguments
def _make_pretty_examples(examples):
"""
Makes the examples description pretty and returns a formatted string if `examples`
starts with the example prefix. Otherwise, returns None.
Expected input:
Examples:
> SELECT ...;
...
> SELECT ...;
...
Expected outp | ut:
**Examples:**
```
> SELECT ...;
...
> SELECT ...;
...
```
"""
if examples.startswith("\n Examples:"):
examples = "\n".join(map(lambda u: u[6:], examples.strip().split("\n")[1:]) | )
return "**Examples:**\n\n```\n%s\n```\n\n" % examples
def _make_pretty_note(note):
"""
Makes the note description pretty and returns a formatted string if `note` is not
an empty string. Otherwise, returns None.
Expected input:
...
Expected output:
**Note:**
...
"""
if note != "":
note = "\n".join(map(lambda n: n[4:], note.split("\n")))
return "**Note:**\n%s\n" % note
def _make_pretty_deprecated(deprecated):
"""
Makes the deprecated description pretty and returns a formatted string if `deprecated`
is not an empty string. Otherwise, returns None.
Expected input:
...
Expected output:
**Deprecated:**
...
"""
if deprecated != "":
deprecated = "\n".join(map(lambda n: n[4:], deprecated.split("\n")))
return "**Deprecated:**\n%s\n" % deprecated
def generate_sql_markdown(jvm, path):
"""
Generates a markdown file after listing the function information. The output file
is created in `path`.
Expected output:
### NAME
USAGE
**Arguments:**
ARGUMENTS
**Examples:**
```
EXAMPLES
```
**Note:**
NOTE
**Since:** SINCE
**Deprecated:**
DEPRECATED
<br/>
"""
with open(path, 'w') as mdfile:
for info in _list_function_infos(jvm):
name = info.name
usage = _make_pretty_usage(info.usage)
arguments = _make_pretty_arguments(info.arguments)
examples = _make_pretty_examples(info.examples)
note = _make_pretty_note(info.note)
since = info.since
deprecated = _make_pretty_deprecated(info.deprecated)
mdfile.write("### %s\n\n" % name)
if usage is not None:
mdfile.write("%s\n\n" % usage.strip())
if arguments is not None:
mdfile.write(arguments)
if examples is not None:
mdfile.write(examples)
if note is not None:
mdfile.write(note)
if since is not None and since != "":
mdfile.write("**Since:** %s\n\n" % since.strip())
if deprecated is not None:
mdfile.write(deprecated)
mdfile.write("<br/>\n\n")
if __name__ == "__main__":
jvm = launch_gateway().jvm
spark_root_dir = os.path.dirname(os.path.dirname(__file__))
markdown_file_path = os.path.join(spark_root_dir, "sql/docs/index.md")
generate_sql_markdown(jvm, markdown_file_path)
|
20tab/django-political-map | politicalplaces/migrations/0009_politicalplace_postal_code.py | Python | mit | 483 | 0 | # -*- coding: utf-8 -*-
# Generated by Django 1.10.5 on 2017-03-18 23:03
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
| dependencies = [
('politicalplaces', '0008_auto_20170317_1636'),
]
operations = [
m | igrations.AddField(
model_name='politicalplace',
name='postal_code',
field=models.CharField(blank=True, max_length=255),
),
]
|
camielv/wildlife-monitoring | src/modules/datastructures/__init__.py | Python | lgpl-2.1 | 47 | 0 | __all__ = ["annot | ation", "detection", "t | rack"]
|
chasetb/sal | server/migrations/0004_auto_20150623_1623.py | Python | apache-2.0 | 1,504 | 0.00266 | # -*- coding: utf-8 -*-
from _ | _future__ import unicode_literals
from django.db import models, migrat | ions
class Migration(migrations.Migration):
dependencies = [
('server', '0003_auto_20150612_1123'),
]
operations = [
migrations.RemoveField(
model_name='apikey',
name='read_only',
),
migrations.AlterField(
model_name='condition',
name='machine',
field=models.ForeignKey(related_name='conditions', to='server.Machine'),
),
migrations.AlterField(
model_name='fact',
name='machine',
field=models.ForeignKey(related_name='facts', to='server.Machine'),
),
migrations.AlterField(
model_name='historicalfact',
name='machine',
field=models.ForeignKey(related_name='historical_facts', to='server.Machine'),
),
migrations.AlterField(
model_name='pendingappleupdate',
name='machine',
field=models.ForeignKey(related_name='pending_apple_updates', to='server.Machine'),
),
migrations.AlterField(
model_name='pendingupdate',
name='machine',
field=models.ForeignKey(related_name='pending_updates', to='server.Machine'),
),
migrations.AlterField(
model_name='plugin',
name='name',
field=models.CharField(unique=True, max_length=512),
),
]
|
GustJc/PyPhysics | projects/03-Game/main.py | Python | gpl-3.0 | 1,567 | 0.02425 | import pygame
import src.sprite as game
pygame.init()
screen = pygame.display.set_mode((400,300))
done = False
GameUpdateList = []
GameRenderList = []
catapult = game.Sprite("data/img/catapult.png", 5)
boulder = None
catapultAnim = game.Animation(catapult, 96, 96, 5, 100)
GameUpdateList.append(catapultAnim)
GameRenderList.append(catapultAnim)
# Testes --------------------------------------
def shotBoulder(dt):
global boulder
if( catapultAnim.isReady() ):
catapultAnim.pause = True
catapultAnim.forceFrame()
if(boulder == None):
boulder = game.Sprite("data/img/boulder.png")
boulder.pos[0] = 46
boulder.pos[1] = 7
GameRenderList.append(boulder)
if(boulder != None):
dt *= 0.001
boulder.pos[0] += 300*dt
boulder.pos[1] += 15*dt
if(boulder.pos[0] > screen.get_width()):
GameRenderList.remove(boulder)
| boulder = None
catapultAnim.forceFrame(0)
catapultAnim.pause = False
# Testes --------------------------------------
last_time = pygame.time.get_ticks()
while not done:
screen.fill((255,255,255))
for event in pygame.event.get():
if event.type == pygame.QUIT:
done = True
# Atualiza temp | o
dt = pygame.time.get_ticks() - last_time
last_time = pygame.time.get_ticks()
# Atualiza timer da catapulta em ms
for obj in GameUpdateList:
obj.update(dt)
#catapultAnim.update(dt)
shotBoulder(dt)
for obj in GameRenderList:
obj.render(screen)
#catapultAnim.render(screen)
# Mostra tela
pygame.display.flip()
pygame.quit()
|
FirmlyReality/docklet | src/master/settings.py | Python | bsd-3-clause | 1,940 | 0.007732 | #!/usr/bin/python3
from utils import env
import json, os
from functools import wraps
from utils.log import logger
class settingsClass:
setting = {}
def __init__(self):
settingPath = env.getenv('FS_PREFIX') + '/local/settings.conf'
if not os.path.exists(settingPath):
settingFile = open(settingPath,'w')
setting = {}
settingFile.write(json.dumps(setting))
settingFile.close()
else:
settingFile = open(settingPath, 'r')
settingText = settingFile.read()
settingFile.close()
self.setting = json.loads(settingText)
def get(self, arg):
return self.setting.get(arg,'')
def list(*args, **kwargs):
if ( ('user_group' in kwargs) == False):
return {"success":'false', "reason":"Cannot get user_group"}
user_group = kwargs['user_group']
if (not ((user_group == 'admin') or (user_group == 'root'))):
return {"success": 'false', "reason": 'Unauthorized Action'}
return {'success': 'true', 'result': args[ | 0].setting}
def update(*args, **kwargs):
try:
if ( ('user_group' in kwargs) == False):
return {"success":'false', "reason":"Cannot get user_group"}
user_group = kwargs['user_group']
if (not ((user_group == 'admin') or (user_group == 'root'))):
return {"success": 'false', "reason": 'Unauthorized Action'}
newSetting = kwargs['newSetti | ng']
settingPath = env.getenv('FS_PREFIX') + '/local/settings.conf';
settingText = json.dumps(newSetting)
settingFile = open(settingPath,'w')
settingFile.write(settingText)
settingFile.close()
args[0].setting = newSetting
return {'success': 'true'}
except:
return {'success': 'false'}
settings = settingsClass()
|
GemHQ/round-py | round/users.py | Python | mit | 6,392 | 0.001095 | # -*- coding: utf-8 -*-
# users.py
#
# Copyright 2014-2015 BitVault, Inc. dba Gem
from __future__ import unicode_literals
from .config import *
from .wrappers import *
from .errors import *
from .subscriptions import Subscriptions
from .devices import Devices
from .wallets import generate, Wallet
class Users(DictWrapper):
"""A collection of round.Users objects."""
def create(self, email, device_name, passphrase=None,
api_token=None, redirect_uri=None, **kwargs):
"""Create a new User object and add it to this Users collection.
In addition to creating a user, this call will create a device for that
user, whose device_token will be returned from this call. Store the
device_token, as it's required to complete Gem-Device authentication
after the user approves the device at the end of their signup flow.
If you lose the device_token returned from users.create, you'll have to
create a new device for the user to gain access to their account again.
Also, after this call, be sure to redirect the user to the location in
`mfa_uri` (second return value of this function) to complete their
account.
If you get a 409 Conflict error, then the user already exists in the Gem
system and you'll want to do a
`client.user(email).devices.create(device_name)`
Args:
email (str)
device_name (str): Human-readable name for the device through which
your Application will be authorized to access the new User's account.
passphrase (str, optional): A passphrase with which to encrypt a user
wallet. If not provided, a default_wallet parameter must be passed in
kwargs.
api_token (str, optional): Your app's API token. This is optional if
and only if the Client which will be calling this function already
has Gem-Application or Gem-Identify authentication.
redirect_uri (str, optional): A URI to which to redirect the User after
they confirm their Gem account.
**kwargs
Returns: device_token
"""
if not passphrase and u'default_wallet' not in kwargs:
raise ValueError("Usage: users.create(email, passphrase, device_name"
", api_token, redirect_uri)")
elif passphrase:
default_wallet = generate(passphrase, ['primary'])['primary']
else:
default_wallet = kwargs['default_wallet']
default_wallet['name'] = 'default'
default_wallet['primary_private_seed'] = default_wallet['encrypted_seed']
default_wallet['primary_public_seed'] = default_wallet['public_seed']
del default_wallet['encrypted_seed']
del default_wallet['public_seed']
del default_wallet['private_seed']
# If not supplied, we assume the client already has an api_token param.
if api_token:
self.client.authenticate_identify(api_token)
user_data = dict(email=email,
default_wallet=default_wallet,
device_name=device_name)
if redirect_uri:
user_data['redirect_uri'] = redirect_uri
if 'first_name' in kwargs:
user_data['first_name'] = kwargs['first_name']
if 'last_name' in kwargs:
user_data['last_name'] = kwargs['last_name']
try:
resource = self.resource.create(user_data)
except ResponseError as e:
if "conflict" in e.message:
raise ConflictError(
"This user already exists. Use "
"client.user(email).devices.create(name) to request "
"authorization from the user.")
raise e
return resource.attributes['metadata']['device_token']
def wrap(self, resource):
return User(resource, self.client)
def key_for(self, wrapper):
return wrapper.email
class User(Wrapper, Updatable):
"""A User represents an single *human* end-user.
A User will | have sole access to their backup key, and will need to
communicate directly with Gem to provide MFA credentials for protected
actions | (updating their User object, publishing transactions, approving
devices, etc).
For a custodial model where a Wallet is intended to hold assets of multiple
individuals or an organization, read the Gem docs regarding Application
wallets.
Attributes:
first_name (str)
last_name (str)
email (str)
phone_number (str)
default_wallet (round.Wallet)
wallets (round.Wallets)
devices (list)
"""
def update(self, **content):
resource = self.resource.update(content)
return User(resource, self.client)
@property
def devices(self):
if not hasattr(self, '_devices'):
devices_resource = self.client.resources.devices_query(
dict(email=self.email))
self._devices = Devices(devices_resource, self.client)
return self._devices
@property
def wallet(self):
"""Fetch and return this user's default (only) Wallet."""
if not hasattr(self, '_wallet'):
wallet_resource = self.resource.default_wallet.get()
self._wallet = Wallet(wallet_resource, self.client)
return self._wallet
@property
def subscriptions(self):
"""Fetch and return Subscriptions associated with this user."""
if not hasattr(self, '_subscriptions'):
subscriptions_resource = self.resource.subscriptions
self._subscriptions = Subscriptions(
subscriptions_resource, self.client)
return self._subscriptions
def send_mfa(self):
"""Send an SMS MFA token to the user."""
return self.resource.send_mfa({})
def verify_mfa(self, mfa_token):
"""Verify an SMS or TOTP MFA token for this user.
Args:
mfa_token (str): An alphanumeric code from either a User's TOTP
application or sent to them via SMS.
Returns:
True if the mfa_token is valid, False otherwise.
"""
response = self.resource.verify_mfa({'mfa_token': mfa_token})
return (response['valid'] == True or response['valid'] == 'true')
|
luken/pcitweak | examples/printbin.py | Python | mit | 166 | 0 | #!/usr/bin/python
from pcitweak.bitstring import BitString
for n in range(0x10):
b = BitString(uint=n, length=4)
print " % 3d 0x%02x | %s" % ( | n, n, b.bin)
|
NickolayStorm/usatu-learning | ComputerGraphic/Lab2/complexview.py | Python | mit | 6,080 | 0.00289 | from PyQt5.QtWidgets import *
from PyQt5.QtGui import *
from cairodrawing import CairoDrawing, PointType
from structures import Point, HomogeneousPoint
class ComplexView(QWidget):
def __init__(self, parent=None, indent=20):
super(ComplexView, self).__init__(parent)
width = self.width()
height = self.height()
# Новый центр координат экрана
self.x0 = width / 2
self.y0 = height / 2
# Отступ от крайних точек оси до границы области рисования.
self.indent = indent
# Длина оси координат.
self.axis_length = min(width, height) - 2*self.indent
# Точки комплексного чертежа.
self.point_t = HomogeneousPoint(50, 50, 50)
self.point_c = HomogeneousPoint(90, 90, 90)
def set_points(self, point_t, point_c):
self.point_t = point_t
self.point_c = point_c
self.calculate()
def resizeEvent(self, event):
size = event.size()
self.resize(size)
self.x0 = self.width()/2
self.y0 = self.height()/2
self.axis_length = min(self.width(), self.height()) - 2*self.indent
self.calculate()
def calculate(self):
# Крайние точки осей координат
self.XY_begin = Point(self.x0 - self.axis_length/2, self.y0)
self.XY_end = Point(self.x0 + self.axis_length/2, self.y0)
self.YZ_begin = Point(self.x0, self.y0 - self.axis_length/2)
self.YZ_end = Point(self.x0, self.y0 + self.axis_length/2)
# Точки комплексного чертежа. Точка T.
self.TX = Point(self.x0 - self.point_t.x, self.y0)
self.TY1 = Point(self.x0 + self.point_t.y, self.y0)
self.TY2 = Point(self.x0, self.y0 + self.point_t.y)
self.TZ = Point(self.x0, self.y0 - self.point_t.z)
self.T1 = Point(self.TX.x, self.TY2.y)
self.T2 = Point(self.TX.x, self.TZ.y)
self.T3 = Point(self.TY1.x, self.TZ.y)
# Точки комплексного чертежа. Точка C.
self.CX = Point(self.x0 - self.point_c.x, self.y0)
self.CY1 = Point(self.x0 + self.point_c.y, self.y0)
self.CY2 = Point(self.x0, self.y0 + self.point_c.y)
self.CZ = Point(self.x0, self.y0 - self.point_c.z)
self.C1 = Point(self.CX.x, self.CY2.y)
self.C2 = Point(self.CX.x, self.CZ.y)
self.C3 = Point(self.CY1.x, self.CZ.y)
def paintEvent(self, event):
painter = QPainter(self)
pen = QPen()
brush = QBrush(QColor(135, 206, 235, 255))
painter.setBrush(brush)
pen.setColor(QColor(20, 10, 70, 255))
painter.drawRect(0, 0, self.width(), self.height())
# painter.translate(self.width()/2, self.height()/2)
# painter.set_font_size(16)
CairoDrawing.draw_line(painter, self.XY_begin, self.XY_end)
CairoDrawing.draw_line(painter, self.YZ_begin, self.YZ_end)
CairoDrawing.draw_text(painter, Point(self.YZ_begin.x+5, self.YZ_begin.y+10), "-Y")
CairoDrawing.draw_text(painter, Point(self.YZ_begin.x-15, self.YZ_begin.y+10), "Z")
CairoDrawing.draw_text(painter, Point(self.YZ_end.x+5, self.YZ_end.y-10), "Y")
CairoDrawing.draw_text(painter, Point(self.YZ_end.x-20, self.YZ_end.y-10), "-Z")
CairoDrawing.draw_text(painter, Point(self.XY_begin.x, self.XY_begin.y+15), "-Y")
CairoDrawing.draw_text(painter, Point(self.XY_begin.x, self.XY_begin.y-5), "X")
CairoDrawing.draw_text(painter, Point(self.XY_end.x-10, self.XY_end.y-5), "-X")
CairoDrawing.draw_text(painter, Point(self.XY_end.x-10, self.XY_end.y+15), "Y")
CairoDrawing.draw_line(painter, self.T1, self.TX)
CairoDrawing.draw_line(painter, self.TX, self.T2)
CairoDrawing.draw_line(painter, self.T2, self.TZ)
CairoDrawing.draw_line(painter, self.TZ, self.T3)
CairoDrawing.change_pen_dash(painter)
CairoDrawing.draw_line(painter, self.T3, self.TY1)
CairoDrawing.draw_line(painter, self.TY2, self.T1)
CairoDrawing.change_pen_default(painter)
CairoDrawing.draw_arc(painter, self.TY1, self.TY2, Point(self.width()/2, self.height()/2))
CairoDrawing.draw_point(painter, self.T1, "T1", PointType.main)
CairoDrawing.draw_point(painter, self.TX, "TX", PointType.subsidiary)
CairoDrawing.draw_point(painter, self.T2, "T2", PointType.main)
CairoDrawing.draw_point(painter, self.TZ, "TZ", PointType.subsidiary)
CairoDrawing.draw_point(painter, self.T3, "T3", PointType.main)
CairoDrawing.draw_point(painter, self.TY1, "TY1", PointType.subsidiary)
CairoDrawing.draw_point(painter, self.TY2, "TY2", PointType.subsidiary)
CairoDrawing.draw_line(painter, self.C1, self.CX, p2_name="CX")
CairoDrawing.draw_line(painter, self.CX, self.C2, p2_name="C2")
CairoDrawing.draw_line(painter, self.C2, self.CZ, p2_name="CZ")
CairoDrawing.draw_line(painter, self.CZ, self.C3, p2_name="C3")
CairoDrawing.change_pen_dash(painter)
CairoDrawing.draw_line(painter, self.C3, self.CY1, p2_name="CY1")
CairoDrawing.draw_line(painter, self.CY2, self.C1, "CY2", "C1")
CairoDrawing.change_pen_default(painter)
CairoDrawing.draw_arc(painter, self.CY1, self.CY2, Point(self.width()/2, self.height()/2))
CairoDrawing.draw_point(painter, self.C1, "C1", PointType.observer_m | ain)
CairoDrawi | ng.draw_point(painter, self.CX, "CX", PointType.observer_subsidiary)
CairoDrawing.draw_point(painter, self.C2, "C2", PointType.observer_main)
CairoDrawing.draw_point(painter, self.CZ, "CZ", PointType.observer_subsidiary)
CairoDrawing.draw_point(painter, self.C3, "C3", PointType.observer_main)
CairoDrawing.draw_point(painter, self.CY1, "CY1", PointType.observer_subsidiary)
CairoDrawing.draw_point(painter, self.CY2, "CY2", PointType.observer_subsidiary)
|
hansonrobotics/chatbot | src/chatbot/aiml/Kernel.py | Python | mit | 50,732 | 0.001183 | # -*- coding: utf-8 -*-
"""
Copyright 2003-2010 Cort Stratton. All rights reserved.
Copyright 2015, 2016 Hanson Robotics
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
1. Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the
distribution.
THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY
EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE FREEBSD PROJECT OR
CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""
"""This file contains the public interface to the aiml module."""
import AimlParser
import DefaultSubs
import Utils
from PatternMgr import PatternMgr
from WordSub import WordSub
from ConfigParser import ConfigParser
import copy
import glob
import os
import random
import re
import string
import sys
import time
import threading
import xml.sax
import logging
logger = logging.getLogger('hr.chatbot.aiml.kernel')
class Kernel:
# module constants
_globalSessionID = "_global" # key of the global session (duh)
_querySessionID = "_query" # key of the query session (duh)
_maxHistorySize = 10 # maximum length of the _inputs and _responses lists
# maximum number of recursive <srai>/<sr> tags before the response is
# aborted.
_maxRecursionDepth = 100
# special predicate keys
# keys to a queue (list) of recent user input
_inputHistory = "_inputHistory"
# keys to a queue (list) of recent responses.
_outputHistory = "_outputHistory"
# Should always be empty in between calls to respond()
_inputStack = "_inputStack"
def __init__(self):
self._verboseMode = True
self._version = "PyAIML 0.8.6"
self._brain = PatternMgr()
self._respondLock = threading.RLock()
self._textEncoding = "utf-8"
self._trace = []
# set up the sessions
self._sessions = { | }
self._addSession(self._globalSessionID)
# Set up the bot predicates
self._botPredicates = {}
self.setBotPredicate("name", "Nameless")
# set | up the word substitutors (subbers):
self._subbers = {}
self._subbers['gender'] = WordSub(DefaultSubs.defaultGender)
self._subbers['person'] = WordSub(DefaultSubs.defaultPerson)
self._subbers['person2'] = WordSub(DefaultSubs.defaultPerson2)
self._subbers['normal'] = WordSub(DefaultSubs.defaultNormal)
# set up the element processors
self._elementProcessors = {
"bot": self._processBot,
"condition": self._processCondition,
"date": self._processDate,
"formal": self._processFormal,
"gender": self._processGender,
"get": self._processGet,
"gossip": self._processGossip,
"id": self._processId,
"input": self._processInput,
"javascript": self._processJavascript,
"learn": self._processLearn,
"li": self._processLi,
"lowercase": self._processLowercase,
"person": self._processPerson,
"person2": self._processPerson2,
"random": self._processRandom,
"text": self._processText,
"sentence": self._processSentence,
"set": self._processSet,
"size": self._processSize,
"sr": self._processSr,
"srai": self._processSrai,
"star": self._processStar,
"system": self._processSystem,
"template": self._processTemplate,
"that": self._processThat,
"thatstar": self._processThatstar,
"think": self._processThink,
"topicstar": self._processTopicstar,
"uppercase": self._processUppercase,
"version": self._processVersion,
}
def bootstrap(self, brainFile=None, learnFiles=[], commands=[]):
"""Prepare a Kernel object for use.
If a brainFile argument is provided, the Kernel attempts to
load the brain at the specified filename.
If learnFiles is provided, the Kernel attempts to load the
specified AIML files.
Finally, each of the input strings in the commands list is
passed to respond().
"""
start = time.clock()
if brainFile:
self.loadBrain(brainFile)
# learnFiles might be a string, in which case it should be
# turned into a single-element list.
learns = learnFiles
try:
learns = [learnFiles + ""]
except:
pass
for file in learns:
self.learn(file)
# ditto for commands
cmds = commands
try:
cmds = [commands + ""]
except:
pass
for cmd in cmds:
logger.info(self._respond(cmd, self._globalSessionID))
if self._verboseMode:
logger.info("Kernel bootstrap completed in %.2f seconds" %
(time.clock() - start))
def verbose(self, isVerbose=True):
"""Enable/disable verbose output mode."""
self._verboseMode = isVerbose
def version(self):
"""Return the Kernel's version string."""
return self._version
def numCategories(self):
"""Return the number of categories the Kernel has learned."""
# there's a one-to-one mapping between templates and categories
return self._brain.numTemplates()
def resetBrain(self):
"""Reset the brain to its initial state.
This is essentially equivilant to:
del(kern)
kern = aiml.Kernel()
"""
del(self._brain)
self.__init__()
def loadBrain(self, filename):
"""Attempt to load a previously-saved 'brain' from the
specified filename.
NOTE: the current contents of the 'brain' will be discarded!
"""
if self._verboseMode:
logger.info("Loading brain from %s..." % filename,)
start = time.clock()
self._brain.restore(filename)
if self._verboseMode:
end = time.clock() - start
logger.info("done (%d categories in %.2f seconds)" %
(self._brain.numTemplates(), end))
def saveBrain(self, filename):
"""Dump the contents of the bot's brain to a file on disk."""
if self._verboseMode:
logger.info("Saving brain to %s..." % filename,)
start = time.clock()
self._brain.save(filename)
if self._verboseMode:
logger.info("done (%.2f seconds)" % (time.clock() - start))
def getPredicate(self, name, sessionID=_globalSessionID):
"""Retrieve the current value of the predicate 'name' from the
specified session.
If name is not a valid predicate in the session, the empty
string is returned.
"""
try:
return self._sessions[sessionID][name]
except KeyError:
return ""
def setPredicate(self, name, value, sessionID=_globalSessionID):
"""Set the value of the predicate 'name' in the specified
session.
|
pchaigno/grreat | lib/aff4_objects/stats_store_test.py | Python | apache-2.0 | 40,610 | 0.001231 | #!/usr/bin/env python
"""Tests for the stats_store classes."""
import math
import pandas
# pylint: disable=unused-import,g-bad-import-order
from grr.lib import server_plugins
# pylint: enable=unused-import,g-bad-import-order
from grr.lib import aff4
from grr.lib import data_store
from grr.lib import flags
from grr.lib import rdfvalue
from grr.lib import stats
from grr.lib import test_lib
from grr.lib.aff4_objects import stats_store
class StatsStoreTest(test_lib.GRRBaseTest):
def setUp(self):
super(StatsStoreTest, self).setUp()
self.process_id = "some_pid"
self.stats_store = aff4.FACTORY.Create(
None, "StatsStore", mode="w", token=self.token)
def testCountersAreWrittenToDataStore(self):
stats.STATS.RegisterCounterMetric("counter")
stats.STATS.IncrementCounter("counter")
self.stats_store.WriteStats(process_id=self.process_id, timestamp=42,
sync=True)
row = data_store.DB.ResolveRegex("aff4:/stats_store/some_pid", ".*",
token=self.token)
counter = [x for x in row if x[0] == "aff4:stats_store/counter"]
self.assertTrue(counter)
stored_value = rdfvalue.StatsStoreValue(
value_type=rdfvalue.MetricMetadata.ValueType.INT,
int_value=1)
self.assertEqual(counter[0], ("aff4:stats_store/counter",
stored_value.SerializeToString(),
42))
def testCountersWithFieldsAreWrittenToDataStore(self):
stats.STATS.RegisterCounterMetric("counter", fields=[("source", str)])
stats.STATS.IncrementCounter("counter", fields=["http"])
stats.STATS.IncrementCounter("counter", delta=2, fields=["rpc"])
self.stats_store.WriteStats(process_id=self.process_id, timestamp=42,
sync=True)
row = data_store.DB.ResolveRegex("aff4:/stats_store/some_pid", ".*",
token=self.token)
# Check that no plain counter is written.
values = [rdfvalue.StatsStoreValue(x[1]) for x in row
if x[0 | ] == "af | f4:stats_store/counter"]
self.assertEqual(len(values), 2)
http_field_value = rdfvalue.StatsStoreFieldValue(
field_type=rdfvalue.MetricFieldDefinition.FieldType.STR,
str_value="http")
rpc_field_value = rdfvalue.StatsStoreFieldValue(
field_type=rdfvalue.MetricFieldDefinition.FieldType.STR,
str_value="rpc")
# Check that counter with source=http is written.
http_counter = [x for x in values
if x.fields_values == [http_field_value]]
self.assertTrue(http_counter)
self.assertEqual(http_counter[0].value_type,
rdfvalue.MetricMetadata.ValueType.INT)
self.assertEqual(http_counter[0].int_value, 1)
# Check that counter with source=rpc is written.
rpc_counter = [x for x in values
if x.fields_values == [rpc_field_value]]
self.assertTrue(rpc_counter)
self.assertEqual(rpc_counter[0].value_type,
rdfvalue.MetricMetadata.ValueType.INT)
self.assertEqual(rpc_counter[0].int_value, 2)
def testEventMetricsAreWrittenToDataStore(self):
stats.STATS.RegisterEventMetric("foo_event")
stats.STATS.RecordEvent("foo_event", 5)
stats.STATS.RecordEvent("foo_event", 15)
self.stats_store.WriteStats(process_id=self.process_id, timestamp=42,
sync=True)
row = data_store.DB.ResolveRegex("aff4:/stats_store/some_pid", ".*",
token=self.token)
values = [rdfvalue.StatsStoreValue(x[1]) for x in row
if x[0] == "aff4:stats_store/foo_event"]
self.assertEqual(len(values), 1)
stored_value = values[0]
self.assertEqual(stored_value.value_type,
rdfvalue.MetricMetadata.ValueType.DISTRIBUTION)
self.assertEqual(stored_value.distribution_value.count, 2)
self.assertEqual(stored_value.distribution_value.sum, 20)
def testEventMetricsWithFieldsAreWrittenToDataStore(self):
stats.STATS.RegisterEventMetric("foo_event", fields=[("source", str)])
stats.STATS.RecordEvent("foo_event", 5, fields=["http"])
stats.STATS.RecordEvent("foo_event", 15, fields=["rpc"])
self.stats_store.WriteStats(process_id=self.process_id, timestamp=42,
sync=True)
row = data_store.DB.ResolveRegex("aff4:/stats_store/some_pid", ".*",
token=self.token)
values = [rdfvalue.StatsStoreValue(x[1]) for x in row
if x[0] == "aff4:stats_store/foo_event"]
self.assertEqual(len(values), 2)
http_field_value = rdfvalue.StatsStoreFieldValue(
field_type=rdfvalue.MetricFieldDefinition.FieldType.STR,
str_value="http")
rpc_field_value = rdfvalue.StatsStoreFieldValue(
field_type=rdfvalue.MetricFieldDefinition.FieldType.STR,
str_value="rpc")
# Check that distribution with source=http is written.
http_events = [x for x in values
if x.fields_values == [http_field_value]]
self.assertTrue(http_events)
self.assertEqual(http_events[0].value_type,
rdfvalue.MetricMetadata.ValueType.DISTRIBUTION)
self.assertEqual(http_events[0].distribution_value.count, 1)
self.assertEqual(http_events[0].distribution_value.sum, 5)
# Check that distribution with source=rpc is written.
rpc_events = [x for x in values
if x.fields_values == [rpc_field_value]]
self.assertTrue(rpc_events)
self.assertEqual(rpc_events[0].value_type,
rdfvalue.MetricMetadata.ValueType.DISTRIBUTION)
self.assertEqual(rpc_events[0].distribution_value.count, 1)
self.assertEqual(rpc_events[0].distribution_value.sum, 15)
def testStringGaugeValuesAreWrittenToDataStore(self):
stats.STATS.RegisterGaugeMetric("str_gauge", str)
stats.STATS.SetGaugeValue("str_gauge", "some_value")
self.stats_store.WriteStats(process_id=self.process_id, timestamp=42,
sync=True)
row = data_store.DB.ResolveRegex("aff4:/stats_store/some_pid", ".*",
token=self.token)
counter = [x for x in row if x[0] == "aff4:stats_store/str_gauge"]
self.assertTrue(counter)
stored_value = rdfvalue.StatsStoreValue(
value_type=rdfvalue.MetricMetadata.ValueType.STR,
str_value="some_value")
self.assertEqual(counter[0], ("aff4:stats_store/str_gauge",
stored_value.SerializeToString(),
42))
def testIntGaugeValuesAreWrittenToDataStore(self):
stats.STATS.RegisterGaugeMetric("int_gauge", int)
stats.STATS.SetGaugeValue("int_gauge", 4242)
self.stats_store.WriteStats(process_id=self.process_id, timestamp=42,
sync=True)
row = data_store.DB.ResolveRegex("aff4:/stats_store/some_pid", ".*",
token=self.token)
counter = [x for x in row if x[0] == "aff4:stats_store/int_gauge"]
self.assertTrue(counter)
stored_value = rdfvalue.StatsStoreValue(
value_type=rdfvalue.MetricMetadata.ValueType.INT,
int_value=4242)
self.assertEqual(counter[0], ("aff4:stats_store/int_gauge",
stored_value.SerializeToString(),
42))
def testLaterValuesDoNotOverridePrevious(self):
stats.STATS.RegisterCounterMetric("counter")
stats.STATS.IncrementCounter("counter")
self.stats_store.WriteStats(process_id=self.process_id, timestamp=42,
sync=True)
stats.STATS.IncrementCounter("counter")
self.stats_store.WriteStats(process_id=self.process_id, timestamp=43,
sync=True)
row = data_store.DB.ResolveRegex("aff4:/stats_store/some_pid", ".*",
token=self.token)
counters = [x for x in row if x[0] == "aff4:stats_store/counter"]
self.assertEqual(len(counters), 2)
counters = sorted(counters, key=lambda x: x[2])
stored_value = rdfvalue |
pjuu/pjuu | pjuu/posts/backend.py | Python | agpl-3.0 | 24,772 | 0 | # -*- coding: utf-8 -*-
"""Simple functions for dealing with posts, replies, votes and subscriptions
within Redis and MongoDB
:license: AGPL v3, see LICENSE for more details
:copyright: 2014-2021 Joe Doherty
"""
# 3rd party imports
from flask import current_app as app, url_for
from jinja2.filters import do_capitalize
# Pjuu imports
from pjuu import mongo as m, redis as r, celery, storage
from pjuu.lib import keys as k, timestamp, get_uuid
from pjuu.lib.alerts import BaseAlert, AlertManager
from pjuu.lib.pagination import Pagination
from pjuu.lib.parser import parse_post
from pjuu.lib.uploads import process_upload
# Allow chaning the maximum length of a post
MAX_POST_LENGTH = 500
class CantVoteOnOwn(Exception):
"""Raised when a user tries to vote on a post they authored
"""
pass
class AlreadyVoted(Exception):
"""Raised when a user tries to vote on a post they have already voted on
"""
pass
class CantFlagOwn(Exception):
"""Can't flag your own post."""
pass
class AlreadyFlagged(Exception):
"""You can't flag a post twice."""
pass
class SubscriptionReasons(object):
"""Constants describing subscriptions to a post
"""
# You are the original poster
POSTER = 1
# You commented on the post
COMMENTER = 2
# You have been tagged in the post
TAGEE = 3
class PostingAlert(BaseAlert):
"""Base form for all alerts used within the posts package.
"""
def __init__(self, user_id, post_id):
# Call the BaseAlert __init__ method
super(PostingAlert, self).__init__(user_id)
self.post_id = post_id
def url(self):
"""Get the user object or the original author for the post.
Eg. Bob may have tagged you in the post but Brian posted the original
post. This is needed to generate the URL.
"""
# Get the author of the posts username so that we can build the URL
author = m.db | .posts.find_one({'_id': self.post_id},
{'username': True, '_id': False})
# Return the username or None
return url_for('posts.view_post', username=author.get('username'),
post_id=self.post_id)
def | verify(self):
"""Overwrites the verify() of BaseAlert to check the post exists
"""
return m.db.users.find_one({'_id': self.user_id}, {}) and \
m.db.posts.find_one({'_id': self.post_id}, {})
class TaggingAlert(PostingAlert):
"""Form of all tagging alert messages
"""
def prettify(self, for_uid=None):
return '<a href="{0}">{1}</a> tagged you in a <a href="{2}">post</a>' \
.format(url_for('users.profile',
username=self.user.get('username')),
do_capitalize(self.user.get('username')), self.url())
class CommentingAlert(PostingAlert):
"""Form of all commenting alert messages
"""
def prettify(self, for_uid=None):
# Let's try and work out why this user is being notified of a comment
reason = subscription_reason(for_uid, self.post_id)
if reason == SubscriptionReasons.POSTER:
sr = 'posted'
elif reason == SubscriptionReasons.COMMENTER:
sr = 'commented on'
elif reason == SubscriptionReasons.TAGEE:
sr = 'were tagged in'
else:
# This should never really happen but let's play ball eh?
sr = 'are subscribed to'
return '<a href="{0}">{1}</a> ' \
'commented on a <a href="{2}">post</a> you {3}' \
.format(url_for('users.profile',
username=self.user.get('username')),
do_capitalize(self.user.get('username')), self.url(),
sr)
def create_post(user_id, username, body, reply_to=None, upload=None,
permission=k.PERM_PUBLIC):
"""Creates a new post
This handled both posts and what used to be called comments. If the
reply_to field is not None then the post will be treat as a comment.
You will need to make sure the reply_to post exists.
:param user_id: The user id of the user posting the post
:type user_id: str
:param username: The user name of the user posting (saves a lookup)
:type username: str
:param body: The content of the post
:type body: str
:param reply_to: The post id of the post this is a reply to if any
:type reply_to: str
:param upload:
:returns: The post id of the new post
:param permission: Who can see/interact with the post you are posting
:type permission: int
:rtype: str or None
"""
# Get a new UUID for the post_id ("_id" in MongoDB)
post_id = get_uuid()
# Get the timestamp, we will use this to populate users feeds
post_time = timestamp()
post = {
'_id': post_id, # Newly created post id
'user_id': user_id, # User id of the poster
'username': username, # Username of the poster
'body': body, # Body of the post
'created': post_time, # Unix timestamp for this moment in time
'score': 0, # Atomic score counter
}
if reply_to is not None:
# If the is a reply it must have this property
post['reply_to'] = reply_to
else:
# Replies don't need a comment count
post['comment_count'] = 0
# Set the permission a user needs to view
post['permission'] = permission
if upload:
# If there is an upload along with this post it needs to go for
# processing.
# process_upload() can throw an Exception of UploadError. We will let
# it fall through as a 500 is okay I think.
# TODO: Turn this in to a Queue task at some point
filename, animated_filename = process_upload(upload)
if filename is not None:
# If the upload process was okay attach the filename to the doc
post['upload'] = filename
if animated_filename:
post['upload_animated'] = animated_filename
else:
# Stop the image upload process here if something went wrong.
return None
# Process everything thats needed in a post
links, mentions, hashtags = parse_post(body)
# Only add the fields if we need too.
if links:
post['links'] = links
if mentions:
post['mentions'] = mentions
if hashtags:
post['hashtags'] = hashtags
# Add the post to the database
# If the post isn't stored, result will be None
result = m.db.posts.insert(post)
# Only carry out the rest of the actions if the insert was successful
if result:
if reply_to is None:
# Add post to authors feed
r.zadd(k.USER_FEED.format(user_id), {str(post_id): post_time})
# Ensure the feed does not grow to large
r.zremrangebyrank(k.USER_FEED.format(user_id), 0, -1000)
# Subscribe the poster to there post
subscribe(user_id, post_id, SubscriptionReasons.POSTER)
# Alert everyone tagged in the post
alert_tagees(mentions, user_id, post_id)
# Append to all followers feeds or approved followers based
# on the posts permission
if permission < k.PERM_APPROVED:
populate_followers_feeds.delay(user_id, post_id, post_time)
else:
populate_approved_followers_feeds.delay(
user_id, post_id, post_time
)
else:
# To reduce database look ups on the read path we will increment
# the reply_to's comment count.
m.db.posts.update({'_id': reply_to},
{'$inc': {'comment_count': 1}})
# Alert all subscribers to the post that a new comment has been
# added. We do this before subscribing anyone new
alert = CommentingAlert(user_id, reply_to)
subscribers = []
# Iterate through subscribers and let them know ab |
juliusf/Neurogenesis | neurogenesis/__init__.py | Python | bsd-3-clause | 22 | 0 | nam | e = "neurogenesi | s"
|
pakit/test_recipes | providesb.py | Python | bsd-3-clause | 413 | 0 | """ Formula that requires no other recip | e. """
from p | akit import Dummy, Recipe
class Providesb(Recipe):
"""
Dummy recipe does nothing special but have dependency.
"""
def __init__(self):
super(Providesb, self).__init__()
self.homepage = 'dummy'
self.repos = {
'stable': Dummy()
}
def build(self):
pass
def verify(self):
pass
|
jsirois/pex | pex/vendor/_vendored/packaging/packaging/markers.py | Python | apache-2.0 | 9,913 | 0.001211 | # This file is dual licensed under the terms of the Apache License, Version
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
# for complete details.
from __future__ import absolute_import, division, print_function
import operator
import os
import platform
import sys
if "__PEX_UNVENDORED__" in __import__("os").environ:
from pyparsing import ParseException, ParseResults, stringStart, stringEnd # vendor:skip
else:
from pex.third_party.pyparsing import ParseException, ParseResults, stringStart, stringEnd
if "__PEX_UNVENDORED__" in __import__("os").environ:
from pyparsing import ZeroOrMore, Group, Forward, QuotedString # vendor:skip
else:
from pex.third_party.pyparsing import ZeroOrMore, Group, Forward, QuotedString
if "__PEX_UNVENDORED__" in __import__("os").environ:
from pyparsing import Literal as L # vendor:skip
else:
from pex.third_party.pyparsing import Literal as L
# noqa
from ._compat import string_types
from ._typing import TYPE_CHECKING
from .specifiers import Specifier, InvalidSpecifier
if TYPE_CHECKING: # pragma: no cover
from typing import Any, Callable, Dict, List, Optional, Tuple, Union
Operator = Callable[[str, str], bool]
__all__ = [
"InvalidMarker",
"UndefinedComparison",
"UndefinedEnvironmentName",
"Marker",
"default_environment",
]
class InvalidMarker(ValueError):
"""
An invalid marker was found, users should refer to PEP 508.
"""
class UndefinedComparison(ValueError):
"""
An invalid operation was attempted on a value that doesn't support it.
"""
class UndefinedEnvironmentName(ValueError):
"""
A name was attempted to be used that does not exist inside of the
environment.
"""
class Node(object):
def __init__(self, value):
# type: (Any) -> None
self.value = value
def __str__(self):
# type: () -> str
return str(self.value)
def __repr__(self):
# type: () -> str
return "<{0}({1!r})>".format(self.__class__.__name__, str(self))
def serialize(self):
# type: () -> str
raise NotImplementedError
class Variable(Node):
def serialize(self):
# type: () -> str
return str(self)
class Value(Node):
def serialize(self):
# type: () -> str
return '"{0}"'.format(self)
class Op(Node):
def serialize(self):
# type: () -> str
return str(self)
VARIABLE = (
L("implementation_version")
| L("platform_python_implementation")
| L("implementation_name")
| L("python_full_version")
| L("platform_release")
| L("platform_version")
| L("platform_machine")
| L("platform_system")
| L("python_version")
| L("sys_platform")
| L("os_name")
| L("os.name") # PEP-345
| L("sys.platform") # PEP-345
| L("platform.version") # PEP-345
| L("platform.machine") # PEP-345
| L("platform.python_implementation") # PEP-345
| L("python_implementation") # undocumented setuptools legacy
| L("extra") # PEP-508
)
ALIASES = {
"os.name": "os_name",
"sys.platform": "sys_platform",
"platform.version": "platform_version",
"platform.machine": "platform_machine",
"platform.python_implementation": "platform_python_implementation",
"python_implementation": "platform_python_implementation",
}
VARIABLE.setParseAction(lambda s, l, t: Variable(ALIASES.get(t[0], t[0])))
VERSION_CMP = (
L("===") | L("==") | L(">=") | L("<=") | L("!=") | L("~=") | L(">") | L("<")
)
MARKER_OP = VERSION_CMP | L("not in") | L("in")
MARKER_OP.setParseAction(lambda s, l, t: Op(t[0]))
MARKER_VALUE = QuotedString("'") | QuotedString('"')
MARKER_VALUE.setParseAction(lambda s, l, t: Value(t[0]))
BOOLOP = L("and") | L("or")
MARKER_VAR = VARIABLE | MARKER_VALUE
MARKER_ITEM = Group(MARKER_VAR + MARKER_OP + MARKER_VAR)
MARKER_ITEM.setParseAction(lambda s, l, t: tuple(t[0]))
LPAREN = L("(").suppress()
RPAREN = L(")").suppress()
MARKER_EXPR = Forward()
MARKER_ATOM = MARKER_ITEM | Group(LPAREN + MARKER_EXPR + RPAREN)
MARKER_EXPR << MARKER_ATOM + ZeroOrMore(BOOLOP + MARKER_EXPR)
MARKER = stringStart + MARKER_EXPR + stringEnd
def _coerce_parse_result(results):
# type: (Union[ParseResults, List[Any]]) -> List[Any]
if isinstance(results, ParseResults):
return [_coerce_parse_result(i) for i in results]
else:
return results
def _format_marker(marker, first=True):
# type: (Union[List[str], Tuple[Node, ...], str], Optional[bool]) -> str
assert isinstance(marker, (list, tuple, string_types))
# Sometimes we have a structure like [[...]] which is a single item list
# where the single item is itself it's own list. In that case we want skip
# the rest of this function so that we don't get extraneous () on the
# outside.
if (
isinstance(marker, list)
and len(marker) == 1
and isinstance(marker[0], (list, tuple))
):
return _format_marker(marker[0])
if isinstance(marker, list):
inner = (_format_marker(m, first=False) for m in marker)
if first:
return " ".join(inner)
else:
return "(" + " ".join(inner) + ")"
elif isinstance(marker, tuple):
return " ".join([m.serialize() for m in marker])
else:
return marker
_operators = {
"in": lambda lhs, rhs: lhs in rhs,
"not in": lambda lhs, rhs: lhs not in rhs,
"<": operator.lt,
"<=": operator.le,
"==": operator.eq,
"!=": operator.ne,
">=": operator.ge,
">": operator.gt,
} # type: Dict[str, Operator]
def _eval_op(lhs, op, rhs):
# type: (str, Op, str) -> bool
try:
spec = Specifier("".join([op.serialize(), rhs]))
except InvalidSpecifier:
pass
else:
return spec.contains(lhs)
oper = _operators.get(op.serialize()) # type: Optional[Operator]
if oper is None:
raise UndefinedComparison(
"Undefined {0!r} on {1!r} and {2!r}.".format(op, lhs, rhs)
)
return oper(lhs, rhs)
class Undefined(object):
pass
_undefined = Undefined()
def _get_env(environment, name):
# type: (Dict[str, str], str) -> str
value = environment.get(name, _undefined) # type: Union[str, Undefined]
if isinstance(value, Undefined):
raise UndefinedEnvironmentName(
"{0!r} does not exist in evaluation environment.".format(name)
)
return value
def _evaluate_markers(markers, environment):
# type: (List[Any], Dict[str, str]) -> bool
groups = [[]] # type: List[List[bool]]
for marker in markers:
assert isinstance(marker, (list, tuple, string_types))
if isinstance(marker, list):
groups[-1].append(_evaluate_markers(marker, environment))
elif isinstance(marker, tuple):
lhs, op, rhs = marker
if isinstance(lhs, Variable):
lhs_value = _get_env(environment, lhs.value)
rhs_value = rhs.value
else:
lhs_value = lhs.value
rhs_value = _get_env(environment, rhs.value)
groups[-1].append(_eval_op(lhs_value, op, rhs_value))
else:
assert marker in ["and", "or"]
if marker == "or":
groups.append([])
return any(all(item) for item in groups)
def format_full_version(info):
# type: (sys._version_info) -> str
version = "{0.major}.{0.minor}.{0.micro}".format(info)
kind = info.releaselevel
if kind != "final":
version += kind[0] + str(info.serial)
return version
def default_environment():
# type: () -> | Dict[str, str]
if hasattr(sys, "implementation"):
# Ignoring the `sys.implementation` reference for type checking due to
# mypy not liking that the attribute doesn't exist in Python 2.7 when
# run with the `--py27` flag.
iver = format_full_version(sys.implementation.version) # type: ignore
imple | mentation_name = sys.implementation.name # type: ignore
else:
iver = "0"
implementation_name = ""
return {
"implementation_name" |
thaim/ansible | lib/ansible/modules/windows/win_netbios.py | Python | mit | 2,309 | 0.002599 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright: (c) 2019, Thomas Moore (@tmmruk)
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
# this is a windows documentation stub. actual code lives in the .ps1
# file of the same name
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = r'''
---
module: win_netbios
version_added: '2.9'
short_description: Manage NetBIOS over TCP/IP settings on Windows.
description:
- Enables or disables NetBIOS on Windows network adapters.
- Can be used to protect a system against NBT-NS poisoning and avoid NBNS broadcast storms.
- Settings can be applied system wide or per adapter.
options:
state:
description:
- Whether NetBIOS should be enabled, disabled, or default (use setting from DHCP server or if static IP address is assigned enable NetBIOS).
choices:
- enabled
- disabled
- default
required: yes
type: str
adapter_names:
description:
- List of adapter names for which to manage NetBIOS settings. If this option is omitted then configuration is applied to all adapters on the system.
- The adapter name used is the connection caption in the Network Control Panel or via C(Get-NetAdapter), eg C(Ethernet 2).
type: list
required: | no
author:
- Thomas Moore (@tmmruk)
notes:
- Changing NetBIOS settings does not usually require a reboot and will take effect immediately.
- | UDP port 137/138/139 will no longer be listening once NetBIOS is disabled.
'''
EXAMPLES = r'''
- name: Disable NetBIOS system wide
win_netbios:
state: disabled
- name: Disable NetBIOS on Ethernet2
win_netbios:
state: disabled
adapter_names:
- Ethernet2
- name: Enable NetBIOS on Public and Backup adapters
win_netbios:
state: enabled
adapter_names:
- Public
- Backup
- name: Set NetBIOS to system default on all adapters
win_netbios:
state: default
'''
RETURN = r'''
reboot_required:
description: Boolean value stating whether a system reboot is required.
returned: always
type: bool
sample: true
'''
|
rrude/twilio_quest | app.py | Python | mit | 3,440 | 0.006105 | import os
from flask import Flask
from flask import Response
from flask import request
from flask import render_template
from twilio import twiml
from twilio.rest import TwilioRestClient
# Pull in configuration from system environment variables
TWILIO_ACCOUNT_SID = os.environ.get('TWILIO_ACCOUNT_SID')
TWILIO_AUTH_TOKEN = os.environ.get('TWILIO_AUTH_TOKEN')
TWILIO_NUMBER = os.environ.get('TWILIO_NUMBER')
# create an authenticated client that can make requests to Twilio for your
# account.
client = TwilioRestClient(TWILIO_ACCOUNT_SID, TWILIO_AUTH_TOKEN)
# Create a Flask web app
app = Flask(__name__)
# Render the home page
@app.route('/')
def index():
return render_template('index.html')
# Handle a POST request to send a text message. This is called via ajax
# on our web page
@app.route('/message', methods=['POST'])
def message():
# Send a text message to the number provided
message = client.sms.messages.create(to=request.form['to'],
from_=TWILIO_NUMBER,
body='Good luck on your Twilio quest!')
# Return a message indicating the text message is enroute
return 'Message on the way!'
# Handle a POST request to make an outbound call. This is called via ajax
# on our web page
@app.route('/call', methods=['POST'])
def call():
# Make an outbound call to the provided number from your Twilio number
call = client.calls.create(to=request.form['to'], from_=TWILIO_NUMBER,
url='http://twimlets.com/message?Message%5B0%5D=http://demo.kevinwhinnery.com/audio/zelda.mp3')
# Return a message indicating the call is coming
return 'Call inbound!'
@app.route('/sms', methods=['GET', 'POST'])
def sms():
response = twiml.Response();
response.message('hi! thanks for texting in!')
return Response(str(response))
@app.route('/call', methods=['GET', 'POST'])
def inbound_call():
| re | sponse = twiml.Response();
response.say('I just responded to a phone call. Huzzah!', voice='woman')
# Gather digits.
with response.gather(numDigits=1, action="/handle-key", method="POST") as g:
g.say('Press 1 for more options, or press 0 to speak to Jona.')
return str(response)
@app.route("/handle-key", methods=['GET', 'POST'])
def handle_key():
# Handle key press from a user.
digit_pressed = request.values.get('Digits', None)
if digit_pressed == "1":
response = twiml.Response()
response.say("Good job you pressed one. You are smart and beautiful.", voice='man')
return str(response)
# If the caller presses 0, it calls Jona
elif digit_pressed == "0":
response = twiml.Response()
response.dial("+12102500227")
# If call fails
response.say("Sorry, Jona is not available. Goodbye.")
return str(response)
# If the caller pressed anything but 1 or 0, redirect them to the main menu.
else:
return redirect("/call")
# Generate TwiML instructions for an outbound call
@app.route('/hello')
def hello():
response = twiml.Response()
response.say('Hello there! You have successfully configured a web hook.')
response.say('Good luck on your Twilio quest!', voice='woman')
return Response(str(response), mimetype='text/xml')
if __name__ == '__main__':
# Note that in production, you would want to disable debugging
app.run(debug=True) |
bluephlavio/latest | test/test_config.py | Python | mit | 438 | 0.002283 | try:
import configparser
except:
import ConfigParser as configparser
def test_config(config):
assert config.templates_dir == '~/.latest/templates/'
assert con | fig.pyexpr_entry == r'\{\$'
assert config.pyexpr_exit == r'\$\}'
assert config.env_entry == r'<<<' |
assert config.env_exit == r'>>>'
def test_non_existing_config(non_existing_config):
assert non_existing_config.env_entry == r'\\begin\{latest\}'
|
Alberto-Beralix/Beralix | i386-squashfs-root/usr/lib/python2.7/dist-packages/ubuntuone-storage-protocol/ubuntuone/storageprotocol/delta.py | Python | gpl-3.0 | 94 | 0.021277 | ../../../../../../share/pyshared/ubuntuone-st | orage-protocol/ubuntu | one/storageprotocol/delta.py |
knodir/son-emu | scenarios/experiments/bench.py | Python | apache-2.0 | 4,445 | 0.003825 | import time
import os
import glog
from daisy import executeCmds
def start_benchmark(algo, num_of_chains, mbps, chain_index=None, isIperf=False):
""" Allocate E2 style chains. """
# list of commands to execute one-by-one
cmds = []
if isIperf:
glog.info('Launching iperf instead of tcpreplay...')
dirname = "%s%s-iperf" % (algo, str(mbps))
else:
dirname = "%s%s" % (algo, str(mbps))
cmds.append('sudo rm -f ./results/allocation/%s/*.csv' %
(dirname))
executeCmds(cmds)
cmds[:] = []
# # copy the traces into the containers for tcpreplay, this might take a while
if not isIperf:
glog.info('Copying traces into the containers...')
if chain_index is None:
for chain_index in range(num_of_chains):
cmds.append('sudo docker cp .. | /traces/output.pcap mn.chain%d-source:/' % chain_index)
else:
cmds.append('sudo docker cp ../traces/output.pcap mn.chain%d-source:/' % chain_index)
executeCmds(cmds)
cmds[:] = []
# # copy the traces into the containers for tcpreplay, this might take a while
glog.info('Running dstat...')
if chain_index is None:
for chain_index in range(num_of_chains):
cmds.append('sudo docker exec -d mn.chain%d-sink dsta | t --net --time -N intf2 --bits --output /tmp/dstat.csv' % chain_index)
if isIperf:
cmds.append('sudo docker exec mn.chain%d-sink iperf3 -s' % chain_index)
else:
cmds.append('sudo docker exec -d mn.chain%d-sink dstat --net --time -N intf2 --bits --output /tmp/dstat.csv' % chain_index)
if isIperf:
cmds.append('sudo docker exec -d mn.chain%d-sink iperf3 -s' % chain_index)
executeCmds(cmds)
cmds[:] = []
print('>>> wait 2s for dstats to initialize')
time.sleep(2)
print('<<< wait complete.')
if chain_index is None:
for chain_index in range(num_of_chains):
# each loop is around 1s for 10 Mbps speed, 100 loops easily make 1m
if isIperf:
cmds.append('sudo docker exec -d mn.chain%d-source iperf3 --zerocopy -b %dm -c 10.0.10.10' %
(chain_index, mbps))
else:
cmds.append('sudo docker exec -d mn.chain%d-source tcpreplay --loop=0 --mbps=%d -d 1 --intf1=intf1 output.pcap' %
(chain_index, mbps))
else:
# each loop is around 1s for 10 Mbps speed, 100 loops easily make 1m
if isIperf:
cmds.append('sudo docker exec -d mn.chain%d-source iperf3 --zerocopy -t 86400 -b %dm -c 10.0.10.10' %
(chain_index, mbps))
else:
cmds.append('sudo docker exec -d mn.chain%d-source tcpreplay --loop=0 --mbps=%d -d 1 --intf1=intf1 output.pcap' %
(chain_index, mbps))
executeCmds(cmds)
cmds[:] = []
def finish_benchmark(algo, num_of_chains, mbps, isIperf=False):
# list of commands to execute one-by-one
cmds = []
if isIperf:
dirname = "%s%s-iperf" % (algo, str(mbps))
else:
dirname = "%s%s" % (algo, str(mbps))
# kill existing tcpreplay and dstat
# for chain_index in range(num_of_chains):
# cmds.append(
# 'sudo docker exec mn.chain%d-source pkill tcpreplay' % chain_index)
# cmds.append(
# 'sudo docker exec mn.chain%d-sink pkill python2' % chain_index)
cmds.append("sudo killall tcpreplay")
cmds.append("sudo killall python2")
cmds.append("sudo killall iperf3")
executeCmds(cmds)
cmds[:] = []
print('>>> wait 10s for dstats to terminate')
time.sleep(10)
print('<<< wait complete.')
# create the target folder if it does not exist
dir = 'results/iter-allocation/%s' % (dirname)
if not os.path.exists(dir):
os.makedirs(dir)
# copy .csv results from VNF to the host
for chain_index in range(num_of_chains):
cmds.append('sudo docker cp mn.chain%s-sink:/tmp/dstat.csv ./results/iter-allocation/%s/e2-allocate-from-chain%s-sink.csv' %
(str(chain_index), dirname, str(chain_index)))
executeCmds(cmds)
cmds[:] = []
# remove dstat output files
for chain_index in range(num_of_chains):
cmds.append('sudo docker exec mn.chain%d-sink rm /tmp/dstat.csv' % chain_index)
executeCmds(cmds)
cmds[:] = []
print('done')
|
cloudnull/tribble-api | tribble/api/views/zones_rest.py | Python | gpl-3.0 | 12,756 | 0 | # =============================================================================
# Copyright [2013] [Kevin Carter]
# License Information :
# This software has no warranty, it is provided 'as is'. It is your
# responsibility to validate the behavior of the routines and its accuracy
# using the code provided. Consult the GNU General Public license for further
# details (see GNU General Public License).
# http://www.gnu.org/licenses/gpl.html
# =============================================================================
import logging
import traceback
import flask
from tribble.api import application
from tribble.api import utils
from tribble.common.db import db_proc
from tribble.common.db import zone_status
from tribble.common import rpc
from tribble.common import system_config
mod = flask.Blueprint('zones', __name__)
LOG = logging.getLogger('tribble-api')
CONFIG = system_config.ConfigurationSetup()
DEFAULT = CONFIG.config_args()
DB = application.DB
@mod.route('/v1/schematics/<sid>/zones', methods=['GET'])
def zones_get(sid):
"""Return a list of zones.
Method is accessible with GET /v1/schematics/<sid>/zones
:param sid: ``str`` # schematic ID
:return json, status: ``tuple``
"""
parsed_data = utils.zone_basic_handler(sid=sid)
if parsed_data[0] is False:
return utils.return_msg(msg=parsed_data[1], status=parsed_data[2])
else:
_success, schematic, zones, user_id = parsed_data
LOG.debug('%s %s %s %s', _success, schematic, zones, user_id)
try:
return_zones = []
for zone in zones:
dzone = utils.pop_ts(zone.__dict__)
instances = db_proc.get_instances(zon=zone)
if instances:
dzone['instance_quantity'] = len(instances)
return_zones.append(dzone)
except Exception:
LOG.error(traceback.format_exc())
return utils.return_msg(msg='Unexpected Error', status=500)
else:
return utils.return_msg(msg=return_zones, status=200)
@mod.route('/v1/schematics/<sid>/zones/<zid>', methods=['GET'])
def zone_get(sid, zid):
"""Return a zone.
Method is accessible with GET /v1/schematics/<sid>/zones/<zid>
:param sid: ``str`` # schematic ID
:param zid: ``str`` # Zone ID
:return json, status: ``tuple``
"""
parsed_data = utils.zone_basic_handler(sid=sid, zid=zid)
if parsed_data[0] is False:
return utils.return_msg(msg=parsed_data[1], status=parsed_data[2])
else:
_success, schematic, zone, user_id = parsed_data
_zone = utils.pop_ts(temp=zone.__dict__)
instances = db_proc.get_instances(zon=zone)
if instances:
_zone['instances'] = [
utils.pop_ts(temp=instance.__dict__) for instance in instances
]
LOG.debug('%s %s %s %s', _success, schematic, zone, user_id)
return utils.return_msg(msg=_zone, status=200)
@mod.route('/v1/schematics/<sid>/zones/<zid>', methods=['DELETE'])
def zone_delete(sid=None, zid=None):
"""Delete a Zone.
Method is accessible with DELETE /v1/schematics/<sid>/zones/<zid>
:param sid: ``str`` # schematic ID
:param zid: ``str`` # Zone ID
:return json, status: ``tuple``
"""
parsed_data = utils.zone_basic_handler(sid=sid, zid=zid)
if parsed_data[0] is False:
return utils.return_msg(msg=parsed_data[1], status=parsed_data[2])
else:
_success, schematic, zone, user_id = parsed_data
if zone.zone_state == 'BUILDING':
build_response = (
'Zone Delete can not be performed because Zone "%s" has a'
' Pending Status' % zone.id
)
return utils.return_msg(msg=build_response, status=200)
LOG.debug('%s %s %s %s', _success, schematic, zone, user_id)
try:
config = db_proc.get_configmanager(skm=schematic)
instances = db_proc.get_instances(zon=zone)
packet = utils.build_cell(
job='zone_delete',
schematic=schematic,
zone=zone,
config=config
)
packet['uuids'] = [instance.instance_id for instance in instances]
rpc.default_publisher(message=packet)
sess = DB.session
zone_status.ZoneState(cell=packet).delete()
except Exception:
LOG.error(traceback.format_exc())
return utils.return_msg(msg='unexpected error', status=500)
else:
db_proc.commit_session(session=sess)
return utils.return_msg(msg='deletes received', status=203)
@mod.route('/v1/schematics/<sid>/zones/<zid>/purge', methods=['DELETE'])
def zone_purge(sid=None, zid=None):
"""purge a Zone.
This is used to remove all indication of a zone without attempting to
disconnect or otherwise clean up the zone or any of its may be attached
instances.
Method is accessible with DELETE /v1/schematics/<sid>/zones/<zid>/purge
:param sid: ``str`` # schematic ID
:param zid: ``str`` # Zone ID
:return json, status: ``tuple``
"""
parsed_data = utils.zone_basic_handler(sid=sid, zid=zid)
if parsed_data[0] is False:
return utils.return_msg(msg=parsed_data[1], status=parsed_data[2])
else:
_success, schematic, zone, user_id = parsed_data
LOG.debug('%s %s %s %s', _success, schematic, zone, user_id)
try:
sess = DB.session
db_proc.delete_item(session=sess, item=zone)
except Exception:
LOG.error(traceback.format_exc())
return utils.return_msg(msg='unexpected error', status=500)
else:
db_proc.commit_session(session=sess)
return utils.return_msg(
msg='zone %s was purged' % zone.id, status=203
)
@mod.route('/v1/sch | ematics/<sid>/zones/<zid>', methods=['PUT'])
def zone_put(sid=None, zid=None):
"""Up | date a Zone.
Method is accessible with PUT /v1/schematics/<sid>/zones/<zid>
:param sid: ``str`` # schematic ID
:param zid: ``str`` # Zone ID
:return json, status: ``tuple``
"""
parsed_data = utils.zone_data_handler(sid=sid)
if parsed_data[0] is False:
return utils.return_msg(msg=parsed_data[1], status=parsed_data[2])
else:
_success, schematic, payload, user_id = parsed_data
LOG.debug('%s %s %s %s', _success, schematic, payload, user_id)
zone = db_proc.get_zones_by_id(skm=schematic, zid=zid)
if not zone:
return utils.return_msg(msg='no zones found', status=404)
try:
sess = DB.session
sess = db_proc.put_zone(
session=sess,
zon=zone,
put=payload
)
except Exception:
LOG.error(traceback.format_exc())
return utils.return_msg(msg='unexpected error', status=500)
else:
db_proc.commit_session(session=sess)
return utils.return_msg(msg='updates received', status=201)
@mod.route('/v1/schematics/<sid>/zones', methods=['POST'])
def zone_post(sid=None):
"""Post a Zone.
Method is accessible with POST /v1/schematics/<sid>/zones
:param sid: ``str`` # schematic ID
:return json, status: ``tuple``
"""
parsed_data = utils.zone_data_handler(sid=sid, check_for_zone=True)
if parsed_data[0] is False:
return utils.return_msg(msg=parsed_data[1], status=parsed_data[2])
else:
_success, schematic, payload, user_id = parsed_data
LOG.debug('%s %s %s %s', _success, schematic, payload, user_id)
config = db_proc.get_configmanager(skm=schematic)
try:
sess = DB.session
for _zn in payload['zones']:
ssh_user = _zn.get('ssh_user')
pub = _zn.get('ssh_key_pub')
pri = _zn.get('ssh_key_pri')
key_name = _zn.get('key_name')
ssh_key = db_proc.post_instanceskeys(
pub=pub,
pri=pri,
sshu=ssh_user,
key_name=key_name
)
db_proc.add_item(session=sess, item=ssh_key)
zone = db_proc.post_zones(
skm=schematic,
|
aabilio/PyDownTV | Servers/riasbaixas.py | Python | gpl-3.0 | 2,956 | 0.00782 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# This file is part of PyDownTV.
#
# PyDownTV is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# PyDownTV is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with PyDownTV. If not, see <http://www.gnu.org/licenses/>.
# Pequeña descripción de qué canal de tv es el módulo
__author__="aabilio"
__date__ ="$16-may-2011 11:03:38$"
from Descargar import Descargar
from utiles import salir, formatearNombre, printt
import sys
class RiasBaixas(object):
'''
Clase que maneja la descarga los vídeos de Canal Rías Baixas
'''
def __init__(self, url=""):
| self._URL_recibida = url
def getURL(self):
return self._URL_recibida
def setURL(self, url):
self._URL_recibida = url
url = property(getURL, setURL)
# Funciones p | rivadas que ayuden a procesarDescarga(self):
def __descHTML(self, url2down):
''' Método que utiliza la clase descargar para descargar el HTML '''
D = Descargar(url2down)
return D.descargar()
def procesarDescarga(self):
'''
Procesa lo necesario para obtener la url final del vídeo a descargar y devuelve
esta y el nombre como se quiere que se descarge el archivo de la siguiente forma:
return [ruta_url, nombre]
Si no se quiere especificar un nombre para el archivo resultante en disco, o no se
conoce un procedimiento para obtener este automáticamente se utilizará:
return [ruta_url, None]
Y el método de Descargar que descarga utilizará el nombre por defecto según la url.
Tanto "ruta_url" como "nombre" pueden ser listas (por supuesto, el nombre del ruta_url[0]
tiene que ser nombre[0] y así sucesivamente).
'''
streamHTML = self.__descHTML(self._URL_recibida)
if streamHTML.find("<param name=\"movie\"") != -1:
printt(u"[INFO] Vídeo")
# Delimitar:
streamVideo = streamHTML.split("<param name=\"movie\"")[1].split("/>")[0]
url = streamVideo.split("file=")[1].split("&")[0]
ext = "." + url.split(".")[-1]
name = streamHTML.split("<meta name=\"title\" content=\"")[1].split("\"")[0] + ext
else:
salir(u"[!!!] No se reconoce el tipo de contenido")
if name:
name = formatearNombre(name)
return [url, name]
|
monuszko/django-polls | polls/migrations/0004_auto_20160201_1000.py | Python | gpl-2.0 | 926 | 0.00216 | # -*- coding: utf-8 -*-
# Generated by Django 1.9.1 on 2016-02-01 09:00
from __future__ import unicode_literals
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
import django.utils.timezone
class Migration(migrations.Migrat | ion):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
| ('polls', '0003_auto_20160131_1905'),
]
operations = [
migrations.AddField(
model_name='poll',
name='created_by',
field=models.ForeignKey(default=1, on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL),
preserve_default=False,
),
migrations.AlterField(
model_name='poll',
name='pub_date',
field=models.DateTimeField(default=django.utils.timezone.now, verbose_name=b'date published'),
),
]
|
caktus/django-timepiece | timepiece/reports/urls.py | Python | mit | 655 | 0 | from django.conf.urls import url
from timepiece.reports import views
urlpatterns = [
url(r'^r | eports/hourly/$',
views.HourlyReport.as_view(),
name='report_hourly'),
url(r'^reports/payroll/$',
views.report_payroll_summary,
| name='report_payroll_summary'),
url(r'^reports/billable_hours/$',
views.BillableHours.as_view(),
name='report_billable_hours'),
url(r'^reports/productivity/$',
views.report_productivity,
name='report_productivity'),
url(r'^reports/estimation_accuracy/$',
views.report_estimation_accuracy,
name='report_estimation_accuracy'),
]
|
spark8103/ops17 | app/user/__init__.py | Python | mit | 101 | 0.009901 | # coding: utf- | 8
from flask import | Blueprint
user = Blueprint('user', __name__)
from . import views
|
MKDTeam/SSH-Client | connection.py | Python | mit | 8,807 | 0.033413 | import paramiko, os, hashlib, random, struct
from Crypto.Cipher import AES
from Crypto import Random
from ui_class.ui_Connection import Ui_dialog_connection
from ui_class.ui_LoadSettings import Ui_qDialog_load
from ui_class.ui_SaveSettings import Ui_qDialog_save
from Exceptions import SSHConnectionError, HostError, PortError, AuthenticationError, BadHostKeyError
from PyQt5.QtWidgets import QWidget, QDialog, QMessageBox
from PyQt5.QtCore import pyqtSignal, QObject
#import ui_Connection
class Settings:
"""docstring for Settings"""
def __init__(self):
self.data = {}
def load(self):
self.window = QDialog()
self.ui = Ui_qDialog_load()
self.ui.setupUi(self.window)
self.ui.groupBox.hide()
def confirm_button():
password = self.ui.lineEdit_password.text()
key = hashlib.sha256(password.encode('utf-8')).digest()
self.decrypt_file(key = key, in_filename = 'settings', out_filename = 'settings.xml', chunksize = 64)
file = open('settings.xml', 'rb')
check_line = file.read(17)
if check_line[:16].isdigit(): #если ключ верен, запоминаем данные
for line in file:
key, value = str(line)[2:-3].split(' |===| ')[:2]
self.data[key] = value
self.window.close()
else:
error = QMessageBox()
error.setText("Неверный пароль")
error.setInformativeText("Веденный пароль не верен, проверте раскладку клавиатуры и не нажата ли клавиша CapsLock")
error.exec_()
file.close()
os.remove('settings.xml')
self.ui.pushButton_confirm.clicked.connect(confirm_button)
self.window.exec_()
def save(self):
self.window = QDialog()
self.ui = Ui_qDialog_save()
self.ui.setupUi(self.window)
self.ui.groupBox.hide()
def confirm_button():
password = self.ui.lineEdit_password.text()
if len(password) < 3:
error = QMessageBox()
error.setText("Неверный пароль")
error.setInformativeText("Веденный пароль слишком короткий")
error.exec_()
return
key = hashlib.sha256(password.encode('utf-8')).digest()
file = open('settings.xml', 'w')
line = ''.join(chr(random.randint(ord('0'), ord('9'))) for i in range(16)) + '\n'
file.write(line)
for index in self.data.keys():
line = index + ' |===| ' + self.data[index] + '\n'
file.write(line)
#print(line)
file.close()
self.encrypt_file(key = key, in_filename = 'settings.xml', out_filename = 'settings', chunksize = 64)
self.window.close()
os.remove('settings.xml')
self.ui.pushButton_confirm.clicked.connect(confirm_button)
self.window.exec_()
def encrypt_file(self, key, in_filename, out_filename = None, chunksize = 64 * 1024):
"""
Шифрует файл используя AES (CBC mode) при помощи заданного ключа.
key:
Ключ шифрования - строка длиной 16, 24 or 32 байта.
in_filename:
Имя шифруемого файла.
out_filename:
Зашифрованый файл, если не указан то будет использован шаблон, '<in_filename>.enc'.
chunksize:
Устанавливает размер блока который используется для шифрования файла. Больший размер
блока позволит быстрее обрабатывать файл. Размер блока должен быть кратен 16.
"""
if not out_filename:
out_filename = in_filename + '.enc'
#iv = ''.join(chr(random.randint(0, 0xFF)) for i in range(16))
iv = Random.new().read(16)
encryptor = AES.new(key, AES.MODE_CBC, iv)
filesize = os.path.getsize(in_filename)
with open(in_filename, 'rb') as infile:
with open(out_filename, 'wb') as outfile:
outfile.write(struct.pack('<Q', filesize))
outfile.write(iv)
while True:
chunk = infile.read(chunksize)
if len(chunk) == 0:
break
elif len(chunk) % 16 != 0:
chunk += b' ' * (16 - len(chunk) % 16)
outfile.write(encryptor.encrypt(chunk))
def decrypt_file(self, key, in_filename, out_filename = None, chunksize = 24 * 1024):
"""
Расшифровывает файл используя AES (CBC mode) при помощи заданного ключа.
key:
Ключ шифрования - строка длиной 16, 24 or 32 байта.
in_filename:
Имя расшифровываемого файла.
out_filename:
Расшифрованный файл.
chunksize:
Устанавливает размер блока который был использован для шифрования файла.
"""
if not out_filename:
out_filename = os.path.splitext(in_filename)[0]
with open(in_filename, 'rb') as infile:
origsize = struct.unpack('<Q', infile.read(struct.calcsize('Q')))[0]
iv = infile.read(16)
decryptor = AES.new(key, AES.MODE_CBC, iv)
with open(out_filename, 'wb') as outfile:
while True:
chunk = infile.read(chunksize) |
if len(chunk) == 0:
break
outfile.write(decryptor.decrypt(chunk))
outfile.truncate(origsize)
def __str__(self):
output = ''
for | line in self.data.keys():
output += line + ' = ' + self.data[line] + '\n'
return output
def __getitem__(self, key):
if key in self.data:
return self.data[key]
class ConnectionManager(QObject):
"""Создание SSH тунеля и отображение окна настроек соеденения"""
signal_onConnect = pyqtSignal() #Сигнал отправляется при успешном подключении через SSH
#signalOnDisconnect = pyqtSignal() #Сигнал отправляется при потере подключения
def __init__(self):
super().__init__()
self.window = QWidget()
self.ui = Ui_dialog_connection()
self.ui.setupUi(self.window)
self.client = paramiko.SSHClient()
self.client.set_missing_host_key_policy(paramiko.AutoAddPolicy())
self.settings = Settings()
self.ui.pushButton_save.clicked.connect(self.writeSettings)
def readSettings(self):
if 'host' in self.settings.data.keys():
self.ui.lineEdit_host.setText(self.settings['host'])
if 'user' in self.settings.data.keys():
self.ui.lineEdit_user.setText(self.settings['user'])
if 'secret' in self.settings.data.keys():
self.ui.lineEdit_secret.setText(self.settings['secret'])
if 'port' in self.settings.data.keys():
self.ui.lineEdit_port.setText(self.settings['port'])
if 'terminal_type' in self.settings.data.keys():
self.ui.lineEdit_terminal_type.setText(self.settings['terminal_type'])
if 'MD5_script' in self.settings.data.keys():
self.ui.lineEdit_MD5Script.setText(self.settings['MD5_script'])
def writeSettings(self):
self.settings.data['host'] = self.ui.lineEdit_host.text()
self.settings.data['user'] = self.ui.lineEdit_user.text()
self.settings.data['secret'] = self.ui.lineEdit_secret.text()
self.settings.data['port'] = self.ui.lineEdit_port.text()
self.settings.data['terminal_type'] = self.ui.lineEdit_terminal_type.text()
self.settings.data['MD5_script'] = self.ui.lineEdit_MD5Script.text()
self.settings.save()
def connect(self):
"""Устанавливает подключение. Если подключение установленно посылает сигнал signal_onConnect"""
try:
self.client.connect(hostname = self.ui.lineEdit_host.text(),
port = int(self.ui.lineEdit_port.text()),
username = self.ui.lineEdit_user.text(),
password = self.ui.lineEdit_secret.text())
except paramiko.ssh_exception.BadHostKeyException:
BadHostKeyError().show()
except paramiko.ssh_exception.AuthenticationException:
AuthenticationError().show()
except paramiko.ssh_exception.SSHException:
SSHConnectionError().show()
except OSError as error:
if error.errno == 8:
HostError().show()
elif type(error) == paramiko.ssh_exception.NoValidConnectionsError:
PortError().show()
else:
print('OSError (Errno:', error.errno, ')\n', type(error))
except Exception as error:
print(type(error),'\n', error)
else:
self.signal_onConnect.emit()
def show(self):
if os.path.exists('settings'):
self.settings.load()
self.window.show()
self.readSettings()
def hide(self):
self.window.hide()
def setButtonsEvents(self, button_connect_func = None, button_exit_func = None):
if button_connect_func:
self.button_connect_func = button_connect_func
self.ui.pushButton_connect.clicked.connect(self.button_connect_func)
if button_exit_func:
self.button_exit_func = button_exit_func
self.ui.pushButton_exit.clicked.connect(self.button_exit_func)
|
s-m-i-t-a/sales_menu | tests/factories.py | Python | bsd-3-clause | 318 | 0 | # -*- coding: utf-8 -*-
import factory
from sales_menu.models import Menu
class MenuFactory(factory.DjangoModelFactory):
FACTORY_FOR = | Menu
text = factory.Sequence(lambda n: u'Menu %d' % n)
parent = None
url = factory.Sequence(lambda n: u'/menu-%d' % n)
weight = factory. | Sequence(lambda n: n)
|
flask-restful/flask-restful | tests/test_inputs.py | Python | bsd-3-clause | 12,433 | 0.001046 | from datetime import datetime, timedelta, tzinfo
import unittest
import pytz
import re
#noinspection PyUnresolvedReferences
from nose.tools import assert_equal, assert_raises # you need it for tests in form of continuations
import six
from flask_restful import inputs
def test_reverse_rfc822_datetime():
dates = [
("Sat, 01 Jan 2011 00:00:00 -0000", datetime(2011, 1, 1, tzinfo=pytz.utc)),
("Sat, 01 Jan 2011 23:59:59 -0000", datetime(2011, 1, 1, 23, 59, 59, tzinfo=pytz.utc)),
("Sat, 01 Jan 2011 21:59:59 -0200", datetime(2011, 1, 1, 23, 59, 59, tzinfo=pytz.utc)),
]
for date_string, expected in dates:
yield assert_equal, inputs.datetime_from_rfc822(date_string), expected
def test_reverse_iso8601_datetime():
dates = [
("2011-01-01T00:00:00+00:00", datetime(2011, 1, 1, tzinfo=pytz.utc)),
("2011-01-01T23:59:59+00:00", datetime(2011, 1, 1, 23, 59, 59, tzinfo=pytz.utc)),
("2011-01-01T23:59:59.001000+00:00", datetime(2011, 1, 1, 23, 59, 59, 1000, tzinfo=pytz.utc)),
("2011-01-01T23:59:59+02:00", datetime(2011, 1, 1, 21, 59, 59, tzinfo=pytz.utc))
]
for date_string, expected in dates:
yield assert_equal, inputs.datetime_from_iso8601(date_string), expected
def test_urls():
urls = [
'http://www.djangoproject.com/',
'http://localhost/',
'http://example.com/',
'http://www.example.com/',
'http://www.example.com:8000/test',
'http://valid-with-hyphens.com/',
'http://subdomain.example.com/',
'http://200.8.9.10/',
'http://200.8.9.10:8000/test',
'http://valid-----hyphens.com/',
'http://example.com?something=value',
'http://example.com/index.php?something=value&another=value2',
'http://foo:bar@example.com',
'http://foo:@example.com',
'http://foo:@2001:db8:85a3::8a2e:370:7334',
'http://foo2:qd1%r@example.com',
]
for value in urls:
yield assert_equal, inputs.url(value), value
def check_bad_url_raises(value):
try:
inputs.url(value)
assert False, "shouldn't get here"
except ValueError as e:
assert_equal(six.text_type(e), u"{0} is not a valid URL".format(value))
def test_bad_urls():
values = [
'foo',
'http://',
'http://example',
'http://example.',
'http://.com',
'http://invalid-.com',
'http://-invalid.com',
'http://inv-.alid-.com',
'http://inv-.-alid.com',
'foo bar baz',
u'foo \u2713',
'http://@foo:bar@example.com',
'http://:bar@example.com',
'http://bar:bar:bar@example.com',
]
| for value in values:
yield check_bad_url_raises, value
def test_bad_url_error_message():
values = [
'google.com',
'domain.google.com',
'kevin:pass@google.com/path?query',
u'google.com/path?\u2713',
]
for value in values:
yield check_url_error_message, value
def check_url_error_message(value):
try:
inputs.url(value)
| assert False, u"inputs.url({0}) should raise an exception".format(value)
except ValueError as e:
assert_equal(six.text_type(e),
(u"{0} is not a valid URL. Did you mean: http://{0}".format(value)))
def test_regex_bad_input():
cases = (
'abc',
'123abc',
'abc123',
'',
)
num_only = inputs.regex(r'^[0-9]+$')
for value in cases:
yield assert_raises, ValueError, lambda: num_only(value)
def test_regex_good_input():
cases = (
'123',
'1234567890',
'00000',
)
num_only = inputs.regex(r'^[0-9]+$')
for value in cases:
yield assert_equal, num_only(value), value
def test_regex_bad_pattern():
"""Regex error raised immediately when regex input parser is created."""
assert_raises(re.error, inputs.regex, '[')
def test_regex_flags_good_input():
cases = (
'abcd',
'ABCabc',
'ABC',
)
case_insensitive = inputs.regex(r'^[A-Z]+$', re.IGNORECASE)
for value in cases:
yield assert_equal, case_insensitive(value), value
def test_regex_flags_bad_input():
cases = (
'abcd',
'ABCabc'
)
case_sensitive = inputs.regex(r'^[A-Z]+$')
for value in cases:
yield assert_raises, ValueError, lambda: case_sensitive(value)
class TypesTestCase(unittest.TestCase):
def test_boolean_false(self):
assert_equal(inputs.boolean("False"), False)
def test_boolean_is_false_for_0(self):
assert_equal(inputs.boolean("0"), False)
def test_boolean_true(self):
assert_equal(inputs.boolean("true"), True)
def test_boolean_is_true_for_1(self):
assert_equal(inputs.boolean("1"), True)
def test_boolean_upper_case(self):
assert_equal(inputs.boolean("FaLSE"), False)
def test_boolean(self):
assert_equal(inputs.boolean("FaLSE"), False)
def test_boolean_with_python_bool(self):
"""Input that is already a native python `bool` should be passed through
without extra processing."""
assert_equal(inputs.boolean(True), True)
assert_equal(inputs.boolean(False), False)
def test_bad_boolean(self):
assert_raises(ValueError, lambda: inputs.boolean("blah"))
def test_date_later_than_1900(self):
assert_equal(inputs.date("1900-01-01"), datetime(1900, 1, 1))
def test_date_input_error(self):
assert_raises(ValueError, lambda: inputs.date("2008-13-13"))
def test_date_input(self):
assert_equal(inputs.date("2008-08-01"), datetime(2008, 8, 1))
def test_natual_negative(self):
assert_raises(ValueError, lambda: inputs.natural(-1))
def test_natural(self):
assert_equal(3, inputs.natural(3))
def test_natual_string(self):
assert_raises(ValueError, lambda: inputs.natural('foo'))
def test_positive(self):
assert_equal(1, inputs.positive(1))
assert_equal(10000, inputs.positive(10000))
def test_positive_zero(self):
assert_raises(ValueError, lambda: inputs.positive(0))
def test_positive_negative_input(self):
assert_raises(ValueError, lambda: inputs.positive(-1))
def test_int_range_good(self):
int_range = inputs.int_range(1, 5)
assert_equal(3, int_range(3))
def test_int_range_inclusive(self):
int_range = inputs.int_range(1, 5)
assert_equal(5, int_range(5))
def test_int_range_low(self):
int_range = inputs.int_range(0, 5)
assert_raises(ValueError, lambda: int_range(-1))
def test_int_range_high(self):
int_range = inputs.int_range(0, 5)
assert_raises(ValueError, lambda: int_range(6))
def test_isointerval():
intervals = [
(
# Full precision with explicit UTC.
"2013-01-01T12:30:00Z/P1Y2M3DT4H5M6S",
(
datetime(2013, 1, 1, 12, 30, 0, tzinfo=pytz.utc),
datetime(2014, 3, 5, 16, 35, 6, tzinfo=pytz.utc),
),
),
(
# Full precision with alternate UTC indication
"2013-01-01T12:30+00:00/P2D",
(
datetime(2013, 1, 1, 12, 30, 0, tzinfo=pytz.utc),
datetime(2013, 1, 3, 12, 30, 0, tzinfo=pytz.utc),
),
),
(
# Implicit UTC with time
"2013-01-01T15:00/P1M",
(
datetime(2013, 1, 1, 15, 0, 0, tzinfo=pytz.utc),
datetime(2013, 1, 31, 15, 0, 0, tzinfo=pytz.utc),
),
),
(
# TZ conversion
"2013-01-01T17:00-05:00/P2W",
(
datetime(2013, 1, 1, 22, 0, 0, tzinfo=pytz.utc),
datetime(2013, 1, 15, 22, 0, 0, tzinfo=pytz.utc),
),
),
(
# Date upgrade to midnight-midnight period
"2013-01-01/P3D",
(
datetime(2013, 1, 1, 0, 0, 0, tzinfo=pytz.utc),
datetime(2013, 1, 4, 0, 0, 0, 0, tzinfo=pytz.utc),
|
mheap/ansible | lib/ansible/plugins/action/eos.py | Python | gpl-3.0 | 5,886 | 0.003058 | #
# (c) 2016 Red Hat Inc.
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import sys
import copy
from ansible import constants as C
from ansible.module_utils._text import to_text
from ansible.module_utils.connection import Connection
from ansible.module_utils.network.eos.eos import eos_provider_spec
from ansible.plugins.action.normal import ActionModule as _ActionModule
from ansible.module_utils.network.common.utils import load_provider
try:
from __main__ import display
except ImportError:
from ansible.utils.display import Display
display = Display()
class ActionModule(_ActionModule):
def run(self, tmp=None, task_vars=None):
del tmp # tmp no longer has any effect
socket_path = None
if self._play_context.connection in ('network_cli', 'httpapi'):
provider = self._task.args.get('provider', {})
if any(provider.values()):
display.warning('provider is unnecessary when using %s and will be ignored' % self._play_context.connection)
del self._task.args['provider']
if self._task.args.get('transport'):
display.warning('transport is unnecessary when using %s and will be ignored' % self._play_context.connection)
del self._task.args['transport']
elif self._play_context.connection == 'local':
provider = load_provider(eos_provider_spec, self._task.args)
transport = provider['transport'] or 'cli'
display.vvvv('connection transport is %s' % transport, self._play_context.remote_addr)
if transport == 'cli':
pc = copy.deepcopy(self._play_context)
pc.connection = 'network_cli'
pc.network_os = 'eos'
pc.remote_addr = provider['host'] or self._play_context.remote_addr
pc.port = int(provider['port'] or self._play_context.port or 22)
pc.remote_user = provider['username'] or self._play_context.connection_user
pc.password = provider['password'] or self._play_context.password
pc.private_key_file = provider['ssh_keyfile'] or self._play_context.private_key_file
pc.timeout = int(provider['timeout']) if provider['timeout'] else None
pc.become = provider['authorize'] or False
if pc.become:
pc.become_method = 'enable'
pc.become_pass = provider['auth_pass']
display.vvv('using connection plugin %s (was local)' % pc.connection, pc.remote_addr)
connection = self._shared_loader_obj.connection_loader.get('persistent', pc, sys.stdin)
if connection._play_context.timeout is None:
connection._play_context.timeout = connection.get_option('persistent_command_timeout')
socket_path = connection.run()
display.vvvv('socket_path: %s' % socket_path, pc.remote_addr)
if not socket_path:
return {'failed': True,
'msg': 'unable to open shell. Please see: ' +
'https://docs.ansible.com/ansible/network_debug_troubleshooting.html#unable-to-open-shell'}
task_vars['ansible_socket'] = socket_path
else:
self._task.args['provider'] = ActionModule.eapi_implementation(provider, self._play_context)
else:
return {'failed': True, 'msg': 'Connection type %s is not valid for this module' % self._play_context.connection}
if (self._play_context.connection == 'local' and transport == 'cli') or self._play_context.connection == 'network_cli':
# make sure we are in | the right cli context which should be
# enable mode and not config module
if socket_path is None:
socket_path = self._connection.socket_path
conn = Connection(socket_path)
out = conn.get_prompt()
while '(config' in to_text | (out, errors='surrogate_then_replace').strip():
display.vvvv('wrong context, sending exit to device', self._play_context.remote_addr)
conn.send_command('abort')
out = conn.get_prompt()
result = super(ActionModule, self).run(task_vars=task_vars)
return result
@staticmethod
def eapi_implementation(provider, play_context):
provider['transport'] = 'eapi'
if provider.get('host') is None:
provider['host'] = play_context.remote_addr
if provider.get('port') is None:
default_port = 443 if provider['use_ssl'] else 80
provider['port'] = int(play_context.port or default_port)
if provider.get('timeout') is None:
provider['timeout'] = C.PERSISTENT_COMMAND_TIMEOUT
if provider.get('username') is None:
provider['username'] = play_context.connection_user
if provider.get('password') is None:
provider['password'] = play_context.password
if provider.get('authorize') is None:
provider['authorize'] = False
return provider
|
wayneww/Scrabble | game_run.py | Python | gpl-3.0 | 13,893 | 0.041748 | # wayne warren 2015
import scrabble # for my shuffling etc. PRESSING ENTER WILL SCORE WORD.
import pygame
import pygame.locals
import time
import random
from pygame import mixer
from os import getcwd
top_dir = getcwd()
print top_dir
mixer.init()
# for bleep when window pops up
bad_word_alert=mixer.Sound(top_dir+'/sounds/dialog-error.ogg')
good_word_alert=mixer.Sound(top_dir+'/sounds/window-slide.ogg')
tile_pressed=mixer.Sound(top_dir+'/sounds/button-pressed.ogg')
# before rule check, i.e placing one tile unsurrounded, need list that stores empty fields by comparing with game board
# create instance of bag.
game_bag = scrabble.Bag()
# do hands
game_bag.shuffle_bag()
game_bag.player_from_bag()
game_bag.shuffle_bag()
game_bag.comp_from_bag()
player_hand = game_bag.get_player_hand() # just a list of chars rememeber
computer_hand = game_bag.get_comp_hand()
player_board = [] # content is temp stored and moved to game_board to allow resetting hand etc
game_board = [] # previous legal uses of player_board[] stay here.
pygame.init()
backdrop = pygame.image.load(top_dir+'/board/full_board.png')
# computers go flag
computer_move = False
# game message settings
font_colour = [255,0,0]
font_size = 18
game_font = pygame.font.SysFont("dejavuserif", font_size) # None or ubuntu etc
#print pygame.font.get_fonts()
msg_comp_score = "Computer score: "
msg_player_score = "Player score: "
# Set the width and height of the screen [width,height]
size = [680,680] # because full_board.png is this size
screen = pygame.display.set_mode(size)
#print pygame.display.list_modes()
print pygame.display.get_driver()
pygame.display.set_caption("Scrabble")
# TILES AREA
# map tiles to letters
tiles = pygame.image.load(top_dir+"/letters/tiles2.png")
rects = {"a":(0,0,40,40), "b":(40,0,40,40), "c":(80,0,40,40), "d":(120,0,40,40), "e":(160,0,40,40), "f":(200,0,40,40), "g":(240,0,40,40), "h":(0,40,40,40), "i":(40,40,40,40), "j":(80,40,40,40),"k":(120,40,40,40), "l":(160,40,40,40), "m":(200,40,40,40), "n":(240,40,40,40), "o":(0,80,40,40), "p":(40,80,40,40), "q":(80,80,40,40), "r":(120,80,40,40), "s":(160,80,40,40), "t":(200,80,40,40), "u":(240,80,40,40), "v":(0,120,40,40), "w":(40,120,40,40), "x":(80,120,40,40),"y":(120,120,40,40), "z":(160,120,40,40), " ":(200,120,40,40) }
clock_tick = 10 # Find right fps for it later, less cpu usage, but when computer has to find all possible moves, cpu will go up
#Loop until the user clicks the close button.
done = False
player_x = 80 # from left to right for displaying hand
computer_x = 320
# for clicking on...
tile_dimensions = []
tile_selected = False
# Used to manage how fast the screen updates
clock = pygame.time.Clock() |
# board is 40 to 600 x and y
board = []
for i in ran | ge(40,680,40):
for j in range(40,680,40):
board.append([i,j])
# tile float
tile_float = False
float_letter = ""
# pass flags lol
player_pass = False # for deadlock at end when we both pass consecutively
computer_pass = False
comp_pass_count = 0
#print board
# -------- Main Program Loop -----------
while done == False: # or while sys memory is good and temp is good
# MOve from here to each branch that deals with their hands, and add a check to see if game bag is 0 too. take of score from losers ltrs
# f1
if len(game_bag.bag) == 0:
if len(player_hand) == 0 and computer_move:
computer_move = False
pygame.display.set_caption("Scrabble... winner is Player 1")
print "PLAYER WINS"
#done = True
if len(computer_hand) == 0 and not computer_move:
#computer_move = False
pygame.display.set_caption("Scrabble... computer is the winner")
print "COMPUTER WINS"
#done = True
if computer_move:
#print "Computer move begin. Computer hand: ", computer_hand
X_HAND = []
X_HAND = game_bag.comp_go(computer_hand) # pass current chars to it
#game_bag.comp_go(computer_hand)
if X_HAND != "PASS":
print "Computer played... ", X_HAND
computer_pass = False
good_word_alert.play() # on sucess
for item in X_HAND:
game_board.append(item)
if item[0] in computer_hand:
computer_hand.remove(item[0])
game_bag.update_empty(game_board)
comp_hand_len = len(computer_hand) # another buggy area lol
comp_add_tile_num = (7 - comp_hand_len)
# getting new tiles
game_bag.comp_pick_ammount(comp_add_tile_num)
if X_HAND == "PASS": # if passed twice, request new hand from bag.
if comp_pass_count == 0:
comp_pass_count += 1
if comp_pass_count == 1:
comp_pass_count = 0
game_bag.add_to_bag(computer_hand)
computer_hand = []
game_bag.shuffle_bag()
game_bag.comp_from_bag()
computer_hand = game_bag.get_comp_hand()
computer_pass = True
bad_word_alert.play()
#pass# next get new tiles
computer_move = False
if not computer_move: # if false, do event processing
for event in pygame.event.get(): # User did something
if event.type == pygame.QUIT: # If user clicked close
done = True # Flag that we are done so we exit this loop
if (event.type == pygame.KEYDOWN) and (event.key == pygame.K_RETURN):
print "scoring word..."
word = ""
# in case of bad word letter, hold the original player_board elements, because match_word changes it in its testing
old_player_board = [] # this is for bad word case, putting hand back to how it was
for item in player_board:
old_player_board.append(item)
# in EACH of the TWO IF cases below check for blank tiles before calling order letters
if len(game_board) == 0:
if game_bag.center_sqr_check(player_board) == "good":
game_bag.blank_check(player_board)
word = game_bag.order_letters(player_board)
else:
bad_word_alert.play() # for now let dictionary check return false
if len(game_board) > 0: # the board has been in use, so need to use a function that combines the use of player_board
# with game_board state AND finally call order_letters(x) with the word found
#print "now in case where previous word[s] have been played."
game_bag.blank_check(player_board)
word = game_bag.match_word(player_board)
#word_list =[]
# check for valid word in dictionary, then:
print "updated word", word
if game_bag.dict_check_word(word) == "good":
print "is word"
good_word_alert.play()
# the simple scoring, next add the mutlipliers [or first]. use player_board while its not reset
# pass to multiples func in game_bag, with map can still apply score with no order to letters
if len(game_board) > 0:
game_bag.letter_score_multiplier(game_bag.temp_hand, game_board) # the old game board state too, for checking multis
game_bag.temp_hand = []
if len(game_board) == 0:
game_bag.letter_score_multiplier(player_board, game_board) # playboard because temp_hand isnt made yet
# move player_board to game board on sucessful score then delete player_board. so state is preserved
for item in player_board:
game_board.append(item)
# after above appending, whole progress is saved in game_squares with its '*' and alpha
game_bag.update_empty(game_board)
player_board = []
print "Player board cleared"
#print "player hand length", len(player_hand)
hand_len = len(player_hand) # another buggy area lol
add_tile_num = (7 - hand_len)
# getting new tiles
game_bag.pick_ammount(add_tile_num)
#print "added tiles?"
# also need to reset player_board and word
word =""
# ### COMPUTER TIME #####move to computers go.
player_pass = False # i didnt pass , 'p' key is pass
computer_move = True
else:
# go through player_board and put element 0 back into player hand.
print "bad word:"
bad_word_alert.play()
#print "placing tiles back to hand..."
#print "old p board:", old_player_board
for item in old_player_board:
player_hand += item[0]
player_board =[]; old_player_board = []
word = ""
#print "placing tiles back works for now..."
# need to fill player tiles before comp goes.
# needs to be done without clearing board, because game progresses lol
if (ev |
ioanpocol/superdesk-core | apps/archive_broadcast/broadcast.py | Python | agpl-3.0 | 14,350 | 0.002927 | # -*- coding: utf-8; -*-
#
# This file is part of Superdesk.
#
# Copyright 2013, 2014 Sourcefabric z.u. and contributors.
#
# For the full copyright and license information, please see the
# AUTHORS and LICENSE files distributed with this source code, or
# at https://www.sourcefabric.org/superdesk/license
import logging
import json
from eve.utils import ParsedRequest
from eve.versioning import resolve_document_version
from flask import request
from apps.archive.common import CUSTOM_HATEOAS, insert_into_versions, get_user, \
ITEM_CREATE, BROADCAST_GENRE, is_genre
from apps.packages import PackageService
from superdesk.metadata.packages import GROUPS
from superdesk.resource import Resource, build_custom_hateoas
from superdesk.services import BaseService
from superdesk.metadata.utils import item_url
from superdesk.metadata.item import CONTENT_TYPE, CONTENT_STATE, ITEM_TYPE, ITEM_STATE, PUBLISH_STATES
from superdesk import get_resource_service, config
from superdesk.errors import SuperdeskApiError
from apps.archive.archive import SOURCE
from apps.publish.content.common import ITEM_CORRECT, ITEM_PUBLISH
from superdesk.utc import utcnow
logger = logging.getLogger(__name__)
# field to be copied from item to broadcast item
FIELDS_TO_COPY = ['urgency', 'priority', 'anpa_category', 'type', 'profile',
'subject', 'dateline', 'slugline', 'place']
ARCHIVE_BROADCAST_NAME = 'archive_broadcast'
class ArchiveBroadcastResource(Resource):
endpoint_name = ARCHIVE_BROADCAST_NAME
resource_title = endpoint_name
url = 'archive/<{0}:item_id>/broadcast'.format(item_url)
schema = {
'desk': Resource.rel('desks', embeddable=False, required=False, nullable=True)
}
resource_methods = ['POST']
item_methods = []
privileges = {'POST': ARCHIVE_BROADCAST_NAME}
class ArchiveBroadcastService(BaseService):
packageService = PackageService()
def create(self, docs):
service = get_resource_service(SOURCE)
item_id = request.view_args['item_id']
item = service.find_one(req=None, _id=item_id)
doc = docs[0]
self._valid_broadcast_item(item)
desk_id = doc.get('desk')
desk = None
if desk_id:
desk = get_resource_service('desks').find_one(req=None, _id=desk_id)
doc.pop('desk', None)
doc['task'] = {}
if desk:
doc['task']['desk'] = desk.get(config.ID_FIELD)
doc['task']['stage'] = desk.get('working_stage')
doc['task']['user'] = get_user().get('_id')
genre_list = get_resource_service('vocabularies').find_one(req=None, _id='genre') or {}
broadcast_genre = [{'qcode': genre.get('qcode'), 'name': genre.get('name')}
for genre in genre_list.get('items', [])
if genre.get('qcode') == BROADCAST_GENRE and genre.get('is_active')]
if no | t broadcast_genre:
raise SuperdeskApiError.badRequestError(message="Cannot find the {} genre.".format(BRO | ADCAST_GENRE))
doc['broadcast'] = {
'status': '',
'master_id': item_id,
'rewrite_id': item.get('rewritten_by')
}
doc['genre'] = broadcast_genre
doc['family_id'] = item.get('family_id')
for key in FIELDS_TO_COPY:
doc[key] = item.get(key)
resolve_document_version(document=doc, resource=SOURCE, method='POST')
service.post(docs)
insert_into_versions(id_=doc[config.ID_FIELD])
build_custom_hateoas(CUSTOM_HATEOAS, doc)
return [doc[config.ID_FIELD]]
def _valid_broadcast_item(self, item):
"""Validates item for broadcast.
Broadcast item can only be created for Text or Pre-formatted item.
Item state needs to be Published or Corrected
:param dict item: Item from which the broadcast item will be created
"""
if not item:
raise SuperdeskApiError.notFoundError(
message="Cannot find the requested item id.")
if not item.get(ITEM_TYPE) in [CONTENT_TYPE.TEXT, CONTENT_TYPE.PREFORMATTED]:
raise SuperdeskApiError.badRequestError(message="Invalid content type.")
if item.get(ITEM_STATE) not in [CONTENT_STATE.CORRECTED, CONTENT_STATE.PUBLISHED]:
raise SuperdeskApiError.badRequestError(message="Invalid content state.")
def _get_broadcast_items(self, ids, include_archived_repo=False):
"""Returns list of broadcast items.
Get the broadcast items for the master_id
:param list ids: list of item ids
:param include_archived_repo True if archived repo needs to be included in search, default is False
:return list: list of broadcast items
"""
query = {
'query': {
'filtered': {
'filter': {
'bool': {
'must': {'term': {'genre.name': BROADCAST_GENRE}},
'should': {'terms': {'broadcast.master_id': ids}}
}
}
}
}
}
req = ParsedRequest()
repos = 'archive,published'
if include_archived_repo:
repos = 'archive,published,archived'
req.args = {'source': json.dumps(query), 'repo': repos}
return get_resource_service('search').get(req=req, lookup=None)
def get_broadcast_items_from_master_story(self, item, include_archived_repo=False):
"""Get the broadcast items from the master story.
:param dict item: master story item
:param include_archived_repo True if archived repo needs to be included in search, default is False
:return list: returns list of broadcast items
"""
if is_genre(item, BROADCAST_GENRE):
return []
ids = [str(item.get(config.ID_FIELD))]
return list(self._get_broadcast_items(ids, include_archived_repo))
def on_broadcast_master_updated(self, item_event, item, rewrite_id=None):
"""Runs when master item is updated.
This event is called when the master story is corrected, published, re-written
:param str item_event: Item operations
:param dict item: item on which operation performed.
:param str rewrite_id: re-written story id.
"""
status = ''
if not item or is_genre(item, BROADCAST_GENRE):
return
elif item_event == ITEM_CREATE and rewrite_id:
status = 'Master Story Re-written'
elif item_event == ITEM_PUBLISH:
status = 'Master Story Published'
elif item_event == ITEM_CORRECT:
status = 'Master Story Corrected'
broadcast_items = self.get_broadcast_items_from_master_story(item)
if not broadcast_items:
return
processed_ids = set()
for broadcast_item in broadcast_items:
try:
if broadcast_item.get('lock_user'):
continue
updates = {
'broadcast': broadcast_item.get('broadcast'),
}
if status:
updates['broadcast']['status'] = status
if not updates['broadcast']['rewrite_id'] and rewrite_id:
updates['broadcast']['rewrite_id'] = rewrite_id
if not broadcast_item.get(config.ID_FIELD) in processed_ids:
self._update_broadcast_status(broadcast_item, updates)
# list of ids that are processed.
processed_ids.add(broadcast_item.get(config.ID_FIELD))
except Exception:
logger.exception('Failed to update status for the broadcast item {}'.
format(broadcast_item.get(config.ID_FIELD)))
def _update_broadcast_status(self, item, updates):
"""Update the status of the broadcast item.
:param dict item: broadcast item to be updated
:param dict updates: broadcast updates
"""
# update the published collection as well as archive.
if item. |
n3011/deeprl | dataset/replay_v2.py | Python | mit | 4,664 | 0 | import random
import numpy as np
class ReplayBuffer(object):
def __init__(self, max_size):
self.max_size = max_size
self.cur_size = 0
self.buffer = {}
self.init_length = 0
def __len__(self):
return self.cur_size
def seed_buffer(self, episodes):
self.init_length = len(episodes)
self.add(episodes, np.ones(self.init_length))
def add(self, episodes, *args):
"""Add episodes to buffer."""
idx = 0
while self.cur_size < self.max_size and idx < len(episodes):
self.buffer[self.cur_size] = episodes[idx]
self.cur_size += 1
idx += 1
if idx < len(episodes):
remove_idxs = self.remove_n(len(episodes) - idx)
for remove_idx in remove_idxs:
self.buffer[remove_idx] = episodes[idx]
idx += 1
assert len(self.buffer) == self.cur_size
def remove_n(self, n):
"""Get n items for removal."""
# random removal
idxs = random.sample(xrange(self.init_length, self.cur_size), n)
return idxs
def get_batch(self, n):
"""Get batch of episodes to train on."""
# random batch
idxs = random.sample(xrange(self.cur_size), n)
return [self.buffer[idx] for idx in idxs], None
def update_last_batch(self, delta):
pass
class PrioritizedReplayBuffer(ReplayBuffer):
def __init__(self, max_size, alpha=0.2,
eviction_strategy='rand'):
self.max_size = max_size
self.alpha = alpha
self.eviction_strategy = eviction_strategy
assert self.eviction_strategy in ['rand', 'fifo', 'rank']
self.remove_idx = 0
self.cur_size = 0
self.buffer = {}
self.priorities = np.zeros(self.max_size)
self.init_length = 0
def __len__(self):
return self.cur_size
def add(self, episodes, priorities, new_idxs=None):
"""Add episodes to buffer."""
if new_idxs is None:
idx = 0
new_idxs = []
while self.cur_size < self.max_size and idx < len(episodes):
self.buffer[self.cur_size] = episodes[idx]
new_idxs.append(self.cur_size)
self.cur_size += 1
idx += 1
if idx < len(episodes):
remove_idxs = self.remove_n(len(episodes) - idx)
for remove_idx in remove_idxs:
self.buffer[remove_idx] = episodes[idx]
new_idxs.append(remove_idx)
idx += 1
else:
assert len(new_idxs) == len(episodes)
for new_idx, ep in zip(new_idxs, episodes):
self.buffer[new_idx] = ep
self.priorities[new_idxs] = priorities
self.priorities[0:self.init_length] = np.max(
self.priorities[self.init_length:])
assert len(self.buffer) == self.cur_size
return new_idx | s
def remove_n(se | lf, n):
"""Get n items for removal."""
assert self.init_length + n <= self.cur_size
if self.eviction_strategy == 'rand':
# random removal
idxs = random.sample(xrange(self.init_length, self.cur_size), n)
elif self.eviction_strategy == 'fifo':
# overwrite elements in cyclical fashion
idxs = [
self.init_length +
(self.remove_idx + i) % (self.max_size - self.init_length)
for i in xrange(n)]
self.remove_idx = idxs[-1] + 1 - self.init_length
elif self.eviction_strategy == 'rank':
# remove lowest-priority indices
idxs = np.argpartition(self.priorities, n)[:n]
return idxs
def sampling_distribution(self):
p = self.priorities[:self.cur_size]
p = np.exp(self.alpha * (p - np.max(p)))
norm = np.sum(p)
if norm > 0:
uniform = 0.0
p = p / norm * (1 - uniform) + 1.0 / self.cur_size * uniform
else:
p = np.ones(self.cur_size) / self.cur_size
return p
def get_batch(self, n):
"""Get batch of episodes to train on."""
p = self.sampling_distribution()
idxs = np.random.choice(self.cur_size, size=n, replace=False, p=p)
self.last_batch = idxs
return [self.buffer[idx] for idx in idxs], p[idxs]
def update_last_batch(self, delta):
"""Update last batch idxs with new priority."""
self.priorities[self.last_batch] = np.abs(delta)
self.priorities[0:self.init_length] = np.max(
self.priorities[self.init_length:])
|
OldhamMade/beanstalkctl | specs/base_spec.py | Python | mit | 3,511 | 0.001994 | import os
import signal
import subprocess
import beanstalkc
import time
import pexpect
try:
import unittest2 as unittest
except ImportError:
import unittest
from beanstalkctl.util import BeanstalkdMixin
class BaseSpec(unittest.TestCase, BeanstalkdMixin):
beanstalkd_instance = None
beanstalkd_host = '127.0.0.1'
beanstalkd_port = 11411
def _beanstalkd_path(self):
beanstalkd = os.getenv('BEANSTALKD')
if beanstalkd:
return os.path.abspath(os.path.join(
os.path.dirname(__file__),
'..', beanstalkd))
# assume beanstalkd is
# installed globally
return 'beanstalkd'
beanstalkd_path = property(_beanstalkd_path)
def _start_beanstalkd(self):
print "Using beanstalkd: {0}".format(self.beanstalkd_path)
print "Starting up the beanstalkd instance...",
self.beanstalkd_instance = subprocess.Popen(
[self.beanstalkd_path, '-p', str(self.beanstalkd_port)],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
shell=False,
)
print 'running as {0}...'.format(self.beanstalkd_instance),
print "done."
def base_setup(self):
self._start_beanstalkd()
beanstalkctl = ' '.join([
os.path.join(
os.path.dirname(self.call('pwd')),
'bin',
'beanstalkctl'),
'--host={0}'.format(self.beanstalkd_host),
'--port={0}'.format(self.beanstalkd_port), ])
self.logfh = open(
'{0}.log'.format(self.__class__.__name__), 'w', 0)
self.beanstalkctl = pexpect.spawn(beanstalkctl, logfile=self.logfh)
self.beanstalkctl.setecho(False)
self.beanstalkctl.expect('beanstalkctl> ')
def base_teardown(self):
self.logfh.close()
if not self.beanstalkd_instance:
return
print "Shutting down the beanstalkd instance...",
self.beanstalkd_instance.terminate()
print "done."
def interact(self, cmd, expect='beanstalkctl> '):
self.beanstalkctl.sendline(cmd)
self.beanstalkctl.expect_exact(expect)
return self.get_response()
def get_response(self):
result = self.beanstalkctl.before
if result.endswith('\x1b[K'):
return result[:-6]
return result
def call(self, command, **env):
"""Run a command on the terminal.
Args:
command (str): the command to execute
Keyword Args:
**env (dict): any keyword arguments are collected into a
dictionary and passed as environment variables directly
to the subprocess call.
Returns:
tuple. A tuple containing `(stdoutdata, stderrdata)`, or None
if unsuccessful.
"""
p = subprocess.Popen(
command,
shell=False,
env=env,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
result, error = p.communicate()
if error:
| raise Exception(error)
return result
| def clean(self, text):
for chunk in ('\r', r'\\x1b[K'):
text = text.replace(chunk, '')
return text.strip()
def skipped(func):
from nose.plugins.skip import SkipTest
def wrapper(*args, **kwargs):
raise SkipTest("Test %s is skipped" % func.__name__)
wrapper.__name__ = func.__name__
return wrapper
|
thomasyu888/synapsePythonClient | tests/unit/synapseclient/core/unit_test_download.py | Python | apache-2.0 | 24,938 | 0.004571 | import hashlib
import json
import os
import shutil
import tempfile
import unittest
from unittest.mock import MagicMock, patch, mock_open, call
import pytest
import requests
import synapseclient.core.constants.concrete_types as concrete_types
import synapseclient.core.multithread_download as multithread_download
from synapseclient import File, Synapse
from synapseclient.core import sts_transfer
from synapseclient import client
from synapseclient.core import utils
from synapseclient.core.exceptions import SynapseHTTPError, SynapseMd5MismatchError, SynapseError, \
SynapseFileNotFoundError
# a callable that mocks the requests.get function
class MockRequestGetFunction(object):
def __init__(self, responses):
self.responses = responses
self.i = 0
def __call__(self, *args, **kwargs):
response = self.responses[self.i]
self.i += 1
return response
# a class to iterate bogus content
class IterateContents(object):
def __init__(self, contents, buffer_size, partial_start=0, partial_end=None):
self.contents = contents
self.buffer_size = buffer_size
self.i = partial_start
self.partial_end = partial_end
self.bytes_iterated = 0
def __iter__(self):
return self
def next(self):
return self.__next__()
def __next__(self):
if self.i >= len(self.contents):
raise StopIteration()
if self.partial_end and self.i >= self.partial_end:
raise requests.exceptions.ChunkedEncodingError("Simulated partial download! Connection reset by peer!")
start = self.i
end = min(self.i + self.buffer_size, len(self.contents))
if self.partial_end:
end = min(end, self.partial_end)
self.i = end
data = self.contents[start:end].encode('utf-8')
self.bytes_iterated += len(data)
return data
def total_bytes_iterated(self):
return self.bytes_iterated
def create_mock_response(url, response_type, **kwargs):
response = MagicMock()
response.request.url = url
response.request.method = kwargs.get('method', 'GET')
response.request.headers = {}
response.request.body = None
if response_type == "redirect":
response.status_code = 301
response.headers = {'location': kwargs['location']}
elif response_type == "error":
response.status_code = kwargs.get('status_code', 500)
response.reason = kwargs.get('reason', 'fake reason')
response.text = '{{"reason":"{}"}}'.format(kwargs.get('reason', 'fake reason'))
response.json = lambda: json.loads(response.text)
elif response_type == "stream":
response.status_code = kwargs.get('status_code', 200)
response.headers = {
'content-disposition': 'attachment; filename="fname.ext"',
'content-type': 'application/octet-stream',
'content-length': len(response.text)
}
def _create_iterator(buffer_size):
response._content_iterator = IterateContents(kwargs['contents'],
kwargs['buffer_size'],
kwargs.get('partial_start', 0),
kwargs.get('partial_end', None))
return response._content_iterator
response.iter_content = _create_iterator
response.raw.tell = lambda: response._content_iterator.total_bytes_iterated()
else:
response.status_code = 200
response.text = kwargs['text']
response.json = lambda: json.loads(response.text)
response.headers = {
'content-type': 'application/json',
'content-length': len(response.text)
}
return response
def mock_generate_headers(self, headers=None):
return {}
def test_mock_download(syn):
temp_dir = tempfile.get | tempdir()
fileHandleId = "42"
objectId = "syn789"
objectType = "FileEntity"
# make bogus content
contents = "\n".join(str(i) for i in range(1000))
# compute MD5 of contents
m = hashlib.md5()
m.update(contents.encode('utf-8'))
contents_md5 = m.hexdigest()
url = "https://repo-prod.prod.sagebase.org/repo/v1/entity/syn6403467/file"
# 1. No redirects
mock_requests_get = MockRequestGetFunction([
create_mock_response(url, " | stream", contents=contents, buffer_size=1024)
])
# patch requests.get and also the method that generates signed
# headers (to avoid having to be logged in to Synapse)
with patch.object(syn._requests_session, 'get', side_effect=mock_requests_get), \
patch.object(Synapse, '_generate_headers', side_effect=mock_generate_headers):
syn._download_from_URL(url, destination=temp_dir, fileHandleId=12345, expected_md5=contents_md5)
# 2. Multiple redirects
mock_requests_get = MockRequestGetFunction([
create_mock_response(url, "redirect", location="https://fakeurl.com/asdf"),
create_mock_response(url, "redirect", location="https://fakeurl.com/qwer"),
create_mock_response(url, "stream", contents=contents, buffer_size=1024)
])
# patch requests.get and also the method that generates signed
# headers (to avoid having to be logged in to Synapse)
with patch.object(syn._requests_session, 'get', side_effect=mock_requests_get), \
patch.object(Synapse, '_generate_headers', side_effect=mock_generate_headers):
syn._download_from_URL(url, destination=temp_dir, fileHandleId=12345, expected_md5=contents_md5)
# 3. recover from partial download
mock_requests_get = MockRequestGetFunction([
create_mock_response(url, "redirect", location="https://fakeurl.com/asdf"),
create_mock_response(url, "stream", contents=contents, buffer_size=1024, partial_end=len(contents) // 7 * 3,
status_code=200),
create_mock_response(url, "stream", contents=contents, buffer_size=1024, partial_start=len(contents) // 7 * 3,
partial_end=len(contents) // 7 * 5, status_code=206),
create_mock_response(url, "stream", contents=contents, buffer_size=1024, partial_start=len(contents) // 7 * 5,
status_code=206)
])
_getFileHandleDownload_return_value = {'preSignedURL': url,
'fileHandle': {'id': 12345, 'contentMd5': contents_md5,
'concreteType': concrete_types.S3_FILE_HANDLE}}
# patch requests.get and also the method that generates signed
# headers (to avoid having to be logged in to Synapse)
with patch.object(syn._requests_session, 'get', side_effect=mock_requests_get), \
patch.object(Synapse, '_generate_headers', side_effect=mock_generate_headers), \
patch.object(Synapse, '_getFileHandleDownload', return_value=_getFileHandleDownload_return_value), \
patch.object(sts_transfer, "is_storage_location_sts_enabled", return_value=False):
syn._downloadFileHandle(fileHandleId, objectId, objectType, destination=temp_dir)
# 4. as long as we're making progress, keep trying
responses = [
create_mock_response(url, "redirect", location="https://fakeurl.com/asdf"),
create_mock_response(url, "stream", contents=contents, buffer_size=1024, partial_start=0,
partial_end=len(contents) // 11, status_code=200)
]
for i in range(1, 12):
responses.append(
create_mock_response(url, "stream", contents=contents, buffer_size=1024,
partial_start=len(contents) // 11 * i,
partial_end=len(contents) // 11 * (i + 1), status_code=206))
mock_requests_get = MockRequestGetFunction(responses)
# patch requests.get and also the method that generates signed
# headers (to avoid having to be logged in to Synapse)
with patch.object(syn._requests_session, 'get', side_effect=mock_requests_get), \
patch.object(Synapse, '_generate_heade |
ros2/launch | launch/launch/actions/conftest.py | Python | apache-2.0 | 1,143 | 0 | # Copyright 2021 Open Source Robotics Foundation, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# imports needed for doctests
imp | ort launch
import launch.actions
import launch.conditions
import launch.substitutions
import pytest
@pytest.fixture(autouse=True)
def add_imports_to_doctest_namespace(doctest_namespace):
doctest_namespace['launch'] = launch
doctest_namespace['LaunchDescription'] = launch.LaunchDescription
for subpackage in (
launch.actions,
launch.conditions,
launch.substitutions,
):
for x in subpackage.__all__:
doctest_namespace[x] = | getattr(subpackage, x)
|
qisanstudio/qstudio-launch | src/studio/launch/commands/config.py | Python | mit | 69 | 0.014493 | # -*- c | oding: utf-8 -*-
from __future__ im | port unicode_literals
|
IDSIA/sacred | examples/06_randomness.py | Python | mit | 2,287 | 0 | #!/usr/bin/env python
# coding=utf-8
"""
This example showcases the randomness features of Sacred.
Sacred generates a random global seed for every expe | riment, that you can
find in the configuration. It will be different every time you run the
experiment.
Based on this global seed it will generate the special parameters ``_seed`` and
``_rnd`` for each captured function. Every time you call such a function the
``_seed`` will be different and ``_rnd`` will be differently seeded random
state. But their values depend deterministically on the global seed and on how
often the function has been called.
Here are a couple of things you shoul | d try:
- run the experiment a couple of times and notice how the results are
different every time
- run the experiment a couple of times with a fixed seed.
Notice that the results are the same::
:$ ./06_randomness.py with seed=12345 -l WARNING
[57]
[28]
695891797
[82]
- run the experiment with a fixed seed and vary the numbers parameter.
Notice that all the results stay the same except for the added numbers.
This demonstrates that all the calls to one function are in fact
independent from each other::
:$ ./06_randomness.py with seed=12345 numbers=3 -l WARNING
[57, 79, 86]
[28, 90, 92]
695891797
[82, 9, 3]
- run the experiment with a fixed seed and set the reverse parameter to true.
Notice how the results are the same, but in slightly different order.
This shows that calls to different functions do not interfere with one
another::
:$ ./06_randomness.py with seed=12345 reverse=True numbers=3 -l WARNING
695891797
[57, 79, 86]
[28, 90, 92]
[82, 9, 3]
"""
from sacred import Experiment
ex = Experiment("randomness")
@ex.config
def cfg():
reverse = False
numbers = 1
@ex.capture
def do_random_stuff(numbers, _rnd):
print([_rnd.randint(1, 100) for _ in range(numbers)])
@ex.capture
def do_more_random_stuff(_seed):
print(_seed)
@ex.automain
def run(reverse):
if reverse:
do_more_random_stuff()
do_random_stuff()
do_random_stuff()
else:
do_random_stuff()
do_random_stuff()
do_more_random_stuff()
do_random_stuff()
|
pennetti/voicebox | server/src/voicebox/ngram.py | Python | mit | 878 | 0 | from __future__ import absolute_import
class Ngram(object):
def __ | init__(self, token):
self.token | = token
self.count = 1
self.after = []
def __str__(self):
return str({
'after': self.after,
'count': self.count
})
def __repr__(self):
return str({
'after': self.after,
'count': self.count
})
def __len__(self):
return len(self.token)
def __eq__(self, other):
if type(self) is type(other):
return self.__dict__ == other.__dict__
return False
def add_after(self, token, reach):
if len(self.after) < reach:
self.after.append({})
target_dict = self.after[reach - 1]
if token in target_dict:
target_dict[token] += 1
else:
target_dict[token] = 1
|
avalcarce/gym-learn | utils.py | Python | mit | 2,550 | 0.002353 | import os
import numpy as np
def get_last_folder_id(folder_path):
t = 0
for fn in os.listdir(folder_path):
t = max(t, int(fn))
return t
def movingaverage(values, window):
weights = np.repeat(1.0, window)/window
sma = np.convolve(values, weights, 'valid')
return sma
def smooth(x, window_len=11, window='hanning'):
"""smoo | th the data using a window with requested size.
This method is based on the convolution of a scaled window with the signal.
The signal is prepared by in | troducing reflected copies of the signal
(with the window size) in both ends so that transient parts are minimized
in the begining and end part of the output signal.
input:
x: the input signal
window_len: the dimension of the smoothing window; should be an odd integer
window: the type of window from 'flat', 'hanning', 'hamming', 'bartlett', 'blackman'
flat window will produce a moving average smoothing.
output:
the smoothed signal
example:
t=linspace(-2,2,0.1)
x=sin(t)+randn(len(t))*0.1
y=smooth(x)
see also:
numpy.hanning, numpy.hamming, numpy.bartlett, numpy.blackman, numpy.convolve
scipy.signal.lfilter
TODO: the window parameter could be the window itself if an array instead of a string
NOTE: length(output) != length(input), to correct this: return y[(window_len/2-1):-(window_len/2)] instead of just y.
"""
if x.ndim != 1:
raise ValueError("smooth only accepts 1 dimension arrays.")
if x.size < window_len:
raise ValueError("Input vector needs to be bigger than window size.")
if window_len < 3:
return x
if window not in ['flat', 'hanning', 'hamming', 'bartlett', 'blackman']:
raise ValueError("Window is on of 'flat', 'hanning', 'hamming', 'bartlett', 'blackman'")
s = np.r_[x[window_len - 1:0:-1], x, x[-1:-window_len:-1]]
# print(len(s))
if window == 'flat': # moving average
w = np.ones(window_len, 'd')
else:
w = eval('np.' + window + '(window_len)')
y = np.convolve(w / w.sum(), s, mode='valid')
# return y
return y[(window_len/2-1):-(window_len/2)]
def draw_equispaced_items_from_sequence(m, n):
"""
draw_equispaced_items_from_sequence(m, n)
Args:
m (int): How many items to draw.
n (int): Length of sequence to draw from.
"""
return [i * n // m + n // (2 * m) for i in range(m)]
|
PavanGupta01/aerospike-admin | lib/controller.py | Python | apache-2.0 | 27,343 | 0.00534 | # Copyright 2013-2014 Aerospike, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND | , either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from lib.controllerlib import *
from lib import util
import time, os, sys, platform, shutil, urllib2, socket
def flip_keys(orig_data):
new_data = {}
for key1, data1 in orig_data.iteritems() | :
if isinstance(data1, Exception):
continue
for key2, data2 in data1.iteritems():
if key2 not in new_data:
new_data[key2] = {}
new_data[key2][key1] = data2
return new_data
@CommandHelp('Aerospike Admin')
class RootController(BaseController):
def __init__(self, seed_nodes=[('127.0.0.1',3000)]
, use_telnet=False, user=None, password=None):
super(RootController, self).__init__(seed_nodes=seed_nodes
, use_telnet=use_telnet
, user=user
, password=password)
self.controller_map = {
'info':InfoController
, 'show':ShowController
, 'asinfo':ASInfoController
, 'clinfo':ASInfoController
, 'cluster':ClusterController
, '!':ShellController
, 'shell':ShellController
, 'collectinfo':CollectinfoController
}
@CommandHelp('Terminate session')
def do_exit(self, line):
# This function is a hack for autocomplete
return "EXIT"
@CommandHelp('Returns documentation related to a command'
, 'for example, to retrieve documentation for the "info"'
, 'command use "help info".')
def do_help(self, line):
self.executeHelp(line)
@CommandHelp('"watch" Runs a command for a specified pause and iterations.'
, 'Usage: watch [pause] [iterations] [--no-diff] command]'
, ' pause: the duration between executions.'
, ' [default: 2 seconds]'
, ' iterations: Number of iterations to execute command.'
, ' [default: until keyboard interrupt]'
, ' --no-diff: Do not do diff highlighting'
, 'Example 1: Show "info network" 3 times with 1 second pause'
, ' watch 1 3 info network'
, 'Example 2: Show "info namespace" with 5 second pause until'
, ' interrupted'
, ' watch 5 info namespace')
def do_watch(self, line):
self.view.watch(self, line)
@CommandHelp('The "info" command provides summary tables for various aspects'
, 'of Aerospike functionality.')
class InfoController(CommandController):
def __init__(self):
self.modifiers = set(['with'])
@CommandHelp('Displays service, network, namespace, and xdr summary'
, 'information.')
def _do_default(self, line):
self.do_service(line)
self.do_network(line)
self.do_namespace(line)
self.do_xdr(line)
@CommandHelp('Displays summary information for the Aerospike service.')
def do_service(self, line):
stats = self.cluster.infoStatistics(nodes=self.nodes)
builds = self.cluster.info('build', nodes=self.nodes)
services = self.cluster.infoServices(nodes=self.nodes)
visible = self.cluster.getVisibility()
visibility = {}
for node_id, service_list in services.iteritems():
if isinstance(service_list, Exception):
continue
service_set = set(service_list)
if len((visible | service_set) - service_set) != 1:
visibility[node_id] = False
else:
visibility[node_id] = True
self.view.infoService(stats, builds, visibility, self.cluster, **self.mods)
@CommandHelp('Displays network information for Aerospike, the main'
, 'purpose of this information is to link node ids to'
, 'fqdn/ip addresses.')
def do_network(self, line):
stats = self.cluster.infoStatistics(nodes=self.nodes)
hosts = self.cluster.nodes
# get current time from namespace
ns_stats = self.cluster.infoAllNamespaceStatistics(nodes=self.nodes)
for host, configs in ns_stats.iteritems():
if isinstance(configs, Exception):
continue
ns = configs.keys()[0]
if ns:
# lets just add it to stats
if not isinstance(configs[ns], Exception) and \
not isinstance(stats[host], Exception):
stats[host]['current-time'] = configs[ns]['current-time']
self.view.infoNetwork(stats, hosts, self.cluster, **self.mods)
@CommandHelp('Displays summary information for each namespace.')
def do_namespace(self, line):
stats = self.cluster.infoAllNamespaceStatistics(nodes=self.nodes)
self.view.infoNamespace(stats, self.cluster, **self.mods)
@CommandHelp('Displays summary information for Cross Datacenter'
, 'Replication (XDR).')
def do_xdr(self, line):
stats = self.cluster.infoXDRStatistics(nodes=self.nodes)
builds = self.cluster.xdrInfo('build', nodes=self.nodes)
xdr_enable = self.cluster.isXDREnabled(nodes=self.nodes)
self.view.infoXDR(stats, builds, xdr_enable, self.cluster, **self.mods)
@CommandHelp('Displays summary information for Seconday Indexes (SIndex).')
def do_sindex(self, line):
stats = self.cluster.infoSIndex(nodes=self.nodes)
sindexes = {}
for host, stat_list in stats.iteritems():
for stat in stat_list:
if not stat:
continue
indexname = stat['indexname']
if indexname not in sindexes:
sindexes[indexname] = {}
sindexes[indexname][host] = stat
self.view.infoSIndex(stats, self.cluster, **self.mods)
@CommandHelp('"asinfo" provides raw access to the info protocol.'
, ' Options:'
, ' -v <command> - The command to execute'
, ' -p <port> - The port to use.'
, ' NOTE: currently restricted to 3000 or 3004'
, ' -l - Replace semicolons ";" with newlines.')
class ASInfoController(CommandController):
def __init__(self):
self.modifiers = set(['with', 'like'])
@CommandHelp('Executes an info command.')
def _do_default(self, line):
if not line:
raise ShellException("Could not understand asinfo request, " + \
"see 'help asinfo'")
mods = self.parseModifiers(line)
line = mods['line']
like = mods['like']
nodes = self.nodes
value = None
line_sep = False
xdr = False
tline = line[:]
while tline:
word = tline.pop(0)
if word == '-v':
value = tline.pop(0)
elif word == '-l':
line_sep = True
elif word == '-p':
port = tline.pop(0)
if port == '3004': # ugly Hack
xdr = True
else:
raise ShellException(
"Do not understand '%s' in '%s'"%(word
, " ".join(line)))
value = value.translate(None, "'\"")
if xdr:
results = self.cluster.xdrInfo(value, nodes=nodes)
else:
results = self.cluster.info |
johnbeard/kicad-git | pcbnew/scripting/examples/listPcbLibrary.py | Python | gpl-2.0 | 331 | 0.036254 | #!/usr/bin/env python
from pcbnew import *
lst = FootprintEnumerate("/usr/share/kicad/modules/so | ckets.mod")
for name in lst:
m = FootprintLoad("/usr/share/kicad/modules/sockets.mod",name)
print name,"->",m.GetLibRef(), m.Ge | tReference()
for p in m.Pads():
print "\t",p.GetPadName(),p.GetPosition(),p.GetPos0(), p.GetOffset()
|
cnr-isti-vclab/meshlab | src/external/openkinect/wrappers/python/demo_cv_async.py | Python | gpl-3.0 | 988 | 0.002024 | #!/usr/bin/env python
import freenect
import cv
import numpy as np
cv.NamedWindow('Depth')
cv.NamedWindow('RGB')
def display_depth(dev, data, timestamp):
data -= np.min(data.ravel())
data *= 65536 / np.max(data.ravel())
image = cv.CreateImageHeader((data.shape[1], data.shape[0]),
cv.IPL_DEPTH_16U,
1)
cv.SetData(image, data.tostring(),
data.dtype.itemsize * data.shape[1])
cv.ShowImage('Depth', image)
cv.WaitKey(5)
def display_rgb(dev, data, timestamp):
image = cv.CreateImageHeader((data.shape[1], data.shape[0]),
| cv.IPL_DEPTH_8U,
3)
# Note: We swap from RGB to BGR here
cv.SetData(image, data[:, :, ::-1].tostring(),
data.dtype.itemsize * 3 * data.shape[1])
cv.ShowImage('RGB', image)
cv.WaitKey(5)
freenect.runloop(depth=display_depth,
video=display_rgb)
| |
evernym/zeno | plenum/test/node_catchup/test_incorrect_catchup_request.py | Python | apache-2.0 | 3,607 | 0.002218 | import pytest
from plenum.common.messages.node_messages import CatchupReq
from stp_core.common.log import getlogger
from plenum.test.helper import sdk_send_random_and_check
logger = getlogger()
ledger_id = 1
def test_receive_incorrect_catchup_request_with_end_greater_catchuptill(looper,
txnPoolNodeSet,
sdk_pool_handle,
sdk_wallet_client):
end = 15
catchup_till = 10
req = CatchupReq(ledger_id, 1, end, catchup_till)
sdk_send_random_and_check(looper,
txnPoolNodeSet,
sdk_pool_handle,
sdk_wallet_client,
4)
ledger_manager = txnPoolNodeSet[0].ledgerManager
ledger_manager.processCatchupReq(req, "frm")
_check_call_discard(ledger_manager, "not able to service since "
"end = {} greater than "
"catchupTill = {}"
.format(end, catchup_till))
def test_receive_incorrect_catchup_request_with_start_greater_end(looper,
txnPoolNodeSet,
sdk_pool_handle,
sdk_wallet_client):
start = 10
end = 5
req = CatchupReq(ledger_id, start, end, 11)
sdk_send_random_and_check(looper,
txnPoolNodeSet,
sdk_pool_handle,
sdk_wallet_client,
4)
ledger_manager = txnPoolNodeSet[0].ledgerManager
ledger_manager.processCatchupReq(req, "frm")
_check_call_discard(ledger_manager, "not able to service since "
"start = {} greater than "
"end = {}"
.format(start, end))
def test_receive_incorrect_catchup_request_with_catchuptill_greater_ledger_size(
looper,
txnPoolNodeSet,
sdk_pool_handle,
sdk_wallet_client):
catchup_till = 100
req = CatchupReq(ledger_id, 1, 10, catchup_till)
sdk_send_random_and_check(looper,
txnPoolNodeSet,
sdk_pool_handle,
| sdk_wallet_client,
4)
ledger_manager = txnPoolNodeSet[0].ledgerManager
ledger_manager.processCatchupReq(req, | "frm")
ledger_size = ledger_manager.ledgerRegistry[ledger_id].ledger.size
_check_call_discard(ledger_manager, "not able to service since "
"catchupTill = {} greater than "
"ledger size = {}"
.format(catchup_till, ledger_size))
def test_receive_incorrect_catchup_request_for_seq_no_zero(txnPoolNodeSet):
req = CatchupReq(ledger_id, 0, 0, 1)
ledger_manager = txnPoolNodeSet[0].ledgerManager
ledger_manager.processCatchupReq(req, "frm")
_check_call_discard(ledger_manager, "not able to service since start 0 is zero or less")
def _check_call_discard(ledger_manager, discard_reason):
last = ledger_manager.owner.spylog.getLastParams(
ledger_manager.owner.discard,
required=False)
assert last
assert discard_reason in last['reason']
|
feer56/Kitsune2 | kitsune/wiki/models.py | Python | bsd-3-clause | 51,285 | 0.000195 | import hashlib
import itertools
import logging
import time
from datetime import datetime, timedelta
from urlparse import urlparse
from django.conf import settings
from django.contrib.auth.models import User
from django.core.cache import cache
from django.core.exceptions import ValidationError, ObjectDoesNotExist
from django.core.urlresolvers import resolve
from django.db import models, IntegrityError
from django.db.models import Q
from django.http import Http404
from django.utils.encoding import smart_str
import waffle
from pyquery import PyQuery
from tidings.models import NotificationsMixin
from tower import ugettext_lazy as _lazy, ugettext as _
from kitsune.gallery.models import Image
from kitsune.products.models import Product, Topic
from kitsune.search.es_utils import UnindexMeBro, es_analyzer_for_locale
from kitsune.search.models import (
SearchMappingType, SearchMixin, register_for_indexing,
register_mapping_type)
from kitsune.sumo import ProgrammingError
from kitsune.sumo.models import ModelBase, LocaleField
from kitsune.sumo.urlresolvers import reverse, split_path
from kitsune.tags.models import BigVocabTaggableMixin
from kitsune.wiki.config import (
CATEGORIES, SIGNIFICANCES, TYPO_SIGNIFICANCE, MEDIUM_SIGNIFICANCE,
MAJOR_SIGNIFICANCE, REDIRECT_HTML, REDIRECT_CONTENT, REDIRECT_TITLE,
REDIRECT_SLUG, CANNED_RESPONSES_CATEGORY, ADMINISTRATION_CATEGORY,
TEMPLATES_CATEGORY, DOC_HTML_CACHE_KEY, TEMPLATE_TITLE_PREFIX)
from kitsune.wiki.permissions import DocumentPermissionMixin
log = logging.getLogger('k.wiki')
class TitleCollision(Exception):
"""An attempt to create two pages of the same title in one locale"""
class SlugCollision(Exception):
"""An attempt to create two pages of the same slug in one locale"""
class _NotDocumentView(Exception):
"""A URL not pointing to the document view was passed to from_url()."""
class Document(NotificationsMixin, ModelBase, BigVocabTaggableMixin,
SearchMixin, DocumentPermissionMixin):
"""A localized knowledgebase document, not revision-specific."""
title = models.CharField(max_length=255, db_index=True)
slug = models.CharField(max_length=255, db_index=True)
# Is this document a template or not?
is_template = models.BooleanField(default=False, editable=False,
db_index=True)
# Is this document localizable or not?
is_localizable = models.BooleanField(default=True, db_index=True)
# TODO: validate (against settings.SUMO_LANGUAGES?)
locale = LocaleField(default=settings.WIKI_DEFAULT_LANGUAGE, db_index=True)
# Latest approved revision. L10n dashboard depends on this being so (rather
# than being able to set it to earlier | approved revisions). (Remove "+" to
# enable reverse link.)
current_revision = models.ForeignKey('Revision', null=True,
related_name='current_for+')
# Latest revision which both is_approved and is_ready_for_lo | calization,
# This may remain non-NULL even if is_localizable is changed to false.
latest_localizable_revision = models.ForeignKey(
'Revision', null=True, related_name='localizable_for+')
# The Document I was translated from. NULL iff this doc is in the default
# locale or it is nonlocalizable. TODO: validate against
# settings.WIKI_DEFAULT_LANGUAGE.
parent = models.ForeignKey('self', related_name='translations',
null=True, blank=True)
# Cached HTML rendering of approved revision's wiki markup:
html = models.TextField(editable=False)
# A document's category must always be that of its parent. If it has no
# parent, it can do what it wants. This invariant is enforced in save().
category = models.IntegerField(choices=CATEGORIES, db_index=True)
# A document's is_archived flag must match that of its parent. If it has no
# parent, it can do what it wants. This invariant is enforced in save().
is_archived = models.BooleanField(
default=False, db_index=True, verbose_name='is obsolete',
help_text=_lazy(
u'If checked, this wiki page will be hidden from basic searches '
u'and dashboards. When viewed, the page will warn that it is no '
u'longer maintained.'))
# Enable discussion (kbforum) on this document.
allow_discussion = models.BooleanField(
default=True, help_text=_lazy(
u'If checked, this document allows discussion in an associated '
u'forum. Uncheck to hide/disable the forum.'))
# List of users that have contributed to this document.
contributors = models.ManyToManyField(User)
# List of products this document applies to.
products = models.ManyToManyField(Product)
# List of product-specific topics this document applies to.
topics = models.ManyToManyField(Topic)
# Needs change fields.
needs_change = models.BooleanField(default=False, help_text=_lazy(
u'If checked, this document needs updates.'), db_index=True)
needs_change_comment = models.CharField(max_length=500, blank=True)
# A 24 character length gives years before having to alter max_length.
share_link = models.CharField(max_length=24, default='')
# Dictates the order in which articles are displayed.
display_order = models.IntegerField(default=1, db_index=True)
# List of related documents
related_documents = models.ManyToManyField('self', blank=True)
# firefox_versions,
# operating_systems:
# defined in the respective classes below. Use them as in
# test_firefox_versions.
# TODO: Rethink indexes once controller code is near complete. Depending on
# how MySQL uses indexes, we probably don't need individual indexes on
# title and locale as well as a combined (title, locale) one.
class Meta(object):
ordering = ['display_order', 'id']
unique_together = (('parent', 'locale'), ('title', 'locale'),
('slug', 'locale'))
permissions = [('archive_document', 'Can archive document'),
('edit_needs_change', 'Can edit needs_change')]
def _collides(self, attr, value):
"""Return whether there exists a doc in this locale whose `attr` attr
is equal to mine."""
return Document.objects.filter(
locale=self.locale, **{attr: value}).exclude(id=self.id).exists()
def _raise_if_collides(self, attr, exception):
"""Raise an exception if a page of this title/slug already exists."""
if self.id is None or hasattr(self, 'old_' + attr):
# If I am new or my title/slug changed...
if self._collides(attr, getattr(self, attr)):
raise exception
def clean(self):
"""Translations can't be localizable."""
self._clean_is_localizable()
self._clean_category()
self._clean_template_status()
self._ensure_inherited_attr('is_archived')
def _clean_is_localizable(self):
"""is_localizable == allowed to have translations. Make sure that isn't
violated.
For default language (en-US), is_localizable means it can have
translations. Enforce:
* is_localizable=True if it has translations
* if has translations, unable to make is_localizable=False
For non-default langauges, is_localizable must be False.
"""
if self.locale != settings.WIKI_DEFAULT_LANGUAGE:
self.is_localizable = False
# Can't save this translation if parent not localizable
if self.parent and not self.parent.is_localizable:
raise ValidationError('"%s": parent "%s" is not localizable.' % (
unicode(self), unicode(self.parent)))
# Can't make not localizable if it has translations
# This only applies to documents that already exist, hence self.pk
if self.pk and not self.is_localizable and self.translations.exists():
raise ValidationError(
u'"{0}": document has {1} translations but is not localizable.' |
dadavidson/Python_Lab | Python-w3resource/Python_Basic/ex03.py | Python | mit | 269 | 0.003717 | # https://www.w3r | esource.com/python-exercises/
# 3. Write a Python program to display the current date and time.
# Sample Output :
# Current date and time :
# 2014-07-05 14:34:14
import datetime
now = datetime.datetime.now()
print n | ow.strftime("%Y-%m-%d %H:%M:%S")
|
gisce/sippers | sippers/parsers/endesa.py | Python | gpl-3.0 | 2,937 | 0.001021 | from __future__ import absolute_import
from sippers import logger
from sippers.utils import build_dict
from sippers.adapters.endesa import EndesaSipsAdapter, EndesaMeasuresAdapter
from sippers.models.endesa import EndesaSipsSchema, EndesaMeasuresSchema
from sippers.parsers.parser import Parser, register
class Endesa(Parser):
pattern = '(SEVILLANA|FECSA|ERZ|UNELCO|GESA).INF.SEG0[1-5].(zip|ZIP)$'
encoding = "iso-8859-15"
delimi | ter = ';'
def __init__(self, strict=False):
self.adapter = EndesaSipsAdapter(strict=strict)
self.schema = EndesaSipsSchema(strict=strict)
self.fields_ps = []
self.headers_ps = []
for f in sorted(self.schema.fields,
key=lambda f: self.schema.fields[f].metadata[ | 'position']):
field = self.schema.fields[f]
self.fields_ps.append((f, field.metadata))
self.headers_ps.append(f)
self.fields = self.fields_ps
def parse_line(self, line):
slinia = tuple(unicode(line.decode(self.encoding)).split(self.delimiter))
slinia = map(lambda s: s.strip(), slinia)
parsed = {'ps': {}, 'measures': {}, 'orig': line}
data = build_dict(self.headers_ps, slinia)
result, errors = self.adapter.load(data)
if errors:
logger.error(errors)
parsed['ps'] = result
return parsed, errors
register(Endesa)
class EndesaCons(Parser):
delimiter = ';'
pattern = '(SEVILLANA|FECSA|ERZ|UNELCO|GESA).INF2.SEG0[1-5].(zip|ZIP)$'
encoding = "iso-8859-15"
def __init__(self, strict=False):
self.schema = EndesaMeasuresSchema(strict=strict)
self.adapter = EndesaMeasuresAdapter(strict=strict)
self.measures_adapter = self.adapter
self.fields = []
self.headers = []
for f in sorted(self.schema.fields,
key=lambda f: self.schema.fields[f].metadata['position']):
field = self.schema.fields[f]
self.fields.append((f, field.metadata))
self.headers.append(f)
self.measures_start = 1
self.measures_step = len(self.headers) - self.measures_start
def parse_line(self, line):
slinia = tuple(line.split(self.delimiter))
slinia = map(lambda s: s.strip(), slinia)
start = self.measures_start
step = self.measures_step
parsed = {'ps': {}, 'measures': [], 'orig': line}
c_line = slinia[start:start+step]
all_errors = {}
while c_line:
c_line.insert(0, slinia[0])
consums = build_dict(self.headers, c_line)
result, errors = self.adapter.load(consums)
if errors:
logger.error(errors)
all_errors.update(errors)
parsed['measures'].append(result)
start += step
c_line = slinia[start:start+step]
return parsed, errors
register(EndesaCons)
|
iluxa-com/mercurial-crew-tonfa | tests/filtertmp.py | Python | gpl-2.0 | 351 | 0.002849 | #!/usr/bin/env python
#
# This used to be a simple sed call like:
#
# $ sed "s:$HGTMP:*HGTMP*:"
#
# But $HGTMP has ':' under Windows which breaks the sed call.
#
import sys, os
input = sys.stdin.read()
input = in | put.replace(os.sep, '/')
hgtmp = os.environ['HGTMP'].repl | ace(os.sep, '/')
input = input.replace(hgtmp, '$HGTMP')
sys.stdout.write(input)
|
masschallenge/django-accelerator | accelerator/tests/factories/criterion_option_spec_factory.py | Python | mit | 636 | 0 | from __future__ import unicode_literals
import | swapper
from factory import (
Sequence,
SubFactory,
)
from factory.django import DjangoModelFactory
from accelerator.tests.factories.criterion_factory import (
CriterionFactory
)
CriterionOptionSpec = swapper.load_model('accelerator', 'CriterionOptionSpec')
class CriterionOptionSpecFactory(DjangoModelFactory | ):
class Meta:
model = CriterionOptionSpec
option = Sequence(lambda n: "CriterionOptionSpec {0}".format(n))
count = CriterionOptionSpec.DEFAULT_COUNT
weight = CriterionOptionSpec.DEFAULT_WEIGHT
criterion = SubFactory(CriterionFactory)
|
sjsj0101/backtestengine | utils/storage.py | Python | apache-2.0 | 4,040 | 0.003085 | # -*- coding: utf-8 -*-
'''
author: Jimmy
contact: 234390130@qq.com
file: storage.py
time: 2017/9/4 下午3:18
description:
'''
__author__ = 'Jimmy'
import pymongo
from ctp.ctp_struct import *
from bson import json_util as jsonb
from utils.tools import *
def _getDataBase():
client = pymongo.MongoClient(host='127.0.0.1', port=27017)
return client.trade
# 报单回报 OnRtnOrder
def insertRtnOrder(event):
db = _getDataBase()
dict = getStrategyInfo(event.dict)
db.order.insert(dict)
# 报单操作 OnErrRtnOrderAction
def insertErrRtnOrderAction(event):
db = _getDataBase()
dict = getStrategyInfo(event.dict)
db.order_action.insert(dict)
# 输入报单操作 OnRspOrderAction
def insertRspOrderAction(event):
db = _getDataBase()
dict = getStrategyInfo(event.dict)
db.input_order_action.insert(dict)
# 报单录入 OnRspOrderInsert OnErrRtnOrderInsert
def insertRspOrderInsert(event):
db = _getDataBase()
dict = getStrategyInfo(event.dict)
db.input_order.insert(dict)
# 交易回报 OnRtnTrade
def insertRtnTrade(event):
db = _getDataBase()
dict = getStrategyInfo(event.dict)
db.trade.insert(dict)
# 请求错误
def insertRspError(event):
db = _getDataBase()
dict = getStrategyInfo(event.dict)
db.error_info.insert(dict)
# db.CThostFtdcRspInfoField.insert(event.dict)
# 保存下单参数
def insertSendOrderArgs(event):
db = _getDataBase()
# 枚举类型要转字符串保存
direction = str(event.dict['direction']).split('.')[-1]
event.dict['direction'] = direction
price_type = str(event.dict['price_type']).split('.')[-1]
event.dict['price_type'] = price_type
stop_price = str(event.dict['stop_price']).split('.')[-1]
event.dict['stop_price'] = stop_price
contingent_condition = str(event.dict['contingent_condition']).split('.')[-1]
event.dict['contingent_condition'] = contingent_condition
event.dict = _insertTime(event.dict)
db.send_order.insert(event.dict)
# 保存撤单参数
def insertCancelOrderArgs(event):
db = _getDataBase()
event.dict = _insertTime(event.dict)
db.cancel_order.insert(event.dict)
# 更新持仓
def insertPosition(event):
db = _getDataBase()
dict = _insertTime(event.dict)
db.position.insert(dict)
# 更新账户
def updateAccount(event):
db = _getDataBase()
dict = _insertTime(event.dict)
if db.account.find().count() > 0:
db.account.update({'AccountID': dict['AccountID']},{"$set": dict})
else:
db.account.insert(dict)
# 插入时间
def _insertTime(dict):
date = getTime()
dict['insert_date'] = date[0]
dict['insert_time'] = date[1]
dict['insert_msec'] = date[2]
return dict
def getStrategyInfo(dict):
db = _getDataBase()
dict = _insertTime(dict)
result = list(db.send_order.find({'order_ref':int(dict['OrderRef'])}))
if len(result) > 0:
result = result[0]
dict['strategy_name'] = result['strategy_name']
dict['strategy_id'] = result['strategy_id']
else:
dict['strategy_name'] = '未知'
dict['s | trategy_id'] = '未知'
return dict
# 获取最大报单编号
def getMaxOrderRef():
db = _getDataBase()
result = list(db.send_order.find({}).sort([('order_ref', -1)]).limit(1))
if len(result) > 0:
result = result[0]
return int(result['order_ref'])
else:
return 0
def getMaxOrderActionRef():
db = _getDataBase()
result = list(db.cancel_order.find({}).sort([('order_action_ref', -1)]).limit(1))
if len(result) > 0:
result = result[0]
| return int(result['order_action_ref'])
else:
return 0
if __name__ == '__main__':
def updateAccount(event):
db = _getDataBase()
if db.account.find().count() > 0:
db.account.update({'AccountID': event.dict['AccountID']},
{"$set": event.dict})
else:
db.account.insert(event.dict) |
nicko96/Chrome-Infra | appengine/chromium_rietveld/codereview/decorators_chromium.py | Python | bsd-3-clause | 2,669 | 0.007119 | # Copyright 2008 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Decorators for Chromium port of Rietveld."""
import mimetypes
import sha
from google.appengine.api import memcache
from django.http import HttpResponseForbidden
from . import decorators as deco
from . import models_chromium
from . import responses
def binary_required(func):
"""Decorator that processes the content argument.
Attributes set on the request:
content: a Content entity.
"""
@deco.patch_required
def binary_wrapper(request, content_type, *args, **kwds):
if content_type == "0":
content_key = request.patch.content_key
elif content_type == "1":
content_key = request.patch.patched_content_key
if not content_key or not content_key.get().data:
# The file was not modified. It was likely moved without modification.
# Return the original file.
content_key = request.patch.content_key
else:
# Other values are erroneous so request.content won't be set.
return responses.HttpTextResponse(
'Invalid content type: %s, expected 0 or 1' % content_type,
status=404)
request.mime_type = mimetypes.guess_type(request.patch.filename)[0]
request.content = content_key.get()
return func(request, *args, **kwds)
return binary_wrapper
def key_required(func):
"""Decorator | that insists that you are using a specific key."""
@deco.require_methods('POST')
def key_wrapper(request, *args, **kwds):
key = request.POST.get('password')
if request.user or not key:
return HttpResponseForbidden('You must be admin in for this function')
value = memcache.get('key_required')
if not value:
obj = models_chromium.Key.query().get()
if not obj:
# Create a dummy value so it can be edited from the datastore admin.
obj = models | _chromium.Key(hash='invalid hash')
obj.put()
value = obj.hash
memcache.add('key_required', value, 60)
if sha.new(key).hexdigest() != value:
return HttpResponseForbidden('You must be admin in for this function')
return func(request, *args, **kwds)
return key_wrapper
|
mhbu50/erpnext | erpnext/stock/doctype/customs_tariff_number/customs_tariff_number.py | Python | gpl-3.0 | 209 | 0.004785 | # Copyright (c) 2017, Frappe Technologies Pvt. L | td. and contributors
# For license information, please see license.txt
from frappe.model.document import Document
|
class CustomsTariffNumber(Document):
pass
|
BurningMan44/SprintCoin | qa/rpc-tests/listtransactions.py | Python | mit | 10,132 | 0.016088 | #!/usr/bin/env python2
# Copyright (c) 2014-2015 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
# Exercise the listtransactions API
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import *
from test_framework.mininode import CTransaction, COIN
from io import BytesIO
def txFromHex(hexstring):
tx = CTransaction()
f = BytesIO(hex_str_to_bytes(hexstring))
tx.deserialize(f)
return tx
class ListTransactionsTest(BitcoinTestFramework):
def setup_nodes(self):
#This test requires mocktime
enable_mocktime()
return start_nodes(4, self.options.tmpdir)
def run_test(self):
# Simple send, 0 to 1:
txid = self.nodes[0].sendtoaddress(self.nodes[1].getnewaddress(), 0.1)
self.sync_all()
assert_array_result(self.nodes[0].listtransactions(),
{"txid":txid},
{"category":"send","account":"","amount":Decimal("-0.1"),"confirmations":0})
assert_array_result(self.nodes[1].listtransactions(),
{"txid":txid},
{"category":"receive","account":"","amount":Decimal("0.1"),"confirmations":0})
# mine a block, confirmations should change:
self.nodes[0].generate(1)
self.sync_all()
assert_array_result(self.nodes[0].listtransactions(),
{"txid":txid},
{"category":"send","account":"","amount":Decimal("-0.1"),"confirmations":1})
assert_array_result(self.nodes[1].listtransactions(),
{"txid":txid},
{"category":"receive","account":"","amount":Decimal("0.1"),"confirmations":1})
# send-to-self:
txid = self.nodes[0].sendtoaddress(self.nodes[0].getnewaddress(), 0.2)
assert_array_result(self.nodes[0].listtransactions(),
{"txid":txid, "category":"send"},
{"amount":Decimal("-0.2")})
assert_array_result(self.nodes[0].listtransactions(),
{"txid":txid, "category":"receive"},
{"amount":Decimal("0.2")})
# sendmany from node1: twice to self, twice to node2:
send_to = { self.nodes[0].getnewaddress() : 0.11,
self.nodes[1].getnewaddress() : 0.22,
self.nodes[0].getaccountaddress("from1") : 0.33,
self.nodes[1].getaccountaddress("toself") : 0.44 }
txid = self.nodes[1].sendmany("", send_to)
| self.sync_all()
assert_array_ | result(self.nodes[1].listtransactions(),
{"category":"send","amount":Decimal("-0.11")},
{"txid":txid} )
assert_array_result(self.nodes[0].listtransactions(),
{"category":"receive","amount":Decimal("0.11")},
{"txid":txid} )
assert_array_result(self.nodes[1].listtransactions(),
{"category":"send","amount":Decimal("-0.22")},
{"txid":txid} )
assert_array_result(self.nodes[1].listtransactions(),
{"category":"receive","amount":Decimal("0.22")},
{"txid":txid} )
assert_array_result(self.nodes[1].listtransactions(),
{"category":"send","amount":Decimal("-0.33")},
{"txid":txid} )
assert_array_result(self.nodes[0].listtransactions(),
{"category":"receive","amount":Decimal("0.33")},
{"txid":txid, "account" : "from1"} )
assert_array_result(self.nodes[1].listtransactions(),
{"category":"send","amount":Decimal("-0.44")},
{"txid":txid, "account" : ""} )
assert_array_result(self.nodes[1].listtransactions(),
{"category":"receive","amount":Decimal("0.44")},
{"txid":txid, "account" : "toself"} )
multisig = self.nodes[1].createmultisig(1, [self.nodes[1].getnewaddress()])
self.nodes[0].importaddress(multisig["redeemScript"], "watchonly", False, True)
txid = self.nodes[1].sendtoaddress(multisig["address"], 0.1)
self.nodes[1].generate(1)
self.sync_all()
assert(len(self.nodes[0].listtransactions("watchonly", 100, 0, False)) == 0)
assert_array_result(self.nodes[0].listtransactions("watchonly", 100, 0, True),
{"category":"receive","amount":Decimal("0.1")},
{"txid":txid, "account" : "watchonly"} )
# rbf is disabled in Sprint Core
# self.run_rbf_opt_in_test()
# Check that the opt-in-rbf flag works properly, for sent and received
# transactions.
def run_rbf_opt_in_test(self):
# Check whether a transaction signals opt-in RBF itself
def is_opt_in(node, txid):
rawtx = node.getrawtransaction(txid, 1)
for x in rawtx["vin"]:
if x["sequence"] < 0xfffffffe:
return True
return False
# Find an unconfirmed output matching a certain txid
def get_unconfirmed_utxo_entry(node, txid_to_match):
utxo = node.listunspent(0, 0)
for i in utxo:
if i["txid"] == txid_to_match:
return i
return None
# 1. Chain a few transactions that don't opt-in.
txid_1 = self.nodes[0].sendtoaddress(self.nodes[1].getnewaddress(), 1)
assert(not is_opt_in(self.nodes[0], txid_1))
assert_array_result(self.nodes[0].listtransactions(), {"txid": txid_1}, {"bip125-replaceable":"no"})
sync_mempools(self.nodes)
assert_array_result(self.nodes[1].listtransactions(), {"txid": txid_1}, {"bip125-replaceable":"no"})
# Tx2 will build off txid_1, still not opting in to RBF.
utxo_to_use = get_unconfirmed_utxo_entry(self.nodes[1], txid_1)
# Create tx2 using createrawtransaction
inputs = [{"txid":utxo_to_use["txid"], "vout":utxo_to_use["vout"]}]
outputs = {self.nodes[0].getnewaddress(): 0.999}
tx2 = self.nodes[1].createrawtransaction(inputs, outputs)
tx2_signed = self.nodes[1].signrawtransaction(tx2)["hex"]
txid_2 = self.nodes[1].sendrawtransaction(tx2_signed)
# ...and check the result
assert(not is_opt_in(self.nodes[1], txid_2))
assert_array_result(self.nodes[1].listtransactions(), {"txid": txid_2}, {"bip125-replaceable":"no"})
sync_mempools(self.nodes)
assert_array_result(self.nodes[0].listtransactions(), {"txid": txid_2}, {"bip125-replaceable":"no"})
# Tx3 will opt-in to RBF
utxo_to_use = get_unconfirmed_utxo_entry(self.nodes[0], txid_2)
inputs = [{"txid": txid_2, "vout":utxo_to_use["vout"]}]
outputs = {self.nodes[1].getnewaddress(): 0.998}
tx3 = self.nodes[0].createrawtransaction(inputs, outputs)
tx3_modified = txFromHex(tx3)
tx3_modified.vin[0].nSequence = 0
tx3 = bytes_to_hex_str(tx3_modified.serialize())
tx3_signed = self.nodes[0].signrawtransaction(tx3)['hex']
txid_3 = self.nodes[0].sendrawtransaction(tx3_signed)
assert(is_opt_in(self.nodes[0], txid_3))
assert_array_result(self.nodes[0].listtransactions(), {"txid": txid_3}, {"bip125-replaceable":"yes"})
sync_mempools(self.nodes)
assert_array_result(self.nodes[1].listtransactions(), {"txid": txid_3}, {"bip125-replaceable":"yes"})
# Tx4 will chain off tx3. Doesn't signal itself, but depends on one
# that does.
utxo_to_use = get_unconfirmed_utxo_entry(self.nodes[1], txid_3)
inputs = [{"txid": txid_3, "vout":utxo_to_use["vout"]}]
outputs = {self.nodes[0].getnewaddress(): 0.997}
tx4 = self.nodes[1].createrawtransaction(inputs, outputs)
tx4_signed = self.nodes[1].signrawtr |
gramps-project/addons-source | DynamicWeb/run_dynamicweb.py | Python | gpl-2.0 | 7,642 | 0.005104 | # -*- coding: utf-8 -*-
#
# Gramps - a GTK+/GNOME based genealogy program
#
# Copyright (C) 2014 Pierre Bélissent
#
# This program is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation; either version 2 of the License, or (at your option) any later version.
# This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details.
# You should have received a copy of the GNU General Public License along with this program; if not, write to the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
#
# $Id: $
"""
Dynamic Web Report generation script
This script produces the dynamic web report examples,
With the database /exa | mple/gramps/example.gramps,
And with various options.
The script is to be launched from its directory
Arguments = [-i] [report numbers]
Usage examples:
- Import example database
python run_dynamicweb.py -i
- Run reports 0 and 2
python run_dynam | icweb.py 0 2
- Run all reports
python run_dynamicweb.py
"""
from __future__ import print_function
import copy, re, os, os.path, subprocess, sys, traceback, locale, shutil, time, glob
# os.environ["LANGUAGE"] = "en_US"
# os.environ["LANG"] = "en_US.UTF-8"
# user_path = os.environ["GRAMPSHOME"]
# if (not os.path.exists(user_path)): raise Exception("User path GRAMPSHOME not found")
plugin_path = "."
gramps_path = os.environ["GRAMPS_RESOURCES"]
if (not os.path.exists(gramps_path)): raise Exception("Gramps path GRAMPS_RESOURCES not found")
sys.path.insert(1, gramps_path)
if sys.version_info[0] < 3:
reload(sys)
sys.setdefaultencoding('utf8')
from dynamicweb import *
from dynamicweb import _
from report_sets import *
def import_data():
path = os.path.join(gramps_path, "example", "gramps", "example.gramps")
path = os.path.abspath(path)
print("=" * 80)
print("Importing data \"%s\" in database \"dynamicweb_example\"" % path)
print("=" * 80)
os.chdir(gramps_path)
subprocess.call([sys.executable, os.path.join(gramps_path, "Gramps.py"), "-y", "-C", "dynamicweb_example", "-i", path])
def main(report_nums):
# Create results directory
results_path = os.path.join(plugin_path, "reports")
results_path = os.path.abspath(results_path)
if (not os.path.isdir(results_path)): os.mkdir(results_path)
plugvers = plugin_version(plugin_path)
# Initialize index pages
html_index = html_index_0
html_procedures = html_procedures_0
for (report_num, report_set) in enumerate(report_list):
if (report_num not in report_nums): continue
report_name = "report_%03i" % report_num
# Build the report title and path
title = report_set['title']
print("=" * 80)
print("%s:" % report_name)
print("Exporting with options: %s" % title)
print("=" * 80)
target = os.path.join(results_path, report_name)
# Build the report options form the default options + the report set options
o = copy.deepcopy(default_options)
o.update(report_set['options'])
o.update({
'title': title,
'target': target,
'archive_file': os.path.join(target, os.path.basename(o['archive_file'])),
})
param = ",".join([
(key + "=" + (str(value) if isinstance(value, (int, bool)) else value))
for (key, value) in o.items()
])
# Setup environment variables
os.environ.update(report_set['environ'])
# Call Gramps CLI
if (sys.version_info[0] < 3):
param = param.encode("UTF-8")
os.chdir(gramps_path)
# subprocess.call([sys.executable, os.path.join(gramps_path, "Gramps.py"), "-d", ".DynamicWeb", "-q", "-O", "dynamicweb_example", "-a", "report", "-p", param])
subprocess.call([sys.executable, os.path.join(gramps_path, "Gramps.py"), "-q", "-O", "dynamicweb_example", "-a", "report", "-p", param])
# Update index pages
p = report_name + "/" + report_set['link']
html_index += "<li><a href='%s'>%s</a></li>" % (p, report_set['title'])
for procedure in report_set['procedures']:
p = report_name + "/" + procedure['path']
html_procedures += "<li>%s<br><a href='%s'>%s</a></li>" % (procedure['what'], p, p)
for (test_num, test_set) in enumerate(test_list):
if ((test_num + len(report_list)) not in report_nums): continue
test_name = "test_%03i" % test_num
# Build the test title and path
title = test_set['title']
print("=" * 80)
print("%s:" % test_name)
print("Exporting with options: %s" % title)
print("=" * 80)
target = os.path.join(results_path, test_name)
# Build the test options form the default options + the test set options
o = copy.deepcopy(default_options)
o.update(test_set['options'])
o.update({
'title': title,
'target': target,
'archive_file': os.path.join(target, os.path.basename(o['archive_file'])),
})
param = ",".join([
(key + "=" + (str(value) if isinstance(value, (int, bool)) else value))
for (key, value) in o.items()
])
# Setup environment variables
os.environ.update(test_set['environ'])
# Call Gramps CLI
if (sys.version_info[0] < 3):
param = param.encode("UTF-8")
os.chdir(gramps_path)
subprocess.call([sys.executable, os.path.join(gramps_path, "Gramps.py"), "-q", "-O", "dynamicweb_example", "-a", "report", "-p", param])
# Generate index pages
html_index += html_index_1 % (default_options['name'], plugvers, VERSION)
f = codecs.open(os.path.join(results_path, "index.html"), "w", encoding = "UTF-8", errors="xmlcharrefreplace")
f.write(html_index)
f.close()
html_procedures += html_procedures_1 % (default_options['name'], plugvers, VERSION)
f = codecs.open(os.path.join(results_path, "procedures.html"), "w", encoding = "UTF-8", errors="xmlcharrefreplace")
f.write(html_procedures)
f.close()
##############################################################
# Unbuffered screen output
# needed in some environments (cygwin for example)
# otherwise the print statements are not printed in the correct order
class Unbuffered(object):
def __init__(self, stream):
self.stream = stream
def write(self, data):
self.stream.write(data)
self.stream.flush()
def __getattr__(self, attr):
return getattr(self.stream, attr)
sys.stdout = Unbuffered(sys.stdout)
sys.stderr = Unbuffered(sys.stderr)
##############################################################
if __name__ == '__main__':
try:
# Import database argument
if (len(sys.argv) == 2 and sys.argv[1] == "-i"):
import_data()
sys.exit(0);
# Reports numbers arguments
report_nums = range(len(report_list) + len(test_list))
if (len(sys.argv) > 1):
report_nums = [
int(sys.argv[i])
for i in range(1, len(sys.argv))
]
# Launch reports generation
print("Exporting reports: %s" % str(report_nums))
main(report_nums)
except Exception as ex:
sys.stderr.write(str(ex))
sys.stderr.write("\n")
traceback.print_exc()
sys.exit(1)
|
mattoufoutu/scoopy | scoopy/oauth.py | Python | gpl-3.0 | 6,386 | 0.000783 | # -*- coding: utf-8 -*-
#
# This file is part of scoopy.
#
# Scoopy is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Scoopy is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Scoopy. If not, see <http://www.gnu.org/licenses/>.
#
"""
.. module:: scoopy.oauth
.. moduleauthor:: Mathieu D. (MatToufoutu) <mattoufootu[at]gmail.com>
"""
import os
from time import time
from urllib import urlencode
try:
from urlparse import parse_qsl
except ImportError:
from cgi import parse_qsl
try:
import cPickle as pickle
except ImportError:
import pickle
import oauth2
__all__ = [
'REQUEST_TOKEN_URL',
'ACCESS_TOKEN_URL',
'AUTHORIZE_URL',
'OAuthException',
'OAuthRequestFailure',
'OAuthTokenError',
'OAuth',
]
BASE_URL = 'http://www.scoop.it'
REQUEST_TOKEN_URL = '%s/oauth/request' % BASE_URL
ACCESS_TOKEN_URL = '%s/oauth/access' % BASE_URL
AUTHORIZE_URL = '%s/oauth/authorize' % BASE_URL
class OAuthException(Exception):
"""
Basic exception for OAuth related errors.
"""
def __init__(self, value):
self.value = value
def __str__(self):
return repr(self.value)
class OAuthRequestFailure(OAuthException):
"""
Exception raised when a request fails.
"""
pass
class OAuthTokenError(OAuthException):
"""
Exception raised when a token isn't set and
an operation requiring one is performed.
"""
pass
class OAuth(object):
"""
Helper class for all OAuth related actions.
"""
signature_method = oauth2.SignatureMethod_HMAC_SHA1()
def __init__(self, consumer_key, consumer_secret):
"""
:param consumer_key: The application's API consumer key.
:type consumer_key: str.
:param consumer_secret: The application's API consumer secret.
:type consumer_secret: str.
"""
self.consumer = oauth2.Consumer(consumer_key, consumer_secret)
self.client = oauth2.Client(self.consumer)
self.token = None
self.access_granted = False
def save_token(self, filepath):
if os.path.exists(filepath):
os.remove(filepath)
if self.token is None:
raise OAuthTokenError('no token found, get one first')
#TODO: if access is not granted, warn user the token saved will be a request_token
db = {'oauth_token': self.token.key,
'oauth_token_secret': self.token.secret}
outfile = open(filepath, 'wb')
try:
pickle.dump(db, outfile, protocol=pickle.HIGHEST_PROTOCOL)
finally:
outfile.close()
def load_token(self, filepath):
infile = open(filepath, 'rb')
try:
db = pickle.load(infile)
finally:
infile.close()
self.token = oauth2.Token(
db['oauth_token'],
db['oauth_token_secret']
)
self.client = oauth2.Client(self.consumer, self.token)
def get_request_token(self):
"""
Request the server for a request_token and return it.
"""
response, content = self.client.request(REQUEST_TOKEN_URL)
if response['status'] != '200':
raise OAuthRequestFailure(
"failed to get request_token (%s)" % response['status']
)
request_token = dict(parse_qsl(content))
self.token = oauth2.Token(
request_token['oauth_token'],
request_token['oauth_token_secret']
)
def get_access_token_url(self, callback_url):
"""
Generate the URL needed for the user to accept the application
and return it.
"""
if self.token is None:
raise OAuthTokenError(
"no request_token found, get one first"
)
#TODO: warn user if access already granted
return "%s?oauth_token=%s&oauth_callback=%s" % (
AUTHORIZE_URL,
self.token.key,
callback_url
)
def get_access_token(self, token_verifier):
"""
Request the server for an access token and return it.
"""
self.token.set_verifier(token_verifier)
self.client = oauth2.Client(self.consumer, self.token)
response, content = self.client.request(ACCESS_TOKEN_URL, 'POST')
if response['status'] != '200':
raise OAuthRequestFailure(
"failed to get access_token (%s)" % response['status']
)
self.access_granted = True
access_token = dict(parse_qsl(content))
self.token = oauth2.Token(
access_token['oauth_token'],
access_token['oauth_token_secret'],
)
self.client = oauth2.Client(self.consumer, self.token)
def generate_request_params(self, params):
"""
Given a dict of parameters, add the needed oauth_* parameters
to it and return an url-encoded string. |
"""
request_params = {
'oauth_version': '1.0',
'oauth_nonce': oauth2.generate_nonce(),
'oauth_timestamp': int(time()),
'oauth_token': self.token.key,
'oauth_consumer_key': self.consumer.key,
}
for key, value in params.iteritems():
request_params[key] = value
return urlencode(request_p | arams)
def request(self, url, params, method='GET'):
request_params = ''
if method.lower() == 'get':
if params:
url += ('?' + urlencode(params))
elif method.lower() == 'post':
request_params = self.generate_request_params(params)
else:
raise OAuthRequestFailure("request method can only be 'GET' or 'POST'")
return self.client.request(
url,
method=method,
body=request_params,
headers={'Accept-encoding': 'gzip'},
)
|
amanzi/ats-dev | tools/visit_ats/visit_ats/visit_rcParams.py | Python | bsd-3-clause | 5,950 | 0.008403 | import datetime
rcParams = {'font.family':'Times',
'axes.2D.tickson':True,
'axes.2D.title.fontscale':2.0,
'axes.2D.label.fontscale':2.0,
'axes.2D.x.title':"x-coordinate [m]",
'axes.2D.y.title':"z-coordinate [m]",
'axes.3D.tickson':False,
'axes.3D.title.fontscale':2.0,
'axes.3D.label.fontscale':2.0,
'axes.3D.x.title':None,
'axes.3D.y.title':None,
'axes.3D.z.title':None,
'legend.fontheight':0.05,
'legend.scale':(1.,1.8),
'legend.position':(0.02,0.76),
'legend.title.fontheight':0.04,
'legend.title.position':(0.018,0.82),
'legend.minmax':True,
'figsize':(2048,1536),
'pseudocolor.linewidth':3,
'contour.linewidth':1,
'contour.color':'k',
'time.format':'%b %d',
'time.zero': datetime.datetime(year=2005, month=1, day=1),
'time.location': (0.018,0.9),
'time.fontheight':0.08,
'time.round': None,
'time.window': 1,
'var.renames' : {"saturation liquid": "liquid saturation",
"saturation ice": "ice saturation",
"saturation gas": "gas saturation"
},
'var.units' : {"saturation": "-",
"depth": "m",
"temperature": "K",
"pressure": "Pa"
},
'var.limits' : {"saturation": (0.,1.),
"depth": (0.,0.01),
"temperature": (-25,10)
}
}
rcParams_poster = {'legend.fontheight':0.025,
'legend.title.fontheight':0.025,
'legend.scale':(1.1,1.),
'time.fontheight':0.04,
}
import visit as v
# fonts
_fonts = {"Arial": 0,
"Courier": 1,
"Times": 2
}
def getDefaultFont():
return _fonts[rcParams['font.family']]
def getAnnotationAttributes():
annot = v.AnnotationAttributes()
annot.userInfoFlag = 0
annot.databaseInfoFl | ag = 0
# 3D
annot.axes3D.triadFlag = 0
annot.axes3D.bboxFlag = 0
# clobber the names
if rcParams['axes.3D.x.title'] is not None:
annot.axes3D.xAxis.title.userTitle = 1
annot.axes3D.xAxis.title.title = rcParams['axes.3D.x.title']
else:
annot.axes3D.xAxis.title.visible = 0
if rcParams['axes.3D.y.title'] is not None:
| annot.axes3D.yAxis.title.userTitle = 1
annot.axes3D.yAxis.title.title = rcParams['axes.3D.y.title']
else:
annot.axes3D.yAxis.title.visible = 0
if rcParams['axes.3D.z.title'] is not None:
annot.axes3D.zAxis.title.userTitle = 1
annot.axes3D.zAxis.title.title = rcParams['axes.3D.z.title']
else:
annot.axes3D.zAxis.title.visible = 0
# move the axes to outside edges
annot.axes3D.tickLocation = annot.axes3D.OutsideEdges
if not rcParams['axes.3D.tickson']:
annot.axes3D.visible = 0
# 2D
if rcParams['axes.2D.x.title'] is not None:
annot.axes2D.xAxis.title.userTitle = 1
annot.axes2D.xAxis.title.title = rcParams['axes.2D.x.title']
else:
annot.axes2D.xAxis.title.visible = 0
if rcParams['axes.2D.y.title'] is not None:
annot.axes2D.yAxis.title.userTitle = 1
annot.axes2D.yAxis.title.title = rcParams['axes.2D.y.title']
else:
annot.axes2D.xAxis.title.visible = 0
if not rcParams['axes.2D.tickson']:
annot.axes2D.visible = 0
# Fonts
fnum = getDefaultFont()
annot.axes2D.xAxis.title.font.font = fnum
annot.axes2D.xAxis.title.font.scale = rcParams['axes.2D.title.fontscale']
annot.axes2D.xAxis.label.font.font = fnum
annot.axes2D.xAxis.label.font.scale = rcParams['axes.2D.label.fontscale']
annot.axes2D.yAxis.title.font.font = fnum
annot.axes2D.yAxis.title.font.scale = rcParams['axes.2D.title.fontscale']
annot.axes2D.yAxis.label.font.font = fnum
annot.axes2D.yAxis.label.font.scale = rcParams['axes.2D.label.fontscale']
annot.axes3D.xAxis.title.font.font = fnum
annot.axes3D.xAxis.title.font.scale = rcParams['axes.3D.title.fontscale']
annot.axes3D.xAxis.label.font.font = fnum
annot.axes3D.xAxis.label.font.scale = rcParams['axes.3D.label.fontscale']
annot.axes3D.yAxis.title.font.font = fnum
annot.axes3D.yAxis.title.font.scale = rcParams['axes.3D.title.fontscale']
annot.axes3D.yAxis.label.font.font = fnum
annot.axes3D.yAxis.label.font.scale = rcParams['axes.3D.label.fontscale']
annot.axes3D.zAxis.title.font.font = fnum
annot.axes3D.zAxis.title.font.scale = rcParams['axes.3D.title.fontscale']
annot.axes3D.zAxis.label.font.font = fnum
annot.axes3D.zAxis.label.font.scale = rcParams['axes.3D.label.fontscale']
return annot
def renameScalar(oldname):
newname = oldname.split(".")[0].replace("_", " ").replace("-", " ")
units = None
try:
newname = rcParams['var.renames'][newname]
except KeyError:
pass
units = None
try:
units = rcParams['var.units'][newname]
except KeyError:
try:
units = rcParams['var.units'][newname.split(" ")[-1]]
except KeyError:
pass
if units is not None:
newname = newname + " [%s]"%units
return newname
def getLimits(name):
newname = name.split(".")[0].replace("_", " ")
try:
return rcParams['var.limits'][newname]
except KeyError:
pass
try:
return rcParams['var.limits'][newname.split(" ")[-1]]
except KeyError:
pass
try:
return rcParams['var.limits'][newname.split(" ")[0]]
except KeyError:
pass
return None
|
ttreeagency/PootleTypo3Org | pootle/apps/pootle_misc/browser.py | Python | gpl-2.0 | 4,675 | 0 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright 2009-2013 Zuza Software Foundation
#
# This file is part of Pootle.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public | License
# along with this program; if not, see <http://www.gnu.org/licenses/>.
from django.utils.translation import ugettext_lazy as _, ungettext
from pootle_misc import dispatch
from pootle_misc.stats import get_raw_stats, stats_descriptions
HEADING_CHOICES = [
{
'id': 'name',
'class': 'stats',
'display_name': _("Name"),
},
{
'id': 'project',
'class': 'stats',
'display_name': _("Project"),
},
{
'id': 'language',
'cl | ass': 'stats',
'display_name': _("Language"),
},
{
'id': 'progress',
'class': 'stats',
# Translators: noun. The graphical representation of translation status
'display_name': _("Progress"),
},
{
'id': 'total',
'class': 'stats-number sorttable_numeric',
# Translators: Heading representing the total number of words of a file
# or directory
'display_name': _("Total"),
},
{
'id': 'need-translation',
'class': 'stats-number sorttable_numeric',
'display_name': _("Need Translation"),
},
{
'id': 'suggestions',
'class': 'stats-number sorttable_numeric',
# Translators: The number of suggestions pending review
'display_name': _("Suggestions"),
},
{
'id': 'activity',
'class': 'stats',
'display_name': _("Last Activity"),
},
]
def get_table_headings(choices):
"""Filters the list of available table headings to the given `choices`."""
return filter(lambda x: x['id'] in choices, HEADING_CHOICES)
def make_generic_item(path_obj, action):
"""Template variables for each row in the table.
:func:`make_directory_item` and :func:`make_store_item` will add onto these
variables.
"""
try:
stats = get_raw_stats(path_obj, include_suggestions=True)
info = {
'href': action,
'href_all': dispatch.translate(path_obj),
'href_todo': dispatch.translate(path_obj, state='incomplete'),
'href_sugg': dispatch.translate(path_obj, state='suggestions'),
'stats': stats,
'tooltip': _('%(percentage)d%% complete',
{'percentage': stats['translated']['percentage']}),
'title': path_obj.name,
}
errors = stats.get('errors', 0)
if errors:
info['errortooltip'] = ungettext('Error reading %d file',
'Error reading %d files',
errors, errors)
info.update(stats_descriptions(stats))
except IOError, e:
info = {
'href': action,
'title': path_obj.name,
'errortooltip': e.strerror,
'data': {'errors': 1},
}
return info
def make_directory_item(directory):
action = directory.pootle_path
item = make_generic_item(directory, action)
item.update({
'icon': 'folder',
'isdir': True,
})
return item
def make_store_item(store):
action = store.pootle_path
item = make_generic_item(store, action)
item.update({
'icon': 'file',
'isfile': True,
})
return item
def get_children(translation_project, directory):
"""Returns a list of children directories and stores for this
``directory``, and also the parent directory.
The elements of the list are dictionaries which keys are populated after
in the templates.
"""
parent = []
parent_dir = directory.parent
if not (parent_dir.is_language() or parent_dir.is_project()):
parent = [{'title': u'..', 'href': parent_dir}]
directories = [make_directory_item(child_dir)
for child_dir in directory.child_dirs.iterator()]
stores = [make_store_item(child_store)
for child_store in directory.child_stores.iterator()]
return parent + directories + stores
|
janusnic/21v-python | unit_20/matplotlib/pyplot_index_formatter.py | Python | mit | 556 | 0.005396 | import numpy as np
import matplotlib.pyplot as plt
import matplotlib.mlab as mlab
import matplotlib.ticke | r as ticker
r = mlab.csv2rec('data/imdb.csv')
r.sort()
r = r[-30:] # get t | he last 30 days
N = len(r)
ind = np.arange(N) # the evenly spaced plot indices
def format_date(x, pos=None):
thisind = np.clip(int(x+0.5), 0, N-1)
return r.date[thisind].strftime('%Y-%m-%d')
fig = plt.figure()
ax = fig.add_subplot(111)
ax.plot(ind, r.adj_close, 'o-')
ax.xaxis.set_major_formatter(ticker.FuncFormatter(format_date))
fig.autofmt_xdate()
plt.show() |
vvw/gensim | gensim/test/test_corpora.py | Python | gpl-3.0 | 9,040 | 0.000996 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright (C) 2010 Radim Rehurek <radimrehurek@seznam.cz>
# Licensed under the GNU LGPL v2.1 - http://www.gnu.org/licenses/lgpl.html
"""
Automated tests for checking corpus I/O formats (the corpora package).
"""
import logging
import os.path
import unittest
import tempfile
import itertools
from gensim.utils import to_unicode, smart_extension
from gensim.corpora import (bleicorpus, mmcorpus, lowcorpus, svmlightcorpus,
ucicorpus, malletcorpus, textcorpus, indexedcorpus)
# needed because sample data files are located in the same folder
module_path = os.path.dirname(__file__)
datapath = lambda fname: os.path.join( | module_path, 'test_data', fname)
def testfile():
# temporary data will be stored to this file
return os.path.join(tempfile.gettempdir(), 'gensim_corpus.tst')
class CorpusTestCase(unittest.TestCase):
TEST_CORPU | S = [[(1, 1.0)], [], [(0, 0.5), (2, 1.0)], []]
def run(self, result=None):
if type(self) is not CorpusTestCase:
super(CorpusTestCase, self).run(result)
def tearDown(self):
# remove all temporary test files
fname = testfile()
extensions = ['', '', '.bz2', '.gz', '.index', '.vocab']
for ext in itertools.permutations(extensions, 2):
try:
os.remove(fname + ext[0] + ext[1])
except OSError:
pass
def test_load(self):
fname = datapath('testcorpus.' + self.file_extension.lstrip('.'))
corpus = self.corpus_class(fname)
docs = list(corpus)
# the deerwester corpus always has nine documents
self.assertEqual(len(docs), 9)
def test_len(self):
fname = datapath('testcorpus.' + self.file_extension.lstrip('.'))
corpus = self.corpus_class(fname)
# make sure corpus.index works, too
corpus = self.corpus_class(fname)
self.assertEqual(len(corpus), 9)
# for subclasses of IndexedCorpus, we need to nuke this so we don't
# test length on the index, but just testcorpus contents
if hasattr(corpus, 'index'):
corpus.index = None
self.assertEqual(len(corpus), 9)
def test_empty_input(self):
with open(testfile(), 'w') as f:
f.write('')
with open(testfile() + '.vocab', 'w') as f:
f.write('')
corpus = self.corpus_class(testfile())
self.assertEqual(len(corpus), 0)
docs = list(corpus)
self.assertEqual(len(docs), 0)
def test_save(self):
corpus = self.TEST_CORPUS
# make sure the corpus can be saved
self.corpus_class.save_corpus(testfile(), corpus)
# and loaded back, resulting in exactly the same corpus
corpus2 = list(self.corpus_class(testfile()))
self.assertEqual(corpus, corpus2)
def test_serialize(self):
corpus = self.TEST_CORPUS
# make sure the corpus can be saved
self.corpus_class.serialize(testfile(), corpus)
# and loaded back, resulting in exactly the same corpus
corpus2 = self.corpus_class(testfile())
self.assertEqual(corpus, list(corpus2))
# make sure the indexing corpus[i] works
for i in range(len(corpus)):
self.assertEqual(corpus[i], corpus2[i])
def test_serialize_compressed(self):
corpus = self.TEST_CORPUS
for extension in ['.gz', '.bz2']:
fname = testfile() + extension
# make sure the corpus can be saved
self.corpus_class.serialize(fname, corpus)
# and loaded back, resulting in exactly the same corpus
corpus2 = self.corpus_class(fname)
self.assertEqual(corpus, list(corpus2))
# make sure the indexing `corpus[i]` syntax works
for i in range(len(corpus)):
self.assertEqual(corpus[i], corpus2[i])
def test_switch_id2word(self):
fname = datapath('testcorpus.' + self.file_extension.lstrip('.'))
corpus = self.corpus_class(fname)
if hasattr(corpus, 'id2word'):
firstdoc = next(iter(corpus))
testdoc = set((to_unicode(corpus.id2word[x]), y) for x, y in firstdoc)
self.assertEqual(testdoc, set([('computer', 1), ('human', 1), ('interface', 1)]))
d = corpus.id2word
d[0], d[1] = d[1], d[0]
corpus.id2word = d
firstdoc2 = next(iter(corpus))
testdoc2 = set((to_unicode(corpus.id2word[x]), y) for x, y in firstdoc2)
self.assertEqual(testdoc2, set([('computer', 1), ('human', 1), ('interface', 1)]))
def test_indexing(self):
fname = datapath('testcorpus.' + self.file_extension.lstrip('.'))
corpus = self.corpus_class(fname)
docs = list(corpus)
for idx, doc in enumerate(docs):
self.assertEqual(doc, corpus[idx])
self.assertEqual(docs, list(corpus[:]))
self.assertEqual(docs[0:], list(corpus[0:]))
self.assertEqual(docs[0:-1], list(corpus[0:-1]))
self.assertEqual(docs[2:4], list(corpus[2:4]))
self.assertEqual(docs[::2], list(corpus[::2]))
self.assertEqual(docs[::-1], list(corpus[::-1]))
# make sure sliced corpora can be iterated over multiple times
c = corpus[:]
self.assertEqual(docs, list(c))
self.assertEqual(docs, list(c))
self.assertEqual(len(docs), len(corpus))
self.assertEqual(len(docs), len(corpus[:]))
self.assertEqual(len(docs[::2]), len(corpus[::2]))
class TestMmCorpus(CorpusTestCase):
def setUp(self):
self.corpus_class = mmcorpus.MmCorpus
self.file_extension = '.mm'
def test_serialize_compressed(self):
# MmCorpus needs file write with seek => doesn't support compressed output (only input)
pass
class TestSvmLightCorpus(CorpusTestCase):
def setUp(self):
self.corpus_class = svmlightcorpus.SvmLightCorpus
self.file_extension = '.svmlight'
class TestBleiCorpus(CorpusTestCase):
def setUp(self):
self.corpus_class = bleicorpus.BleiCorpus
self.file_extension = '.blei'
def test_save_format_for_dtm(self):
corpus = [[(1, 1.0)], [], [(0, 5.0), (2, 1.0)], []]
test_file = testfile()
self.corpus_class.save_corpus(test_file, corpus)
with open(test_file) as f:
for line in f:
# unique_word_count index1:count1 index2:count2 ... indexn:counnt
tokens = line.split()
words_len = int(tokens[0])
if words_len > 0:
tokens = tokens[1:]
else:
tokens = []
self.assertEqual(words_len, len(tokens))
for token in tokens:
word, count = token.split(':')
self.assertEqual(count, str(int(count)))
class TestLowCorpus(CorpusTestCase):
TEST_CORPUS = [[(1, 1)], [], [(0, 2), (2, 1)], []]
def setUp(self):
self.corpus_class = lowcorpus.LowCorpus
self.file_extension = '.low'
class TestUciCorpus(CorpusTestCase):
TEST_CORPUS = [[(1, 1)], [], [(0, 2), (2, 1)], []]
def setUp(self):
self.corpus_class = ucicorpus.UciCorpus
self.file_extension = '.uci'
def test_serialize_compressed(self):
# UciCorpus needs file write with seek => doesn't support compressed output (only input)
pass
class TestMalletCorpus(CorpusTestCase):
TEST_CORPUS = [[(1, 1)], [], [(0, 2), (2, 1)], []]
def setUp(self):
self.corpus_class = malletcorpus.MalletCorpus
self.file_extension = '.mallet'
def test_load_with_metadata(self):
fname = datapath('testcorpus.' + self.file_extension.lstrip('.'))
corpus = self.corpus_class(fname)
corpus.metadata = True
self.assertEqual(len(corpus), 9)
docs = list(corpus)
self.assertEqual(len(docs), 9)
for i, docmeta in enumerate(docs):
doc, metadata = docmeta
self.assertEqual(metadata[0], str(i + 1))
self.assertEqual(meta |
danithaca/berrypicking | django/autocomplete/demo/views.py | Python | gpl-2.0 | 569 | 0.003515 | from django.contrib.auth.models import User
from django.db.models import Q
from django.shortcuts import render
def demo(request):
retur | n render(request, 'demo/demo.html')
def demo_autocomplete(request | , template_name='demo/autocomplete.html'):
q = request.GET.get('q', '')
context = {'q': q}
queries = {
'users': User.objects.filter(Q(username__icontains=q) | Q(first_name__icontains=q) | Q(last_name__icontains=q) | Q(email__icontains=q)).distinct()[:3]
}
context.update(queries)
return render(request, template_name, context) |
tmkdev/cwd | cwd_helpers.py | Python | gpl-2.0 | 1,076 | 0.001859 | from configuration import *
from models.cwdlogs import *
from models.dataaccess import *
from utils.alarmsender import *
from logging import *
from sqlalchemy.orm.exc import *
def delcurrent(db, name):
db.query(CurrentJobs).filter(CurrentJobs.name == name).delete()
db.commit()
def clearalarm(db,name):
try:
db.query(Alarms).filter(Alarms.name == name).one()
sendalarm(name, 'ok')
db.query(Alarms).filter(Alarms.name == name).delete()
db.commit()
except NoResultFound:
pass
def raisealarm(db, name, raisedevent):
try:
db.query(Alarms).filter(Alarms.name == name).one().\
update({"raisedevent": raisedevent})
except NoResultFound:
toemails = getalertemails(db, name)
sendalarm(name, raisedevent, toemails)
alarm = Alarms(name=name, eventtime=datetime.datetime.now(), raisedevent | =raisedevent)
db.add(alarm)
db.commit()
def checkauth(username, password):
if username == ADMINUSER and p | assword == ADMINPASS:
return True
return False
|
asmodehn/filefinder2 | tests/test_filefinder2/pkg/submodule.py | Python | mit | 143 | 0.006993 | #!/usr/bin/python
# -*- codin | g: utf-8 -*-
# Just Dummy class for testing
class TestClassInSubModu | le:
"""Test Class from source"""
pass |
motmot/flymovieformat | scripts/fmf_subtract_frame.py | Python | bsd-3-clause | 2,939 | 0.001021 | import pkg_resources
import motmot.FlyMovieFormat.FlyMovieFormat as FMF
import motmot.imops.imops as imops
import sys, os
from pylab import prctile
import numpy as np
import collections
from optparse import OptionParser
def doit(
input_fname, subtract_frame, start=None, stop=None, gain=1.0, offset=0.0,
):
output_fname = os.path.splitext(input_fname)[0] + ".sub.fmf"
in_fmf = FMF.FlyMovie(input_fname)
input_format = in_fmf.get_format()
input_is_color = imops.is_coding_color(input_format)
if not subtract_frame.endswith(".fmf"):
raise NotImplementedError("only fmf supported for --subtract-frame")
tmp_fmf = FMF.FlyMovie(subtract_frame)
if input_is_color:
tmp_frame, tmp_timestamp = tmp_fmf.get_next_frame()
subtract_frame = imops.to_rgb8(tmp_fmf.get_format(), tmp_frame)
subtract_frame = subtract_frame.astype(
np.float32
) # force upconversion to float
else:
tmp_frame, tmp_timestamp = tmp_fmf.get_next_frame()
subtract_frame = imops.to_mono8(tmp_fmf.get_format(), tmp_frame)
subtract_frame = subtract_frame.astype(
np.float32
) # force upconversion to float
if input_is_color:
output_format = "RGB8"
else:
output_format = "MONO8"
out_fmf = FMF.FlyMovieSaver(output_fname, version=3, format=output_format,)
try:
if stop is None:
stop = in_fmf.get_n_frames() - 1
if start is None:
start = 0
n_frames = stop - start + 1
n_samples = max(30, n_frames)
for fno in np.linspace(start, stop, n_samples):
fno = int(round(fno))
in_fmf.seek(fno)
frame, timestamp = in_fmf.get_next_frame()
if | input_is_color:
frame = imops | .to_rgb8(input_format, frame)
new_frame = frame - subtract_frame
else:
frame = np.atleast_3d(frame)
new_frame = frame - subtract_frame
new_frame = np.clip(new_frame * gain + offset, 0, 255)
new_frame = new_frame.astype(np.uint8)
out_fmf.add_frame(new_frame, timestamp)
out_fmf.close()
except:
os.unlink(output_fname)
raise
in_fmf.close()
def main():
parser = OptionParser(usage="%prog [options] filename.fmf", version="%prog 0.1")
parser.add_option("--start", type="int", help="first frame")
parser.add_option("--stop", type="int", help="last frame")
parser.add_option("--gain", type="float", default=1.0)
parser.add_option("--offset", type="float", default=127.0)
(options, args) = parser.parse_args()
filename = args[0]
subtract_frame = args[1]
doit(
filename,
subtract_frame,
start=options.start,
stop=options.stop,
gain=options.gain,
offset=options.offset,
)
if __name__ == "__main__":
main()
|
fyabc/MiniGames | HearthStone2/MyHearthStone/network/lan_client.py | Python | mit | 2,327 | 0.00043 | #! /usr/bin/python
# -*- coding: utf-8 -*-
import socket
import threading
from . import utils2 as utils
from ..utils.message import info, error
__author__ = 'fyabc'
class LanClient:
def __init__(self, user):
self.user = user
self.socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self.socket.connect(self.user.address)
self.rfile = self.socket.makefile('rb', 0) |
self.wfile = self.socket.makefile('wb', 0)
self.input_thread = None
self.start()
s | elf.run()
def start(self):
# 1. Send user data to server.
self.send('user_data', nickname=self.user.nickname, deck_code=self.user.deck_code)
self.input_thread = self.InputThread(self.wfile)
t = threading.Thread(target=self.input_thread.run)
t.setDaemon(True)
t.start()
def run(self):
running = True
while running:
try:
msg = self.recv()
except ConnectionError:
break
result = self.parse_msg(msg)
if msg['type'] == 'terminated':
running = False
self.input_thread.done = True
def send(self, msg_type, **kwargs):
utils.send_msg(self.wfile, msg_type, **kwargs)
def send_text(self, text, error=False):
msg_type = 'error' if error else 'text'
self.send(msg_type, text=text)
def send_ok(self):
self.send('ok')
def recv(self):
return utils.recv_msg(self.rfile)
def parse_msg(self, msg):
msg_type = msg['type']
if msg_type == 'text':
info(msg['text'])
elif msg_type == 'error':
error(msg['text'])
elif msg_type == 'ok':
pass
elif msg_type == 'user_data':
pass
elif msg_type == 'game_status':
pass
else:
pass
class InputThread:
def __init__(self, wfile):
self.wfile = wfile
self.done = False
def run(self):
"""Echo standard input to the chat server until told to stop."""
# todo: add input method
while not self.done:
pass
def start_client(address, nickname, deck_code):
LanClient(utils.NetworkUser(address, nickname, deck_code))
|
ststaynov/fishGame | manage.py | Python | bsd-3-clause | 245 | 0 | #!/usr/bin/env python
import os
import sys
if __name__ == "__main__":
os.environ.setdefault( | "DJANGO_SETTINGS_MODULE", "chat.settings")
from django.core.management import execute_from_command_line
execute_from_command_lin | e(sys.argv)
|
cgstudiomap/cgstudiomap | main/eggs/passlib-1.6.5-py2.7.egg/passlib/tests/test_utils.py | Python | agpl-3.0 | 35,168 | 0.006881 | """tests for passlib.util"""
#=============================================================================
# imports
#=============================================================================
from __future__ import with_statement
# core
from binascii import hexlify, unhexlify
import sys
import random
import warnings
# site
# pkg
# module
from passlib.utils.compat import b, bytes, bascii_to_str, irange, PY2, PY3, u, \
unicode, join_bytes, SUPPORTS_DIR_METHOD
from passlib.tests.utils import TestCase, catch_warnings
def hb(source):
return unhexlify(b(source))
#=============================================================================
# byte funcs
#=============================================================================
class MiscTest(TestCase):
"""tests various parts of utils module"""
# NOTE: could test xor_bytes(), but it's exercised well enough by pbkdf2 test
def test_compat(self):
"""test compat's lazymodule"""
from passlib.utils import compat
# "<module 'passlib.utils.compat' from 'passlib/utils/compat.pyc'>"
self.assertRegex(repr(compat),
r"^<module 'passlib.utils.compat' from '.*?'>$")
# test synthentic dir()
dir(compat)
if SUPPORTS_DIR_METHOD:
self.assertTrue('UnicodeIO' in dir(compat))
self.assertTrue('irange' in dir(compat))
def test_classproperty(self):
from passlib.utils import classproperty
class test(object):
xvar = 1
@classproperty
def xprop(cls):
return cls.xvar
self.assertEqual(test.xprop, 1)
prop = test.__dict__['xprop']
self.assertIs(prop.im_func, prop.__func__)
def test_deprecated_function(self):
from passlib.utils import deprecated_function
# NOTE: not comprehensive, just tests the basic behavior
@deprecated_function(deprecated="1.6", removed="1.8")
def test_func(*args):
"""test docstring"""
return args
self.assertTrue(".. deprecated::" in test_func.__doc__)
with self.assertWarningList(dict(category=DeprecationWarning,
message="the function passlib.tests.test_utils.test_func() "
"is deprecated as of Passlib 1.6, and will be "
"removed in Passlib 1.8."
)):
self.assertEqual(test_func(1,2), (1,2))
def test_memoized_property(self):
from passlib.utils import memoized_property
class dummy(object):
counter = 0
@memoized_property
def value(self):
value = self.counter
self.counter = value+1
return value
d = dummy()
self.assertEqual(d.value, 0)
self.assertEqual(d.value, 0)
self.assertEqual(d.counter, 1)
prop = dummy.value
self.assertIs(prop.im_func, prop.__func__)
def test_getrandbytes(self):
"""test getrandbytes()"""
from passlib.utils import getrandbytes, rng
def f(*a,**k):
return getrandbytes(rng, *a, **k)
self.assertEqual(len(f(0)), 0)
a = f(10)
b = f(10)
self.assertIsInstance(a, bytes)
self.assertEqual(len(a), 10)
self.assertEqual(len(b), 10)
self.assertNotEqual(a, b)
def test_getrandstr(self):
"""test getrandstr()"""
| from passlib.utils import getrandstr, rng
def f(*a,**k):
return getrandstr(rng, * | a, **k)
# count 0
self.assertEqual(f('abc',0), '')
# count <0
self.assertRaises(ValueError, f, 'abc', -1)
# letters 0
self.assertRaises(ValueError, f, '', 0)
# letters 1
self.assertEqual(f('a',5), 'aaaaa')
# letters
x = f(u('abc'), 16)
y = f(u('abc'), 16)
self.assertIsInstance(x, unicode)
self.assertNotEqual(x,y)
self.assertEqual(sorted(set(x)), [u('a'),u('b'),u('c')])
# bytes
x = f(b('abc'), 16)
y = f(b('abc'), 16)
self.assertIsInstance(x, bytes)
self.assertNotEqual(x,y)
# NOTE: decoding this due to py3 bytes
self.assertEqual(sorted(set(x.decode("ascii"))), [u('a'),u('b'),u('c')])
# generate_password
from passlib.utils import generate_password
self.assertEqual(len(generate_password(15)), 15)
def test_is_crypt_context(self):
"""test is_crypt_context()"""
from passlib.utils import is_crypt_context
from passlib.context import CryptContext
cc = CryptContext(["des_crypt"])
self.assertTrue(is_crypt_context(cc))
self.assertFalse(not is_crypt_context(cc))
def test_genseed(self):
"""test genseed()"""
import random
from passlib.utils import genseed
rng = random.Random(genseed())
a = rng.randint(0, 100000)
rng = random.Random(genseed())
b = rng.randint(0, 100000)
self.assertNotEqual(a,b)
rng.seed(genseed(rng))
def test_crypt(self):
"""test crypt.crypt() wrappers"""
from passlib.utils import has_crypt, safe_crypt, test_crypt
# test everything is disabled
if not has_crypt:
self.assertEqual(safe_crypt("test", "aa"), None)
self.assertFalse(test_crypt("test", "aaqPiZY5xR5l."))
raise self.skipTest("crypt.crypt() not available")
# XXX: this assumes *every* crypt() implementation supports des_crypt.
# if this fails for some platform, this test will need modifying.
# test return type
self.assertIsInstance(safe_crypt(u("test"), u("aa")), unicode)
# test ascii password
h1 = u('aaqPiZY5xR5l.')
self.assertEqual(safe_crypt(u('test'), u('aa')), h1)
self.assertEqual(safe_crypt(b('test'), b('aa')), h1)
# test utf-8 / unicode password
h2 = u('aahWwbrUsKZk.')
self.assertEqual(safe_crypt(u('test\u1234'), 'aa'), h2)
self.assertEqual(safe_crypt(b('test\xe1\x88\xb4'), 'aa'), h2)
# test latin-1 password
hash = safe_crypt(b('test\xff'), 'aa')
if PY3: # py3 supports utf-8 bytes only.
self.assertEqual(hash, None)
else: # but py2 is fine.
self.assertEqual(hash, u('aaOx.5nbTU/.M'))
# test rejects null chars in password
self.assertRaises(ValueError, safe_crypt, '\x00', 'aa')
# check test_crypt()
h1x = h1[:-1] + 'x'
self.assertTrue(test_crypt("test", h1))
self.assertFalse(test_crypt("test", h1x))
# check crypt returning variant error indicators
# some platforms return None on errors, others empty string,
# The BSDs in some cases return ":"
import passlib.utils as mod
orig = mod._crypt
try:
fake = None
mod._crypt = lambda secret, hash: fake
for fake in [None, "", ":", ":0", "*0"]:
self.assertEqual(safe_crypt("test", "aa"), None)
self.assertFalse(test_crypt("test", h1))
fake = 'xxx'
self.assertEqual(safe_crypt("test", "aa"), "xxx")
finally:
mod._crypt = orig
def test_consteq(self):
"""test consteq()"""
# NOTE: this test is kind of over the top, but that's only because
# this is used for the critical task of comparing hashes for equality.
from passlib.utils import consteq
# ensure error raises for wrong types
self.assertRaises(TypeError, consteq, u(''), b(''))
self.assertRaises(TypeError, consteq, u(''), 1)
self.assertRaises(TypeError, consteq, u(''), None)
self.assertRaises(TypeError, consteq, b(''), u(''))
self.assertRaises(TypeError, consteq, b(''), 1)
self.assertRaises(TypeError, consteq, b(''), None)
self.assertRaises(TypeError, consteq, None, u(''))
self.assertRaises(TypeError, consteq, None, b(''))
self.assertRaises(TypeError, consteq, 1, u(''))
|
GetmeUK/MongoFrames | tests/fixtures.py | Python | mit | 3,565 | 0.001122 | from datetime import datetime
from pymongo import MongoClient
import pytest
from mongoframes import *
__all__ = [
# Frames
'Dragon',
'Inventory',
'Lair',
'ComplexDragon',
'MonitoredDragon',
# Fixtures
'mongo_client',
'example_dataset_one',
'example_dataset_many'
]
|
# Classes
class Dragon(Frame):
"""
A dragon.
"""
_fields = { |
'name',
'breed'
}
_private_fields = {'breed'}
def _get_dummy_prop(self):
return self._dummy_prop
def _set_dummy_prop(self, value):
self._dummy_prop = True
dummy_prop = property(_get_dummy_prop, _set_dummy_prop)
class Inventory(SubFrame):
"""
An inventory of items kept within a lair.
"""
_fields = {
'gold',
'skulls'
}
_private_fields = {'gold'}
class Lair(Frame):
"""
A lair in which a dragon resides.
"""
_fields = {
'name',
'inventory'
}
class ComplexDragon(Dragon):
_fields = Dragon._fields | {
'dob',
'lair',
'traits',
'misc'
}
_default_projection = {
'lair': {
'$ref': Lair,
'inventory': {'$sub': Inventory}
}
}
class MonitoredDragon(Dragon):
_fields = Dragon._fields | {
'created',
'modified'
}
# Fixtures
@pytest.fixture(scope='function')
def mongo_client(request):
"""Connect to the test database"""
# Connect to mongodb and create a test database
Frame._client = MongoClient('mongodb://localhost:27017/mongoframes_test')
def fin():
# Remove the test database
Frame._client.drop_database('mongoframes_test')
request.addfinalizer(fin)
return Frame._client
@pytest.fixture(scope='function')
def example_dataset_one(request):
"""Create an example set of data that can be used in testing"""
inventory = Inventory(
gold=1000,
skulls=100
)
cave = Lair(
name='Cave',
inventory=inventory
)
cave.insert()
burt = ComplexDragon(
name='Burt',
dob=datetime(1979, 6, 11),
breed='Cold-drake',
lair=cave,
traits=['irritable', 'narcissistic']
)
burt.insert()
@pytest.fixture(scope='function')
def example_dataset_many(request):
"""Create an example set of data that can be used in testing"""
# Burt
cave = Lair(
name='Cave',
inventory=Inventory(
gold=1000,
skulls=100
)
)
cave.insert()
burt = ComplexDragon(
name='Burt',
dob=datetime(1979, 6, 11),
breed='Cold-drake',
lair=cave,
traits=['irritable', 'narcissistic']
)
burt.insert()
# Fred
castle = Lair(
name='Castle',
inventory=Inventory(
gold=2000,
skulls=200
)
)
castle.insert()
fred = ComplexDragon(
name='Fred',
dob=datetime(1980, 7, 12),
breed='Fire-drake',
lair=castle,
traits=['impulsive', 'loyal']
)
fred.insert()
# Fred
mountain = Lair(
name='Mountain',
inventory=Inventory(
gold=3000,
skulls=300
)
)
mountain.insert()
albert = ComplexDragon(
name='Albert',
dob=datetime(1981, 8, 13),
breed='Stone dragon',
lair=mountain,
traits=['reclusive', 'cunning']
)
albert.insert() |
CanalTP/kirin | kirin/__init__.py | Python | agpl-3.0 | 3,892 | 0.004882 | # coding=utf-8
# Copyright (c) 2001, Canal TP and/or its affiliates. All rights reserved.
#
# This file is part of Navitia,
# the software to build cool stuff with public transport.
#
# Hope you'll enjoy and contribute to this project,
# powered by Canal TP (www.canaltp.fr).
# Help us simplify mobility and open public transport:
# a non ending quest to the responsive locomotion way of traveling!
#
# LICENCE: This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 o | f the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of | the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
# Stay tuned using
# twitter @navitia
# [matrix] channel #navitia:matrix.org (https://app.element.io/#/room/#navitia:matrix.org)
# https://groups.google.com/d/forum/navitia
# www.navitia.io
import os
import sys
from flask import Flask
from flask_cors import CORS
import logging.config
# As Kirin is an API without authentication, we don't need to add CSRF protection
app = Flask(__name__)
CORS(
app,
resources={r"/*": {"origins": "*", "send_wildcard": "False", "methods": ["GET", "POST", "PUT", "DELETE"]}},
)
app.config.from_object("kirin.default_settings") # type: ignore
if "KIRIN_CONFIG_FILE" in os.environ:
app.config.from_envvar("KIRIN_CONFIG_FILE") # type: ignore
from kirin.helper import KirinRequest
app.request_class = KirinRequest
if app.config["USE_GEVENT"]:
# replace blocking method by a non blocking equivalent
# this enable us to use gevent for launching background task
# Note: there is a conflict between py.test and gevent
# http://stackoverflow.com/questions/8774958/keyerror-in-module-threading-after-a-successful-py-test-run
# so we need to remove threading from the import
if "threading" in sys.modules:
del sys.modules["threading"]
# end of conflict's patch
# MUST be imported before `requests`, ideally, import it as soon as possible
# https://www.gevent.org/api/gevent.monkey.html
from gevent import monkey
monkey.patch_all()
from kirin import exceptions
from kirin import new_relic
from flask_caching import Cache
# register the cache instance and binds it on to your app
app.cache = Cache(app, config={"CACHE_THRESHOLD": 5000})
from redis import Redis
redis_client = Redis(
host=app.config["REDIS_HOST"],
port=app.config["REDIS_PORT"],
db=app.config["REDIS_DB"],
password=app.config["REDIS_PASSWORD"],
)
# activate a command
import kirin.command.load_realtime
import kirin.queue_workers.piv.piv_worker
import kirin.command.purge
import kirin.queue_workers.siri_et_xml_tn.siri_et_xml_tn_worker
from flask_migrate import Migrate
from kirin.core import model
db = model.db
db.init_app(app)
migrate = Migrate(app, db)
# We need to log all kinds of patch, all patch must be done as soon as possible
logger = logging.getLogger(__name__)
if "threading" not in sys.modules:
logger.info("threading is deleted from sys.modules")
logger.info("Configs: %s", app.config)
from kirin.rabbitmq_handler import RabbitMQHandler
rmq_handler = RabbitMQHandler(app.config["RABBITMQ_CONNECTION_STRING"], app.config["EXCHANGE"])
import kirin.api
from kirin import utils
if "LOGGER" in app.config:
logging.config.dictConfig(app.config["LOGGER"])
else: # Default is std out
handler = logging.StreamHandler(stream=sys.stdout)
app.logger.addHandler(handler)
app.logger.setLevel("INFO")
|
fabiobatalha/analytics | analytics/views_ajax.py | Python | bsd-2-clause | 17,098 | 0.004971 | # coding: utf-8
from pyramid.view import view_config
from dogpile.cache import make_region
from analytics.control_manager import base_data_manager
from citedby.custom_query import journal_titles
cache_region = make_region(name='views_ajax_cache')
@view_config(route_name='bibliometrics_document_received_citations', request_method='GET', renderer='jsonp')
@base_data_manager
def bibliometrics_document_received_citations(request):
data = request.data_manager
code = request.GET.get('code', '')
data = request.stats.bibliometrics.document_received_citations(code)
return data
@view_config(route_name='bibliometrics_journal_jcr_eigen_factor_chart', request_method='GET', renderer='jsonp')
@base_data_manager
def bibliometrics_journal_jcr_eigen_factor_chart(request):
data = request.data_manager
data = request.stats.bibliometrics.jcr_eigen_factor(data['selected_journal_code'])
return request.chartsconfig.bibliometrics_jcr_eigen_factor(data)
@view_config(route_name='bibliometrics_journal_jcr_received_citations_chart', request_method='GET', renderer='jsonp')
@base_data_manager
def bibliometrics_journal_jcr_received_citations_chart(request):
data = request.data_manager
data = request.stats.bibliometrics.jcr_received_citations(data['selected_journal_code'])
return request.chartsconfig.bibliometrics_jcr_received_citations(data)
@view_config(route_name='bibliometrics_journal_jcr_average_impact_factor_percentile_chart', request_method='GET', renderer='jsonp')
@base_data_manager
def bibliometrics_journal_jcr_average_impact_factor_percentile_chart(request):
data = request.data_manager
data = request.stats.bibliometrics.jcr_average_impact_factor_percentile(data['selected_journal_code'])
return request.chartsconfig.bibliometrics_jcr_average_impact_factor_percentile(data)
@view_config(route_name='bibliometrics_journal_jcr_impact_factor_chart', request_method='GET', renderer='jsonp')
@base_data_manager
def bibliometrics_journal_jcr_impact_factor_chart(request):
data = request.data_manager
data = request.stats.bibliometrics.jcr_impact_factor(data['selected_journal_code'])
return request.chartsconfig.bibliometrics_jcr_impact_factor(data)
@view_config(route_name='bibliometrics_journal_google_h5m5_chart', request_method='GET', renderer='jsonp')
@base_data_manager
def bibliometrics_journal_google_h5m5_chart(request):
data = request.data_manager
data = request.stats.bibliometrics.google_h5m5(data['selected_journal_code'])
return request.chartsconfig.bibliometrics_google_h5m5(data)
@view_config(route_name='bibliometrics_journal_cited_and_citing_years_heat', request_method='GET', renderer='jsonp')
@base_data_manager
def bibliometrics_journal_cited_and_citing_years_heat(request):
data = request.data_manager
titles = request.GET.get('titles', None)
titles = titles.split('||') if titles else []
if data['selected_journal_code']:
journal = request.stats.articlemeta.journal(code=data['selected_journal_code'])
titles.append(journal.title)
titles.append(journal.abbreviated_title)
titles.extend(x['title'] for x in journal_titles.load(data['selected_journal_code']).get('should', []) if x['title'] not in titles)
data = request.stats.bibliometrics.cited_and_citing_years_heat(
data['selected_journal_code'],
titles
)
return request.chartsconfig.bibliometrics_cited_and_citing_years_heat(data)
@view_config(route_name='bibliometrics_journal_impact_factor_chart', request_method='GET', renderer='jsonp')
@base_data_manager
def bibliometrics_journal_impact_factor_chart(request):
data = request.data_manager
titles = request.GET.get('titles', None)
titles = titles.split('||') if titles else []
if data['selected_journal_code']:
journal = request.stats.articlemeta.journal(code=data['selected_journal_code'])
titles.append(journal.title)
titles.append(journal.abbreviated_title)
titles.extend(x['title'] for x in journal_titles.load(data['selected_journal_code']).get('should', []) if x['title'] not in titles)
data = request.stats.impact_factor_chart(data['selected_journal_code'], data['selected_collection_code'], titles, py_range=data['py_range'])
return request.chartsconfig.bibliometrics_impact_factor(data)
@view_config(route_name='bibliometrics_journal_received_self_and_granted_citation_chart', request_method='GET', renderer='jsonp')
@base_data_manager
def bibliometrics_journal_received_self_and_granted_citation_chart(request):
data = request.data_manager
titles = request.GET.get('titles', None)
titles = titles.split('||') if titles else []
if data['selected_journal_code']:
journal = request.stats.articlemeta.journal(code=data['selected_journal_code'])
titles.append(journal.title)
titles.append(journal.abbreviated_title)
titles.extend(x['title'] for x in journal_titles.load(data['selected_journal_code']).get('should', []) if x['title'] not in titles)
data = request.stats.received_self_and_granted_citation_chart(data['selected_journal_code'], data['selected_collection_code'], titles, py_range=data['py_range'])
return request.chartsconfig.bibliometrics_journal_received_self_and_granted_citation_chart(data)
@view_config(route_name='publication_article_references', request_method='GET', renderer='jsonp')
@base_data_manager
def publication_article_references(request):
data = request.data_manager
chart_data = request.stats.pub | lication.general('article', 'citations' | , data['selected_code'], data['selected_collection_code'], py_range=data['py_range'], sa_scope=data['sa_scope'], la_scope=data['la_scope'], size=40, sort_term='asc')
return request.chartsconfig.publication_article_references(chart_data)
@view_config(route_name='publication_article_authors', request_method='GET', renderer='jsonp')
@base_data_manager
def publication_article_authors(request):
data = request.data_manager
chart_data = request.stats.publication.general('article', 'authors', data['selected_code'], data['selected_collection_code'], py_range=data['py_range'], sa_scope=data['sa_scope'], la_scope=data['la_scope'], size=0, sort_term='asc')
return request.chartsconfig.publication_article_authors(chart_data)
@view_config(route_name='publication_article_affiliations_map', request_method='GET', renderer='jsonp')
@base_data_manager
def publication_article_affiliations_map(request):
data = request.data_manager
chart_data = request.stats.publication.general('article', 'aff_countries', data['selected_code'], data['selected_collection_code'], py_range=data['py_range'], sa_scope=data['sa_scope'], la_scope=data['la_scope'])
return request.chartsconfig.publication_article_affiliations_map(chart_data)
@view_config(route_name='publication_article_affiliations', request_method='GET', renderer='jsonp')
@base_data_manager
def publication_article_affiliations(request):
data = request.data_manager
chart_data = request.stats.publication.general('article', 'aff_countries', data['selected_code'], data['selected_collection_code'], py_range=data['py_range'], sa_scope=data['sa_scope'], la_scope=data['la_scope'], size=20)
return request.chartsconfig.publication_article_affiliations(chart_data)
@view_config(route_name='publication_article_affiliations_publication_year', request_method='GET', renderer='jsonp')
@base_data_manager
def publication_article_affiliations_publication_year(request):
data = request.data_manager
chart_data = request.stats.publication.affiliations_by_publication_year(data['selected_code'], data['selected_collection_code'], data['py_range'], data['sa_scope'], data['la_scope'])
return request.chartsconfig.publication_article_affiliations_by_publication_year(chart_data)
@view_config(route_name='publication_article_year', request_method='GET', renderer='jsonp')
@base_data_manager
def publication_article_year(request):
data = request.data_manager
data_chart = request.stats.publication.general('article', 'publication_year', data['sele |
pviotti/osm-viz | map_points.py | Python | gpl-3.0 | 1,183 | 0.01268 | #!/usr/bin/env python
# Script for processing with map reduce the Open Street Map datasets.
# Counts the num | ber of GPS hits in a discretized and scaled coordinates space. |
# Example of input row: -778591613,1666898345 [as described here: http://blog.osmfoundation.org/2012/04/01/bulk-gps-point-data/ ]
# Example of output row: 1000-2579 282 [<latitude>-<longitude> \t <density value>]
import sys
SCALING = 10 # scaling factor, to decrease map resolution
MAX_LAT = 180 * SCALING
MAX_LON = 360 * SCALING
PDIV = 10000000 / SCALING
def mapper():
for line in sys.stdin:
line = line.strip()
if len(line) < 6: continue
if len(line.split(',')) != 2: continue
coords = line.split(',')
try:
lat = int((90*SCALING) + round(float(coords[0])/PDIV)) # 0-1800
lon = int((180*SCALING) + round(float(coords[1])/PDIV)) # 0-3600
except:
continue
if (lat <= MAX_LAT) and (lon <= MAX_LON):
sys.stdout.write('LongValueSum:%s\t%d\n' % (str(lat) + "-" + str(lon), 1))
if __name__ == '__main__':
mapper()
|
sidhart/antlr4 | runtime/Python2/src/antlr4/error/ErrorListener.py | Python | bsd-3-clause | 4,146 | 0.003618 | #
# [The "BSD license"]
# Copyright (c) 2012 Terence Parr
# Copyright (c) 2012 Sam Harwell
# Copyright (c) 2014 Eric Vergnaud
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# 3. The name of the author may not be used to endorse or promote products
# derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
# IMPLIED WARRA | NTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
# OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
# IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
# NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETH | ER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
# THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
# Provides an empty default implementation of {@link ANTLRErrorListener}. The
# default implementation of each method does nothing, but can be overridden as
# necessary.
from __future__ import print_function
import sys
class ErrorListener(object):
def syntaxError(self, recognizer, offendingSymbol, line, column, msg, e):
pass
def reportAmbiguity(self, recognizer, dfa, startIndex, stopIndex, exact, ambigAlts, configs):
pass
def reportAttemptingFullContext(self, recognizer, dfa, startIndex, stopIndex, conflictingAlts, configs):
pass
def reportContextSensitivity(self, recognizer, dfa, startIndex, stopIndex, prediction, configs):
pass
class ConsoleErrorListener(ErrorListener):
#
# Provides a default instance of {@link ConsoleErrorListener}.
#
INSTANCE = None
#
# {@inheritDoc}
#
# <p>
# This implementation prints messages to {@link System#err} containing the
# values of {@code line}, {@code charPositionInLine}, and {@code msg} using
# the following format.</p>
#
# <pre>
# line <em>line</em>:<em>charPositionInLine</em> <em>msg</em>
# </pre>
#
def syntaxError(self, recognizer, offendingSymbol, line, column, msg, e):
print("line " + str(line) + ":" + str(column) + " " + msg, file=sys.stderr)
ConsoleErrorListener.INSTANCE = ConsoleErrorListener()
class ProxyErrorListener(ErrorListener):
def __init__(self, delegates):
super(ProxyErrorListener, self).__init__()
if delegates is None:
raise ReferenceError("delegates")
self.delegates = delegates
def syntaxError(self, recognizer, offendingSymbol, line, column, msg, e):
for delegate in self.delegates:
delegate.syntaxError(recognizer, offendingSymbol, line, column, msg, e)
def reportAmbiguity(self, recognizer, dfa, startIndex, stopIndex, exact, ambigAlts, configs):
for delegate in self.delegates:
delegate.reportAmbiguity(recognizer, dfa, startIndex, stopIndex, exact, ambigAlts, configs)
def reportAttemptingFullContext(self, recognizer, dfa, startIndex, stopIndex, conflictingAlts, configs):
for delegate in self.delegates:
delegate.reportAttemptingFullContext(recognizer, dfa, startIndex, stopIndex, conflictingAlts, configs)
def reportContextSensitivity(self, recognizer, dfa, startIndex, stopIndex, prediction, configs):
for delegate in self.delegates:
delegate.reportContextSensitivity(recognizer, dfa, startIndex, stopIndex, prediction, configs)
|
akbertram/appengine-pipeline | src/pipeline/pipeline.py | Python | apache-2.0 | 112,693 | 0.007001 | #!/usr/bin/python2.5
#
# Copyright 2010 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Google App Engine Pipeline API for complex, asynchronous workflows."""
__all__ = [
# Public API.
'Error', 'PipelineSetupError', 'PipelineExistsError',
'PipelineRuntimeError', 'SlotNotFilledError', 'SlotNotDeclaredError',
'UnexpectedPipelineError', 'PipelineStatusError', 'Slot', 'Pipeline',
'PipelineFuture', 'After', 'InOrder', 'Retry', 'Abort', 'get_status_tree',
'create_handlers_map', 'set_enforce_auth',
]
import datetime
import itertools
import logging
import os
import re
import sys
import threading
import time
import traceback
import urllib
import uuid
from google.appengine.api import mail
from google.appengine.api import files
from google.appengine.api import users
from google.appengine.api import taskqueue
from google.appengine.ext import db
from google.appengine.ext import webapp
# Relative imports
import models
import simplejson
import util as mr_util
# For convenience
_PipelineRecord = models._PipelineRecord
_SlotRecord = models._SlotRecord
_BarrierRecord = models._BarrierRecord
_StatusRecord = models._StatusRecord
# Overall TODOs:
# - Add a human readable name for start()
# - Consider using sha1 of the UUID for user-supplied pipeline keys to ensure
# that they keys are definitely not sequen | tial or guessable (Python's uuid1
# method generates roughly sequential IDs).
# - Ability to list all root pipelines that are live on simple page.
# Potentia | l TODOs:
# - Add support for ANY N barriers.
# - Add a global 'flags' value passed in to start() that all pipelines have
# access to; makes it easy to pass along Channel API IDs and such.
# - Allow Pipelines to declare they are "short" and optimize the evaluate()
# function to run as many of them in quick succession.
# - Add support in all Pipelines for hold/release where up-stream
# barriers will fire but do nothing because the Pipeline is not ready.
################################################################################
class Error(Exception):
"""Base class for exceptions in this module."""
class PipelineSetupError(Error):
"""Base class for exceptions that happen before Pipeline execution."""
class PipelineExistsError(PipelineSetupError):
"""A new Pipeline with an assigned idempotence_key cannot be overwritten."""
class PipelineRuntimeError(Error):
"""Base class for exceptions that happen during Pipeline execution."""
class SlotNotFilledError(PipelineRuntimeError):
"""A slot that should have been filled already was not yet filled."""
class SlotNotDeclaredError(PipelineRuntimeError):
"""A slot that was filled or passed along was not previously declared."""
class UnexpectedPipelineError(PipelineRuntimeError):
"""An assertion failed, potentially leaving the pipeline unable to proceed."""
class PipelineUserError(Error):
"""Exceptions raised indirectly by developers to cause certain behaviors."""
class Retry(PipelineUserError):
"""The currently running pipeline should be retried at a later time."""
class Abort(PipelineUserError):
"""The currently running pipeline should be aborted up to the root."""
class PipelineStatusError(Error):
"""Exceptions raised when trying to collect pipeline status."""
################################################################################
_MAX_BARRIERS_TO_NOTIFY = 10
_MAX_ABORTS_TO_BEGIN = 10
_TEST_MODE = False
_TEST_ROOT_PIPELINE_KEY = None
_DEFAULT_BACKOFF_SECONDS = 15
_DEFAULT_BACKOFF_FACTOR = 2
_DEFAULT_MAX_ATTEMPTS = 3
_RETRY_WIGGLE_TIMEDELTA = datetime.timedelta(seconds=20)
_DEBUG = False
_MAX_JSON_SIZE = 900000
_ENFORCE_AUTH = True
################################################################################
class Slot(object):
"""An output that is filled by a Pipeline as it executes."""
def __init__(self, name=None, slot_key=None, strict=False):
"""Initializer.
Args:
name: The name of this slot.
slot_key: The db.Key for this slot's _SlotRecord if it's already been
allocated by an up-stream pipeline.
strict: If this Slot was created as an output of a strictly defined
pipeline.
"""
if name is None:
raise UnexpectedPipelineError('Slot with key "%s" missing a name.' %
slot_key)
if slot_key is None:
slot_key = db.Key.from_path(_SlotRecord.kind(), uuid.uuid1().hex)
self._exists = _TEST_MODE
else:
self._exists = True
self._touched = False
self._strict = strict
self.name = name
self.key = slot_key
self.filled = False
self._filler_pipeline_key = None
self._fill_datetime = None
self._value = None
@property
def value(self):
"""Returns the current value of this slot.
Returns:
The value of the slot (a serializable Python type).
Raises:
SlotNotFilledError if the value hasn't been filled yet.
"""
if not self.filled:
raise SlotNotFilledError('Slot with name "%s", key "%s" not yet filled.'
% (self.name, self.key))
return self._value
@property
def filler(self):
"""Returns the pipeline ID that filled this slot's value.
Returns:
A string that is the pipeline ID.
Raises:
SlotNotFilledError if the value hasn't been filled yet.
"""
if not self.filled:
raise SlotNotFilledError('Slot with name "%s", key "%s" not yet filled.'
% (self.name, self.key))
return self._filler_pipeline_key.name()
@property
def fill_datetime(self):
"""Returns when the slot was filled.
Returns:
A datetime.datetime.
Raises:
SlotNotFilledError if the value hasn't been filled yet.
"""
if not self.filled:
raise SlotNotFilledError('Slot with name "%s", key "%s" not yet filled.'
% (self.name, self.key))
return self._fill_datetime
def _set_value(self, slot_record):
"""Sets the value of this slot based on its corresponding _SlotRecord.
Does nothing if the slot has not yet been filled.
Args:
slot_record: The _SlotRecord containing this Slot's value.
"""
if slot_record.status == _SlotRecord.FILLED:
self.filled = True
self._filler_pipeline_key = _SlotRecord.filler.get_value_for_datastore(
slot_record)
self._fill_datetime = slot_record.fill_time
self._value = slot_record.value
def _set_value_test(self, filler_pipeline_key, value):
"""Sets the value of this slot for use in testing.
Args:
filler_pipeline_key: The db.Key of the _PipelineRecord that filled
this slot.
value: The serializable value set for this slot.
"""
self.filled = True
self._filler_pipeline_key = filler_pipeline_key
self._fill_datetime = datetime.datetime.utcnow()
# Convert to JSON and back again, to simulate the behavior of production.
self._value = simplejson.loads(simplejson.dumps(value))
def __repr__(self):
"""Returns a string representation of this slot."""
if self.filled:
return repr(self._value)
else:
return 'Slot(name="%s", slot_key="%s")' % (self.name, self.key)
class PipelineFuture(object):
"""A future for accessing the outputs of a Pipeline."""
# NOTE: Do not, ever, add a names() method to this class. Callers cannot do
# introspection on their context of being called. Even though the runtime
# environment of the Pipeline can allow for that to happen, such behavior
# would prevent synchronous simulation and verification, whic is an
# unacceptable tradeoff.
def __init__(s |
incuna/incuna-groups | groups/migrations/0016_attachedfile.py | Python | bsd-2-clause | 997 | 0.004012 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
from django.conf import settings
import django.utils.timezone
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('groups', '0015_unrequire_m2ms'),
]
operations = [
migrations.CreateModel(
name='AttachedFile',
fields=[
('id', models.AutoField(primary_key=True, verbose_name='ID', serialize=False, auto_created=True)),
('date_created', models.DateTimeField(default=django.utils.timezone.now)),
('fi | le', models.FileField(upload_to='groups/attachments')),
('attached_to', models.ForeignKey(to='groups.BaseComment', null=True, blank=True, related_name='attachments')),
('user', models.ForeignKey(to=settings.AUTH_USER_MODEL, re | lated_name='attachments')),
],
),
]
|
jjhelmus/artview | artview/components/__init__.py | Python | bsd-3-clause | 1,065 | 0.000939 | """
===========================================
Main Components (:mod:`artview.components`)
==================== | =======================
.. currentmodule:: artview.components
ARTview offers some basic Components for visualization
of weather radar data using Py-ART and
ARTview functions.
.. autosummary::
:toctree: generated/
RadarDisplay
GridDisplay
Menu
LevelButtonWindow
FieldButtonWindow
LinkPlugins
SelectRegion
PlotDisplay
"""
import pyart
from pkg_resources import parse_version
from .plot_radar import Radar | Display
if parse_version(pyart.__version__) >= parse_version('1.6.0'):
from .plot_grid import GridDisplay
else:
from .plot_grid_legacy import GridDisplay
from .plot_points import PointsDisplay
from .menu import Menu
from .level import LevelButtonWindow
from .field import FieldButtonWindow
from .component_control import LinkPlugins
from .select_region import SelectRegion as SelectRegion_dev
from .select_region_old import SelectRegion
from .plot_simple import PlotDisplay
del pyart
del parse_version |
vholer/zenpacklib | tests/data/zenpacks/ZenPacks.zenoss.ZPLTest1/ZenPacks/zenoss/ZPLTest1/__init__.py | Python | gpl-2.0 | 540 | 0 | ##############################################################################
#
# Copyright | (C) Zenoss, Inc. 2015, all rights reserved.
#
# This content is made available according to terms specified in
# License.zenoss under the directory where your Zenoss product is installed.
#
######################################################## | ######################
from . import zenpacklib
import os
if 'ZPL_YAML_FILENAME' in os.environ:
CFG = zenpacklib.load_yaml(os.environ['ZPL_YAML_FILENAME'])
else:
CFG = zenpacklib.load_yaml()
|
8v060htwyc/api-kickstart | examples/python/http_calls.py | Python | apache-2.0 | 5,289 | 0.017395 | # Python edgegrid module
""" Copyright 2015 Akamai Technologies, Inc. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import requests, logging, json, sys
from random import randint
from akamai.edgegrid import EdgeGridAuth
from config import EdgeGridConfig
from urlparse import urljoin
import urllib
import os
if sys.version_info[0] != 2 or sys.version_info[1] < 7:
print("This script requires Python version 2.7")
sys.exit(1)
logger = logging.getLogger(__name__)
class EdgeGridHttpCaller():
def __init__(self, session, debug, baseurl):
self.debug = debug
self.session = session
self.baseurl = baseurl
return None
def getResult(self, endpoint, parameters=None):
if parameters:
parameter_string = urllib.urlencode(parameters)
path = ''.join([endpoint + '?',parameter_string])
else:
path = endpoint
endpoint_result = self.session.get(urljoin(self.baseurl,path))
if self.debug: print ">>>\n" + json.dumps(endpoint_result.json(), indent=2) + "\n<<<\n"
self.httpErrors(endpoint_result.status_code, path, endpoint_result.json())
return endpoint_result.json()
def httpErrors(self, status_code, endpoint, result):
if status_code == 403:
error_msg = "ERROR: Call to %s failed with a 403 result\n" % endpoint
error_msg += "ERROR: This indicates a problem with authorization.\n"
error_msg += "ERROR: Please ensure that the credentials you created for this script\n"
error_msg += "ERROR: have the necessary permissions in the Luna portal.\n"
error_msg += "ERROR: Problem details: %s\n" % result["detail"]
exit(error_msg)
if status_code in [400, 401]:
error_msg = "ERROR: Call to %s failed with a %s result\n" % (endpoint, status_code)
error_msg += "ERROR: This indicates a problem with authentication or headers.\n"
error_m | sg += "ERROR: Please ensure that the .edgerc file is formatted correctly.\n"
error_msg += "ERROR: If you still have issues, please use gen_ed | gerc.py to generate the credentials\n"
error_msg += "ERROR: Problem details: %s\n" % result["detail"]
exit(error_msg)
if status_code in [404]:
error_msg = "ERROR: Call to %s failed with a %s result\n" % (endpoint, status_code)
error_msg += "ERROR: This means that the page does not exist as requested.\n"
error_msg += "ERROR: Please ensure that the URL you're calling is correctly formatted\n"
error_msg += "ERROR: or look at other examples to make sure yours matches.\n"
error_msg += "ERROR: Problem details: %s\n" % result["detail"]
exit(error_msg)
error_string = None
if "errorString" in result:
if result["errorString"]:
error_string = result["errorString"]
else:
for key in result:
if type(key) is not str:
continue
if type(result[key]["errorString"]) is str:
error_string = result[key]["errorString"]
if error_string:
error_msg = "ERROR: Call caused a server fault.\n"
error_msg += "ERROR: Please check the problem details for more information:\n"
error_msg += "ERROR: Problem details: %s\n" % error_string
exit(error_msg)
def postResult(self, endpoint, body, parameters=None):
headers = {'content-type': 'application/json'}
if parameters:
parameter_string = urllib.urlencode(parameters)
path = ''.join([endpoint + '?',parameter_string])
else:
path = endpoint
endpoint_result = self.session.post(urljoin(self.baseurl,path), data=body, headers=headers)
self.httpErrors(endpoint_result.status_code, path, endpoint_result.json())
if self.debug: print ">>>\n" + json.dumps(endpoint_result.json(), indent=2) + "\n<<<\n"
return endpoint_result.json()
def putResult(endpoint, body, parameters=None):
headers = {'content-type': 'application/json'}
if parameters:
parameter_string = urllib.urlencode(parameters)
path = ''.join([endpoint + '?',parameter_string])
else:
path = endpoint
endpoint_result = session.put(urljoin(self.baseurl,path), data=body, headers=headers)
if debug: print ">>>\n" + json.dumps(endpoint_result.json(), indent=2) + "\n<<<\n"
return endpoint_result.json()
|
AppEnlight/demo-application | src/appenlight_demo/__init__.py | Python | bsd-3-clause | 939 | 0.001065 | import redis
from pyramid.authentication import AuthTktAuthenticationPolicy
from pyramid.authorization import ACLAuthorizationPolicy
from pyramid.config import Configurator
def main(global_config, **settings):
""" This function returns a Pyramid WSGI application.
"""
authorization_policy = ACLAuthorizationPolicy()
authentication_policy = AuthTktAuthenticationPolicy('not_so_secret')
config = Configurator(settings=settings,
authentication_policy=authentication_policy,
authorization_policy=authorization_policy,
root_factory='appenlight_demo.security.RootFactory',
)
c | onfig.include('pyramid_jinja2')
config.include('.models')
config.include('.routes')
config.registry.redis_conn = redis.StrictRedis.from_url(
settings['redis.url'])
| config.scan()
return config.make_wsgi_app()
|
ahmedaljazzar/edx-platform | openedx/core/djangoapps/credit/urls.py | Python | agpl-3.0 | 908 | 0.004405 | """
URLs for the credit app.
"""
from django.conf.urls import include, url
from openedx.core.djangoapps.credit import models, routers, views
PROVIDER_ID_PATTERN = r'(? | P<provider_id>{})'.format(models.CREDIT_PROVIDER_ID_REGEX)
PROVIDER_URLS = [
url(r'^request/$', views.CreditProviderRequestCreateView.as_view(), name='create_request'),
url(r'^callback/?$', views.CreditProviderCallbackView.as_view(), name='provider_callback'),
]
V1_URLS = [
url(r'^providers/{}/'.format(PROVIDER_ID_PATTERN), include(PROVIDER_URLS)),
url(r'^eligibility/$', views.CreditEligibilit | yView.as_view(), name='eligibility_details'),
]
router = routers.SimpleRouter() # pylint: disable=invalid-name
router.register(r'courses', views.CreditCourseViewSet)
router.register(r'providers', views.CreditProviderViewSet)
V1_URLS += router.urls
app_name = 'credit'
urlpatterns = [
url(r'^v1/', include(V1_URLS)),
]
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.